1
/*
2
 * Hurl (https://hurl.dev)
3
 * Copyright (C) 2025 Orange
4
 *
5
 * Licensed under the Apache License, Version 2.0 (the "License");
6
 * you may not use this file except in compliance with the License.
7
 * You may obtain a copy of the License at
8
 *
9
 *          http://www.apache.org/licenses/LICENSE-2.0
10
 *
11
 * Unless required by applicable law or agreed to in writing, software
12
 * distributed under the License is distributed on an "AS IS" BASIS,
13
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
 * See the License for the specific language governing permissions and
15
 * limitations under the License.
16
 *
17
 */
18
use hurl_core::ast::{
19
    Assert, Base64, Body, BooleanOption, Bytes, Capture, CertificateAttributeName, Comment, Cookie,
20
    CookieAttribute, CookiePath, CountOption, DurationOption, Entry, EntryOption, Expr, ExprKind,
21
    File, FileParam, FileValue, Filter, FilterValue, Function, GraphQl, GraphQlVariables, Hex,
22
    HurlFile, JsonListElement, JsonObjectElement, JsonValue, KeyValue, LineTerminator, Method,
23
    MultilineString, MultilineStringAttribute, MultilineStringKind, MultipartParam, NaturalOption,
24
    OptionKind, Placeholder, Predicate, PredicateFunc, PredicateFuncValue, PredicateValue, Query,
25
    QueryValue, Regex, RegexValue, Request, Response, Section, SectionValue, Status, StatusValue,
26
    Template, TemplateElement, Variable, VariableDefinition, VariableValue, Version, Whitespace,
27
    I64, U64,
28
};
29
use hurl_core::typing::{Count, Duration, ToSource};
30

            
31
#[derive(Clone, Debug, PartialEq, Eq)]
32
pub enum Token {
33
    Method(String),
34
    Version(String),
35
    Status(String),
36
    SectionHeader(String),
37
    QueryType(String),
38
    PredicateType(String),
39
    FilterType(String),
40
    Not(String),
41
    Keyword(String),
42

            
43
    // Primitives
44
    Whitespace(String),
45
    Comment(String),
46
    Value(String),
47
    Colon(String),
48
    StringDelimiter(String),
49
    Boolean(String),
50
    Number(String),
51
    String(String),
52
    CodeDelimiter(String),
53
    CodeVariable(String),
54
    Lang(String),
55
    Unit(String),
56
}
57

            
58
pub trait Tokenizable {
59
    fn tokenize(&self) -> Vec<Token>;
60
}
61

            
62
impl Tokenizable for HurlFile {
63
81
    fn tokenize(&self) -> Vec<Token> {
64
81
        let mut tokens: Vec<Token> = vec![];
65
291
        tokens.append(&mut self.entries.iter().flat_map(|e| e.tokenize()).collect());
66
81
        tokens.append(
67
81
            &mut self
68
81
                .line_terminators
69
81
                .iter()
70
89
                .flat_map(|e| e.tokenize())
71
81
                .collect(),
72
81
        );
73
81
        tokens
74
    }
75
}
76

            
77
impl Tokenizable for Entry {
78
264
    fn tokenize(&self) -> Vec<Token> {
79
264
        let mut tokens: Vec<Token> = vec![];
80
264
        tokens.append(&mut self.request.tokenize());
81
264
        if let Some(response) = &self.response {
82
108
            tokens.append(&mut response.tokenize());
83
        }
84
264
        tokens
85
    }
86
}
87

            
88
impl Tokenizable for Request {
89
264
    fn tokenize(&self) -> Vec<Token> {
90
264
        let mut tokens: Vec<Token> = vec![];
91
264
        tokens.append(
92
264
            &mut self
93
264
                .line_terminators
94
264
                .iter()
95
321
                .flat_map(|e| e.tokenize())
96
264
                .collect(),
97
264
        );
98
264
        tokens.append(&mut self.space0.tokenize());
99
264
        tokens.append(&mut self.method.tokenize());
100
264
        tokens.append(&mut self.space1.tokenize());
101
264
        tokens.append(&mut self.url.tokenize());
102
264
        tokens.append(&mut self.line_terminator0.tokenize());
103
289
        tokens.append(&mut self.headers.iter().flat_map(|e| e.tokenize()).collect());
104
286
        tokens.append(&mut self.sections.iter().flat_map(|e| e.tokenize()).collect());
105
264
        if let Some(body) = &self.body {
106
51
            tokens.append(&mut body.tokenize());
107
        }
108
264
        tokens
109
    }
110
}
111

            
112
impl Tokenizable for Method {
113
264
    fn tokenize(&self) -> Vec<Token> {
114
264
        vec![Token::Method(self.to_string())]
115
    }
116
}
117

            
118
impl Tokenizable for Response {
119
108
    fn tokenize(&self) -> Vec<Token> {
120
108
        let mut tokens: Vec<Token> = vec![];
121
108
        tokens.append(
122
108
            &mut self
123
108
                .line_terminators
124
108
                .iter()
125
110
                .flat_map(|e| e.tokenize())
126
108
                .collect(),
127
108
        );
128
108
        tokens.append(&mut self.space0.tokenize());
129
108
        tokens.append(&mut self.version.tokenize());
130
108
        tokens.append(&mut self.space1.tokenize());
131
108
        tokens.append(&mut self.status.tokenize());
132
108
        tokens.append(&mut self.line_terminator0.tokenize());
133
113
        tokens.append(&mut self.headers.iter().flat_map(|e| e.tokenize()).collect());
134
127
        tokens.append(&mut self.sections.iter().flat_map(|e| e.tokenize()).collect());
135
108
        if let Some(body) = self.clone().body {
136
45
            tokens.append(&mut body.tokenize());
137
        }
138
108
        tokens
139
    }
140
}
141

            
142
impl Tokenizable for Status {
143
108
    fn tokenize(&self) -> Vec<Token> {
144
108
        let mut tokens: Vec<Token> = vec![];
145
108
        match self.value.clone() {
146
3
            StatusValue::Any => tokens.push(Token::Status("*".to_string())),
147
105
            StatusValue::Specific(v) => tokens.push(Token::Status(v.to_string())),
148
        }
149
108
        tokens
150
    }
151
}
152

            
153
impl Tokenizable for Version {
154
108
    fn tokenize(&self) -> Vec<Token> {
155
108
        vec![Token::Version(self.value.to_string())]
156
    }
157
}
158

            
159
impl Tokenizable for Body {
160
96
    fn tokenize(&self) -> Vec<Token> {
161
96
        let mut tokens: Vec<Token> = vec![];
162
96
        tokens.append(
163
96
            &mut self
164
96
                .line_terminators
165
96
                .iter()
166
101
                .flat_map(|e| e.tokenize())
167
96
                .collect(),
168
96
        );
169
96
        tokens.append(&mut self.space0.tokenize());
170
96
        tokens.append(&mut self.value.tokenize());
171
96
        tokens.append(&mut self.line_terminator0.tokenize());
172
96
        tokens
173
    }
174
}
175

            
176
impl Tokenizable for Bytes {
177
96
    fn tokenize(&self) -> Vec<Token> {
178
96
        let mut tokens: Vec<Token> = vec![];
179
96
        match self {
180
6
            Bytes::Json(value) => tokens.append(&mut value.tokenize()),
181
3
            Bytes::Xml(value) => tokens.push(Token::String(value.to_string())),
182
39
            Bytes::MultilineString(value) => tokens.append(&mut value.tokenize()),
183
15
            Bytes::OnelineString(value) => tokens.append(&mut value.tokenize()),
184
12
            Bytes::Base64(value) => tokens.append(&mut value.tokenize()),
185
9
            Bytes::Hex(value) => tokens.append(&mut value.tokenize()),
186
12
            Bytes::File(value) => tokens.append(&mut value.tokenize()),
187
        }
188
96
        tokens
189
    }
190
}
191

            
192
impl Tokenizable for Section {
193
123
    fn tokenize(&self) -> Vec<Token> {
194
123
        let mut tokens: Vec<Token> = vec![];
195
123
        tokens.append(
196
123
            &mut self
197
123
                .line_terminators
198
123
                .iter()
199
133
                .flat_map(|e| e.tokenize())
200
123
                .collect(),
201
123
        );
202
123
        tokens.append(&mut self.space0.tokenize());
203
123
        tokens.push(Token::SectionHeader(format!("[{}]", self.identifier())));
204
123
        tokens.append(&mut self.line_terminator0.tokenize());
205
123
        tokens.append(&mut self.value.tokenize());
206
123
        tokens
207
    }
208
}
209

            
210
impl Tokenizable for SectionValue {
211
123
    fn tokenize(&self) -> Vec<Token> {
212
123
        let mut tokens: Vec<Token> = vec![];
213
123
        match self {
214
45
            SectionValue::Asserts(items) => {
215
360
                tokens.append(&mut items.iter().flat_map(|e| e.tokenize()).collect());
216
            }
217
12
            SectionValue::QueryParams(items, _) => {
218
22
                tokens.append(&mut items.iter().flat_map(|e| e.tokenize()).collect());
219
            }
220
3
            SectionValue::BasicAuth(item) => {
221
3
                if let Some(kv) = item {
222
3
                    tokens.append(&mut kv.tokenize());
223
                }
224
            }
225
6
            SectionValue::FormParams(items, _) => {
226
14
                tokens.append(&mut items.iter().flat_map(|e| e.tokenize()).collect());
227
            }
228
6
            SectionValue::MultipartFormData(items, _) => {
229
11
                tokens.append(&mut items.iter().flat_map(|e| e.tokenize()).collect());
230
            }
231
9
            SectionValue::Cookies(items) => {
232
12
                tokens.append(&mut items.iter().flat_map(|e| e.tokenize()).collect());
233
            }
234
12
            SectionValue::Captures(items) => {
235
22
                tokens.append(&mut items.iter().flat_map(|e| e.tokenize()).collect());
236
            }
237
30
            SectionValue::Options(items) => {
238
289
                tokens.append(&mut items.iter().flat_map(|e| e.tokenize()).collect());
239
            }
240
        }
241
123
        tokens
242
    }
243
}
244

            
245
impl Tokenizable for Base64 {
246
15
    fn tokenize(&self) -> Vec<Token> {
247
15
        let mut tokens: Vec<Token> = vec![Token::Keyword(String::from("base64,"))];
248
15
        tokens.append(&mut self.space0.tokenize());
249
15
        tokens.push(Token::String(self.source.to_string()));
250
15
        tokens.append(&mut self.space1.tokenize());
251
15
        tokens.push(Token::Keyword(String::from(";")));
252
15
        tokens
253
    }
254
}
255

            
256
impl Tokenizable for Hex {
257
9
    fn tokenize(&self) -> Vec<Token> {
258
9
        let mut tokens: Vec<Token> = vec![Token::Keyword(String::from("hex,"))];
259
9
        tokens.append(&mut self.space0.tokenize());
260
9
        tokens.push(Token::String(self.source.to_string()));
261
9
        tokens.append(&mut self.space1.tokenize());
262
9
        tokens.push(Token::Keyword(String::from(";")));
263
9
        tokens
264
    }
265
}
266

            
267
impl Tokenizable for File {
268
15
    fn tokenize(&self) -> Vec<Token> {
269
15
        let mut tokens: Vec<Token> = vec![Token::Keyword(String::from("file,"))];
270
15
        tokens.append(&mut self.space0.tokenize());
271
15
        tokens.append(&mut self.filename.tokenize());
272
15
        tokens.append(&mut self.space1.tokenize());
273
15
        tokens.push(Token::Keyword(String::from(";")));
274
15
        tokens
275
    }
276
}
277

            
278
impl Tokenizable for KeyValue {
279
126
    fn tokenize(&self) -> Vec<Token> {
280
126
        let mut tokens: Vec<Token> = vec![];
281
126
        tokens.append(
282
126
            &mut self
283
126
                .line_terminators
284
126
                .iter()
285
127
                .flat_map(|e| e.tokenize())
286
126
                .collect(),
287
126
        );
288
126
        tokens.append(&mut self.space0.tokenize());
289
126
        tokens.append(&mut self.key.tokenize());
290
126
        tokens.append(&mut self.space1.tokenize());
291
126
        tokens.push(Token::Colon(String::from(":")));
292
126
        tokens.append(&mut self.space2.tokenize());
293
126
        tokens.append(&mut self.value.tokenize());
294
126
        tokens.append(&mut self.line_terminator0.tokenize());
295
126
        tokens
296
    }
297
}
298

            
299
impl Tokenizable for MultipartParam {
300
9
    fn tokenize(&self) -> Vec<Token> {
301
9
        match self {
302
3
            MultipartParam::Param(key_value) => key_value.tokenize(),
303
6
            MultipartParam::FileParam(file_param) => file_param.tokenize(),
304
        }
305
    }
306
}
307

            
308
impl Tokenizable for FileParam {
309
6
    fn tokenize(&self) -> Vec<Token> {
310
6
        let mut tokens: Vec<Token> = vec![];
311
6
        tokens.append(&mut self.space0.tokenize());
312
6
        tokens.append(&mut self.key.tokenize());
313
6
        tokens.append(&mut self.space1.tokenize());
314
6
        tokens.push(Token::Colon(String::from(":")));
315
6
        tokens.append(&mut self.space2.tokenize());
316
6
        tokens.append(&mut self.value.tokenize());
317
6
        tokens.append(&mut self.line_terminator0.tokenize());
318
6
        tokens
319
    }
320
}
321

            
322
impl Tokenizable for FileValue {
323
6
    fn tokenize(&self) -> Vec<Token> {
324
6
        let mut tokens: Vec<Token> = vec![Token::Keyword("file,".to_string())];
325
6
        tokens.append(&mut self.space0.tokenize());
326
6
        tokens.append(&mut self.filename.tokenize());
327
6
        tokens.append(&mut self.space1.tokenize());
328
6
        tokens.push(Token::Keyword(";".to_string()));
329
6
        tokens.append(&mut self.space2.tokenize());
330
6
        if let Some(content_type) = &self.content_type {
331
3
            tokens.push(Token::String(content_type.to_string()));
332
        }
333
6
        tokens
334
    }
335
}
336

            
337
impl Tokenizable for Cookie {
338
9
    fn tokenize(&self) -> Vec<Token> {
339
9
        let mut tokens: Vec<Token> = vec![];
340
9
        tokens.append(
341
9
            &mut self
342
9
                .line_terminators
343
9
                .iter()
344
9
                .flat_map(|e| e.tokenize())
345
9
                .collect(),
346
9
        );
347
9
        tokens.append(&mut self.space0.tokenize());
348
9
        tokens.append(&mut self.name.tokenize());
349
9
        tokens.append(&mut self.space1.tokenize());
350
9
        tokens.push(Token::Colon(String::from(":")));
351
9
        tokens.append(&mut self.space2.tokenize());
352
9
        tokens.append(&mut self.value.tokenize());
353
9
        tokens.append(&mut self.line_terminator0.tokenize());
354
9
        tokens
355
    }
356
}
357

            
358
impl Tokenizable for Capture {
359
18
    fn tokenize(&self) -> Vec<Token> {
360
18
        let mut tokens: Vec<Token> = vec![];
361
18
        tokens.append(
362
18
            &mut self
363
18
                .line_terminators
364
18
                .iter()
365
18
                .flat_map(|e| e.tokenize())
366
18
                .collect(),
367
18
        );
368
18
        tokens.append(&mut self.space0.tokenize());
369
18
        tokens.append(&mut self.name.tokenize());
370
18
        tokens.append(&mut self.space1.tokenize());
371
18
        tokens.push(Token::Colon(String::from(":")));
372
18
        tokens.append(&mut self.space2.tokenize());
373
18
        tokens.append(&mut self.query.tokenize());
374
21
        for (space, filter) in &self.filters {
375
3
            tokens.append(&mut space.tokenize());
376
3
            tokens.append(&mut filter.tokenize());
377
        }
378
18
        tokens.append(&mut self.space3.tokenize());
379
18
        if self.redact {
380
6
            tokens.push(Token::Keyword(String::from("redact")));
381
        }
382
18
        tokens.append(&mut self.line_terminator0.tokenize());
383
18
        tokens
384
    }
385
}
386

            
387
impl Tokenizable for Assert {
388
345
    fn tokenize(&self) -> Vec<Token> {
389
345
        let mut tokens: Vec<Token> = vec![];
390
345
        tokens.append(
391
345
            &mut self
392
345
                .line_terminators
393
345
                .iter()
394
345
                .flat_map(|e| e.tokenize())
395
345
                .collect(),
396
345
        );
397
345
        tokens.append(&mut self.space0.tokenize());
398
345
        tokens.append(&mut self.query.tokenize());
399
450
        for (space, filter) in &self.filters {
400
105
            tokens.append(&mut space.tokenize());
401
105
            tokens.append(&mut filter.tokenize());
402
        }
403
345
        tokens.append(&mut self.space1.tokenize());
404
345
        // TODO reconvert back your first predicate for jsonpath
405
345
        // so that you can use your firstX predicate for other query
406
345
        tokens.append(&mut self.predicate.tokenize());
407
345
        tokens.append(&mut self.line_terminator0.tokenize());
408
345
        tokens
409
    }
410
}
411

            
412
impl Tokenizable for Query {
413
363
    fn tokenize(&self) -> Vec<Token> {
414
363
        self.value.tokenize()
415
    }
416
}
417

            
418
impl Tokenizable for QueryValue {
419
363
    fn tokenize(&self) -> Vec<Token> {
420
363
        let mut tokens = vec![];
421
363
        let token = Token::QueryType(self.identifier().to_string());
422
363
        tokens.push(token);
423
363

            
424
363
        match self {
425
12
            QueryValue::Header { space0, name } => {
426
12
                tokens.append(&mut space0.tokenize());
427
12
                tokens.append(&mut name.tokenize());
428
            }
429
6
            QueryValue::Cookie { space0, expr } => {
430
6
                tokens.append(&mut space0.tokenize());
431
6
                tokens.push(Token::CodeDelimiter("\"".to_string()));
432
6
                tokens.append(&mut expr.tokenize());
433
6
                tokens.push(Token::CodeDelimiter("\"".to_string()));
434
            }
435
3
            QueryValue::Xpath { space0, expr } => {
436
3
                tokens.append(&mut space0.tokenize());
437
3
                tokens.append(&mut expr.tokenize());
438
            }
439
216
            QueryValue::Jsonpath { space0, expr } => {
440
216
                tokens.append(&mut space0.tokenize());
441
216
                tokens.append(&mut expr.tokenize());
442
            }
443
3
            QueryValue::Regex { space0, value } => {
444
3
                tokens.append(&mut space0.tokenize());
445
3
                tokens.append(&mut value.tokenize());
446
            }
447
9
            QueryValue::Variable { space0, name } => {
448
9
                tokens.append(&mut space0.tokenize());
449
9
                tokens.append(&mut name.tokenize());
450
            }
451
            QueryValue::Certificate {
452
30
                space0,
453
30
                attribute_name: field,
454
30
            } => {
455
30
                tokens.append(&mut space0.tokenize());
456
30
                tokens.append(&mut field.tokenize());
457
            }
458
84
            _ => {}
459
        }
460
363
        tokens
461
    }
462
}
463

            
464
impl Tokenizable for RegexValue {
465
21
    fn tokenize(&self) -> Vec<Token> {
466
21
        match self {
467
18
            RegexValue::Template(template) => template.tokenize(),
468
3
            RegexValue::Regex(regex) => regex.tokenize(),
469
        }
470
    }
471
}
472

            
473
impl Tokenizable for CookiePath {
474
6
    fn tokenize(&self) -> Vec<Token> {
475
6
        let mut tokens: Vec<Token> = vec![];
476
6
        tokens.append(&mut self.name.tokenize());
477
6
        if let Some(attribute) = self.attribute.clone() {
478
3
            tokens.append(&mut attribute.tokenize());
479
        }
480
6
        tokens
481
    }
482
}
483

            
484
impl Tokenizable for CookieAttribute {
485
3
    fn tokenize(&self) -> Vec<Token> {
486
3
        let mut tokens: Vec<Token> = vec![Token::CodeDelimiter("[".to_string())];
487
3
        tokens.append(&mut self.space0.tokenize());
488
3
        tokens.push(Token::String(self.name.value()));
489
3
        tokens.append(&mut self.space1.tokenize());
490
3
        tokens.push(Token::CodeDelimiter("]".to_string()));
491
3
        tokens
492
    }
493
}
494

            
495
impl Tokenizable for CertificateAttributeName {
496
30
    fn tokenize(&self) -> Vec<Token> {
497
30
        vec![
498
30
            Token::StringDelimiter("\"".to_string()),
499
30
            Token::String(self.identifier().to_string()),
500
30
            Token::StringDelimiter("\"".to_string()),
501
30
        ]
502
    }
503
}
504

            
505
impl Tokenizable for Predicate {
506
345
    fn tokenize(&self) -> Vec<Token> {
507
345
        let mut tokens: Vec<Token> = vec![];
508
345
        if self.not {
509
3
            tokens.push(Token::Not(String::from("not")));
510
3
            tokens.append(&mut self.space0.tokenize());
511
        }
512
345
        tokens.append(&mut self.predicate_func.tokenize());
513
345
        tokens
514
    }
515
}
516

            
517
impl Tokenizable for PredicateFunc {
518
345
    fn tokenize(&self) -> Vec<Token> {
519
345
        self.value.tokenize()
520
    }
521
}
522

            
523
impl Tokenizable for PredicateFuncValue {
524
345
    fn tokenize(&self) -> Vec<Token> {
525
345
        let mut tokens: Vec<Token> = vec![];
526
345
        let name = self.identifier().to_string();
527
345
        match self {
528
225
            PredicateFuncValue::Equal { space0, value, .. } => {
529
225
                tokens.push(Token::PredicateType(name));
530
225
                tokens.append(&mut space0.tokenize());
531
225
                tokens.append(&mut value.tokenize());
532
            }
533
9
            PredicateFuncValue::NotEqual { space0, value, .. } => {
534
9
                tokens.push(Token::PredicateType(name));
535
9
                tokens.append(&mut space0.tokenize());
536
9
                tokens.append(&mut value.tokenize());
537
            }
538
9
            PredicateFuncValue::GreaterThan { space0, value, .. } => {
539
9
                tokens.push(Token::PredicateType(name));
540
9
                tokens.append(&mut space0.tokenize());
541
9
                tokens.append(&mut value.tokenize());
542
            }
543
3
            PredicateFuncValue::GreaterThanOrEqual { space0, value, .. } => {
544
3
                tokens.push(Token::PredicateType(name));
545
3
                tokens.append(&mut space0.tokenize());
546
3
                tokens.append(&mut value.tokenize());
547
            }
548
12
            PredicateFuncValue::LessThan { space0, value, .. } => {
549
12
                tokens.push(Token::PredicateType(name));
550
12
                tokens.append(&mut space0.tokenize());
551
12
                tokens.append(&mut value.tokenize());
552
            }
553
3
            PredicateFuncValue::LessThanOrEqual { space0, value, .. } => {
554
3
                tokens.push(Token::PredicateType(name));
555
3
                tokens.append(&mut space0.tokenize());
556
3
                tokens.append(&mut value.tokenize());
557
            }
558
9
            PredicateFuncValue::StartWith { space0, value } => {
559
9
                tokens.push(Token::PredicateType(name));
560
9
                tokens.append(&mut space0.tokenize());
561
9
                tokens.append(&mut value.tokenize());
562
            }
563
6
            PredicateFuncValue::EndWith { space0, value } => {
564
6
                tokens.push(Token::PredicateType(name));
565
6
                tokens.append(&mut space0.tokenize());
566
6
                tokens.append(&mut value.tokenize());
567
            }
568
9
            PredicateFuncValue::Contain { space0, value } => {
569
9
                tokens.push(Token::PredicateType(name));
570
9
                tokens.append(&mut space0.tokenize());
571
9
                tokens.append(&mut value.tokenize());
572
            }
573
3
            PredicateFuncValue::Include { space0, value } => {
574
3
                tokens.push(Token::PredicateType(name));
575
3
                tokens.append(&mut space0.tokenize());
576
3
                tokens.append(&mut value.tokenize());
577
            }
578
6
            PredicateFuncValue::Match { space0, value } => {
579
6
                tokens.push(Token::PredicateType(name));
580
6
                tokens.append(&mut space0.tokenize());
581
6
                tokens.append(&mut value.tokenize());
582
            }
583
3
            PredicateFuncValue::IsInteger => {
584
3
                tokens.push(Token::PredicateType(name));
585
            }
586
3
            PredicateFuncValue::IsFloat => {
587
3
                tokens.push(Token::PredicateType(name));
588
            }
589
3
            PredicateFuncValue::IsBoolean => {
590
3
                tokens.push(Token::PredicateType(name));
591
            }
592
3
            PredicateFuncValue::IsString => {
593
3
                tokens.push(Token::PredicateType(name));
594
            }
595
3
            PredicateFuncValue::IsCollection => {
596
3
                tokens.push(Token::PredicateType(name));
597
            }
598
9
            PredicateFuncValue::IsDate => {
599
9
                tokens.push(Token::PredicateType(name));
600
            }
601
3
            PredicateFuncValue::IsIsoDate => {
602
3
                tokens.push(Token::PredicateType(name));
603
            }
604
12
            PredicateFuncValue::Exist => {
605
12
                tokens.push(Token::PredicateType(name));
606
            }
607
3
            PredicateFuncValue::IsEmpty => {
608
3
                tokens.push(Token::PredicateType(name));
609
            }
610
3
            PredicateFuncValue::IsNumber => {
611
3
                tokens.push(Token::PredicateType(name));
612
            }
613
3
            PredicateFuncValue::IsIpv4 => {
614
3
                tokens.push(Token::PredicateType(name));
615
            }
616
3
            PredicateFuncValue::IsIpv6 => {
617
3
                tokens.push(Token::PredicateType(name));
618
            }
619
        }
620
345
        tokens
621
    }
622
}
623

            
624
impl Tokenizable for PredicateValue {
625
294
    fn tokenize(&self) -> Vec<Token> {
626
294
        match self {
627
123
            PredicateValue::String(value) => value.tokenize(),
628
18
            PredicateValue::MultilineString(value) => value.tokenize(),
629
3
            PredicateValue::Bool(value) => vec![Token::Boolean(value.to_string())],
630
3
            PredicateValue::Null => vec![Token::Keyword("null".to_string())],
631
108
            PredicateValue::Number(value) => vec![Token::Number(value.to_source().to_string())],
632
3
            PredicateValue::File(value) => value.tokenize(),
633
27
            PredicateValue::Hex(value) => vec![Token::String(value.to_string())],
634
3
            PredicateValue::Base64(value) => value.tokenize(),
635
3
            PredicateValue::Placeholder(value) => value.tokenize(),
636
3
            PredicateValue::Regex(value) => value.tokenize(),
637
        }
638
    }
639
}
640

            
641
impl Tokenizable for MultilineString {
642
57
    fn tokenize(&self) -> Vec<Token> {
643
57
        let mut tokens: Vec<Token> = vec![Token::StringDelimiter("```".to_string())];
644
57
        tokens.push(Token::Lang(self.lang().to_string()));
645
57
        for (i, attribute) in self.attributes.iter().enumerate() {
646
12
            if i > 0 || !self.lang().is_empty() {
647
3
                tokens.push(Token::StringDelimiter(",".to_string()));
648
            }
649
12
            tokens.append(&mut attribute.tokenize());
650
        }
651
57
        tokens.append(&mut self.space.tokenize());
652
57
        tokens.append(&mut self.newline.tokenize());
653
57
        match &self.kind {
654
27
            MultilineStringKind::Text(value)
655
12
            | MultilineStringKind::Json(value)
656
48
            | MultilineStringKind::Xml(value) => tokens.append(&mut value.tokenize()),
657
9
            MultilineStringKind::GraphQl(graphql) => tokens.append(&mut graphql.tokenize()),
658
        }
659
57
        tokens.push(Token::StringDelimiter("```".to_string()));
660
57
        tokens
661
    }
662
}
663

            
664
impl Tokenizable for MultilineStringAttribute {
665
12
    fn tokenize(&self) -> Vec<Token> {
666
12
        match self {
667
9
            MultilineStringAttribute::Escape => vec![Token::String("escape".to_string())],
668
3
            MultilineStringAttribute::NoVariable => vec![Token::String("novariable".to_string())],
669
        }
670
    }
671
}
672

            
673
impl Tokenizable for GraphQl {
674
9
    fn tokenize(&self) -> Vec<Token> {
675
9
        let mut tokens: Vec<Token> = vec![];
676
9
        tokens.append(&mut self.value.tokenize());
677
9
        if let Some(vars) = &self.variables {
678
            tokens.append(&mut vars.tokenize());
679
        }
680
9
        tokens
681
    }
682
}
683

            
684
impl Tokenizable for GraphQlVariables {
685
    fn tokenize(&self) -> Vec<Token> {
686
        let mut tokens: Vec<Token> = vec![];
687
        tokens.push(Token::String("variables".to_string()));
688
        tokens.append(&mut self.space.tokenize());
689
        tokens.append(&mut self.value.tokenize());
690
        tokens.append(&mut self.whitespace.tokenize());
691
        tokens
692
    }
693
}
694

            
695
impl Tokenizable for Template {
696
1191
    fn tokenize(&self) -> Vec<Token> {
697
1191
        let mut tokens: Vec<Token> = vec![];
698
1191
        if let Some(d) = self.delimiter {
699
459
            tokens.push(Token::StringDelimiter(d.to_string()));
700
        }
701
2424
        for element in &self.elements {
702
1233
            tokens.append(&mut element.tokenize());
703
        }
704
1191
        if let Some(d) = self.delimiter {
705
459
            tokens.push(Token::StringDelimiter(d.to_string()));
706
        }
707
1191
        tokens
708
    }
709
}
710

            
711
impl Tokenizable for TemplateElement {
712
1233
    fn tokenize(&self) -> Vec<Token> {
713
1233
        match self {
714
1137
            TemplateElement::String { source, .. } => {
715
1137
                vec![Token::String(source.to_string())]
716
            }
717
96
            TemplateElement::Placeholder(value) => {
718
96
                let mut tokens: Vec<Token> = vec![];
719
96
                tokens.append(&mut value.tokenize());
720
96
                tokens
721
            }
722
        }
723
    }
724
}
725

            
726
impl Tokenizable for Placeholder {
727
171
    fn tokenize(&self) -> Vec<Token> {
728
171
        let mut tokens: Vec<Token> = vec![Token::CodeDelimiter(String::from("{{"))];
729
171
        tokens.append(&mut self.space0.tokenize());
730
171
        tokens.append(&mut self.expr.tokenize());
731
171
        tokens.append(&mut self.space1.tokenize());
732
171
        tokens.push(Token::CodeDelimiter(String::from("}}")));
733
171
        tokens
734
    }
735
}
736

            
737
impl Tokenizable for Expr {
738
171
    fn tokenize(&self) -> Vec<Token> {
739
171
        self.kind.tokenize()
740
    }
741
}
742

            
743
impl Tokenizable for ExprKind {
744
171
    fn tokenize(&self) -> Vec<Token> {
745
171
        match self {
746
165
            ExprKind::Variable(variable) => variable.tokenize(),
747
6
            ExprKind::Function(function) => function.tokenize(),
748
        }
749
    }
750
}
751

            
752
impl Tokenizable for Variable {
753
165
    fn tokenize(&self) -> Vec<Token> {
754
165
        vec![Token::CodeVariable(self.name.clone())]
755
    }
756
}
757

            
758
impl Tokenizable for Function {
759
6
    fn tokenize(&self) -> Vec<Token> {
760
6
        match self {
761
3
            Function::NewDate => vec![Token::CodeVariable("newDate".to_string())],
762
3
            Function::NewUuid => vec![Token::CodeVariable("newUuid".to_string())],
763
        }
764
    }
765
}
766

            
767
impl Tokenizable for Regex {
768
6
    fn tokenize(&self) -> Vec<Token> {
769
6
        let s = str::replace(self.inner.as_str(), "/", "\\/");
770
6
        vec![Token::String(format!("/{s}/"))]
771
    }
772
}
773

            
774
impl Tokenizable for LineTerminator {
775
1626
    fn tokenize(&self) -> Vec<Token> {
776
1626
        let mut tokens: Vec<Token> = vec![];
777
1626
        tokens.append(&mut self.space0.tokenize());
778
1626
        if let Some(comment) = &self.comment {
779
270
            tokens.append(&mut comment.tokenize());
780
        }
781
1626
        tokens.append(&mut self.newline.tokenize());
782
1626
        tokens
783
    }
784
}
785

            
786
impl Tokenizable for Whitespace {
787
7599
    fn tokenize(&self) -> Vec<Token> {
788
7599
        let mut tokens: Vec<Token> = vec![];
789
7599
        if !self.value.is_empty() {
790
3843
            tokens.push(Token::Whitespace(self.value.clone()));
791
        }
792
7599
        tokens
793
    }
794
}
795

            
796
impl Tokenizable for Comment {
797
270
    fn tokenize(&self) -> Vec<Token> {
798
270
        vec![Token::Comment(format!("#{}", self.value.clone()))]
799
    }
800
}
801

            
802
impl Tokenizable for JsonValue {
803
102
    fn tokenize(&self) -> Vec<Token> {
804
102
        let mut tokens: Vec<Token> = vec![];
805
102
        match self {
806
18
            JsonValue::String(s) => {
807
18
                //tokens.push(Token::CodeDelimiter("\"".to_string()));
808
18
                tokens.append(&mut s.tokenize());
809
18
                //tokens.push(Token::CodeDelimiter("\"".to_string()));
810
            }
811
45
            JsonValue::Number(value) => {
812
45
                tokens.push(Token::Number(value.to_string()));
813
            }
814
3
            JsonValue::Boolean(value) => {
815
3
                tokens.push(Token::Boolean(value.to_string()));
816
            }
817
15
            JsonValue::List { space0, elements } => {
818
15
                tokens.push(Token::CodeDelimiter("[".to_string()));
819
15
                tokens.push(Token::Whitespace(space0.clone()));
820
51
                for (i, element) in elements.iter().enumerate() {
821
51
                    if i > 0 {
822
39
                        tokens.push(Token::CodeDelimiter(",".to_string()));
823
                    }
824
51
                    tokens.append(&mut element.tokenize());
825
                }
826
15
                tokens.push(Token::CodeDelimiter("]".to_string()));
827
            }
828
15
            JsonValue::Object { space0, elements } => {
829
15
                tokens.push(Token::CodeDelimiter("{".to_string()));
830
15
                tokens.push(Token::Whitespace(space0.clone()));
831
45
                for (i, element) in elements.iter().enumerate() {
832
45
                    if i > 0 {
833
33
                        tokens.push(Token::CodeDelimiter(",".to_string()));
834
                    }
835
45
                    tokens.append(&mut element.tokenize());
836
                }
837
15
                tokens.push(Token::CodeDelimiter("}".to_string()));
838
            }
839
3
            JsonValue::Null => {
840
3
                tokens.push(Token::Keyword("null".to_string()));
841
            }
842
3
            JsonValue::Placeholder(exp) => {
843
3
                tokens.append(&mut exp.tokenize());
844
            }
845
        }
846
102
        tokens
847
    }
848
}
849

            
850
impl Tokenizable for JsonListElement {
851
51
    fn tokenize(&self) -> Vec<Token> {
852
51
        let mut tokens: Vec<Token> = vec![Token::Whitespace(self.space0.clone())];
853
51
        tokens.append(&mut self.value.tokenize());
854
51
        tokens.push(Token::Whitespace(self.space1.clone()));
855
51
        tokens
856
    }
857
}
858

            
859
impl Tokenizable for JsonObjectElement {
860
45
    fn tokenize(&self) -> Vec<Token> {
861
45
        let mut tokens: Vec<Token> = vec![Token::Whitespace(self.space0.clone())];
862
45
        tokens.push(Token::StringDelimiter("\"".to_string()));
863
45
        tokens.push(Token::String(self.name.to_string()));
864
45
        tokens.push(Token::StringDelimiter("\"".to_string()));
865
45
        tokens.push(Token::Whitespace(self.space1.clone()));
866
45
        tokens.push(Token::CodeDelimiter(":".to_string()));
867
45
        tokens.push(Token::Whitespace(self.space2.clone()));
868
45
        tokens.append(&mut self.value.tokenize());
869
45
        tokens.push(Token::Whitespace(self.space3.clone()));
870
45
        tokens
871
    }
872
}
873

            
874
impl Tokenizable for EntryOption {
875
279
    fn tokenize(&self) -> Vec<Token> {
876
279
        let mut tokens: Vec<Token> = vec![];
877
279
        tokens.append(
878
279
            &mut self
879
279
                .line_terminators
880
279
                .iter()
881
280
                .flat_map(|e| e.tokenize())
882
279
                .collect(),
883
279
        );
884
279
        tokens.append(&mut self.space0.tokenize());
885
279
        tokens.push(Token::String(self.kind.identifier().to_string()));
886
279
        tokens.append(&mut self.space1.tokenize());
887
279
        tokens.push(Token::Colon(String::from(":")));
888
279
        tokens.append(&mut self.space2.tokenize());
889
279
        tokens.append(&mut self.kind.tokenize());
890
279
        tokens.append(&mut self.line_terminator0.tokenize());
891
279
        tokens
892
    }
893
}
894

            
895
impl Tokenizable for OptionKind {
896
279
    fn tokenize(&self) -> Vec<Token> {
897
279
        match self {
898
6
            OptionKind::AwsSigV4(value) => value.tokenize(),
899
6
            OptionKind::CaCertificate(filename) => filename.tokenize(),
900
9
            OptionKind::ClientCert(filename) => filename.tokenize(),
901
6
            OptionKind::ClientKey(filename) => filename.tokenize(),
902
6
            OptionKind::Compressed(value) => value.tokenize(),
903
6
            OptionKind::ConnectTo(value) => value.tokenize(),
904
6
            OptionKind::ConnectTimeout(value) => value.tokenize(),
905
12
            OptionKind::Delay(value) => value.tokenize(),
906
9
            OptionKind::FollowLocation(value) => value.tokenize(),
907
6
            OptionKind::FollowLocationTrusted(value) => value.tokenize(),
908
6
            OptionKind::Header(value) => value.tokenize(),
909
6
            OptionKind::Http10(value) => value.tokenize(),
910
6
            OptionKind::Http11(value) => value.tokenize(),
911
6
            OptionKind::Http2(value) => value.tokenize(),
912
6
            OptionKind::Http3(value) => value.tokenize(),
913
9
            OptionKind::Insecure(value) => value.tokenize(),
914
6
            OptionKind::IpV4(value) => value.tokenize(),
915
6
            OptionKind::IpV6(value) => value.tokenize(),
916
6
            OptionKind::LimitRate(value) => value.tokenize(),
917
6
            OptionKind::MaxRedirect(value) => value.tokenize(),
918
6
            OptionKind::NetRc(value) => value.tokenize(),
919
6
            OptionKind::NetRcFile(filename) => filename.tokenize(),
920
6
            OptionKind::NetRcOptional(value) => value.tokenize(),
921
6
            OptionKind::Output(filename) => filename.tokenize(),
922
6
            OptionKind::PathAsIs(value) => value.tokenize(),
923
6
            OptionKind::Proxy(value) => value.tokenize(),
924
9
            OptionKind::Repeat(value) => value.tokenize(),
925
6
            OptionKind::Resolve(value) => value.tokenize(),
926
15
            OptionKind::Retry(value) => value.tokenize(),
927
12
            OptionKind::RetryInterval(value) => value.tokenize(),
928
6
            OptionKind::Skip(value) => value.tokenize(),
929
6
            OptionKind::UnixSocket(value) => value.tokenize(),
930
6
            OptionKind::User(value) => value.tokenize(),
931
27
            OptionKind::Variable(value) => value.tokenize(),
932
15
            OptionKind::Verbose(value) => value.tokenize(),
933
6
            OptionKind::VeryVerbose(value) => value.tokenize(),
934
        }
935
    }
936
}
937

            
938
impl Tokenizable for BooleanOption {
939
111
    fn tokenize(&self) -> Vec<Token> {
940
111
        match self {
941
63
            BooleanOption::Literal(value) => vec![Token::Boolean(value.to_string())],
942
48
            BooleanOption::Placeholder(expr) => expr.tokenize(),
943
        }
944
    }
945
}
946

            
947
impl Tokenizable for NaturalOption {
948
6
    fn tokenize(&self) -> Vec<Token> {
949
6
        match self {
950
3
            NaturalOption::Literal(value) => value.tokenize(),
951
3
            NaturalOption::Placeholder(expr) => expr.tokenize(),
952
        }
953
    }
954
}
955

            
956
impl Tokenizable for U64 {
957
3
    fn tokenize(&self) -> Vec<Token> {
958
3
        vec![Token::Number(self.to_source().to_string())]
959
    }
960
}
961

            
962
impl Tokenizable for I64 {
963
    fn tokenize(&self) -> Vec<Token> {
964
        vec![Token::Number(self.to_source().to_string())]
965
    }
966
}
967

            
968
impl Tokenizable for CountOption {
969
30
    fn tokenize(&self) -> Vec<Token> {
970
30
        match self {
971
21
            CountOption::Literal(retry) => retry.tokenize(),
972
9
            CountOption::Placeholder(expr) => expr.tokenize(),
973
        }
974
    }
975
}
976

            
977
impl Tokenizable for Count {
978
21
    fn tokenize(&self) -> Vec<Token> {
979
21
        match self {
980
15
            Count::Finite(n) => vec![Token::Number(n.to_string())],
981
6
            Count::Infinite => vec![Token::Number("-1".to_string())],
982
        }
983
    }
984
}
985

            
986
impl Tokenizable for DurationOption {
987
30
    fn tokenize(&self) -> Vec<Token> {
988
30
        match self {
989
21
            DurationOption::Literal(value) => value.tokenize(),
990
9
            DurationOption::Placeholder(expr) => expr.tokenize(),
991
        }
992
    }
993
}
994

            
995
impl Tokenizable for Duration {
996
21
    fn tokenize(&self) -> Vec<Token> {
997
21
        let mut tokens = vec![Token::Number(self.value.to_source().to_string())];
998
21
        if let Some(unit) = self.unit {
999
21
            tokens.push(Token::Unit(unit.to_string()));
        }
21
        tokens
    }
}
impl Tokenizable for VariableDefinition {
27
    fn tokenize(&self) -> Vec<Token> {
27
        let mut tokens: Vec<Token> = vec![Token::String(self.name.clone())];
27
        tokens.append(&mut self.space0.tokenize());
27
        tokens.push(Token::Keyword("=".to_string()));
27
        tokens.append(&mut self.space1.tokenize());
27
        tokens.append(&mut self.value.tokenize());
27
        tokens
    }
}
impl Tokenizable for VariableValue {
27
    fn tokenize(&self) -> Vec<Token> {
27
        match self {
3
            VariableValue::Null => vec![Token::Keyword("null".to_string())],
3
            VariableValue::Bool(v) => vec![Token::Boolean(v.to_string())],
6
            VariableValue::Number(v) => vec![Token::Number(v.to_source().to_string())],
15
            VariableValue::String(v) => v.tokenize(),
        }
    }
}
impl Tokenizable for Filter {
108
    fn tokenize(&self) -> Vec<Token> {
108
        let mut tokens = vec![Token::FilterType(self.value.identifier().to_string())];
108
        match &self.value {
6
            FilterValue::Decode { space0, encoding } => {
6
                tokens.append(&mut space0.tokenize());
6
                tokens.append(&mut encoding.tokenize());
            }
9
            FilterValue::Format { space0, fmt } => {
9
                tokens.append(&mut space0.tokenize());
9
                tokens.append(&mut fmt.tokenize());
            }
3
            FilterValue::JsonPath { space0, expr } => {
3
                tokens.append(&mut space0.tokenize());
3
                tokens.append(&mut expr.tokenize());
            }
3
            FilterValue::Nth { space0, n } => {
3
                tokens.append(&mut space0.tokenize());
3
                tokens.push(Token::Number(n.to_source().to_string()));
            }
3
            FilterValue::Regex { space0, value } => {
3
                tokens.append(&mut space0.tokenize());
3
                tokens.append(&mut value.tokenize());
            }
            FilterValue::Replace {
15
                space0,
15
                old_value,
15
                space1,
15
                new_value,
15
            } => {
15
                tokens.append(&mut space0.tokenize());
15
                tokens.append(&mut old_value.tokenize());
15
                tokens.append(&mut space1.tokenize());
15
                tokens.append(&mut new_value.tokenize());
            }
3
            FilterValue::Split { space0, sep } => {
3
                tokens.append(&mut space0.tokenize());
3
                tokens.append(&mut sep.tokenize());
            }
3
            FilterValue::ToDate { space0, fmt } => {
3
                tokens.append(&mut space0.tokenize());
3
                tokens.append(&mut fmt.tokenize());
            }
3
            FilterValue::UrlQueryParam { space0, param } => {
3
                tokens.append(&mut space0.tokenize());
3
                tokens.append(&mut param.tokenize());
            }
3
            FilterValue::XPath { space0, expr } => {
3
                tokens.append(&mut space0.tokenize());
3
                tokens.append(&mut expr.tokenize());
            }
57
            _ => {}
        }
108
        tokens
    }
}