1
/*
2
 * Hurl (https://hurl.dev)
3
 * Copyright (C) 2025 Orange
4
 *
5
 * Licensed under the Apache License, Version 2.0 (the "License");
6
 * you may not use this file except in compliance with the License.
7
 * You may obtain a copy of the License at
8
 *
9
 *          http://www.apache.org/licenses/LICENSE-2.0
10
 *
11
 * Unless required by applicable law or agreed to in writing, software
12
 * distributed under the License is distributed on an "AS IS" BASIS,
13
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
 * See the License for the specific language governing permissions and
15
 * limitations under the License.
16
 *
17
 */
18
use hurl_core::ast::{
19
    Assert, Base64, Body, BooleanOption, Bytes, Capture, CertificateAttributeName, Comment, Cookie,
20
    CookieAttribute, CookiePath, CountOption, DurationOption, Entry, EntryOption, Expr, ExprKind,
21
    File, FileParam, FileValue, Filter, FilterValue, Function, GraphQl, GraphQlVariables, Hex,
22
    HurlFile, JsonListElement, JsonObjectElement, JsonValue, KeyValue, LineTerminator, Method,
23
    MultilineString, MultilineStringAttribute, MultilineStringKind, MultipartParam, NaturalOption,
24
    OptionKind, Placeholder, Predicate, PredicateFunc, PredicateFuncValue, PredicateValue, Query,
25
    QueryValue, Regex, RegexValue, Request, Response, Section, SectionValue, Status, StatusValue,
26
    Template, TemplateElement, Text, Variable, VariableDefinition, VariableValue, Version,
27
    Whitespace, I64, U64,
28
};
29
use hurl_core::typing::{Count, Duration, ToSource};
30

            
31
#[derive(Clone, Debug, PartialEq, Eq)]
32
pub enum Token {
33
    Method(String),
34
    Version(String),
35
    Status(String),
36
    SectionHeader(String),
37
    QueryType(String),
38
    PredicateType(String),
39
    FilterType(String),
40
    Not(String),
41
    Keyword(String),
42

            
43
    // Primitives
44
    Whitespace(String),
45
    Comment(String),
46
    Value(String),
47
    Colon(String),
48
    StringDelimiter(String),
49
    Boolean(String),
50
    Number(String),
51
    String(String),
52
    CodeDelimiter(String),
53
    CodeVariable(String),
54
    Lang(String),
55
    Unit(String),
56
}
57

            
58
pub trait Tokenizable {
59
    fn tokenize(&self) -> Vec<Token>;
60
}
61

            
62
impl Tokenizable for HurlFile {
63
81
    fn tokenize(&self) -> Vec<Token> {
64
81
        let mut tokens: Vec<Token> = vec![];
65
270
        tokens.append(&mut self.entries.iter().flat_map(|e| e.tokenize()).collect());
66
81
        tokens.append(
67
81
            &mut self
68
81
                .line_terminators
69
81
                .iter()
70
89
                .flat_map(|e| e.tokenize())
71
81
                .collect(),
72
81
        );
73
81
        tokens
74
    }
75
}
76

            
77
impl Tokenizable for Entry {
78
243
    fn tokenize(&self) -> Vec<Token> {
79
243
        let mut tokens: Vec<Token> = vec![];
80
243
        tokens.append(&mut self.request.tokenize());
81
243
        if let Some(response) = &self.response {
82
105
            tokens.append(&mut response.tokenize());
83
        }
84
243
        tokens
85
    }
86
}
87

            
88
impl Tokenizable for Request {
89
243
    fn tokenize(&self) -> Vec<Token> {
90
243
        let mut tokens: Vec<Token> = vec![];
91
243
        tokens.append(
92
243
            &mut self
93
243
                .line_terminators
94
243
                .iter()
95
292
                .flat_map(|e| e.tokenize())
96
243
                .collect(),
97
243
        );
98
243
        tokens.append(&mut self.space0.tokenize());
99
243
        tokens.append(&mut self.method.tokenize());
100
243
        tokens.append(&mut self.space1.tokenize());
101
243
        tokens.append(&mut self.url.tokenize());
102
243
        tokens.append(&mut self.line_terminator0.tokenize());
103
268
        tokens.append(&mut self.headers.iter().flat_map(|e| e.tokenize()).collect());
104
263
        tokens.append(&mut self.sections.iter().flat_map(|e| e.tokenize()).collect());
105
243
        if let Some(body) = &self.body {
106
45
            tokens.append(&mut body.tokenize());
107
        }
108
243
        tokens
109
    }
110
}
111

            
112
impl Tokenizable for Method {
113
243
    fn tokenize(&self) -> Vec<Token> {
114
243
        vec![Token::Method(self.to_string())]
115
    }
116
}
117

            
118
impl Tokenizable for Response {
119
105
    fn tokenize(&self) -> Vec<Token> {
120
105
        let mut tokens: Vec<Token> = vec![];
121
105
        tokens.append(
122
105
            &mut self
123
105
                .line_terminators
124
105
                .iter()
125
110
                .flat_map(|e| e.tokenize())
126
105
                .collect(),
127
105
        );
128
105
        tokens.append(&mut self.space0.tokenize());
129
105
        tokens.append(&mut self.version.tokenize());
130
105
        tokens.append(&mut self.space1.tokenize());
131
105
        tokens.append(&mut self.status.tokenize());
132
105
        tokens.append(&mut self.line_terminator0.tokenize());
133
110
        tokens.append(&mut self.headers.iter().flat_map(|e| e.tokenize()).collect());
134
123
        tokens.append(&mut self.sections.iter().flat_map(|e| e.tokenize()).collect());
135
105
        if let Some(body) = self.clone().body {
136
45
            tokens.append(&mut body.tokenize());
137
        }
138
105
        tokens
139
    }
140
}
141

            
142
impl Tokenizable for Status {
143
105
    fn tokenize(&self) -> Vec<Token> {
144
105
        let mut tokens: Vec<Token> = vec![];
145
105
        match self.value.clone() {
146
3
            StatusValue::Any => tokens.push(Token::Status("*".to_string())),
147
102
            StatusValue::Specific(v) => tokens.push(Token::Status(v.to_string())),
148
        }
149
105
        tokens
150
    }
151
}
152

            
153
impl Tokenizable for Version {
154
105
    fn tokenize(&self) -> Vec<Token> {
155
105
        vec![Token::Version(self.value.to_string())]
156
    }
157
}
158

            
159
impl Tokenizable for Body {
160
90
    fn tokenize(&self) -> Vec<Token> {
161
90
        let mut tokens: Vec<Token> = vec![];
162
90
        tokens.append(
163
90
            &mut self
164
90
                .line_terminators
165
90
                .iter()
166
95
                .flat_map(|e| e.tokenize())
167
90
                .collect(),
168
90
        );
169
90
        tokens.append(&mut self.space0.tokenize());
170
90
        tokens.append(&mut self.value.tokenize());
171
90
        tokens.append(&mut self.line_terminator0.tokenize());
172
90
        tokens
173
    }
174
}
175

            
176
impl Tokenizable for Bytes {
177
90
    fn tokenize(&self) -> Vec<Token> {
178
90
        let mut tokens: Vec<Token> = vec![];
179
90
        match self {
180
6
            Bytes::Json(value) => tokens.append(&mut value.tokenize()),
181
3
            Bytes::Xml(value) => tokens.push(Token::String(value.to_string())),
182
33
            Bytes::MultilineString(value) => tokens.append(&mut value.tokenize()),
183
15
            Bytes::OnelineString(value) => tokens.append(&mut value.tokenize()),
184
12
            Bytes::Base64(value) => tokens.append(&mut value.tokenize()),
185
9
            Bytes::Hex(value) => tokens.append(&mut value.tokenize()),
186
12
            Bytes::File(value) => tokens.append(&mut value.tokenize()),
187
        }
188
90
        tokens
189
    }
190
}
191

            
192
impl Tokenizable for Section {
193
114
    fn tokenize(&self) -> Vec<Token> {
194
114
        let mut tokens: Vec<Token> = vec![];
195
114
        tokens.append(
196
114
            &mut self
197
114
                .line_terminators
198
114
                .iter()
199
124
                .flat_map(|e| e.tokenize())
200
114
                .collect(),
201
114
        );
202
114
        tokens.append(&mut self.space0.tokenize());
203
114
        tokens.push(Token::SectionHeader(format!("[{}]", self.identifier())));
204
114
        tokens.append(&mut self.line_terminator0.tokenize());
205
114
        tokens.append(&mut self.value.tokenize());
206
114
        tokens
207
    }
208
}
209

            
210
impl Tokenizable for SectionValue {
211
114
    fn tokenize(&self) -> Vec<Token> {
212
114
        let mut tokens: Vec<Token> = vec![];
213
114
        match self {
214
42
            SectionValue::Asserts(items) => {
215
344
                tokens.append(&mut items.iter().flat_map(|e| e.tokenize()).collect());
216
            }
217
12
            SectionValue::QueryParams(items, _) => {
218
22
                tokens.append(&mut items.iter().flat_map(|e| e.tokenize()).collect());
219
            }
220
3
            SectionValue::BasicAuth(item) => {
221
3
                if let Some(kv) = item {
222
3
                    tokens.append(&mut kv.tokenize());
223
                }
224
            }
225
6
            SectionValue::FormParams(items, _) => {
226
14
                tokens.append(&mut items.iter().flat_map(|e| e.tokenize()).collect());
227
            }
228
6
            SectionValue::MultipartFormData(items, _) => {
229
11
                tokens.append(&mut items.iter().flat_map(|e| e.tokenize()).collect());
230
            }
231
9
            SectionValue::Cookies(items) => {
232
12
                tokens.append(&mut items.iter().flat_map(|e| e.tokenize()).collect());
233
            }
234
12
            SectionValue::Captures(items) => {
235
16
                tokens.append(&mut items.iter().flat_map(|e| e.tokenize()).collect());
236
            }
237
24
            SectionValue::Options(items) => {
238
281
                tokens.append(&mut items.iter().flat_map(|e| e.tokenize()).collect());
239
            }
240
        }
241
114
        tokens
242
    }
243
}
244

            
245
impl Tokenizable for Base64 {
246
15
    fn tokenize(&self) -> Vec<Token> {
247
15
        let mut tokens: Vec<Token> = vec![Token::Keyword(String::from("base64,"))];
248
15
        tokens.append(&mut self.space0.tokenize());
249
15
        tokens.push(Token::String(self.source.to_string()));
250
15
        tokens.append(&mut self.space1.tokenize());
251
15
        tokens.push(Token::Keyword(String::from(";")));
252
15
        tokens
253
    }
254
}
255

            
256
impl Tokenizable for Hex {
257
9
    fn tokenize(&self) -> Vec<Token> {
258
9
        let mut tokens: Vec<Token> = vec![Token::Keyword(String::from("hex,"))];
259
9
        tokens.append(&mut self.space0.tokenize());
260
9
        tokens.push(Token::String(self.source.to_string()));
261
9
        tokens.append(&mut self.space1.tokenize());
262
9
        tokens.push(Token::Keyword(String::from(";")));
263
9
        tokens
264
    }
265
}
266

            
267
impl Tokenizable for File {
268
15
    fn tokenize(&self) -> Vec<Token> {
269
15
        let mut tokens: Vec<Token> = vec![Token::Keyword(String::from("file,"))];
270
15
        tokens.append(&mut self.space0.tokenize());
271
15
        tokens.append(&mut self.filename.tokenize());
272
15
        tokens.append(&mut self.space1.tokenize());
273
15
        tokens.push(Token::Keyword(String::from(";")));
274
15
        tokens
275
    }
276
}
277

            
278
impl Tokenizable for KeyValue {
279
126
    fn tokenize(&self) -> Vec<Token> {
280
126
        let mut tokens: Vec<Token> = vec![];
281
126
        tokens.append(
282
126
            &mut self
283
126
                .line_terminators
284
126
                .iter()
285
127
                .flat_map(|e| e.tokenize())
286
126
                .collect(),
287
126
        );
288
126
        tokens.append(&mut self.space0.tokenize());
289
126
        tokens.append(&mut self.key.tokenize());
290
126
        tokens.append(&mut self.space1.tokenize());
291
126
        tokens.push(Token::Colon(String::from(":")));
292
126
        tokens.append(&mut self.space2.tokenize());
293
126
        tokens.append(&mut self.value.tokenize());
294
126
        tokens.append(&mut self.line_terminator0.tokenize());
295
126
        tokens
296
    }
297
}
298

            
299
impl Tokenizable for MultipartParam {
300
9
    fn tokenize(&self) -> Vec<Token> {
301
9
        match self {
302
3
            MultipartParam::Param(key_value) => key_value.tokenize(),
303
6
            MultipartParam::FileParam(file_param) => file_param.tokenize(),
304
        }
305
    }
306
}
307

            
308
impl Tokenizable for FileParam {
309
6
    fn tokenize(&self) -> Vec<Token> {
310
6
        let mut tokens: Vec<Token> = vec![];
311
6
        tokens.append(&mut self.space0.tokenize());
312
6
        tokens.append(&mut self.key.tokenize());
313
6
        tokens.append(&mut self.space1.tokenize());
314
6
        tokens.push(Token::Colon(String::from(":")));
315
6
        tokens.append(&mut self.space2.tokenize());
316
6
        tokens.append(&mut self.value.tokenize());
317
6
        tokens.append(&mut self.line_terminator0.tokenize());
318
6
        tokens
319
    }
320
}
321

            
322
impl Tokenizable for FileValue {
323
6
    fn tokenize(&self) -> Vec<Token> {
324
6
        let mut tokens: Vec<Token> = vec![Token::Keyword("file,".to_string())];
325
6
        tokens.append(&mut self.space0.tokenize());
326
6
        tokens.append(&mut self.filename.tokenize());
327
6
        tokens.append(&mut self.space1.tokenize());
328
6
        tokens.push(Token::Keyword(";".to_string()));
329
6
        tokens.append(&mut self.space2.tokenize());
330
6
        if let Some(content_type) = self.content_type.clone() {
331
3
            tokens.push(Token::String(content_type));
332
        }
333
6
        tokens
334
    }
335
}
336

            
337
impl Tokenizable for Cookie {
338
9
    fn tokenize(&self) -> Vec<Token> {
339
9
        let mut tokens: Vec<Token> = vec![];
340
9
        tokens.append(
341
9
            &mut self
342
9
                .line_terminators
343
9
                .iter()
344
9
                .flat_map(|e| e.tokenize())
345
9
                .collect(),
346
9
        );
347
9
        tokens.append(&mut self.space0.tokenize());
348
9
        tokens.append(&mut self.name.tokenize());
349
9
        tokens.append(&mut self.space1.tokenize());
350
9
        tokens.push(Token::Colon(String::from(":")));
351
9
        tokens.append(&mut self.space2.tokenize());
352
9
        tokens.append(&mut self.value.tokenize());
353
9
        tokens.append(&mut self.line_terminator0.tokenize());
354
9
        tokens
355
    }
356
}
357

            
358
impl Tokenizable for Capture {
359
12
    fn tokenize(&self) -> Vec<Token> {
360
12
        let mut tokens: Vec<Token> = vec![];
361
12
        tokens.append(
362
12
            &mut self
363
12
                .line_terminators
364
12
                .iter()
365
12
                .flat_map(|e| e.tokenize())
366
12
                .collect(),
367
12
        );
368
12
        tokens.append(&mut self.space0.tokenize());
369
12
        tokens.append(&mut self.name.tokenize());
370
12
        tokens.append(&mut self.space1.tokenize());
371
12
        tokens.push(Token::Colon(String::from(":")));
372
12
        tokens.append(&mut self.space2.tokenize());
373
12
        tokens.append(&mut self.query.tokenize());
374
15
        for (space, filter) in &self.filters {
375
3
            tokens.append(&mut space.tokenize());
376
3
            tokens.append(&mut filter.tokenize());
377
        }
378
12
        if self.redact {
379
3
            tokens.append(&mut self.space3.tokenize());
380
3
            tokens.push(Token::Keyword(String::from("redact")));
381
        }
382
12
        tokens.append(&mut self.line_terminator0.tokenize());
383
12
        tokens
384
    }
385
}
386

            
387
impl Tokenizable for Assert {
388
330
    fn tokenize(&self) -> Vec<Token> {
389
330
        let mut tokens: Vec<Token> = vec![];
390
330
        tokens.append(
391
330
            &mut self
392
330
                .line_terminators
393
330
                .iter()
394
330
                .flat_map(|e| e.tokenize())
395
330
                .collect(),
396
330
        );
397
330
        tokens.append(&mut self.space0.tokenize());
398
330
        tokens.append(&mut self.query.tokenize());
399
423
        for (space, filter) in &self.filters {
400
93
            tokens.append(&mut space.tokenize());
401
93
            tokens.append(&mut filter.tokenize());
402
        }
403
330
        tokens.append(&mut self.space1.tokenize());
404
330
        // TODO reconvert back your first predicate for jsonpath
405
330
        // so that you can use your firstX predicate for other query
406
330
        tokens.append(&mut self.predicate.tokenize());
407
330
        tokens.append(&mut self.line_terminator0.tokenize());
408
330
        tokens
409
    }
410
}
411

            
412
impl Tokenizable for Query {
413
342
    fn tokenize(&self) -> Vec<Token> {
414
342
        self.value.tokenize()
415
    }
416
}
417

            
418
impl Tokenizable for QueryValue {
419
342
    fn tokenize(&self) -> Vec<Token> {
420
342
        let mut tokens = vec![];
421
342
        let token = Token::QueryType(self.identifier().to_string());
422
342
        tokens.push(token);
423
342

            
424
342
        match self {
425
12
            QueryValue::Header { space0, name } => {
426
12
                tokens.append(&mut space0.tokenize());
427
12
                tokens.append(&mut name.tokenize());
428
            }
429
6
            QueryValue::Cookie { space0, expr } => {
430
6
                tokens.append(&mut space0.tokenize());
431
6
                tokens.push(Token::CodeDelimiter("\"".to_string()));
432
6
                tokens.append(&mut expr.tokenize());
433
6
                tokens.push(Token::CodeDelimiter("\"".to_string()));
434
            }
435
3
            QueryValue::Xpath { space0, expr } => {
436
3
                tokens.append(&mut space0.tokenize());
437
3
                tokens.append(&mut expr.tokenize());
438
            }
439
201
            QueryValue::Jsonpath { space0, expr } => {
440
201
                tokens.append(&mut space0.tokenize());
441
201
                tokens.append(&mut expr.tokenize());
442
            }
443
3
            QueryValue::Regex { space0, value } => {
444
3
                tokens.append(&mut space0.tokenize());
445
3
                tokens.append(&mut value.tokenize());
446
            }
447
9
            QueryValue::Variable { space0, name } => {
448
9
                tokens.append(&mut space0.tokenize());
449
9
                tokens.append(&mut name.tokenize());
450
            }
451
            QueryValue::Certificate {
452
30
                space0,
453
30
                attribute_name: field,
454
30
            } => {
455
30
                tokens.append(&mut space0.tokenize());
456
30
                tokens.append(&mut field.tokenize());
457
            }
458
78
            _ => {}
459
        }
460
342
        tokens
461
    }
462
}
463

            
464
impl Tokenizable for RegexValue {
465
21
    fn tokenize(&self) -> Vec<Token> {
466
21
        match self {
467
18
            RegexValue::Template(template) => template.tokenize(),
468
3
            RegexValue::Regex(regex) => regex.tokenize(),
469
        }
470
    }
471
}
472

            
473
impl Tokenizable for CookiePath {
474
6
    fn tokenize(&self) -> Vec<Token> {
475
6
        let mut tokens: Vec<Token> = vec![];
476
6
        tokens.append(&mut self.name.tokenize());
477
6
        if let Some(attribute) = self.attribute.clone() {
478
3
            tokens.append(&mut attribute.tokenize());
479
        }
480
6
        tokens
481
    }
482
}
483

            
484
impl Tokenizable for CookieAttribute {
485
3
    fn tokenize(&self) -> Vec<Token> {
486
3
        let mut tokens: Vec<Token> = vec![Token::CodeDelimiter("[".to_string())];
487
3
        tokens.append(&mut self.space0.tokenize());
488
3
        tokens.push(Token::String(self.name.value()));
489
3
        tokens.append(&mut self.space1.tokenize());
490
3
        tokens.push(Token::CodeDelimiter("]".to_string()));
491
3
        tokens
492
    }
493
}
494

            
495
impl Tokenizable for CertificateAttributeName {
496
30
    fn tokenize(&self) -> Vec<Token> {
497
30
        vec![
498
30
            Token::StringDelimiter("\"".to_string()),
499
30
            Token::String(self.identifier().to_string()),
500
30
            Token::StringDelimiter("\"".to_string()),
501
30
        ]
502
    }
503
}
504

            
505
impl Tokenizable for Predicate {
506
330
    fn tokenize(&self) -> Vec<Token> {
507
330
        let mut tokens: Vec<Token> = vec![];
508
330
        if self.not {
509
3
            tokens.push(Token::Not(String::from("not")));
510
3
            tokens.append(&mut self.space0.tokenize());
511
        }
512
330
        tokens.append(&mut self.predicate_func.tokenize());
513
330
        tokens
514
    }
515
}
516

            
517
impl Tokenizable for PredicateFunc {
518
330
    fn tokenize(&self) -> Vec<Token> {
519
330
        self.value.tokenize()
520
    }
521
}
522

            
523
impl Tokenizable for PredicateFuncValue {
524
330
    fn tokenize(&self) -> Vec<Token> {
525
330
        let mut tokens: Vec<Token> = vec![];
526
330
        let name = self.identifier().to_string();
527
330
        match self {
528
210
            PredicateFuncValue::Equal { space0, value, .. } => {
529
210
                tokens.push(Token::PredicateType(name));
530
210
                tokens.append(&mut space0.tokenize());
531
210
                tokens.append(&mut value.tokenize());
532
            }
533
9
            PredicateFuncValue::NotEqual { space0, value, .. } => {
534
9
                tokens.push(Token::PredicateType(name));
535
9
                tokens.append(&mut space0.tokenize());
536
9
                tokens.append(&mut value.tokenize());
537
            }
538
9
            PredicateFuncValue::GreaterThan { space0, value, .. } => {
539
9
                tokens.push(Token::PredicateType(name));
540
9
                tokens.append(&mut space0.tokenize());
541
9
                tokens.append(&mut value.tokenize());
542
            }
543
3
            PredicateFuncValue::GreaterThanOrEqual { space0, value, .. } => {
544
3
                tokens.push(Token::PredicateType(name));
545
3
                tokens.append(&mut space0.tokenize());
546
3
                tokens.append(&mut value.tokenize());
547
            }
548
12
            PredicateFuncValue::LessThan { space0, value, .. } => {
549
12
                tokens.push(Token::PredicateType(name));
550
12
                tokens.append(&mut space0.tokenize());
551
12
                tokens.append(&mut value.tokenize());
552
            }
553
3
            PredicateFuncValue::LessThanOrEqual { space0, value, .. } => {
554
3
                tokens.push(Token::PredicateType(name));
555
3
                tokens.append(&mut space0.tokenize());
556
3
                tokens.append(&mut value.tokenize());
557
            }
558
9
            PredicateFuncValue::StartWith { space0, value } => {
559
9
                tokens.push(Token::PredicateType(name));
560
9
                tokens.append(&mut space0.tokenize());
561
9
                tokens.append(&mut value.tokenize());
562
            }
563
6
            PredicateFuncValue::EndWith { space0, value } => {
564
6
                tokens.push(Token::PredicateType(name));
565
6
                tokens.append(&mut space0.tokenize());
566
6
                tokens.append(&mut value.tokenize());
567
            }
568
9
            PredicateFuncValue::Contain { space0, value } => {
569
9
                tokens.push(Token::PredicateType(name));
570
9
                tokens.append(&mut space0.tokenize());
571
9
                tokens.append(&mut value.tokenize());
572
            }
573
3
            PredicateFuncValue::Include { space0, value } => {
574
3
                tokens.push(Token::PredicateType(name));
575
3
                tokens.append(&mut space0.tokenize());
576
3
                tokens.append(&mut value.tokenize());
577
            }
578
6
            PredicateFuncValue::Match { space0, value } => {
579
6
                tokens.push(Token::PredicateType(name));
580
6
                tokens.append(&mut space0.tokenize());
581
6
                tokens.append(&mut value.tokenize());
582
            }
583
3
            PredicateFuncValue::IsInteger => {
584
3
                tokens.push(Token::PredicateType(name));
585
            }
586
3
            PredicateFuncValue::IsFloat => {
587
3
                tokens.push(Token::PredicateType(name));
588
            }
589
3
            PredicateFuncValue::IsBoolean => {
590
3
                tokens.push(Token::PredicateType(name));
591
            }
592
3
            PredicateFuncValue::IsString => {
593
3
                tokens.push(Token::PredicateType(name));
594
            }
595
3
            PredicateFuncValue::IsCollection => {
596
3
                tokens.push(Token::PredicateType(name));
597
            }
598
9
            PredicateFuncValue::IsDate => {
599
9
                tokens.push(Token::PredicateType(name));
600
            }
601
3
            PredicateFuncValue::IsIsoDate => {
602
3
                tokens.push(Token::PredicateType(name));
603
            }
604
12
            PredicateFuncValue::Exist => {
605
12
                tokens.push(Token::PredicateType(name));
606
            }
607
3
            PredicateFuncValue::IsEmpty => {
608
3
                tokens.push(Token::PredicateType(name));
609
            }
610
3
            PredicateFuncValue::IsNumber => {
611
3
                tokens.push(Token::PredicateType(name));
612
            }
613
3
            PredicateFuncValue::IsIpv4 => {
614
3
                tokens.push(Token::PredicateType(name));
615
            }
616
3
            PredicateFuncValue::IsIpv6 => {
617
3
                tokens.push(Token::PredicateType(name));
618
            }
619
        }
620
330
        tokens
621
    }
622
}
623

            
624
impl Tokenizable for PredicateValue {
625
279
    fn tokenize(&self) -> Vec<Token> {
626
279
        match self {
627
114
            PredicateValue::String(value) => value.tokenize(),
628
15
            PredicateValue::MultilineString(value) => value.tokenize(),
629
3
            PredicateValue::Bool(value) => vec![Token::Boolean(value.to_string())],
630
3
            PredicateValue::Null => vec![Token::Keyword("null".to_string())],
631
108
            PredicateValue::Number(value) => vec![Token::Number(value.to_source().to_string())],
632
3
            PredicateValue::File(value) => value.tokenize(),
633
24
            PredicateValue::Hex(value) => vec![Token::String(value.to_string())],
634
3
            PredicateValue::Base64(value) => value.tokenize(),
635
3
            PredicateValue::Placeholder(value) => value.tokenize(),
636
3
            PredicateValue::Regex(value) => value.tokenize(),
637
        }
638
    }
639
}
640

            
641
impl Tokenizable for MultilineString {
642
48
    fn tokenize(&self) -> Vec<Token> {
643
48
        let mut tokens: Vec<Token> = vec![Token::StringDelimiter("```".to_string())];
644
48
        tokens.push(Token::Lang(self.lang().to_string()));
645
48
        for (i, attribute) in self.attributes.iter().enumerate() {
646
3
            if i > 0 || !self.lang().is_empty() {
647
                tokens.push(Token::StringDelimiter(",".to_string()));
648
            }
649
3
            tokens.append(&mut attribute.tokenize());
650
        }
651
48
        match self {
652
            MultilineString {
653
21
                kind: MultilineStringKind::Text(text),
654
                ..
655
            }
656
            | MultilineString {
657
9
                kind: MultilineStringKind::Json(text),
658
                ..
659
            }
660
            | MultilineString {
661
9
                kind: MultilineStringKind::Xml(text),
662
                ..
663
39
            } => tokens.append(&mut text.tokenize()),
664
            MultilineString {
665
9
                kind: MultilineStringKind::GraphQl(graphql),
666
9
                ..
667
9
            } => tokens.append(&mut graphql.tokenize()),
668
        }
669
48
        tokens.push(Token::StringDelimiter("```".to_string()));
670
48
        tokens
671
    }
672
}
673

            
674
impl Tokenizable for MultilineStringAttribute {
675
3
    fn tokenize(&self) -> Vec<Token> {
676
3
        match self {
677
3
            MultilineStringAttribute::Escape => vec![Token::String("escape".to_string())],
678
            MultilineStringAttribute::NoVariable => vec![Token::String("novariable".to_string())],
679
        }
680
    }
681
}
682

            
683
impl Tokenizable for Text {
684
39
    fn tokenize(&self) -> Vec<Token> {
685
39
        let mut tokens: Vec<Token> = vec![];
686
39
        tokens.append(&mut self.space.tokenize());
687
39
        tokens.append(&mut self.newline.tokenize());
688
39
        tokens.append(&mut self.value.tokenize());
689
39
        tokens
690
    }
691
}
692

            
693
impl Tokenizable for GraphQl {
694
9
    fn tokenize(&self) -> Vec<Token> {
695
9
        let mut tokens: Vec<Token> = vec![];
696
9
        tokens.append(&mut self.space.tokenize());
697
9
        tokens.append(&mut self.newline.tokenize());
698
9
        tokens.append(&mut self.value.tokenize());
699
9
        if let Some(vars) = &self.variables {
700
            tokens.append(&mut vars.tokenize());
701
        }
702
9
        tokens
703
    }
704
}
705

            
706
impl Tokenizable for GraphQlVariables {
707
    fn tokenize(&self) -> Vec<Token> {
708
        let mut tokens: Vec<Token> = vec![];
709
        tokens.push(Token::String("variables".to_string()));
710
        tokens.append(&mut self.space.tokenize());
711
        tokens.append(&mut self.value.tokenize());
712
        tokens.append(&mut self.whitespace.tokenize());
713
        tokens
714
    }
715
}
716

            
717
impl Tokenizable for Template {
718
1128
    fn tokenize(&self) -> Vec<Token> {
719
1128
        let mut tokens: Vec<Token> = vec![];
720
1128
        if let Some(d) = self.delimiter {
721
432
            tokens.push(Token::StringDelimiter(d.to_string()));
722
        }
723
2256
        for element in &self.elements {
724
1128
            tokens.append(&mut element.tokenize());
725
        }
726
1128
        if let Some(d) = self.delimiter {
727
432
            tokens.push(Token::StringDelimiter(d.to_string()));
728
        }
729
1128
        tokens
730
    }
731
}
732

            
733
impl Tokenizable for TemplateElement {
734
1128
    fn tokenize(&self) -> Vec<Token> {
735
1128
        match self {
736
1059
            TemplateElement::String { source, .. } => {
737
1059
                vec![Token::String(source.to_string())]
738
            }
739
69
            TemplateElement::Placeholder(value) => {
740
69
                let mut tokens: Vec<Token> = vec![];
741
69
                tokens.append(&mut value.tokenize());
742
69
                tokens
743
            }
744
        }
745
    }
746
}
747

            
748
impl Tokenizable for Placeholder {
749
144
    fn tokenize(&self) -> Vec<Token> {
750
144
        let mut tokens: Vec<Token> = vec![Token::CodeDelimiter(String::from("{{"))];
751
144
        tokens.append(&mut self.space0.tokenize());
752
144
        tokens.append(&mut self.expr.tokenize());
753
144
        tokens.append(&mut self.space1.tokenize());
754
144
        tokens.push(Token::CodeDelimiter(String::from("}}")));
755
144
        tokens
756
    }
757
}
758

            
759
impl Tokenizable for Expr {
760
144
    fn tokenize(&self) -> Vec<Token> {
761
144
        self.kind.tokenize()
762
    }
763
}
764

            
765
impl Tokenizable for ExprKind {
766
144
    fn tokenize(&self) -> Vec<Token> {
767
144
        match self {
768
138
            ExprKind::Variable(variable) => variable.tokenize(),
769
6
            ExprKind::Function(function) => function.tokenize(),
770
        }
771
    }
772
}
773

            
774
impl Tokenizable for Variable {
775
138
    fn tokenize(&self) -> Vec<Token> {
776
138
        vec![Token::CodeVariable(self.name.clone())]
777
    }
778
}
779

            
780
impl Tokenizable for Function {
781
6
    fn tokenize(&self) -> Vec<Token> {
782
6
        match self {
783
3
            Function::NewDate => vec![Token::CodeVariable("newDate".to_string())],
784
3
            Function::NewUuid => vec![Token::CodeVariable("newUuid".to_string())],
785
        }
786
    }
787
}
788

            
789
impl Tokenizable for Regex {
790
6
    fn tokenize(&self) -> Vec<Token> {
791
6
        let s = str::replace(self.inner.as_str(), "/", "\\/");
792
6
        vec![Token::String(format!("/{s}/"))]
793
    }
794
}
795

            
796
impl Tokenizable for LineTerminator {
797
1545
    fn tokenize(&self) -> Vec<Token> {
798
1545
        let mut tokens: Vec<Token> = vec![];
799
1545
        tokens.append(&mut self.space0.tokenize());
800
1545
        if let Some(comment) = &self.comment {
801
243
            tokens.append(&mut comment.tokenize());
802
        }
803
1545
        tokens.append(&mut self.newline.tokenize());
804
1545
        tokens
805
    }
806
}
807

            
808
impl Tokenizable for Whitespace {
809
7176
    fn tokenize(&self) -> Vec<Token> {
810
7176
        let mut tokens: Vec<Token> = vec![];
811
7176
        if !self.value.is_empty() {
812
3594
            tokens.push(Token::Whitespace(self.value.clone()));
813
        }
814
7176
        tokens
815
    }
816
}
817

            
818
impl Tokenizable for Comment {
819
243
    fn tokenize(&self) -> Vec<Token> {
820
243
        vec![Token::Comment(format!("#{}", self.value.clone()))]
821
    }
822
}
823

            
824
impl Tokenizable for JsonValue {
825
102
    fn tokenize(&self) -> Vec<Token> {
826
102
        let mut tokens: Vec<Token> = vec![];
827
102
        match self {
828
18
            JsonValue::String(s) => {
829
18
                //tokens.push(Token::CodeDelimiter("\"".to_string()));
830
18
                tokens.append(&mut s.tokenize());
831
18
                //tokens.push(Token::CodeDelimiter("\"".to_string()));
832
            }
833
45
            JsonValue::Number(value) => {
834
45
                tokens.push(Token::Number(value.to_string()));
835
            }
836
3
            JsonValue::Boolean(value) => {
837
3
                tokens.push(Token::Boolean(value.to_string()));
838
            }
839
15
            JsonValue::List { space0, elements } => {
840
15
                tokens.push(Token::CodeDelimiter("[".to_string()));
841
15
                tokens.push(Token::Whitespace(space0.clone()));
842
51
                for (i, element) in elements.iter().enumerate() {
843
51
                    if i > 0 {
844
39
                        tokens.push(Token::CodeDelimiter(",".to_string()));
845
                    }
846
51
                    tokens.append(&mut element.tokenize());
847
                }
848
15
                tokens.push(Token::CodeDelimiter("]".to_string()));
849
            }
850
15
            JsonValue::Object { space0, elements } => {
851
15
                tokens.push(Token::CodeDelimiter("{".to_string()));
852
15
                tokens.push(Token::Whitespace(space0.clone()));
853
45
                for (i, element) in elements.iter().enumerate() {
854
45
                    if i > 0 {
855
33
                        tokens.push(Token::CodeDelimiter(",".to_string()));
856
                    }
857
45
                    tokens.append(&mut element.tokenize());
858
                }
859
15
                tokens.push(Token::CodeDelimiter("}".to_string()));
860
            }
861
3
            JsonValue::Null => {
862
3
                tokens.push(Token::Keyword("null".to_string()));
863
            }
864
3
            JsonValue::Placeholder(exp) => {
865
3
                tokens.append(&mut exp.tokenize());
866
            }
867
        }
868
102
        tokens
869
    }
870
}
871

            
872
impl Tokenizable for JsonListElement {
873
51
    fn tokenize(&self) -> Vec<Token> {
874
51
        let mut tokens: Vec<Token> = vec![Token::Whitespace(self.space0.clone())];
875
51
        tokens.append(&mut self.value.tokenize());
876
51
        tokens.push(Token::Whitespace(self.space1.clone()));
877
51
        tokens
878
    }
879
}
880

            
881
impl Tokenizable for JsonObjectElement {
882
45
    fn tokenize(&self) -> Vec<Token> {
883
45
        let mut tokens: Vec<Token> = vec![Token::Whitespace(self.space0.clone())];
884
45
        tokens.push(Token::StringDelimiter("\"".to_string()));
885
45
        tokens.push(Token::String(self.name.to_string()));
886
45
        tokens.push(Token::StringDelimiter("\"".to_string()));
887
45
        tokens.push(Token::Whitespace(self.space1.clone()));
888
45
        tokens.push(Token::CodeDelimiter(":".to_string()));
889
45
        tokens.push(Token::Whitespace(self.space2.clone()));
890
45
        tokens.append(&mut self.value.tokenize());
891
45
        tokens.push(Token::Whitespace(self.space3.clone()));
892
45
        tokens
893
    }
894
}
895

            
896
impl Tokenizable for EntryOption {
897
273
    fn tokenize(&self) -> Vec<Token> {
898
273
        let mut tokens: Vec<Token> = vec![];
899
273
        tokens.append(
900
273
            &mut self
901
273
                .line_terminators
902
273
                .iter()
903
274
                .flat_map(|e| e.tokenize())
904
273
                .collect(),
905
273
        );
906
273
        tokens.append(&mut self.space0.tokenize());
907
273
        tokens.push(Token::String(self.kind.identifier().to_string()));
908
273
        tokens.append(&mut self.space1.tokenize());
909
273
        tokens.push(Token::Colon(String::from(":")));
910
273
        tokens.append(&mut self.space2.tokenize());
911
273
        tokens.append(&mut self.kind.tokenize());
912
273
        tokens.append(&mut self.line_terminator0.tokenize());
913
273
        tokens
914
    }
915
}
916

            
917
impl Tokenizable for OptionKind {
918
273
    fn tokenize(&self) -> Vec<Token> {
919
273
        match self {
920
6
            OptionKind::AwsSigV4(value) => value.tokenize(),
921
6
            OptionKind::CaCertificate(filename) => filename.tokenize(),
922
9
            OptionKind::ClientCert(filename) => filename.tokenize(),
923
6
            OptionKind::ClientKey(filename) => filename.tokenize(),
924
6
            OptionKind::Compressed(value) => value.tokenize(),
925
6
            OptionKind::ConnectTo(value) => value.tokenize(),
926
6
            OptionKind::ConnectTimeout(value) => value.tokenize(),
927
12
            OptionKind::Delay(value) => value.tokenize(),
928
9
            OptionKind::FollowLocation(value) => value.tokenize(),
929
6
            OptionKind::FollowLocationTrusted(value) => value.tokenize(),
930
6
            OptionKind::Header(value) => value.tokenize(),
931
6
            OptionKind::Http10(value) => value.tokenize(),
932
6
            OptionKind::Http11(value) => value.tokenize(),
933
6
            OptionKind::Http2(value) => value.tokenize(),
934
6
            OptionKind::Http3(value) => value.tokenize(),
935
9
            OptionKind::Insecure(value) => value.tokenize(),
936
6
            OptionKind::IpV4(value) => value.tokenize(),
937
6
            OptionKind::IpV6(value) => value.tokenize(),
938
6
            OptionKind::LimitRate(value) => value.tokenize(),
939
6
            OptionKind::MaxRedirect(value) => value.tokenize(),
940
6
            OptionKind::NetRc(value) => value.tokenize(),
941
6
            OptionKind::NetRcFile(filename) => filename.tokenize(),
942
6
            OptionKind::NetRcOptional(value) => value.tokenize(),
943
6
            OptionKind::Output(filename) => filename.tokenize(),
944
6
            OptionKind::PathAsIs(value) => value.tokenize(),
945
6
            OptionKind::Proxy(value) => value.tokenize(),
946
9
            OptionKind::Repeat(value) => value.tokenize(),
947
6
            OptionKind::Resolve(value) => value.tokenize(),
948
15
            OptionKind::Retry(value) => value.tokenize(),
949
12
            OptionKind::RetryInterval(value) => value.tokenize(),
950
6
            OptionKind::Skip(value) => value.tokenize(),
951
6
            OptionKind::UnixSocket(value) => value.tokenize(),
952
6
            OptionKind::User(value) => value.tokenize(),
953
27
            OptionKind::Variable(value) => value.tokenize(),
954
9
            OptionKind::Verbose(value) => value.tokenize(),
955
6
            OptionKind::VeryVerbose(value) => value.tokenize(),
956
        }
957
    }
958
}
959

            
960
impl Tokenizable for BooleanOption {
961
105
    fn tokenize(&self) -> Vec<Token> {
962
105
        match self {
963
57
            BooleanOption::Literal(value) => vec![Token::Boolean(value.to_string())],
964
48
            BooleanOption::Placeholder(expr) => expr.tokenize(),
965
        }
966
    }
967
}
968

            
969
impl Tokenizable for NaturalOption {
970
6
    fn tokenize(&self) -> Vec<Token> {
971
6
        match self {
972
3
            NaturalOption::Literal(value) => value.tokenize(),
973
3
            NaturalOption::Placeholder(expr) => expr.tokenize(),
974
        }
975
    }
976
}
977

            
978
impl Tokenizable for U64 {
979
3
    fn tokenize(&self) -> Vec<Token> {
980
3
        vec![Token::Number(self.to_source().to_string())]
981
    }
982
}
983

            
984
impl Tokenizable for I64 {
985
    fn tokenize(&self) -> Vec<Token> {
986
        vec![Token::Number(self.to_source().to_string())]
987
    }
988
}
989

            
990
impl Tokenizable for CountOption {
991
30
    fn tokenize(&self) -> Vec<Token> {
992
30
        match self {
993
21
            CountOption::Literal(retry) => retry.tokenize(),
994
9
            CountOption::Placeholder(expr) => expr.tokenize(),
995
        }
996
    }
997
}
998

            
999
impl Tokenizable for Count {
21
    fn tokenize(&self) -> Vec<Token> {
21
        match self {
15
            Count::Finite(n) => vec![Token::Number(n.to_string())],
6
            Count::Infinite => vec![Token::Number("-1".to_string())],
        }
    }
}
impl Tokenizable for DurationOption {
30
    fn tokenize(&self) -> Vec<Token> {
30
        match self {
21
            DurationOption::Literal(value) => value.tokenize(),
9
            DurationOption::Placeholder(expr) => expr.tokenize(),
        }
    }
}
impl Tokenizable for Duration {
21
    fn tokenize(&self) -> Vec<Token> {
21
        let mut tokens = vec![Token::Number(self.value.to_source().to_string())];
21
        if let Some(unit) = self.unit {
21
            tokens.push(Token::Unit(unit.to_string()));
        }
21
        tokens
    }
}
impl Tokenizable for VariableDefinition {
27
    fn tokenize(&self) -> Vec<Token> {
27
        let mut tokens: Vec<Token> = vec![Token::String(self.name.clone())];
27
        tokens.append(&mut self.space0.tokenize());
27
        tokens.push(Token::Keyword("=".to_string()));
27
        tokens.append(&mut self.space1.tokenize());
27
        tokens.append(&mut self.value.tokenize());
27
        tokens
    }
}
impl Tokenizable for VariableValue {
27
    fn tokenize(&self) -> Vec<Token> {
27
        match self {
3
            VariableValue::Null => vec![Token::Keyword("null".to_string())],
3
            VariableValue::Bool(v) => vec![Token::Boolean(v.to_string())],
6
            VariableValue::Number(v) => vec![Token::Number(v.to_source().to_string())],
15
            VariableValue::String(v) => v.tokenize(),
        }
    }
}
impl Tokenizable for Filter {
96
    fn tokenize(&self) -> Vec<Token> {
96
        let mut tokens = vec![Token::FilterType(self.value.identifier().to_string())];
96
        match &self.value {
6
            FilterValue::Decode { space0, encoding } => {
6
                tokens.append(&mut space0.tokenize());
6
                tokens.append(&mut encoding.tokenize());
            }
9
            FilterValue::Format { space0, fmt } => {
9
                tokens.append(&mut space0.tokenize());
9
                tokens.append(&mut fmt.tokenize());
            }
3
            FilterValue::JsonPath { space0, expr } => {
3
                tokens.append(&mut space0.tokenize());
3
                tokens.append(&mut expr.tokenize());
            }
3
            FilterValue::Nth { space0, n } => {
3
                tokens.append(&mut space0.tokenize());
3
                tokens.push(Token::Number(n.to_source().to_string()));
            }
3
            FilterValue::Regex { space0, value } => {
3
                tokens.append(&mut space0.tokenize());
3
                tokens.append(&mut value.tokenize());
            }
            FilterValue::Replace {
15
                space0,
15
                old_value,
15
                space1,
15
                new_value,
15
            } => {
15
                tokens.append(&mut space0.tokenize());
15
                tokens.append(&mut old_value.tokenize());
15
                tokens.append(&mut space1.tokenize());
15
                tokens.append(&mut new_value.tokenize());
            }
3
            FilterValue::Split { space0, sep } => {
3
                tokens.append(&mut space0.tokenize());
3
                tokens.append(&mut sep.tokenize());
            }
3
            FilterValue::ToDate { space0, fmt } => {
3
                tokens.append(&mut space0.tokenize());
3
                tokens.append(&mut fmt.tokenize());
            }
3
            FilterValue::XPath { space0, expr } => {
3
                tokens.append(&mut space0.tokenize());
3
                tokens.append(&mut expr.tokenize());
            }
48
            _ => {}
        }
96
        tokens
    }
}