1
/*
2
 * Hurl (https://hurl.dev)
3
 * Copyright (C) 2024 Orange
4
 *
5
 * Licensed under the Apache License, Version 2.0 (the "License");
6
 * you may not use this file except in compliance with the License.
7
 * You may obtain a copy of the License at
8
 *
9
 *          http://www.apache.org/licenses/LICENSE-2.0
10
 *
11
 * Unless required by applicable law or agreed to in writing, software
12
 * distributed under the License is distributed on an "AS IS" BASIS,
13
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
 * See the License for the specific language governing permissions and
15
 * limitations under the License.
16
 *
17
 */
18
use hurl_core::ast::*;
19
use hurl_core::typing::{Count, Duration};
20

            
21
#[derive(Clone, Debug, PartialEq, Eq)]
22
pub enum Token {
23
    Method(String),
24
    Version(String),
25
    Status(String),
26
    SectionHeader(String),
27
    QueryType(String),
28
    PredicateType(String),
29
    FilterType(String),
30
    Not(String),
31
    Keyword(String),
32

            
33
    // Primitives
34
    Whitespace(String),
35
    Comment(String),
36
    Value(String),
37
    Colon(String),
38
    StringDelimiter(String),
39
    Boolean(String),
40
    Number(String),
41
    String(String),
42
    CodeDelimiter(String),
43
    CodeVariable(String),
44
    Lang(String),
45
    Unit(String),
46
}
47

            
48
pub trait Tokenizable {
49
    fn tokenize(&self) -> Vec<Token>;
50
}
51

            
52
impl Tokenizable for HurlFile {
53
69
    fn tokenize(&self) -> Vec<Token> {
54
69
        let mut tokens: Vec<Token> = vec![];
55
251
        tokens.append(&mut self.entries.iter().flat_map(|e| e.tokenize()).collect());
56
69
        tokens.append(
57
69
            &mut self
58
69
                .line_terminators
59
69
                .iter()
60
79
                .flat_map(|e| e.tokenize())
61
69
                .collect(),
62
69
        );
63
69
        tokens
64
    }
65
}
66

            
67
impl Tokenizable for Entry {
68
228
    fn tokenize(&self) -> Vec<Token> {
69
228
        let mut tokens: Vec<Token> = vec![];
70
228
        tokens.append(&mut self.request.tokenize());
71
228
        if let Some(response) = &self.response {
72
105
            tokens.append(&mut response.tokenize());
73
        }
74
228
        tokens
75
    }
76
}
77

            
78
impl Tokenizable for Request {
79
228
    fn tokenize(&self) -> Vec<Token> {
80
228
        let mut tokens: Vec<Token> = vec![];
81
228
        tokens.append(
82
228
            &mut self
83
228
                .line_terminators
84
228
                .iter()
85
278
                .flat_map(|e| e.tokenize())
86
228
                .collect(),
87
228
        );
88
228
        tokens.append(&mut self.space0.tokenize());
89
228
        tokens.append(&mut self.method.tokenize());
90
228
        tokens.append(&mut self.space1.tokenize());
91
228
        tokens.append(&mut self.url.tokenize());
92
228
        tokens.append(&mut self.line_terminator0.tokenize());
93
252
        tokens.append(&mut self.headers.iter().flat_map(|e| e.tokenize()).collect());
94
247
        tokens.append(&mut self.sections.iter().flat_map(|e| e.tokenize()).collect());
95
228
        if let Some(body) = &self.body {
96
45
            tokens.append(&mut body.tokenize());
97
        }
98
228
        tokens
99
    }
100
}
101

            
102
impl Tokenizable for Method {
103
228
    fn tokenize(&self) -> Vec<Token> {
104
228
        vec![Token::Method(self.to_string())]
105
    }
106
}
107

            
108
impl Tokenizable for Response {
109
105
    fn tokenize(&self) -> Vec<Token> {
110
105
        let mut tokens: Vec<Token> = vec![];
111
105
        tokens.append(
112
105
            &mut self
113
105
                .line_terminators
114
105
                .iter()
115
109
                .flat_map(|e| e.tokenize())
116
105
                .collect(),
117
105
        );
118
105
        tokens.append(&mut self.space0.tokenize());
119
105
        tokens.append(&mut self.version.tokenize());
120
105
        tokens.append(&mut self.space1.tokenize());
121
105
        tokens.append(&mut self.status.tokenize());
122
105
        tokens.append(&mut self.line_terminator0.tokenize());
123
110
        tokens.append(&mut self.headers.iter().flat_map(|e| e.tokenize()).collect());
124
122
        tokens.append(&mut self.sections.iter().flat_map(|e| e.tokenize()).collect());
125
105
        if let Some(body) = self.clone().body {
126
45
            tokens.append(&mut body.tokenize());
127
        }
128
105
        tokens
129
    }
130
}
131

            
132
impl Tokenizable for Status {
133
105
    fn tokenize(&self) -> Vec<Token> {
134
105
        let mut tokens: Vec<Token> = vec![];
135
105
        match self.value.clone() {
136
3
            StatusValue::Any => tokens.push(Token::Status("*".to_string())),
137
102
            StatusValue::Specific(v) => tokens.push(Token::Status(v.to_string())),
138
        }
139
105
        tokens
140
    }
141
}
142

            
143
impl Tokenizable for Version {
144
105
    fn tokenize(&self) -> Vec<Token> {
145
105
        vec![Token::Version(self.value.to_string())]
146
    }
147
}
148

            
149
impl Tokenizable for Body {
150
90
    fn tokenize(&self) -> Vec<Token> {
151
90
        let mut tokens: Vec<Token> = vec![];
152
90
        tokens.append(
153
90
            &mut self
154
90
                .line_terminators
155
90
                .iter()
156
95
                .flat_map(|e| e.tokenize())
157
90
                .collect(),
158
90
        );
159
90
        tokens.append(&mut self.space0.tokenize());
160
90
        tokens.append(&mut self.value.tokenize());
161
90
        tokens.append(&mut self.line_terminator0.tokenize());
162
90
        tokens
163
    }
164
}
165

            
166
impl Tokenizable for Bytes {
167
90
    fn tokenize(&self) -> Vec<Token> {
168
90
        let mut tokens: Vec<Token> = vec![];
169
90
        match self {
170
6
            Bytes::Json(value) => tokens.append(&mut value.tokenize()),
171
3
            Bytes::Xml(value) => tokens.push(Token::String(value.to_string())),
172
33
            Bytes::MultilineString(value) => tokens.append(&mut value.tokenize()),
173
15
            Bytes::OnelineString(value) => tokens.append(&mut value.tokenize()),
174
12
            Bytes::Base64(value) => tokens.append(&mut value.tokenize()),
175
9
            Bytes::Hex(value) => tokens.append(&mut value.tokenize()),
176
12
            Bytes::File(value) => tokens.append(&mut value.tokenize()),
177
        }
178
90
        tokens
179
    }
180
}
181

            
182
impl Tokenizable for Section {
183
108
    fn tokenize(&self) -> Vec<Token> {
184
108
        let mut tokens: Vec<Token> = vec![];
185
108
        tokens.append(
186
108
            &mut self
187
108
                .line_terminators
188
108
                .iter()
189
118
                .flat_map(|e| e.tokenize())
190
108
                .collect(),
191
108
        );
192
108
        tokens.append(&mut self.space0.tokenize());
193
108
        tokens.push(Token::SectionHeader(format!("[{}]", self.name())));
194
108
        tokens.append(&mut self.line_terminator0.tokenize());
195
108
        tokens.append(&mut self.value.tokenize());
196
108
        tokens
197
    }
198
}
199

            
200
impl Tokenizable for SectionValue {
201
108
    fn tokenize(&self) -> Vec<Token> {
202
108
        let mut tokens: Vec<Token> = vec![];
203
108
        match self {
204
42
            SectionValue::Asserts(items) => {
205
326
                tokens.append(&mut items.iter().flat_map(|e| e.tokenize()).collect());
206
            }
207
9
            SectionValue::QueryParams(items, _) => {
208
15
                tokens.append(&mut items.iter().flat_map(|e| e.tokenize()).collect());
209
            }
210
3
            SectionValue::BasicAuth(item) => {
211
3
                if let Some(kv) = item {
212
3
                    tokens.append(&mut kv.tokenize());
213
                }
214
            }
215
6
            SectionValue::FormParams(items, _) => {
216
14
                tokens.append(&mut items.iter().flat_map(|e| e.tokenize()).collect());
217
            }
218
6
            SectionValue::MultipartFormData(items, _) => {
219
11
                tokens.append(&mut items.iter().flat_map(|e| e.tokenize()).collect());
220
            }
221
9
            SectionValue::Cookies(items) => {
222
12
                tokens.append(&mut items.iter().flat_map(|e| e.tokenize()).collect());
223
            }
224
9
            SectionValue::Captures(items) => {
225
11
                tokens.append(&mut items.iter().flat_map(|e| e.tokenize()).collect());
226
            }
227
24
            SectionValue::Options(items) => {
228
260
                tokens.append(&mut items.iter().flat_map(|e| e.tokenize()).collect());
229
            }
230
        }
231
108
        tokens
232
    }
233
}
234

            
235
impl Tokenizable for Base64 {
236
15
    fn tokenize(&self) -> Vec<Token> {
237
15
        let mut tokens: Vec<Token> = vec![Token::Keyword(String::from("base64,"))];
238
15
        tokens.append(&mut self.space0.tokenize());
239
15
        tokens.push(Token::String(self.encoded.to_string()));
240
15
        tokens.append(&mut self.space1.tokenize());
241
15
        tokens.push(Token::Keyword(String::from(";")));
242
15
        tokens
243
    }
244
}
245

            
246
impl Tokenizable for Hex {
247
9
    fn tokenize(&self) -> Vec<Token> {
248
9
        let mut tokens: Vec<Token> = vec![Token::Keyword(String::from("hex,"))];
249
9
        tokens.append(&mut self.space0.tokenize());
250
9
        tokens.push(Token::String(self.encoded.to_string()));
251
9
        tokens.append(&mut self.space1.tokenize());
252
9
        tokens.push(Token::Keyword(String::from(";")));
253
9
        tokens
254
    }
255
}
256

            
257
impl Tokenizable for File {
258
15
    fn tokenize(&self) -> Vec<Token> {
259
15
        let mut tokens: Vec<Token> = vec![Token::Keyword(String::from("file,"))];
260
15
        tokens.append(&mut self.space0.tokenize());
261
15
        tokens.append(&mut self.filename.tokenize());
262
15
        tokens.append(&mut self.space1.tokenize());
263
15
        tokens.push(Token::Keyword(String::from(";")));
264
15
        tokens
265
    }
266
}
267

            
268
impl Tokenizable for KeyValue {
269
117
    fn tokenize(&self) -> Vec<Token> {
270
117
        let mut tokens: Vec<Token> = vec![];
271
117
        tokens.append(
272
117
            &mut self
273
117
                .line_terminators
274
117
                .iter()
275
118
                .flat_map(|e| e.tokenize())
276
117
                .collect(),
277
117
        );
278
117
        tokens.append(&mut self.space0.tokenize());
279
117
        tokens.append(&mut self.key.tokenize());
280
117
        tokens.append(&mut self.space1.tokenize());
281
117
        tokens.push(Token::Colon(String::from(":")));
282
117
        tokens.append(&mut self.space2.tokenize());
283
117
        tokens.append(&mut self.value.tokenize());
284
117
        tokens.append(&mut self.line_terminator0.tokenize());
285
117
        tokens
286
    }
287
}
288

            
289
impl Tokenizable for MultipartParam {
290
9
    fn tokenize(&self) -> Vec<Token> {
291
9
        match self {
292
3
            MultipartParam::Param(key_value) => key_value.tokenize(),
293
6
            MultipartParam::FileParam(file_param) => file_param.tokenize(),
294
        }
295
    }
296
}
297

            
298
impl Tokenizable for FileParam {
299
6
    fn tokenize(&self) -> Vec<Token> {
300
6
        let mut tokens: Vec<Token> = vec![];
301
6
        tokens.append(&mut self.space0.tokenize());
302
6
        tokens.append(&mut self.key.tokenize());
303
6
        tokens.append(&mut self.space1.tokenize());
304
6
        tokens.push(Token::Colon(String::from(":")));
305
6
        tokens.append(&mut self.space2.tokenize());
306
6
        tokens.append(&mut self.value.tokenize());
307
6
        tokens.append(&mut self.line_terminator0.tokenize());
308
6
        tokens
309
    }
310
}
311

            
312
impl Tokenizable for FileValue {
313
6
    fn tokenize(&self) -> Vec<Token> {
314
6
        let mut tokens: Vec<Token> = vec![Token::Keyword("file,".to_string())];
315
6
        tokens.append(&mut self.space0.tokenize());
316
6
        tokens.append(&mut self.filename.tokenize());
317
6
        tokens.append(&mut self.space1.tokenize());
318
6
        tokens.push(Token::Keyword(";".to_string()));
319
6
        tokens.append(&mut self.space2.tokenize());
320
6
        if let Some(content_type) = self.content_type.clone() {
321
3
            tokens.push(Token::String(content_type));
322
        }
323
6
        tokens
324
    }
325
}
326

            
327
impl Tokenizable for Cookie {
328
9
    fn tokenize(&self) -> Vec<Token> {
329
9
        let mut tokens: Vec<Token> = vec![];
330
9
        tokens.append(
331
9
            &mut self
332
9
                .line_terminators
333
9
                .iter()
334
9
                .flat_map(|e| e.tokenize())
335
9
                .collect(),
336
9
        );
337
9
        tokens.append(&mut self.space0.tokenize());
338
9
        tokens.append(&mut self.name.tokenize());
339
9
        tokens.append(&mut self.space1.tokenize());
340
9
        tokens.push(Token::Colon(String::from(":")));
341
9
        tokens.append(&mut self.space2.tokenize());
342
9
        tokens.append(&mut self.value.tokenize());
343
9
        tokens.append(&mut self.line_terminator0.tokenize());
344
9
        tokens
345
    }
346
}
347

            
348
impl Tokenizable for Capture {
349
6
    fn tokenize(&self) -> Vec<Token> {
350
6
        let mut tokens: Vec<Token> = vec![];
351
6
        tokens.append(
352
6
            &mut self
353
6
                .line_terminators
354
6
                .iter()
355
6
                .flat_map(|e| e.tokenize())
356
6
                .collect(),
357
6
        );
358
6
        tokens.append(&mut self.space0.tokenize());
359
6
        tokens.append(&mut self.name.tokenize());
360
6
        tokens.append(&mut self.space1.tokenize());
361
6
        tokens.push(Token::Colon(String::from(":")));
362
6
        tokens.append(&mut self.space2.tokenize());
363
6
        tokens.append(&mut self.query.tokenize());
364
9
        for (space, filter) in &self.filters {
365
3
            tokens.append(&mut space.tokenize());
366
3
            tokens.append(&mut filter.tokenize());
367
        }
368
6
        tokens.append(&mut self.line_terminator0.tokenize());
369
6
        tokens
370
    }
371
}
372

            
373
impl Tokenizable for Assert {
374
312
    fn tokenize(&self) -> Vec<Token> {
375
312
        let mut tokens: Vec<Token> = vec![];
376
312
        tokens.append(
377
312
            &mut self
378
312
                .line_terminators
379
312
                .iter()
380
312
                .flat_map(|e| e.tokenize())
381
312
                .collect(),
382
312
        );
383
312
        tokens.append(&mut self.space0.tokenize());
384
312
        tokens.append(&mut self.query.tokenize());
385
399
        for (space, filter) in &self.filters {
386
87
            tokens.append(&mut space.tokenize());
387
87
            tokens.append(&mut filter.tokenize());
388
        }
389
312
        tokens.append(&mut self.space1.tokenize());
390
312
        // TODO reconvert back your first predicate for jsonpath
391
312
        // so that you can use your firstX predicate for other query
392
312
        tokens.append(&mut self.predicate.tokenize());
393
312
        tokens.append(&mut self.line_terminator0.tokenize());
394
312
        tokens
395
    }
396
}
397

            
398
impl Tokenizable for Query {
399
318
    fn tokenize(&self) -> Vec<Token> {
400
318
        self.value.tokenize()
401
    }
402
}
403

            
404
impl Tokenizable for QueryValue {
405
318
    fn tokenize(&self) -> Vec<Token> {
406
318
        let mut tokens: Vec<Token> = vec![];
407
318
        match self.clone() {
408
6
            QueryValue::Status => tokens.push(Token::QueryType(String::from("status"))),
409
3
            QueryValue::Url => tokens.push(Token::QueryType(String::from("url"))),
410
12
            QueryValue::Header { space0, name } => {
411
12
                tokens.push(Token::QueryType(String::from("header")));
412
12
                tokens.append(&mut space0.tokenize());
413
12
                tokens.append(&mut name.tokenize());
414
            }
415
6
            QueryValue::Cookie { space0, expr } => {
416
6
                tokens.push(Token::QueryType(String::from("cookie")));
417
6
                tokens.append(&mut space0.tokenize());
418
6
                tokens.push(Token::CodeDelimiter("\"".to_string()));
419
6
                tokens.append(&mut expr.tokenize());
420
6
                tokens.push(Token::CodeDelimiter("\"".to_string()));
421
            }
422
30
            QueryValue::Body => tokens.push(Token::QueryType(String::from("body"))),
423
3
            QueryValue::Xpath { space0, expr } => {
424
3
                tokens.push(Token::QueryType(String::from("xpath")));
425
3
                tokens.append(&mut space0.tokenize());
426
3
                tokens.append(&mut expr.tokenize());
427
            }
428
189
            QueryValue::Jsonpath { space0, expr } => {
429
189
                tokens.push(Token::QueryType(String::from("jsonpath")));
430
189
                tokens.append(&mut space0.tokenize());
431
189
                tokens.append(&mut expr.tokenize());
432
            }
433
3
            QueryValue::Regex { space0, value } => {
434
3
                tokens.push(Token::QueryType(String::from("regex")));
435
3
                tokens.append(&mut space0.tokenize());
436
3
                tokens.append(&mut value.tokenize());
437
            }
438
9
            QueryValue::Variable { space0, name } => {
439
9
                tokens.push(Token::QueryType(String::from("variable")));
440
9
                tokens.append(&mut space0.tokenize());
441
9
                tokens.append(&mut name.tokenize());
442
            }
443
3
            QueryValue::Duration => tokens.push(Token::QueryType(String::from("duration"))),
444
18
            QueryValue::Bytes => tokens.push(Token::QueryType(String::from("bytes"))),
445
3
            QueryValue::Sha256 => tokens.push(Token::QueryType(String::from("sha256"))),
446
3
            QueryValue::Md5 => tokens.push(Token::QueryType(String::from("md5"))),
447
            QueryValue::Certificate {
448
30
                space0,
449
30
                attribute_name: field,
450
30
            } => {
451
30
                tokens.push(Token::QueryType(String::from("certificate")));
452
30
                tokens.append(&mut space0.tokenize());
453
30
                tokens.append(&mut field.tokenize());
454
            }
455
        }
456
318
        tokens
457
    }
458
}
459

            
460
impl Tokenizable for RegexValue {
461
21
    fn tokenize(&self) -> Vec<Token> {
462
21
        match self {
463
18
            RegexValue::Template(template) => template.tokenize(),
464
3
            RegexValue::Regex(regex) => regex.tokenize(),
465
        }
466
    }
467
}
468

            
469
impl Tokenizable for CookiePath {
470
6
    fn tokenize(&self) -> Vec<Token> {
471
6
        let mut tokens: Vec<Token> = vec![];
472
6
        tokens.append(&mut self.name.tokenize());
473
6
        if let Some(attribute) = self.attribute.clone() {
474
3
            tokens.append(&mut attribute.tokenize());
475
        }
476
6
        tokens
477
    }
478
}
479

            
480
impl Tokenizable for CookieAttribute {
481
3
    fn tokenize(&self) -> Vec<Token> {
482
3
        let mut tokens: Vec<Token> = vec![Token::CodeDelimiter("[".to_string())];
483
3
        tokens.append(&mut self.space0.tokenize());
484
3
        tokens.push(Token::String(self.name.value()));
485
3
        tokens.append(&mut self.space1.tokenize());
486
3
        tokens.push(Token::CodeDelimiter("]".to_string()));
487
3
        tokens
488
    }
489
}
490

            
491
impl Tokenizable for CertificateAttributeName {
492
30
    fn tokenize(&self) -> Vec<Token> {
493
30
        let value = match self {
494
3
            CertificateAttributeName::Subject => "Subject",
495
3
            CertificateAttributeName::Issuer => "Issuer",
496
9
            CertificateAttributeName::StartDate => "Start-Date",
497
12
            CertificateAttributeName::ExpireDate => "Expire-Date",
498
3
            CertificateAttributeName::SerialNumber => "Serial-Number",
499
        };
500
30
        vec![
501
30
            Token::StringDelimiter("\"".to_string()),
502
30
            Token::String(value.to_string()),
503
30
            Token::StringDelimiter("\"".to_string()),
504
30
        ]
505
    }
506
}
507

            
508
impl Tokenizable for Predicate {
509
312
    fn tokenize(&self) -> Vec<Token> {
510
312
        let mut tokens: Vec<Token> = vec![];
511
312
        if self.not {
512
3
            tokens.push(Token::Not(String::from("not")));
513
3
            tokens.append(&mut self.space0.tokenize());
514
        }
515
312
        tokens.append(&mut self.predicate_func.tokenize());
516
312
        tokens
517
    }
518
}
519

            
520
impl Tokenizable for PredicateFunc {
521
312
    fn tokenize(&self) -> Vec<Token> {
522
312
        self.value.tokenize()
523
    }
524
}
525

            
526
impl Tokenizable for PredicateFuncValue {
527
312
    fn tokenize(&self) -> Vec<Token> {
528
312
        let mut tokens: Vec<Token> = vec![];
529
312
        match self {
530
201
            PredicateFuncValue::Equal { space0, value, .. } => {
531
201
                tokens.push(Token::PredicateType(self.name()));
532
201
                tokens.append(&mut space0.tokenize());
533
201
                tokens.append(&mut value.tokenize());
534
            }
535
9
            PredicateFuncValue::NotEqual { space0, value, .. } => {
536
9
                tokens.push(Token::PredicateType(self.name()));
537
9
                tokens.append(&mut space0.tokenize());
538
9
                tokens.append(&mut value.tokenize());
539
            }
540
9
            PredicateFuncValue::GreaterThan { space0, value, .. } => {
541
9
                tokens.push(Token::PredicateType(self.name()));
542
9
                tokens.append(&mut space0.tokenize());
543
9
                tokens.append(&mut value.tokenize());
544
            }
545
3
            PredicateFuncValue::GreaterThanOrEqual { space0, value, .. } => {
546
3
                tokens.push(Token::PredicateType(self.name()));
547
3
                tokens.append(&mut space0.tokenize());
548
3
                tokens.append(&mut value.tokenize());
549
            }
550
12
            PredicateFuncValue::LessThan { space0, value, .. } => {
551
12
                tokens.push(Token::PredicateType(self.name()));
552
12
                tokens.append(&mut space0.tokenize());
553
12
                tokens.append(&mut value.tokenize());
554
            }
555
3
            PredicateFuncValue::LessThanOrEqual { space0, value, .. } => {
556
3
                tokens.push(Token::PredicateType(self.name()));
557
3
                tokens.append(&mut space0.tokenize());
558
3
                tokens.append(&mut value.tokenize());
559
            }
560
9
            PredicateFuncValue::StartWith { space0, value } => {
561
9
                tokens.push(Token::PredicateType(self.name()));
562
9
                tokens.append(&mut space0.tokenize());
563
9
                tokens.append(&mut value.tokenize());
564
            }
565
6
            PredicateFuncValue::EndWith { space0, value } => {
566
6
                tokens.push(Token::PredicateType(self.name()));
567
6
                tokens.append(&mut space0.tokenize());
568
6
                tokens.append(&mut value.tokenize());
569
            }
570
6
            PredicateFuncValue::Contain { space0, value } => {
571
6
                tokens.push(Token::PredicateType(self.name()));
572
6
                tokens.append(&mut space0.tokenize());
573
6
                tokens.append(&mut value.tokenize());
574
            }
575
3
            PredicateFuncValue::Include { space0, value } => {
576
3
                tokens.push(Token::PredicateType(self.name()));
577
3
                tokens.append(&mut space0.tokenize());
578
3
                tokens.append(&mut value.tokenize());
579
            }
580
6
            PredicateFuncValue::Match { space0, value } => {
581
6
                tokens.push(Token::PredicateType(self.name()));
582
6
                tokens.append(&mut space0.tokenize());
583
6
                tokens.append(&mut value.tokenize());
584
            }
585

            
586
3
            PredicateFuncValue::IsInteger => {
587
3
                tokens.push(Token::PredicateType(self.name()));
588
            }
589
3
            PredicateFuncValue::IsFloat => {
590
3
                tokens.push(Token::PredicateType(self.name()));
591
            }
592
3
            PredicateFuncValue::IsBoolean => {
593
3
                tokens.push(Token::PredicateType(self.name()));
594
            }
595
3
            PredicateFuncValue::IsString => {
596
3
                tokens.push(Token::PredicateType(self.name()));
597
            }
598
3
            PredicateFuncValue::IsCollection => {
599
3
                tokens.push(Token::PredicateType(self.name()));
600
            }
601
9
            PredicateFuncValue::IsDate => {
602
9
                tokens.push(Token::PredicateType(self.name()));
603
            }
604
3
            PredicateFuncValue::IsIsoDate => {
605
3
                tokens.push(Token::PredicateType(self.name()));
606
            }
607
12
            PredicateFuncValue::Exist => {
608
12
                tokens.push(Token::PredicateType(self.name()));
609
            }
610
3
            PredicateFuncValue::IsEmpty => {
611
3
                tokens.push(Token::PredicateType(self.name()));
612
            }
613
3
            PredicateFuncValue::IsNumber => {
614
3
                tokens.push(Token::PredicateType(self.name()));
615
            }
616
        }
617
312
        tokens
618
    }
619
}
620

            
621
impl Tokenizable for PredicateValue {
622
267
    fn tokenize(&self) -> Vec<Token> {
623
267
        match self {
624
105
            PredicateValue::String(value) => value.tokenize(),
625
15
            PredicateValue::MultilineString(value) => value.tokenize(),
626
3
            PredicateValue::Bool(value) => vec![Token::Boolean(value.to_string())],
627
3
            PredicateValue::Null => vec![Token::Keyword("null".to_string())],
628
108
            PredicateValue::Number(value) => vec![Token::Number(value.to_string())],
629
3
            PredicateValue::File(value) => value.tokenize(),
630
21
            PredicateValue::Hex(value) => vec![Token::String(value.to_string())],
631
3
            PredicateValue::Base64(value) => value.tokenize(),
632
3
            PredicateValue::Expression(value) => value.tokenize(),
633
3
            PredicateValue::Regex(value) => value.tokenize(),
634
        }
635
    }
636
}
637

            
638
impl Tokenizable for MultilineString {
639
48
    fn tokenize(&self) -> Vec<Token> {
640
48
        let mut tokens: Vec<Token> = vec![Token::StringDelimiter("```".to_string())];
641
48
        tokens.push(Token::Lang(self.lang().to_string()));
642
48
        for (i, attribute) in self.attributes.iter().enumerate() {
643
3
            if i > 0 || !self.lang().is_empty() {
644
                tokens.push(Token::StringDelimiter(",".to_string()));
645
            }
646
3
            tokens.append(&mut attribute.tokenize());
647
        }
648
48
        match self {
649
            MultilineString {
650
21
                kind: MultilineStringKind::Text(text),
651
                ..
652
            }
653
            | MultilineString {
654
9
                kind: MultilineStringKind::Json(text),
655
                ..
656
            }
657
            | MultilineString {
658
9
                kind: MultilineStringKind::Xml(text),
659
                ..
660
39
            } => tokens.append(&mut text.tokenize()),
661
            MultilineString {
662
9
                kind: MultilineStringKind::GraphQl(graphql),
663
9
                ..
664
9
            } => tokens.append(&mut graphql.tokenize()),
665
        }
666
48
        tokens.push(Token::StringDelimiter("```".to_string()));
667
48
        tokens
668
    }
669
}
670

            
671
impl Tokenizable for MultilineStringAttribute {
672
3
    fn tokenize(&self) -> Vec<Token> {
673
3
        match self {
674
3
            MultilineStringAttribute::Escape => vec![Token::String("escape".to_string())],
675
            MultilineStringAttribute::NoVariable => vec![Token::String("novariable".to_string())],
676
        }
677
    }
678
}
679

            
680
impl Tokenizable for Text {
681
39
    fn tokenize(&self) -> Vec<Token> {
682
39
        let mut tokens: Vec<Token> = vec![];
683
39
        tokens.append(&mut self.space.tokenize());
684
39
        tokens.append(&mut self.newline.tokenize());
685
39
        tokens.append(&mut self.value.tokenize());
686
39
        tokens
687
    }
688
}
689

            
690
impl Tokenizable for GraphQl {
691
9
    fn tokenize(&self) -> Vec<Token> {
692
9
        let mut tokens: Vec<Token> = vec![];
693
9
        tokens.append(&mut self.space.tokenize());
694
9
        tokens.append(&mut self.newline.tokenize());
695
9
        tokens.append(&mut self.value.tokenize());
696
9
        if let Some(vars) = &self.variables {
697
            tokens.append(&mut vars.tokenize());
698
        }
699
9
        tokens
700
    }
701
}
702

            
703
impl Tokenizable for GraphQlVariables {
704
    fn tokenize(&self) -> Vec<Token> {
705
        let mut tokens: Vec<Token> = vec![];
706
        tokens.push(Token::String("variables".to_string()));
707
        tokens.append(&mut self.space.tokenize());
708
        tokens.append(&mut self.value.tokenize());
709
        tokens.append(&mut self.whitespace.tokenize());
710
        tokens
711
    }
712
}
713

            
714
impl Tokenizable for EncodedString {
715
    fn tokenize(&self) -> Vec<Token> {
716
        let mut tokens: Vec<Token> = vec![];
717
        if self.quotes {
718
            tokens.push(Token::StringDelimiter(
719
                if self.quotes { "\"" } else { "" }.to_string(),
720
            ));
721
        }
722
        tokens.push(Token::String(self.encoded.clone()));
723

            
724
        if self.quotes {
725
            tokens.push(Token::StringDelimiter(
726
                if self.quotes { "\"" } else { "" }.to_string(),
727
            ));
728
        }
729
        tokens
730
    }
731
}
732

            
733
impl Tokenizable for Template {
734
1059
    fn tokenize(&self) -> Vec<Token> {
735
1059
        let mut tokens: Vec<Token> = vec![];
736
1059
        if let Some(d) = self.delimiter {
737
411
            tokens.push(Token::StringDelimiter(d.to_string()));
738
        }
739
2121
        for element in &self.elements {
740
1062
            tokens.append(&mut element.tokenize());
741
        }
742
1059
        if let Some(d) = self.delimiter {
743
411
            tokens.push(Token::StringDelimiter(d.to_string()));
744
        }
745
1059
        tokens
746
    }
747
}
748

            
749
impl Tokenizable for TemplateElement {
750
1062
    fn tokenize(&self) -> Vec<Token> {
751
1062
        match self {
752
1002
            TemplateElement::String { encoded, .. } => {
753
1002
                vec![Token::String(encoded.to_string())]
754
            }
755
60
            TemplateElement::Expression(value) => {
756
60
                let mut tokens: Vec<Token> = vec![];
757
60
                tokens.append(&mut value.tokenize());
758
60
                tokens
759
            }
760
        }
761
    }
762
}
763

            
764
impl Tokenizable for Expr {
765
129
    fn tokenize(&self) -> Vec<Token> {
766
129
        let mut tokens: Vec<Token> = vec![Token::CodeDelimiter(String::from("{{"))];
767
129
        tokens.append(&mut self.space0.tokenize());
768
129
        tokens.push(Token::CodeVariable(self.variable.name.clone()));
769
129
        tokens.append(&mut self.space1.tokenize());
770
129
        tokens.push(Token::CodeDelimiter(String::from("}}")));
771
129
        tokens
772
    }
773
}
774

            
775
impl Tokenizable for Regex {
776
6
    fn tokenize(&self) -> Vec<Token> {
777
6
        let s = str::replace(self.inner.as_str(), "/", "\\/");
778
6
        vec![Token::String(format!("/{s}/"))]
779
    }
780
}
781

            
782
impl Tokenizable for LineTerminator {
783
1476
    fn tokenize(&self) -> Vec<Token> {
784
1476
        let mut tokens: Vec<Token> = vec![];
785
1476
        tokens.append(&mut self.space0.tokenize());
786
1476
        if let Some(comment) = &self.comment {
787
231
            tokens.append(&mut comment.tokenize());
788
        }
789
1476
        tokens.append(&mut self.newline.tokenize());
790
1476
        tokens
791
    }
792
}
793

            
794
impl Tokenizable for Whitespace {
795
6789
    fn tokenize(&self) -> Vec<Token> {
796
6789
        let mut tokens: Vec<Token> = vec![];
797
6789
        if !self.value.is_empty() {
798
3411
            tokens.push(Token::Whitespace(self.value.clone()));
799
        }
800
6789
        tokens
801
    }
802
}
803

            
804
impl Tokenizable for Comment {
805
231
    fn tokenize(&self) -> Vec<Token> {
806
231
        vec![Token::Comment(format!("#{}", self.value.clone()))]
807
    }
808
}
809

            
810
impl Tokenizable for JsonValue {
811
102
    fn tokenize(&self) -> Vec<Token> {
812
102
        let mut tokens: Vec<Token> = vec![];
813
102
        match self {
814
18
            JsonValue::String(s) => {
815
18
                //tokens.push(Token::CodeDelimiter("\"".to_string()));
816
18
                tokens.append(&mut s.tokenize());
817
18
                //tokens.push(Token::CodeDelimiter("\"".to_string()));
818
            }
819
45
            JsonValue::Number(value) => {
820
45
                tokens.push(Token::Number(value.to_string()));
821
            }
822
3
            JsonValue::Boolean(value) => {
823
3
                tokens.push(Token::Boolean(value.to_string()));
824
            }
825
15
            JsonValue::List { space0, elements } => {
826
15
                tokens.push(Token::CodeDelimiter("[".to_string()));
827
15
                tokens.push(Token::Whitespace(space0.clone()));
828
51
                for (i, element) in elements.iter().enumerate() {
829
51
                    if i > 0 {
830
39
                        tokens.push(Token::CodeDelimiter(",".to_string()));
831
                    }
832
51
                    tokens.append(&mut element.tokenize());
833
                }
834
15
                tokens.push(Token::CodeDelimiter("]".to_string()));
835
            }
836
15
            JsonValue::Object { space0, elements } => {
837
15
                tokens.push(Token::CodeDelimiter("{".to_string()));
838
15
                tokens.push(Token::Whitespace(space0.clone()));
839
45
                for (i, element) in elements.iter().enumerate() {
840
45
                    if i > 0 {
841
33
                        tokens.push(Token::CodeDelimiter(",".to_string()));
842
                    }
843
45
                    tokens.append(&mut element.tokenize());
844
                }
845
15
                tokens.push(Token::CodeDelimiter("}".to_string()));
846
            }
847
3
            JsonValue::Null => {
848
3
                tokens.push(Token::Keyword("null".to_string()));
849
            }
850
3
            JsonValue::Expression(exp) => {
851
3
                tokens.append(&mut exp.tokenize());
852
            }
853
        }
854
102
        tokens
855
    }
856
}
857

            
858
impl Tokenizable for JsonListElement {
859
51
    fn tokenize(&self) -> Vec<Token> {
860
51
        let mut tokens: Vec<Token> = vec![Token::Whitespace(self.space0.clone())];
861
51
        tokens.append(&mut self.value.tokenize());
862
51
        tokens.push(Token::Whitespace(self.space1.clone()));
863
51
        tokens
864
    }
865
}
866

            
867
impl Tokenizable for JsonObjectElement {
868
45
    fn tokenize(&self) -> Vec<Token> {
869
45
        let mut tokens: Vec<Token> = vec![Token::Whitespace(self.space0.clone())];
870
45
        tokens.push(Token::StringDelimiter("\"".to_string()));
871
45
        tokens.push(Token::String(self.name.to_string()));
872
45
        tokens.push(Token::StringDelimiter("\"".to_string()));
873
45
        tokens.push(Token::Whitespace(self.space1.clone()));
874
45
        tokens.push(Token::CodeDelimiter(":".to_string()));
875
45
        tokens.push(Token::Whitespace(self.space2.clone()));
876
45
        tokens.append(&mut self.value.tokenize());
877
45
        tokens.push(Token::Whitespace(self.space3.clone()));
878
45
        tokens
879
    }
880
}
881
impl Tokenizable for EntryOption {
882
252
    fn tokenize(&self) -> Vec<Token> {
883
252
        let mut tokens: Vec<Token> = vec![];
884
252
        tokens.append(
885
252
            &mut self
886
252
                .line_terminators
887
252
                .iter()
888
253
                .flat_map(|e| e.tokenize())
889
252
                .collect(),
890
252
        );
891
252
        tokens.append(&mut self.space0.tokenize());
892
252
        tokens.push(Token::String(self.kind.name().to_string()));
893
252
        tokens.append(&mut self.space1.tokenize());
894
252
        tokens.push(Token::Colon(String::from(":")));
895
252
        tokens.append(&mut self.space2.tokenize());
896
252
        tokens.append(&mut self.kind.tokenize());
897
252
        tokens.append(&mut self.line_terminator0.tokenize());
898
252
        tokens
899
    }
900
}
901

            
902
impl Tokenizable for OptionKind {
903
252
    fn tokenize(&self) -> Vec<Token> {
904
252
        match self {
905
6
            OptionKind::AwsSigV4(value) => value.tokenize(),
906
6
            OptionKind::CaCertificate(filename) => filename.tokenize(),
907
9
            OptionKind::ClientCert(filename) => filename.tokenize(),
908
6
            OptionKind::ClientKey(filename) => filename.tokenize(),
909
6
            OptionKind::Compressed(value) => value.tokenize(),
910
6
            OptionKind::ConnectTo(value) => value.tokenize(),
911
12
            OptionKind::Delay(value) => value.tokenize(),
912
9
            OptionKind::FollowLocation(value) => value.tokenize(),
913
6
            OptionKind::FollowLocationTrusted(value) => value.tokenize(),
914
6
            OptionKind::Http10(value) => value.tokenize(),
915
6
            OptionKind::Http11(value) => value.tokenize(),
916
6
            OptionKind::Http2(value) => value.tokenize(),
917
6
            OptionKind::Http3(value) => value.tokenize(),
918
9
            OptionKind::Insecure(value) => value.tokenize(),
919
6
            OptionKind::IpV4(value) => value.tokenize(),
920
6
            OptionKind::IpV6(value) => value.tokenize(),
921
6
            OptionKind::MaxRedirect(value) => value.tokenize(),
922
6
            OptionKind::NetRc(value) => value.tokenize(),
923
6
            OptionKind::NetRcFile(filename) => filename.tokenize(),
924
6
            OptionKind::NetRcOptional(value) => value.tokenize(),
925
6
            OptionKind::Output(filename) => filename.tokenize(),
926
6
            OptionKind::PathAsIs(value) => value.tokenize(),
927
6
            OptionKind::Proxy(value) => value.tokenize(),
928
9
            OptionKind::Repeat(value) => value.tokenize(),
929
6
            OptionKind::Resolve(value) => value.tokenize(),
930
15
            OptionKind::Retry(value) => value.tokenize(),
931
12
            OptionKind::RetryInterval(value) => value.tokenize(),
932
6
            OptionKind::Skip(value) => value.tokenize(),
933
6
            OptionKind::UnixSocket(value) => value.tokenize(),
934
6
            OptionKind::User(value) => value.tokenize(),
935
24
            OptionKind::Variable(value) => value.tokenize(),
936
9
            OptionKind::Verbose(value) => value.tokenize(),
937
6
            OptionKind::VeryVerbose(value) => value.tokenize(),
938
        }
939
    }
940
}
941

            
942
impl Tokenizable for BooleanOption {
943
105
    fn tokenize(&self) -> Vec<Token> {
944
105
        match self {
945
57
            BooleanOption::Literal(value) => vec![Token::Boolean(value.to_string())],
946
48
            BooleanOption::Expression(expr) => expr.tokenize(),
947
        }
948
    }
949
}
950

            
951
impl Tokenizable for NaturalOption {
952
    fn tokenize(&self) -> Vec<Token> {
953
        match self {
954
            NaturalOption::Literal(value) => vec![Token::Number(value.to_string())],
955
            NaturalOption::Expression(expr) => expr.tokenize(),
956
        }
957
    }
958
}
959

            
960
impl Tokenizable for CountOption {
961
30
    fn tokenize(&self) -> Vec<Token> {
962
30
        match self {
963
21
            CountOption::Literal(retry) => retry.tokenize(),
964
9
            CountOption::Expression(expr) => expr.tokenize(),
965
        }
966
    }
967
}
968

            
969
impl Tokenizable for Count {
970
21
    fn tokenize(&self) -> Vec<Token> {
971
21
        match self {
972
15
            Count::Finite(n) => vec![Token::Number(n.to_string())],
973
6
            Count::Infinite => vec![Token::Number("-1".to_string())],
974
        }
975
    }
976
}
977

            
978
impl Tokenizable for DurationOption {
979
24
    fn tokenize(&self) -> Vec<Token> {
980
24
        match self {
981
18
            DurationOption::Literal(value) => value.tokenize(),
982
6
            DurationOption::Expression(expr) => expr.tokenize(),
983
        }
984
    }
985
}
986

            
987
impl Tokenizable for Duration {
988
18
    fn tokenize(&self) -> Vec<Token> {
989
18
        let mut tokens = vec![Token::Number(self.value.to_string())];
990
18
        if let Some(unit) = self.unit {
991
18
            tokens.push(Token::Unit(unit.to_string()));
992
        }
993
18
        tokens
994
    }
995
}
996

            
997
impl Tokenizable for VariableDefinition {
998
24
    fn tokenize(&self) -> Vec<Token> {
999
24
        let mut tokens: Vec<Token> = vec![Token::String(self.name.clone())];
24
        tokens.append(&mut self.space0.tokenize());
24
        tokens.push(Token::Keyword("=".to_string()));
24
        tokens.append(&mut self.space1.tokenize());
24
        tokens.append(&mut self.value.tokenize());
24
        tokens
    }
}
impl Tokenizable for VariableValue {
24
    fn tokenize(&self) -> Vec<Token> {
24
        match self {
3
            VariableValue::Null => vec![Token::Keyword("null".to_string())],
3
            VariableValue::Bool(v) => vec![Token::Boolean(v.to_string())],
6
            VariableValue::Number(v) => vec![Token::Number(v.to_string())],
12
            VariableValue::String(v) => v.tokenize(),
        }
    }
}
impl Tokenizable for Filter {
90
    fn tokenize(&self) -> Vec<Token> {
90
        match self.value.clone() {
15
            FilterValue::Count => vec![Token::FilterType(String::from("count"))],
3
            FilterValue::DaysAfterNow => vec![Token::FilterType(String::from("daysAfterNow"))],
6
            FilterValue::DaysBeforeNow => vec![Token::FilterType(String::from("daysBeforeNow"))],
6
            FilterValue::Decode { space0, encoding } => {
6
                let mut tokens: Vec<Token> = vec![Token::FilterType(String::from("decode"))];
6
                tokens.append(&mut space0.tokenize());
6
                tokens.append(&mut encoding.tokenize());
6
                tokens
            }
9
            FilterValue::Format { space0, fmt } => {
9
                let mut tokens: Vec<Token> = vec![Token::FilterType(String::from("format"))];
9
                tokens.append(&mut space0.tokenize());
9
                tokens.append(&mut fmt.tokenize());
9
                tokens
            }
3
            FilterValue::HtmlEscape => vec![Token::FilterType(String::from("htmlEscape"))],
            FilterValue::HtmlUnescape => {
3
                vec![Token::FilterType(String::from("htmlUnescape"))]
            }
3
            FilterValue::JsonPath { space0, expr } => {
3
                let mut tokens: Vec<Token> = vec![Token::FilterType(String::from("jsonpath"))];
3
                tokens.append(&mut space0.tokenize());
3
                tokens.append(&mut expr.tokenize());
3
                tokens
            }
3
            FilterValue::Nth { space0, n } => {
3
                let mut tokens: Vec<Token> = vec![Token::FilterType(String::from("nth"))];
3
                tokens.append(&mut space0.tokenize());
3
                tokens.push(Token::Number(n.to_string()));
3
                tokens
            }
3
            FilterValue::Regex { space0, value } => {
3
                let mut tokens: Vec<Token> = vec![Token::FilterType(String::from("regex"))];
3
                tokens.append(&mut space0.tokenize());
3
                tokens.append(&mut value.tokenize());
3
                tokens
            }
            FilterValue::Replace {
15
                space0,
15
                old_value,
15
                space1,
15
                new_value,
15
            } => {
15
                let mut tokens: Vec<Token> = vec![Token::FilterType(String::from("replace"))];
15
                tokens.append(&mut space0.tokenize());
15
                tokens.append(&mut old_value.tokenize());
15
                tokens.append(&mut space1.tokenize());
15
                tokens.append(&mut new_value.tokenize());
15
                tokens
            }
3
            FilterValue::UrlEncode => vec![Token::FilterType(String::from("urlEncode"))],
3
            FilterValue::UrlDecode => vec![Token::FilterType(String::from("urlDecode"))],
3
            FilterValue::Split { space0, sep } => {
3
                let mut tokens: Vec<Token> = vec![Token::FilterType(String::from("split"))];
3
                tokens.append(&mut space0.tokenize());
3
                tokens.append(&mut sep.tokenize());
3
                tokens
            }
3
            FilterValue::ToDate { space0, fmt } => {
3
                let mut tokens: Vec<Token> = vec![Token::FilterType(String::from("toDate"))];
3
                tokens.append(&mut space0.tokenize());
3
                tokens.append(&mut fmt.tokenize());
3
                tokens
            }
3
            FilterValue::ToFloat => vec![Token::FilterType(String::from("toFloat"))],
3
            FilterValue::ToInt => vec![Token::FilterType(String::from("toInt"))],
3
            FilterValue::XPath { space0, expr } => {
3
                let mut tokens: Vec<Token> = vec![Token::FilterType(String::from("xpath"))];
3
                tokens.append(&mut space0.tokenize());
3
                tokens.append(&mut expr.tokenize());
3
                tokens
            }
        }
    }
}