1
/*
2
 * Hurl (https://hurl.dev)
3
 * Copyright (C) 2024 Orange
4
 *
5
 * Licensed under the Apache License, Version 2.0 (the "License");
6
 * you may not use this file except in compliance with the License.
7
 * You may obtain a copy of the License at
8
 *
9
 *          http://www.apache.org/licenses/LICENSE-2.0
10
 *
11
 * Unless required by applicable law or agreed to in writing, software
12
 * distributed under the License is distributed on an "AS IS" BASIS,
13
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
 * See the License for the specific language governing permissions and
15
 * limitations under the License.
16
 *
17
 */
18
use hurl_core::ast::*;
19
use hurl_core::typing::{Count, Duration};
20

            
21
#[derive(Clone, Debug, PartialEq, Eq)]
22
pub enum Token {
23
    Method(String),
24
    Version(String),
25
    Status(String),
26
    SectionHeader(String),
27
    QueryType(String),
28
    PredicateType(String),
29
    FilterType(String),
30
    Not(String),
31
    Keyword(String),
32

            
33
    // Primitives
34
    Whitespace(String),
35
    Comment(String),
36
    Value(String),
37
    Colon(String),
38
    StringDelimiter(String),
39
    Boolean(String),
40
    Number(String),
41
    String(String),
42
    CodeDelimiter(String),
43
    CodeVariable(String),
44
    Lang(String),
45
    Unit(String),
46
}
47

            
48
pub trait Tokenizable {
49
    fn tokenize(&self) -> Vec<Token>;
50
}
51

            
52
impl Tokenizable for HurlFile {
53
72
    fn tokenize(&self) -> Vec<Token> {
54
72
        let mut tokens: Vec<Token> = vec![];
55
258
        tokens.append(&mut self.entries.iter().flat_map(|e| e.tokenize()).collect());
56
72
        tokens.append(
57
72
            &mut self
58
72
                .line_terminators
59
72
                .iter()
60
80
                .flat_map(|e| e.tokenize())
61
72
                .collect(),
62
72
        );
63
72
        tokens
64
    }
65
}
66

            
67
impl Tokenizable for Entry {
68
234
    fn tokenize(&self) -> Vec<Token> {
69
234
        let mut tokens: Vec<Token> = vec![];
70
234
        tokens.append(&mut self.request.tokenize());
71
234
        if let Some(response) = &self.response {
72
105
            tokens.append(&mut response.tokenize());
73
        }
74
234
        tokens
75
    }
76
}
77

            
78
impl Tokenizable for Request {
79
234
    fn tokenize(&self) -> Vec<Token> {
80
234
        let mut tokens: Vec<Token> = vec![];
81
234
        tokens.append(
82
234
            &mut self
83
234
                .line_terminators
84
234
                .iter()
85
284
                .flat_map(|e| e.tokenize())
86
234
                .collect(),
87
234
        );
88
234
        tokens.append(&mut self.space0.tokenize());
89
234
        tokens.append(&mut self.method.tokenize());
90
234
        tokens.append(&mut self.space1.tokenize());
91
234
        tokens.append(&mut self.url.tokenize());
92
234
        tokens.append(&mut self.line_terminator0.tokenize());
93
258
        tokens.append(&mut self.headers.iter().flat_map(|e| e.tokenize()).collect());
94
254
        tokens.append(&mut self.sections.iter().flat_map(|e| e.tokenize()).collect());
95
234
        if let Some(body) = &self.body {
96
45
            tokens.append(&mut body.tokenize());
97
        }
98
234
        tokens
99
    }
100
}
101

            
102
impl Tokenizable for Method {
103
234
    fn tokenize(&self) -> Vec<Token> {
104
234
        vec![Token::Method(self.to_string())]
105
    }
106
}
107

            
108
impl Tokenizable for Response {
109
105
    fn tokenize(&self) -> Vec<Token> {
110
105
        let mut tokens: Vec<Token> = vec![];
111
105
        tokens.append(
112
105
            &mut self
113
105
                .line_terminators
114
105
                .iter()
115
109
                .flat_map(|e| e.tokenize())
116
105
                .collect(),
117
105
        );
118
105
        tokens.append(&mut self.space0.tokenize());
119
105
        tokens.append(&mut self.version.tokenize());
120
105
        tokens.append(&mut self.space1.tokenize());
121
105
        tokens.append(&mut self.status.tokenize());
122
105
        tokens.append(&mut self.line_terminator0.tokenize());
123
110
        tokens.append(&mut self.headers.iter().flat_map(|e| e.tokenize()).collect());
124
122
        tokens.append(&mut self.sections.iter().flat_map(|e| e.tokenize()).collect());
125
105
        if let Some(body) = self.clone().body {
126
45
            tokens.append(&mut body.tokenize());
127
        }
128
105
        tokens
129
    }
130
}
131

            
132
impl Tokenizable for Status {
133
105
    fn tokenize(&self) -> Vec<Token> {
134
105
        let mut tokens: Vec<Token> = vec![];
135
105
        match self.value.clone() {
136
3
            StatusValue::Any => tokens.push(Token::Status("*".to_string())),
137
102
            StatusValue::Specific(v) => tokens.push(Token::Status(v.to_string())),
138
        }
139
105
        tokens
140
    }
141
}
142

            
143
impl Tokenizable for Version {
144
105
    fn tokenize(&self) -> Vec<Token> {
145
105
        vec![Token::Version(self.value.to_string())]
146
    }
147
}
148

            
149
impl Tokenizable for Body {
150
90
    fn tokenize(&self) -> Vec<Token> {
151
90
        let mut tokens: Vec<Token> = vec![];
152
90
        tokens.append(
153
90
            &mut self
154
90
                .line_terminators
155
90
                .iter()
156
95
                .flat_map(|e| e.tokenize())
157
90
                .collect(),
158
90
        );
159
90
        tokens.append(&mut self.space0.tokenize());
160
90
        tokens.append(&mut self.value.tokenize());
161
90
        tokens.append(&mut self.line_terminator0.tokenize());
162
90
        tokens
163
    }
164
}
165

            
166
impl Tokenizable for Bytes {
167
90
    fn tokenize(&self) -> Vec<Token> {
168
90
        let mut tokens: Vec<Token> = vec![];
169
90
        match self {
170
6
            Bytes::Json(value) => tokens.append(&mut value.tokenize()),
171
3
            Bytes::Xml(value) => tokens.push(Token::String(value.to_string())),
172
33
            Bytes::MultilineString(value) => tokens.append(&mut value.tokenize()),
173
15
            Bytes::OnelineString(value) => tokens.append(&mut value.tokenize()),
174
12
            Bytes::Base64(value) => tokens.append(&mut value.tokenize()),
175
9
            Bytes::Hex(value) => tokens.append(&mut value.tokenize()),
176
12
            Bytes::File(value) => tokens.append(&mut value.tokenize()),
177
        }
178
90
        tokens
179
    }
180
}
181

            
182
impl Tokenizable for Section {
183
111
    fn tokenize(&self) -> Vec<Token> {
184
111
        let mut tokens: Vec<Token> = vec![];
185
111
        tokens.append(
186
111
            &mut self
187
111
                .line_terminators
188
111
                .iter()
189
121
                .flat_map(|e| e.tokenize())
190
111
                .collect(),
191
111
        );
192
111
        tokens.append(&mut self.space0.tokenize());
193
111
        tokens.push(Token::SectionHeader(format!("[{}]", self.name())));
194
111
        tokens.append(&mut self.line_terminator0.tokenize());
195
111
        tokens.append(&mut self.value.tokenize());
196
111
        tokens
197
    }
198
}
199

            
200
impl Tokenizable for SectionValue {
201
111
    fn tokenize(&self) -> Vec<Token> {
202
111
        let mut tokens: Vec<Token> = vec![];
203
111
        match self {
204
42
            SectionValue::Asserts(items) => {
205
326
                tokens.append(&mut items.iter().flat_map(|e| e.tokenize()).collect());
206
            }
207
12
            SectionValue::QueryParams(items, _) => {
208
19
                tokens.append(&mut items.iter().flat_map(|e| e.tokenize()).collect());
209
            }
210
3
            SectionValue::BasicAuth(item) => {
211
3
                if let Some(kv) = item {
212
3
                    tokens.append(&mut kv.tokenize());
213
                }
214
            }
215
6
            SectionValue::FormParams(items, _) => {
216
14
                tokens.append(&mut items.iter().flat_map(|e| e.tokenize()).collect());
217
            }
218
6
            SectionValue::MultipartFormData(items, _) => {
219
11
                tokens.append(&mut items.iter().flat_map(|e| e.tokenize()).collect());
220
            }
221
9
            SectionValue::Cookies(items) => {
222
12
                tokens.append(&mut items.iter().flat_map(|e| e.tokenize()).collect());
223
            }
224
9
            SectionValue::Captures(items) => {
225
11
                tokens.append(&mut items.iter().flat_map(|e| e.tokenize()).collect());
226
            }
227
24
            SectionValue::Options(items) => {
228
275
                tokens.append(&mut items.iter().flat_map(|e| e.tokenize()).collect());
229
            }
230
        }
231
111
        tokens
232
    }
233
}
234

            
235
impl Tokenizable for Base64 {
236
15
    fn tokenize(&self) -> Vec<Token> {
237
15
        let mut tokens: Vec<Token> = vec![Token::Keyword(String::from("base64,"))];
238
15
        tokens.append(&mut self.space0.tokenize());
239
15
        tokens.push(Token::String(self.encoded.to_string()));
240
15
        tokens.append(&mut self.space1.tokenize());
241
15
        tokens.push(Token::Keyword(String::from(";")));
242
15
        tokens
243
    }
244
}
245

            
246
impl Tokenizable for Hex {
247
9
    fn tokenize(&self) -> Vec<Token> {
248
9
        let mut tokens: Vec<Token> = vec![Token::Keyword(String::from("hex,"))];
249
9
        tokens.append(&mut self.space0.tokenize());
250
9
        tokens.push(Token::String(self.encoded.to_string()));
251
9
        tokens.append(&mut self.space1.tokenize());
252
9
        tokens.push(Token::Keyword(String::from(";")));
253
9
        tokens
254
    }
255
}
256

            
257
impl Tokenizable for File {
258
15
    fn tokenize(&self) -> Vec<Token> {
259
15
        let mut tokens: Vec<Token> = vec![Token::Keyword(String::from("file,"))];
260
15
        tokens.append(&mut self.space0.tokenize());
261
15
        tokens.append(&mut self.filename.tokenize());
262
15
        tokens.append(&mut self.space1.tokenize());
263
15
        tokens.push(Token::Keyword(String::from(";")));
264
15
        tokens
265
    }
266
}
267

            
268
impl Tokenizable for KeyValue {
269
120
    fn tokenize(&self) -> Vec<Token> {
270
120
        let mut tokens: Vec<Token> = vec![];
271
120
        tokens.append(
272
120
            &mut self
273
120
                .line_terminators
274
120
                .iter()
275
121
                .flat_map(|e| e.tokenize())
276
120
                .collect(),
277
120
        );
278
120
        tokens.append(&mut self.space0.tokenize());
279
120
        tokens.append(&mut self.key.tokenize());
280
120
        tokens.append(&mut self.space1.tokenize());
281
120
        tokens.push(Token::Colon(String::from(":")));
282
120
        tokens.append(&mut self.space2.tokenize());
283
120
        tokens.append(&mut self.value.tokenize());
284
120
        tokens.append(&mut self.line_terminator0.tokenize());
285
120
        tokens
286
    }
287
}
288

            
289
impl Tokenizable for MultipartParam {
290
9
    fn tokenize(&self) -> Vec<Token> {
291
9
        match self {
292
3
            MultipartParam::Param(key_value) => key_value.tokenize(),
293
6
            MultipartParam::FileParam(file_param) => file_param.tokenize(),
294
        }
295
    }
296
}
297

            
298
impl Tokenizable for FileParam {
299
6
    fn tokenize(&self) -> Vec<Token> {
300
6
        let mut tokens: Vec<Token> = vec![];
301
6
        tokens.append(&mut self.space0.tokenize());
302
6
        tokens.append(&mut self.key.tokenize());
303
6
        tokens.append(&mut self.space1.tokenize());
304
6
        tokens.push(Token::Colon(String::from(":")));
305
6
        tokens.append(&mut self.space2.tokenize());
306
6
        tokens.append(&mut self.value.tokenize());
307
6
        tokens.append(&mut self.line_terminator0.tokenize());
308
6
        tokens
309
    }
310
}
311

            
312
impl Tokenizable for FileValue {
313
6
    fn tokenize(&self) -> Vec<Token> {
314
6
        let mut tokens: Vec<Token> = vec![Token::Keyword("file,".to_string())];
315
6
        tokens.append(&mut self.space0.tokenize());
316
6
        tokens.append(&mut self.filename.tokenize());
317
6
        tokens.append(&mut self.space1.tokenize());
318
6
        tokens.push(Token::Keyword(";".to_string()));
319
6
        tokens.append(&mut self.space2.tokenize());
320
6
        if let Some(content_type) = self.content_type.clone() {
321
3
            tokens.push(Token::String(content_type));
322
        }
323
6
        tokens
324
    }
325
}
326

            
327
impl Tokenizable for Cookie {
328
9
    fn tokenize(&self) -> Vec<Token> {
329
9
        let mut tokens: Vec<Token> = vec![];
330
9
        tokens.append(
331
9
            &mut self
332
9
                .line_terminators
333
9
                .iter()
334
9
                .flat_map(|e| e.tokenize())
335
9
                .collect(),
336
9
        );
337
9
        tokens.append(&mut self.space0.tokenize());
338
9
        tokens.append(&mut self.name.tokenize());
339
9
        tokens.append(&mut self.space1.tokenize());
340
9
        tokens.push(Token::Colon(String::from(":")));
341
9
        tokens.append(&mut self.space2.tokenize());
342
9
        tokens.append(&mut self.value.tokenize());
343
9
        tokens.append(&mut self.line_terminator0.tokenize());
344
9
        tokens
345
    }
346
}
347

            
348
impl Tokenizable for Capture {
349
6
    fn tokenize(&self) -> Vec<Token> {
350
6
        let mut tokens: Vec<Token> = vec![];
351
6
        tokens.append(
352
6
            &mut self
353
6
                .line_terminators
354
6
                .iter()
355
6
                .flat_map(|e| e.tokenize())
356
6
                .collect(),
357
6
        );
358
6
        tokens.append(&mut self.space0.tokenize());
359
6
        tokens.append(&mut self.name.tokenize());
360
6
        tokens.append(&mut self.space1.tokenize());
361
6
        tokens.push(Token::Colon(String::from(":")));
362
6
        tokens.append(&mut self.space2.tokenize());
363
6
        tokens.append(&mut self.query.tokenize());
364
9
        for (space, filter) in &self.filters {
365
3
            tokens.append(&mut space.tokenize());
366
3
            tokens.append(&mut filter.tokenize());
367
        }
368
6
        tokens.append(&mut self.line_terminator0.tokenize());
369
6
        tokens
370
    }
371
}
372

            
373
impl Tokenizable for Assert {
374
312
    fn tokenize(&self) -> Vec<Token> {
375
312
        let mut tokens: Vec<Token> = vec![];
376
312
        tokens.append(
377
312
            &mut self
378
312
                .line_terminators
379
312
                .iter()
380
312
                .flat_map(|e| e.tokenize())
381
312
                .collect(),
382
312
        );
383
312
        tokens.append(&mut self.space0.tokenize());
384
312
        tokens.append(&mut self.query.tokenize());
385
399
        for (space, filter) in &self.filters {
386
87
            tokens.append(&mut space.tokenize());
387
87
            tokens.append(&mut filter.tokenize());
388
        }
389
312
        tokens.append(&mut self.space1.tokenize());
390
312
        // TODO reconvert back your first predicate for jsonpath
391
312
        // so that you can use your firstX predicate for other query
392
312
        tokens.append(&mut self.predicate.tokenize());
393
312
        tokens.append(&mut self.line_terminator0.tokenize());
394
312
        tokens
395
    }
396
}
397

            
398
impl Tokenizable for Query {
399
318
    fn tokenize(&self) -> Vec<Token> {
400
318
        self.value.tokenize()
401
    }
402
}
403

            
404
impl Tokenizable for QueryValue {
405
318
    fn tokenize(&self) -> Vec<Token> {
406
318
        let mut tokens: Vec<Token> = vec![];
407
318
        match self.clone() {
408
6
            QueryValue::Status => tokens.push(Token::QueryType(String::from("status"))),
409
3
            QueryValue::Url => tokens.push(Token::QueryType(String::from("url"))),
410
12
            QueryValue::Header { space0, name } => {
411
12
                tokens.push(Token::QueryType(String::from("header")));
412
12
                tokens.append(&mut space0.tokenize());
413
12
                tokens.append(&mut name.tokenize());
414
            }
415
6
            QueryValue::Cookie { space0, expr } => {
416
6
                tokens.push(Token::QueryType(String::from("cookie")));
417
6
                tokens.append(&mut space0.tokenize());
418
6
                tokens.push(Token::CodeDelimiter("\"".to_string()));
419
6
                tokens.append(&mut expr.tokenize());
420
6
                tokens.push(Token::CodeDelimiter("\"".to_string()));
421
            }
422
30
            QueryValue::Body => tokens.push(Token::QueryType(String::from("body"))),
423
3
            QueryValue::Xpath { space0, expr } => {
424
3
                tokens.push(Token::QueryType(String::from("xpath")));
425
3
                tokens.append(&mut space0.tokenize());
426
3
                tokens.append(&mut expr.tokenize());
427
            }
428
189
            QueryValue::Jsonpath { space0, expr } => {
429
189
                tokens.push(Token::QueryType(String::from("jsonpath")));
430
189
                tokens.append(&mut space0.tokenize());
431
189
                tokens.append(&mut expr.tokenize());
432
            }
433
3
            QueryValue::Regex { space0, value } => {
434
3
                tokens.push(Token::QueryType(String::from("regex")));
435
3
                tokens.append(&mut space0.tokenize());
436
3
                tokens.append(&mut value.tokenize());
437
            }
438
9
            QueryValue::Variable { space0, name } => {
439
9
                tokens.push(Token::QueryType(String::from("variable")));
440
9
                tokens.append(&mut space0.tokenize());
441
9
                tokens.append(&mut name.tokenize());
442
            }
443
3
            QueryValue::Duration => tokens.push(Token::QueryType(String::from("duration"))),
444
18
            QueryValue::Bytes => tokens.push(Token::QueryType(String::from("bytes"))),
445
3
            QueryValue::Sha256 => tokens.push(Token::QueryType(String::from("sha256"))),
446
3
            QueryValue::Md5 => tokens.push(Token::QueryType(String::from("md5"))),
447
            QueryValue::Certificate {
448
30
                space0,
449
30
                attribute_name: field,
450
30
            } => {
451
30
                tokens.push(Token::QueryType(String::from("certificate")));
452
30
                tokens.append(&mut space0.tokenize());
453
30
                tokens.append(&mut field.tokenize());
454
            }
455
        }
456
318
        tokens
457
    }
458
}
459

            
460
impl Tokenizable for RegexValue {
461
21
    fn tokenize(&self) -> Vec<Token> {
462
21
        match self {
463
18
            RegexValue::Template(template) => template.tokenize(),
464
3
            RegexValue::Regex(regex) => regex.tokenize(),
465
        }
466
    }
467
}
468

            
469
impl Tokenizable for CookiePath {
470
6
    fn tokenize(&self) -> Vec<Token> {
471
6
        let mut tokens: Vec<Token> = vec![];
472
6
        tokens.append(&mut self.name.tokenize());
473
6
        if let Some(attribute) = self.attribute.clone() {
474
3
            tokens.append(&mut attribute.tokenize());
475
        }
476
6
        tokens
477
    }
478
}
479

            
480
impl Tokenizable for CookieAttribute {
481
3
    fn tokenize(&self) -> Vec<Token> {
482
3
        let mut tokens: Vec<Token> = vec![Token::CodeDelimiter("[".to_string())];
483
3
        tokens.append(&mut self.space0.tokenize());
484
3
        tokens.push(Token::String(self.name.value()));
485
3
        tokens.append(&mut self.space1.tokenize());
486
3
        tokens.push(Token::CodeDelimiter("]".to_string()));
487
3
        tokens
488
    }
489
}
490

            
491
impl Tokenizable for CertificateAttributeName {
492
30
    fn tokenize(&self) -> Vec<Token> {
493
30
        let value = match self {
494
3
            CertificateAttributeName::Subject => "Subject",
495
3
            CertificateAttributeName::Issuer => "Issuer",
496
9
            CertificateAttributeName::StartDate => "Start-Date",
497
12
            CertificateAttributeName::ExpireDate => "Expire-Date",
498
3
            CertificateAttributeName::SerialNumber => "Serial-Number",
499
        };
500
30
        vec![
501
30
            Token::StringDelimiter("\"".to_string()),
502
30
            Token::String(value.to_string()),
503
30
            Token::StringDelimiter("\"".to_string()),
504
30
        ]
505
    }
506
}
507

            
508
impl Tokenizable for Predicate {
509
312
    fn tokenize(&self) -> Vec<Token> {
510
312
        let mut tokens: Vec<Token> = vec![];
511
312
        if self.not {
512
3
            tokens.push(Token::Not(String::from("not")));
513
3
            tokens.append(&mut self.space0.tokenize());
514
        }
515
312
        tokens.append(&mut self.predicate_func.tokenize());
516
312
        tokens
517
    }
518
}
519

            
520
impl Tokenizable for PredicateFunc {
521
312
    fn tokenize(&self) -> Vec<Token> {
522
312
        self.value.tokenize()
523
    }
524
}
525

            
526
impl Tokenizable for PredicateFuncValue {
527
312
    fn tokenize(&self) -> Vec<Token> {
528
312
        let mut tokens: Vec<Token> = vec![];
529
312
        match self {
530
201
            PredicateFuncValue::Equal { space0, value, .. } => {
531
201
                tokens.push(Token::PredicateType(self.name()));
532
201
                tokens.append(&mut space0.tokenize());
533
201
                tokens.append(&mut value.tokenize());
534
            }
535
9
            PredicateFuncValue::NotEqual { space0, value, .. } => {
536
9
                tokens.push(Token::PredicateType(self.name()));
537
9
                tokens.append(&mut space0.tokenize());
538
9
                tokens.append(&mut value.tokenize());
539
            }
540
9
            PredicateFuncValue::GreaterThan { space0, value, .. } => {
541
9
                tokens.push(Token::PredicateType(self.name()));
542
9
                tokens.append(&mut space0.tokenize());
543
9
                tokens.append(&mut value.tokenize());
544
            }
545
3
            PredicateFuncValue::GreaterThanOrEqual { space0, value, .. } => {
546
3
                tokens.push(Token::PredicateType(self.name()));
547
3
                tokens.append(&mut space0.tokenize());
548
3
                tokens.append(&mut value.tokenize());
549
            }
550
12
            PredicateFuncValue::LessThan { space0, value, .. } => {
551
12
                tokens.push(Token::PredicateType(self.name()));
552
12
                tokens.append(&mut space0.tokenize());
553
12
                tokens.append(&mut value.tokenize());
554
            }
555
3
            PredicateFuncValue::LessThanOrEqual { space0, value, .. } => {
556
3
                tokens.push(Token::PredicateType(self.name()));
557
3
                tokens.append(&mut space0.tokenize());
558
3
                tokens.append(&mut value.tokenize());
559
            }
560
9
            PredicateFuncValue::StartWith { space0, value } => {
561
9
                tokens.push(Token::PredicateType(self.name()));
562
9
                tokens.append(&mut space0.tokenize());
563
9
                tokens.append(&mut value.tokenize());
564
            }
565
6
            PredicateFuncValue::EndWith { space0, value } => {
566
6
                tokens.push(Token::PredicateType(self.name()));
567
6
                tokens.append(&mut space0.tokenize());
568
6
                tokens.append(&mut value.tokenize());
569
            }
570
6
            PredicateFuncValue::Contain { space0, value } => {
571
6
                tokens.push(Token::PredicateType(self.name()));
572
6
                tokens.append(&mut space0.tokenize());
573
6
                tokens.append(&mut value.tokenize());
574
            }
575
3
            PredicateFuncValue::Include { space0, value } => {
576
3
                tokens.push(Token::PredicateType(self.name()));
577
3
                tokens.append(&mut space0.tokenize());
578
3
                tokens.append(&mut value.tokenize());
579
            }
580
6
            PredicateFuncValue::Match { space0, value } => {
581
6
                tokens.push(Token::PredicateType(self.name()));
582
6
                tokens.append(&mut space0.tokenize());
583
6
                tokens.append(&mut value.tokenize());
584
            }
585

            
586
3
            PredicateFuncValue::IsInteger => {
587
3
                tokens.push(Token::PredicateType(self.name()));
588
            }
589
3
            PredicateFuncValue::IsFloat => {
590
3
                tokens.push(Token::PredicateType(self.name()));
591
            }
592
3
            PredicateFuncValue::IsBoolean => {
593
3
                tokens.push(Token::PredicateType(self.name()));
594
            }
595
3
            PredicateFuncValue::IsString => {
596
3
                tokens.push(Token::PredicateType(self.name()));
597
            }
598
3
            PredicateFuncValue::IsCollection => {
599
3
                tokens.push(Token::PredicateType(self.name()));
600
            }
601
9
            PredicateFuncValue::IsDate => {
602
9
                tokens.push(Token::PredicateType(self.name()));
603
            }
604
3
            PredicateFuncValue::IsIsoDate => {
605
3
                tokens.push(Token::PredicateType(self.name()));
606
            }
607
12
            PredicateFuncValue::Exist => {
608
12
                tokens.push(Token::PredicateType(self.name()));
609
            }
610
3
            PredicateFuncValue::IsEmpty => {
611
3
                tokens.push(Token::PredicateType(self.name()));
612
            }
613
3
            PredicateFuncValue::IsNumber => {
614
3
                tokens.push(Token::PredicateType(self.name()));
615
            }
616
        }
617
312
        tokens
618
    }
619
}
620

            
621
impl Tokenizable for PredicateValue {
622
267
    fn tokenize(&self) -> Vec<Token> {
623
267
        match self {
624
105
            PredicateValue::String(value) => value.tokenize(),
625
15
            PredicateValue::MultilineString(value) => value.tokenize(),
626
3
            PredicateValue::Bool(value) => vec![Token::Boolean(value.to_string())],
627
3
            PredicateValue::Null => vec![Token::Keyword("null".to_string())],
628
108
            PredicateValue::Number(value) => vec![Token::Number(value.to_string())],
629
3
            PredicateValue::File(value) => value.tokenize(),
630
21
            PredicateValue::Hex(value) => vec![Token::String(value.to_string())],
631
3
            PredicateValue::Base64(value) => value.tokenize(),
632
3
            PredicateValue::Placeholder(value) => value.tokenize(),
633
3
            PredicateValue::Regex(value) => value.tokenize(),
634
        }
635
    }
636
}
637

            
638
impl Tokenizable for MultilineString {
639
48
    fn tokenize(&self) -> Vec<Token> {
640
48
        let mut tokens: Vec<Token> = vec![Token::StringDelimiter("```".to_string())];
641
48
        tokens.push(Token::Lang(self.lang().to_string()));
642
48
        for (i, attribute) in self.attributes.iter().enumerate() {
643
3
            if i > 0 || !self.lang().is_empty() {
644
                tokens.push(Token::StringDelimiter(",".to_string()));
645
            }
646
3
            tokens.append(&mut attribute.tokenize());
647
        }
648
48
        match self {
649
            MultilineString {
650
21
                kind: MultilineStringKind::Text(text),
651
                ..
652
            }
653
            | MultilineString {
654
9
                kind: MultilineStringKind::Json(text),
655
                ..
656
            }
657
            | MultilineString {
658
9
                kind: MultilineStringKind::Xml(text),
659
                ..
660
39
            } => tokens.append(&mut text.tokenize()),
661
            MultilineString {
662
9
                kind: MultilineStringKind::GraphQl(graphql),
663
9
                ..
664
9
            } => tokens.append(&mut graphql.tokenize()),
665
        }
666
48
        tokens.push(Token::StringDelimiter("```".to_string()));
667
48
        tokens
668
    }
669
}
670

            
671
impl Tokenizable for MultilineStringAttribute {
672
3
    fn tokenize(&self) -> Vec<Token> {
673
3
        match self {
674
3
            MultilineStringAttribute::Escape => vec![Token::String("escape".to_string())],
675
            MultilineStringAttribute::NoVariable => vec![Token::String("novariable".to_string())],
676
        }
677
    }
678
}
679

            
680
impl Tokenizable for Text {
681
39
    fn tokenize(&self) -> Vec<Token> {
682
39
        let mut tokens: Vec<Token> = vec![];
683
39
        tokens.append(&mut self.space.tokenize());
684
39
        tokens.append(&mut self.newline.tokenize());
685
39
        tokens.append(&mut self.value.tokenize());
686
39
        tokens
687
    }
688
}
689

            
690
impl Tokenizable for GraphQl {
691
9
    fn tokenize(&self) -> Vec<Token> {
692
9
        let mut tokens: Vec<Token> = vec![];
693
9
        tokens.append(&mut self.space.tokenize());
694
9
        tokens.append(&mut self.newline.tokenize());
695
9
        tokens.append(&mut self.value.tokenize());
696
9
        if let Some(vars) = &self.variables {
697
            tokens.append(&mut vars.tokenize());
698
        }
699
9
        tokens
700
    }
701
}
702

            
703
impl Tokenizable for GraphQlVariables {
704
    fn tokenize(&self) -> Vec<Token> {
705
        let mut tokens: Vec<Token> = vec![];
706
        tokens.push(Token::String("variables".to_string()));
707
        tokens.append(&mut self.space.tokenize());
708
        tokens.append(&mut self.value.tokenize());
709
        tokens.append(&mut self.whitespace.tokenize());
710
        tokens
711
    }
712
}
713

            
714
impl Tokenizable for EncodedString {
715
    fn tokenize(&self) -> Vec<Token> {
716
        let mut tokens: Vec<Token> = vec![];
717
        if self.quotes {
718
            tokens.push(Token::StringDelimiter(
719
                if self.quotes { "\"" } else { "" }.to_string(),
720
            ));
721
        }
722
        tokens.push(Token::String(self.encoded.clone()));
723

            
724
        if self.quotes {
725
            tokens.push(Token::StringDelimiter(
726
                if self.quotes { "\"" } else { "" }.to_string(),
727
            ));
728
        }
729
        tokens
730
    }
731
}
732

            
733
impl Tokenizable for Template {
734
1074
    fn tokenize(&self) -> Vec<Token> {
735
1074
        let mut tokens: Vec<Token> = vec![];
736
1074
        if let Some(d) = self.delimiter {
737
411
            tokens.push(Token::StringDelimiter(d.to_string()));
738
        }
739
2151
        for element in &self.elements {
740
1077
            tokens.append(&mut element.tokenize());
741
        }
742
1074
        if let Some(d) = self.delimiter {
743
411
            tokens.push(Token::StringDelimiter(d.to_string()));
744
        }
745
1074
        tokens
746
    }
747
}
748

            
749
impl Tokenizable for TemplateElement {
750
1077
    fn tokenize(&self) -> Vec<Token> {
751
1077
        match self {
752
1014
            TemplateElement::String { encoded, .. } => {
753
1014
                vec![Token::String(encoded.to_string())]
754
            }
755
63
            TemplateElement::Placeholder(value) => {
756
63
                let mut tokens: Vec<Token> = vec![];
757
63
                tokens.append(&mut value.tokenize());
758
63
                tokens
759
            }
760
        }
761
    }
762
}
763

            
764
impl Tokenizable for Placeholder {
765
138
    fn tokenize(&self) -> Vec<Token> {
766
138
        let mut tokens: Vec<Token> = vec![Token::CodeDelimiter(String::from("{{"))];
767
138
        tokens.append(&mut self.space0.tokenize());
768
138
        tokens.append(&mut self.expr.tokenize());
769
138
        tokens.append(&mut self.space1.tokenize());
770
138
        tokens.push(Token::CodeDelimiter(String::from("}}")));
771
138
        tokens
772
    }
773
}
774

            
775
impl Tokenizable for Expr {
776
138
    fn tokenize(&self) -> Vec<Token> {
777
138
        self.kind.tokenize()
778
    }
779
}
780

            
781
impl Tokenizable for ExprKind {
782
138
    fn tokenize(&self) -> Vec<Token> {
783
138
        match self {
784
135
            ExprKind::Variable(variable) => variable.tokenize(),
785
3
            ExprKind::Function(function) => function.tokenize(),
786
        }
787
    }
788
}
789

            
790
impl Tokenizable for Variable {
791
135
    fn tokenize(&self) -> Vec<Token> {
792
135
        vec![Token::CodeVariable(self.name.clone())]
793
    }
794
}
795

            
796
impl Tokenizable for Function {
797
3
    fn tokenize(&self) -> Vec<Token> {
798
3
        match self {
799
3
            Function::NewUuid => vec![Token::CodeVariable("newUuid".to_string())],
800
        }
801
    }
802
}
803

            
804
impl Tokenizable for Regex {
805
6
    fn tokenize(&self) -> Vec<Token> {
806
6
        let s = str::replace(self.inner.as_str(), "/", "\\/");
807
6
        vec![Token::String(format!("/{s}/"))]
808
    }
809
}
810

            
811
impl Tokenizable for LineTerminator {
812
1497
    fn tokenize(&self) -> Vec<Token> {
813
1497
        let mut tokens: Vec<Token> = vec![];
814
1497
        tokens.append(&mut self.space0.tokenize());
815
1497
        if let Some(comment) = &self.comment {
816
231
            tokens.append(&mut comment.tokenize());
817
        }
818
1497
        tokens.append(&mut self.newline.tokenize());
819
1497
        tokens
820
    }
821
}
822

            
823
impl Tokenizable for Whitespace {
824
6924
    fn tokenize(&self) -> Vec<Token> {
825
6924
        let mut tokens: Vec<Token> = vec![];
826
6924
        if !self.value.is_empty() {
827
3456
            tokens.push(Token::Whitespace(self.value.clone()));
828
        }
829
6924
        tokens
830
    }
831
}
832

            
833
impl Tokenizable for Comment {
834
231
    fn tokenize(&self) -> Vec<Token> {
835
231
        vec![Token::Comment(format!("#{}", self.value.clone()))]
836
    }
837
}
838

            
839
impl Tokenizable for JsonValue {
840
102
    fn tokenize(&self) -> Vec<Token> {
841
102
        let mut tokens: Vec<Token> = vec![];
842
102
        match self {
843
18
            JsonValue::String(s) => {
844
18
                //tokens.push(Token::CodeDelimiter("\"".to_string()));
845
18
                tokens.append(&mut s.tokenize());
846
18
                //tokens.push(Token::CodeDelimiter("\"".to_string()));
847
            }
848
45
            JsonValue::Number(value) => {
849
45
                tokens.push(Token::Number(value.to_string()));
850
            }
851
3
            JsonValue::Boolean(value) => {
852
3
                tokens.push(Token::Boolean(value.to_string()));
853
            }
854
15
            JsonValue::List { space0, elements } => {
855
15
                tokens.push(Token::CodeDelimiter("[".to_string()));
856
15
                tokens.push(Token::Whitespace(space0.clone()));
857
51
                for (i, element) in elements.iter().enumerate() {
858
51
                    if i > 0 {
859
39
                        tokens.push(Token::CodeDelimiter(",".to_string()));
860
                    }
861
51
                    tokens.append(&mut element.tokenize());
862
                }
863
15
                tokens.push(Token::CodeDelimiter("]".to_string()));
864
            }
865
15
            JsonValue::Object { space0, elements } => {
866
15
                tokens.push(Token::CodeDelimiter("{".to_string()));
867
15
                tokens.push(Token::Whitespace(space0.clone()));
868
45
                for (i, element) in elements.iter().enumerate() {
869
45
                    if i > 0 {
870
33
                        tokens.push(Token::CodeDelimiter(",".to_string()));
871
                    }
872
45
                    tokens.append(&mut element.tokenize());
873
                }
874
15
                tokens.push(Token::CodeDelimiter("}".to_string()));
875
            }
876
3
            JsonValue::Null => {
877
3
                tokens.push(Token::Keyword("null".to_string()));
878
            }
879
3
            JsonValue::Placeholder(exp) => {
880
3
                tokens.append(&mut exp.tokenize());
881
            }
882
        }
883
102
        tokens
884
    }
885
}
886

            
887
impl Tokenizable for JsonListElement {
888
51
    fn tokenize(&self) -> Vec<Token> {
889
51
        let mut tokens: Vec<Token> = vec![Token::Whitespace(self.space0.clone())];
890
51
        tokens.append(&mut self.value.tokenize());
891
51
        tokens.push(Token::Whitespace(self.space1.clone()));
892
51
        tokens
893
    }
894
}
895

            
896
impl Tokenizable for JsonObjectElement {
897
45
    fn tokenize(&self) -> Vec<Token> {
898
45
        let mut tokens: Vec<Token> = vec![Token::Whitespace(self.space0.clone())];
899
45
        tokens.push(Token::StringDelimiter("\"".to_string()));
900
45
        tokens.push(Token::String(self.name.to_string()));
901
45
        tokens.push(Token::StringDelimiter("\"".to_string()));
902
45
        tokens.push(Token::Whitespace(self.space1.clone()));
903
45
        tokens.push(Token::CodeDelimiter(":".to_string()));
904
45
        tokens.push(Token::Whitespace(self.space2.clone()));
905
45
        tokens.append(&mut self.value.tokenize());
906
45
        tokens.push(Token::Whitespace(self.space3.clone()));
907
45
        tokens
908
    }
909
}
910

            
911
impl Tokenizable for EntryOption {
912
267
    fn tokenize(&self) -> Vec<Token> {
913
267
        let mut tokens: Vec<Token> = vec![];
914
267
        tokens.append(
915
267
            &mut self
916
267
                .line_terminators
917
267
                .iter()
918
268
                .flat_map(|e| e.tokenize())
919
267
                .collect(),
920
267
        );
921
267
        tokens.append(&mut self.space0.tokenize());
922
267
        tokens.push(Token::String(self.kind.name().to_string()));
923
267
        tokens.append(&mut self.space1.tokenize());
924
267
        tokens.push(Token::Colon(String::from(":")));
925
267
        tokens.append(&mut self.space2.tokenize());
926
267
        tokens.append(&mut self.kind.tokenize());
927
267
        tokens.append(&mut self.line_terminator0.tokenize());
928
267
        tokens
929
    }
930
}
931

            
932
impl Tokenizable for OptionKind {
933
267
    fn tokenize(&self) -> Vec<Token> {
934
267
        match self {
935
6
            OptionKind::AwsSigV4(value) => value.tokenize(),
936
6
            OptionKind::CaCertificate(filename) => filename.tokenize(),
937
9
            OptionKind::ClientCert(filename) => filename.tokenize(),
938
6
            OptionKind::ClientKey(filename) => filename.tokenize(),
939
6
            OptionKind::Compressed(value) => value.tokenize(),
940
6
            OptionKind::ConnectTo(value) => value.tokenize(),
941
6
            OptionKind::ConnectTimeout(value) => value.tokenize(),
942
12
            OptionKind::Delay(value) => value.tokenize(),
943
9
            OptionKind::FollowLocation(value) => value.tokenize(),
944
6
            OptionKind::FollowLocationTrusted(value) => value.tokenize(),
945
6
            OptionKind::Http10(value) => value.tokenize(),
946
6
            OptionKind::Http11(value) => value.tokenize(),
947
6
            OptionKind::Http2(value) => value.tokenize(),
948
6
            OptionKind::Http3(value) => value.tokenize(),
949
9
            OptionKind::Insecure(value) => value.tokenize(),
950
6
            OptionKind::IpV4(value) => value.tokenize(),
951
6
            OptionKind::IpV6(value) => value.tokenize(),
952
6
            OptionKind::LimitRate(value) => value.tokenize(),
953
6
            OptionKind::MaxRedirect(value) => value.tokenize(),
954
6
            OptionKind::NetRc(value) => value.tokenize(),
955
6
            OptionKind::NetRcFile(filename) => filename.tokenize(),
956
6
            OptionKind::NetRcOptional(value) => value.tokenize(),
957
6
            OptionKind::Output(filename) => filename.tokenize(),
958
6
            OptionKind::PathAsIs(value) => value.tokenize(),
959
6
            OptionKind::Proxy(value) => value.tokenize(),
960
9
            OptionKind::Repeat(value) => value.tokenize(),
961
6
            OptionKind::Resolve(value) => value.tokenize(),
962
15
            OptionKind::Retry(value) => value.tokenize(),
963
12
            OptionKind::RetryInterval(value) => value.tokenize(),
964
6
            OptionKind::Skip(value) => value.tokenize(),
965
6
            OptionKind::UnixSocket(value) => value.tokenize(),
966
6
            OptionKind::User(value) => value.tokenize(),
967
27
            OptionKind::Variable(value) => value.tokenize(),
968
9
            OptionKind::Verbose(value) => value.tokenize(),
969
6
            OptionKind::VeryVerbose(value) => value.tokenize(),
970
        }
971
    }
972
}
973

            
974
impl Tokenizable for BooleanOption {
975
105
    fn tokenize(&self) -> Vec<Token> {
976
105
        match self {
977
57
            BooleanOption::Literal(value) => vec![Token::Boolean(value.to_string())],
978
48
            BooleanOption::Placeholder(expr) => expr.tokenize(),
979
        }
980
    }
981
}
982

            
983
impl Tokenizable for NaturalOption {
984
6
    fn tokenize(&self) -> Vec<Token> {
985
6
        match self {
986
3
            NaturalOption::Literal(value) => vec![Token::Number(value.to_string())],
987
3
            NaturalOption::Placeholder(expr) => expr.tokenize(),
988
        }
989
    }
990
}
991

            
992
impl Tokenizable for CountOption {
993
30
    fn tokenize(&self) -> Vec<Token> {
994
30
        match self {
995
21
            CountOption::Literal(retry) => retry.tokenize(),
996
9
            CountOption::Placeholder(expr) => expr.tokenize(),
997
        }
998
    }
999
}
impl Tokenizable for Count {
21
    fn tokenize(&self) -> Vec<Token> {
21
        match self {
15
            Count::Finite(n) => vec![Token::Number(n.to_string())],
6
            Count::Infinite => vec![Token::Number("-1".to_string())],
        }
    }
}
impl Tokenizable for DurationOption {
30
    fn tokenize(&self) -> Vec<Token> {
30
        match self {
21
            DurationOption::Literal(value) => value.tokenize(),
9
            DurationOption::Placeholder(expr) => expr.tokenize(),
        }
    }
}
impl Tokenizable for Duration {
21
    fn tokenize(&self) -> Vec<Token> {
21
        let mut tokens = vec![Token::Number(self.value.to_string())];
21
        if let Some(unit) = self.unit {
21
            tokens.push(Token::Unit(unit.to_string()));
        }
21
        tokens
    }
}
impl Tokenizable for VariableDefinition {
27
    fn tokenize(&self) -> Vec<Token> {
27
        let mut tokens: Vec<Token> = vec![Token::String(self.name.clone())];
27
        tokens.append(&mut self.space0.tokenize());
27
        tokens.push(Token::Keyword("=".to_string()));
27
        tokens.append(&mut self.space1.tokenize());
27
        tokens.append(&mut self.value.tokenize());
27
        tokens
    }
}
impl Tokenizable for VariableValue {
27
    fn tokenize(&self) -> Vec<Token> {
27
        match self {
3
            VariableValue::Null => vec![Token::Keyword("null".to_string())],
3
            VariableValue::Bool(v) => vec![Token::Boolean(v.to_string())],
6
            VariableValue::Number(v) => vec![Token::Number(v.to_string())],
15
            VariableValue::String(v) => v.tokenize(),
        }
    }
}
impl Tokenizable for Filter {
90
    fn tokenize(&self) -> Vec<Token> {
90
        match self.value.clone() {
15
            FilterValue::Count => vec![Token::FilterType(String::from("count"))],
3
            FilterValue::DaysAfterNow => vec![Token::FilterType(String::from("daysAfterNow"))],
6
            FilterValue::DaysBeforeNow => vec![Token::FilterType(String::from("daysBeforeNow"))],
6
            FilterValue::Decode { space0, encoding } => {
6
                let mut tokens: Vec<Token> = vec![Token::FilterType(String::from("decode"))];
6
                tokens.append(&mut space0.tokenize());
6
                tokens.append(&mut encoding.tokenize());
6
                tokens
            }
9
            FilterValue::Format { space0, fmt } => {
9
                let mut tokens: Vec<Token> = vec![Token::FilterType(String::from("format"))];
9
                tokens.append(&mut space0.tokenize());
9
                tokens.append(&mut fmt.tokenize());
9
                tokens
            }
3
            FilterValue::HtmlEscape => vec![Token::FilterType(String::from("htmlEscape"))],
            FilterValue::HtmlUnescape => {
3
                vec![Token::FilterType(String::from("htmlUnescape"))]
            }
3
            FilterValue::JsonPath { space0, expr } => {
3
                let mut tokens: Vec<Token> = vec![Token::FilterType(String::from("jsonpath"))];
3
                tokens.append(&mut space0.tokenize());
3
                tokens.append(&mut expr.tokenize());
3
                tokens
            }
3
            FilterValue::Nth { space0, n } => {
3
                let mut tokens: Vec<Token> = vec![Token::FilterType(String::from("nth"))];
3
                tokens.append(&mut space0.tokenize());
3
                tokens.push(Token::Number(n.to_string()));
3
                tokens
            }
3
            FilterValue::Regex { space0, value } => {
3
                let mut tokens: Vec<Token> = vec![Token::FilterType(String::from("regex"))];
3
                tokens.append(&mut space0.tokenize());
3
                tokens.append(&mut value.tokenize());
3
                tokens
            }
            FilterValue::Replace {
15
                space0,
15
                old_value,
15
                space1,
15
                new_value,
15
            } => {
15
                let mut tokens: Vec<Token> = vec![Token::FilterType(String::from("replace"))];
15
                tokens.append(&mut space0.tokenize());
15
                tokens.append(&mut old_value.tokenize());
15
                tokens.append(&mut space1.tokenize());
15
                tokens.append(&mut new_value.tokenize());
15
                tokens
            }
3
            FilterValue::UrlEncode => vec![Token::FilterType(String::from("urlEncode"))],
3
            FilterValue::UrlDecode => vec![Token::FilterType(String::from("urlDecode"))],
3
            FilterValue::Split { space0, sep } => {
3
                let mut tokens: Vec<Token> = vec![Token::FilterType(String::from("split"))];
3
                tokens.append(&mut space0.tokenize());
3
                tokens.append(&mut sep.tokenize());
3
                tokens
            }
3
            FilterValue::ToDate { space0, fmt } => {
3
                let mut tokens: Vec<Token> = vec![Token::FilterType(String::from("toDate"))];
3
                tokens.append(&mut space0.tokenize());
3
                tokens.append(&mut fmt.tokenize());
3
                tokens
            }
3
            FilterValue::ToFloat => vec![Token::FilterType(String::from("toFloat"))],
3
            FilterValue::ToInt => vec![Token::FilterType(String::from("toInt"))],
3
            FilterValue::XPath { space0, expr } => {
3
                let mut tokens: Vec<Token> = vec![Token::FilterType(String::from("xpath"))];
3
                tokens.append(&mut space0.tokenize());
3
                tokens.append(&mut expr.tokenize());
3
                tokens
            }
        }
    }
}