1
/*
2
 * Hurl (https://hurl.dev)
3
 * Copyright (C) 2024 Orange
4
 *
5
 * Licensed under the Apache License, Version 2.0 (the "License");
6
 * you may not use this file except in compliance with the License.
7
 * You may obtain a copy of the License at
8
 *
9
 *          http://www.apache.org/licenses/LICENSE-2.0
10
 *
11
 * Unless required by applicable law or agreed to in writing, software
12
 * distributed under the License is distributed on an "AS IS" BASIS,
13
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
 * See the License for the specific language governing permissions and
15
 * limitations under the License.
16
 *
17
 */
18
use hurl_core::ast::{
19
    Assert, Base64, Body, BooleanOption, Bytes, Capture, CertificateAttributeName, Comment, Cookie,
20
    CookieAttribute, CookiePath, CountOption, DurationOption, EncodedString, Entry, EntryOption,
21
    Expr, ExprKind, File, FileParam, FileValue, Filter, FilterValue, Function, GraphQl,
22
    GraphQlVariables, Hex, HurlFile, JsonListElement, JsonObjectElement, JsonValue, KeyValue,
23
    LineTerminator, Method, MultilineString, MultilineStringAttribute, MultilineStringKind,
24
    MultipartParam, NaturalOption, OptionKind, Placeholder, Predicate, PredicateFunc,
25
    PredicateFuncValue, PredicateValue, Query, QueryValue, Regex, RegexValue, Request, Response,
26
    Section, SectionValue, Status, StatusValue, Template, TemplateElement, Text, Variable,
27
    VariableDefinition, VariableValue, Version, Whitespace, I64, U64,
28
};
29
use hurl_core::typing::{Count, Duration};
30

            
31
#[derive(Clone, Debug, PartialEq, Eq)]
32
pub enum Token {
33
    Method(String),
34
    Version(String),
35
    Status(String),
36
    SectionHeader(String),
37
    QueryType(String),
38
    PredicateType(String),
39
    FilterType(String),
40
    Not(String),
41
    Keyword(String),
42

            
43
    // Primitives
44
    Whitespace(String),
45
    Comment(String),
46
    Value(String),
47
    Colon(String),
48
    StringDelimiter(String),
49
    Boolean(String),
50
    Number(String),
51
    String(String),
52
    CodeDelimiter(String),
53
    CodeVariable(String),
54
    Lang(String),
55
    Unit(String),
56
}
57

            
58
pub trait Tokenizable {
59
    fn tokenize(&self) -> Vec<Token>;
60
}
61

            
62
impl Tokenizable for HurlFile {
63
72
    fn tokenize(&self) -> Vec<Token> {
64
72
        let mut tokens: Vec<Token> = vec![];
65
258
        tokens.append(&mut self.entries.iter().flat_map(|e| e.tokenize()).collect());
66
72
        tokens.append(
67
72
            &mut self
68
72
                .line_terminators
69
72
                .iter()
70
80
                .flat_map(|e| e.tokenize())
71
72
                .collect(),
72
72
        );
73
72
        tokens
74
    }
75
}
76

            
77
impl Tokenizable for Entry {
78
234
    fn tokenize(&self) -> Vec<Token> {
79
234
        let mut tokens: Vec<Token> = vec![];
80
234
        tokens.append(&mut self.request.tokenize());
81
234
        if let Some(response) = &self.response {
82
105
            tokens.append(&mut response.tokenize());
83
        }
84
234
        tokens
85
    }
86
}
87

            
88
impl Tokenizable for Request {
89
234
    fn tokenize(&self) -> Vec<Token> {
90
234
        let mut tokens: Vec<Token> = vec![];
91
234
        tokens.append(
92
234
            &mut self
93
234
                .line_terminators
94
234
                .iter()
95
284
                .flat_map(|e| e.tokenize())
96
234
                .collect(),
97
234
        );
98
234
        tokens.append(&mut self.space0.tokenize());
99
234
        tokens.append(&mut self.method.tokenize());
100
234
        tokens.append(&mut self.space1.tokenize());
101
234
        tokens.append(&mut self.url.tokenize());
102
234
        tokens.append(&mut self.line_terminator0.tokenize());
103
258
        tokens.append(&mut self.headers.iter().flat_map(|e| e.tokenize()).collect());
104
254
        tokens.append(&mut self.sections.iter().flat_map(|e| e.tokenize()).collect());
105
234
        if let Some(body) = &self.body {
106
45
            tokens.append(&mut body.tokenize());
107
        }
108
234
        tokens
109
    }
110
}
111

            
112
impl Tokenizable for Method {
113
234
    fn tokenize(&self) -> Vec<Token> {
114
234
        vec![Token::Method(self.to_string())]
115
    }
116
}
117

            
118
impl Tokenizable for Response {
119
105
    fn tokenize(&self) -> Vec<Token> {
120
105
        let mut tokens: Vec<Token> = vec![];
121
105
        tokens.append(
122
105
            &mut self
123
105
                .line_terminators
124
105
                .iter()
125
109
                .flat_map(|e| e.tokenize())
126
105
                .collect(),
127
105
        );
128
105
        tokens.append(&mut self.space0.tokenize());
129
105
        tokens.append(&mut self.version.tokenize());
130
105
        tokens.append(&mut self.space1.tokenize());
131
105
        tokens.append(&mut self.status.tokenize());
132
105
        tokens.append(&mut self.line_terminator0.tokenize());
133
110
        tokens.append(&mut self.headers.iter().flat_map(|e| e.tokenize()).collect());
134
122
        tokens.append(&mut self.sections.iter().flat_map(|e| e.tokenize()).collect());
135
105
        if let Some(body) = self.clone().body {
136
45
            tokens.append(&mut body.tokenize());
137
        }
138
105
        tokens
139
    }
140
}
141

            
142
impl Tokenizable for Status {
143
105
    fn tokenize(&self) -> Vec<Token> {
144
105
        let mut tokens: Vec<Token> = vec![];
145
105
        match self.value.clone() {
146
3
            StatusValue::Any => tokens.push(Token::Status("*".to_string())),
147
102
            StatusValue::Specific(v) => tokens.push(Token::Status(v.to_string())),
148
        }
149
105
        tokens
150
    }
151
}
152

            
153
impl Tokenizable for Version {
154
105
    fn tokenize(&self) -> Vec<Token> {
155
105
        vec![Token::Version(self.value.to_string())]
156
    }
157
}
158

            
159
impl Tokenizable for Body {
160
90
    fn tokenize(&self) -> Vec<Token> {
161
90
        let mut tokens: Vec<Token> = vec![];
162
90
        tokens.append(
163
90
            &mut self
164
90
                .line_terminators
165
90
                .iter()
166
95
                .flat_map(|e| e.tokenize())
167
90
                .collect(),
168
90
        );
169
90
        tokens.append(&mut self.space0.tokenize());
170
90
        tokens.append(&mut self.value.tokenize());
171
90
        tokens.append(&mut self.line_terminator0.tokenize());
172
90
        tokens
173
    }
174
}
175

            
176
impl Tokenizable for Bytes {
177
90
    fn tokenize(&self) -> Vec<Token> {
178
90
        let mut tokens: Vec<Token> = vec![];
179
90
        match self {
180
6
            Bytes::Json(value) => tokens.append(&mut value.tokenize()),
181
3
            Bytes::Xml(value) => tokens.push(Token::String(value.to_string())),
182
33
            Bytes::MultilineString(value) => tokens.append(&mut value.tokenize()),
183
15
            Bytes::OnelineString(value) => tokens.append(&mut value.tokenize()),
184
12
            Bytes::Base64(value) => tokens.append(&mut value.tokenize()),
185
9
            Bytes::Hex(value) => tokens.append(&mut value.tokenize()),
186
12
            Bytes::File(value) => tokens.append(&mut value.tokenize()),
187
        }
188
90
        tokens
189
    }
190
}
191

            
192
impl Tokenizable for Section {
193
111
    fn tokenize(&self) -> Vec<Token> {
194
111
        let mut tokens: Vec<Token> = vec![];
195
111
        tokens.append(
196
111
            &mut self
197
111
                .line_terminators
198
111
                .iter()
199
121
                .flat_map(|e| e.tokenize())
200
111
                .collect(),
201
111
        );
202
111
        tokens.append(&mut self.space0.tokenize());
203
111
        tokens.push(Token::SectionHeader(format!("[{}]", self.name())));
204
111
        tokens.append(&mut self.line_terminator0.tokenize());
205
111
        tokens.append(&mut self.value.tokenize());
206
111
        tokens
207
    }
208
}
209

            
210
impl Tokenizable for SectionValue {
211
111
    fn tokenize(&self) -> Vec<Token> {
212
111
        let mut tokens: Vec<Token> = vec![];
213
111
        match self {
214
42
            SectionValue::Asserts(items) => {
215
326
                tokens.append(&mut items.iter().flat_map(|e| e.tokenize()).collect());
216
            }
217
12
            SectionValue::QueryParams(items, _) => {
218
22
                tokens.append(&mut items.iter().flat_map(|e| e.tokenize()).collect());
219
            }
220
3
            SectionValue::BasicAuth(item) => {
221
3
                if let Some(kv) = item {
222
3
                    tokens.append(&mut kv.tokenize());
223
                }
224
            }
225
6
            SectionValue::FormParams(items, _) => {
226
14
                tokens.append(&mut items.iter().flat_map(|e| e.tokenize()).collect());
227
            }
228
6
            SectionValue::MultipartFormData(items, _) => {
229
11
                tokens.append(&mut items.iter().flat_map(|e| e.tokenize()).collect());
230
            }
231
9
            SectionValue::Cookies(items) => {
232
12
                tokens.append(&mut items.iter().flat_map(|e| e.tokenize()).collect());
233
            }
234
9
            SectionValue::Captures(items) => {
235
11
                tokens.append(&mut items.iter().flat_map(|e| e.tokenize()).collect());
236
            }
237
24
            SectionValue::Options(items) => {
238
275
                tokens.append(&mut items.iter().flat_map(|e| e.tokenize()).collect());
239
            }
240
        }
241
111
        tokens
242
    }
243
}
244

            
245
impl Tokenizable for Base64 {
246
15
    fn tokenize(&self) -> Vec<Token> {
247
15
        let mut tokens: Vec<Token> = vec![Token::Keyword(String::from("base64,"))];
248
15
        tokens.append(&mut self.space0.tokenize());
249
15
        tokens.push(Token::String(self.encoded.to_string()));
250
15
        tokens.append(&mut self.space1.tokenize());
251
15
        tokens.push(Token::Keyword(String::from(";")));
252
15
        tokens
253
    }
254
}
255

            
256
impl Tokenizable for Hex {
257
9
    fn tokenize(&self) -> Vec<Token> {
258
9
        let mut tokens: Vec<Token> = vec![Token::Keyword(String::from("hex,"))];
259
9
        tokens.append(&mut self.space0.tokenize());
260
9
        tokens.push(Token::String(self.encoded.to_string()));
261
9
        tokens.append(&mut self.space1.tokenize());
262
9
        tokens.push(Token::Keyword(String::from(";")));
263
9
        tokens
264
    }
265
}
266

            
267
impl Tokenizable for File {
268
15
    fn tokenize(&self) -> Vec<Token> {
269
15
        let mut tokens: Vec<Token> = vec![Token::Keyword(String::from("file,"))];
270
15
        tokens.append(&mut self.space0.tokenize());
271
15
        tokens.append(&mut self.filename.tokenize());
272
15
        tokens.append(&mut self.space1.tokenize());
273
15
        tokens.push(Token::Keyword(String::from(";")));
274
15
        tokens
275
    }
276
}
277

            
278
impl Tokenizable for KeyValue {
279
123
    fn tokenize(&self) -> Vec<Token> {
280
123
        let mut tokens: Vec<Token> = vec![];
281
123
        tokens.append(
282
123
            &mut self
283
123
                .line_terminators
284
123
                .iter()
285
124
                .flat_map(|e| e.tokenize())
286
123
                .collect(),
287
123
        );
288
123
        tokens.append(&mut self.space0.tokenize());
289
123
        tokens.append(&mut self.key.tokenize());
290
123
        tokens.append(&mut self.space1.tokenize());
291
123
        tokens.push(Token::Colon(String::from(":")));
292
123
        tokens.append(&mut self.space2.tokenize());
293
123
        tokens.append(&mut self.value.tokenize());
294
123
        tokens.append(&mut self.line_terminator0.tokenize());
295
123
        tokens
296
    }
297
}
298

            
299
impl Tokenizable for MultipartParam {
300
9
    fn tokenize(&self) -> Vec<Token> {
301
9
        match self {
302
3
            MultipartParam::Param(key_value) => key_value.tokenize(),
303
6
            MultipartParam::FileParam(file_param) => file_param.tokenize(),
304
        }
305
    }
306
}
307

            
308
impl Tokenizable for FileParam {
309
6
    fn tokenize(&self) -> Vec<Token> {
310
6
        let mut tokens: Vec<Token> = vec![];
311
6
        tokens.append(&mut self.space0.tokenize());
312
6
        tokens.append(&mut self.key.tokenize());
313
6
        tokens.append(&mut self.space1.tokenize());
314
6
        tokens.push(Token::Colon(String::from(":")));
315
6
        tokens.append(&mut self.space2.tokenize());
316
6
        tokens.append(&mut self.value.tokenize());
317
6
        tokens.append(&mut self.line_terminator0.tokenize());
318
6
        tokens
319
    }
320
}
321

            
322
impl Tokenizable for FileValue {
323
6
    fn tokenize(&self) -> Vec<Token> {
324
6
        let mut tokens: Vec<Token> = vec![Token::Keyword("file,".to_string())];
325
6
        tokens.append(&mut self.space0.tokenize());
326
6
        tokens.append(&mut self.filename.tokenize());
327
6
        tokens.append(&mut self.space1.tokenize());
328
6
        tokens.push(Token::Keyword(";".to_string()));
329
6
        tokens.append(&mut self.space2.tokenize());
330
6
        if let Some(content_type) = self.content_type.clone() {
331
3
            tokens.push(Token::String(content_type));
332
        }
333
6
        tokens
334
    }
335
}
336

            
337
impl Tokenizable for Cookie {
338
9
    fn tokenize(&self) -> Vec<Token> {
339
9
        let mut tokens: Vec<Token> = vec![];
340
9
        tokens.append(
341
9
            &mut self
342
9
                .line_terminators
343
9
                .iter()
344
9
                .flat_map(|e| e.tokenize())
345
9
                .collect(),
346
9
        );
347
9
        tokens.append(&mut self.space0.tokenize());
348
9
        tokens.append(&mut self.name.tokenize());
349
9
        tokens.append(&mut self.space1.tokenize());
350
9
        tokens.push(Token::Colon(String::from(":")));
351
9
        tokens.append(&mut self.space2.tokenize());
352
9
        tokens.append(&mut self.value.tokenize());
353
9
        tokens.append(&mut self.line_terminator0.tokenize());
354
9
        tokens
355
    }
356
}
357

            
358
impl Tokenizable for Capture {
359
6
    fn tokenize(&self) -> Vec<Token> {
360
6
        let mut tokens: Vec<Token> = vec![];
361
6
        tokens.append(
362
6
            &mut self
363
6
                .line_terminators
364
6
                .iter()
365
6
                .flat_map(|e| e.tokenize())
366
6
                .collect(),
367
6
        );
368
6
        tokens.append(&mut self.space0.tokenize());
369
6
        tokens.append(&mut self.name.tokenize());
370
6
        tokens.append(&mut self.space1.tokenize());
371
6
        tokens.push(Token::Colon(String::from(":")));
372
6
        tokens.append(&mut self.space2.tokenize());
373
6
        tokens.append(&mut self.query.tokenize());
374
9
        for (space, filter) in &self.filters {
375
3
            tokens.append(&mut space.tokenize());
376
3
            tokens.append(&mut filter.tokenize());
377
        }
378
6
        tokens.append(&mut self.line_terminator0.tokenize());
379
6
        tokens
380
    }
381
}
382

            
383
impl Tokenizable for Assert {
384
312
    fn tokenize(&self) -> Vec<Token> {
385
312
        let mut tokens: Vec<Token> = vec![];
386
312
        tokens.append(
387
312
            &mut self
388
312
                .line_terminators
389
312
                .iter()
390
312
                .flat_map(|e| e.tokenize())
391
312
                .collect(),
392
312
        );
393
312
        tokens.append(&mut self.space0.tokenize());
394
312
        tokens.append(&mut self.query.tokenize());
395
399
        for (space, filter) in &self.filters {
396
87
            tokens.append(&mut space.tokenize());
397
87
            tokens.append(&mut filter.tokenize());
398
        }
399
312
        tokens.append(&mut self.space1.tokenize());
400
312
        // TODO reconvert back your first predicate for jsonpath
401
312
        // so that you can use your firstX predicate for other query
402
312
        tokens.append(&mut self.predicate.tokenize());
403
312
        tokens.append(&mut self.line_terminator0.tokenize());
404
312
        tokens
405
    }
406
}
407

            
408
impl Tokenizable for Query {
409
318
    fn tokenize(&self) -> Vec<Token> {
410
318
        self.value.tokenize()
411
    }
412
}
413

            
414
impl Tokenizable for QueryValue {
415
318
    fn tokenize(&self) -> Vec<Token> {
416
318
        let mut tokens: Vec<Token> = vec![];
417
318
        match self.clone() {
418
6
            QueryValue::Status => tokens.push(Token::QueryType(String::from("status"))),
419
3
            QueryValue::Url => tokens.push(Token::QueryType(String::from("url"))),
420
12
            QueryValue::Header { space0, name } => {
421
12
                tokens.push(Token::QueryType(String::from("header")));
422
12
                tokens.append(&mut space0.tokenize());
423
12
                tokens.append(&mut name.tokenize());
424
            }
425
6
            QueryValue::Cookie { space0, expr } => {
426
6
                tokens.push(Token::QueryType(String::from("cookie")));
427
6
                tokens.append(&mut space0.tokenize());
428
6
                tokens.push(Token::CodeDelimiter("\"".to_string()));
429
6
                tokens.append(&mut expr.tokenize());
430
6
                tokens.push(Token::CodeDelimiter("\"".to_string()));
431
            }
432
30
            QueryValue::Body => tokens.push(Token::QueryType(String::from("body"))),
433
3
            QueryValue::Xpath { space0, expr } => {
434
3
                tokens.push(Token::QueryType(String::from("xpath")));
435
3
                tokens.append(&mut space0.tokenize());
436
3
                tokens.append(&mut expr.tokenize());
437
            }
438
189
            QueryValue::Jsonpath { space0, expr } => {
439
189
                tokens.push(Token::QueryType(String::from("jsonpath")));
440
189
                tokens.append(&mut space0.tokenize());
441
189
                tokens.append(&mut expr.tokenize());
442
            }
443
3
            QueryValue::Regex { space0, value } => {
444
3
                tokens.push(Token::QueryType(String::from("regex")));
445
3
                tokens.append(&mut space0.tokenize());
446
3
                tokens.append(&mut value.tokenize());
447
            }
448
9
            QueryValue::Variable { space0, name } => {
449
9
                tokens.push(Token::QueryType(String::from("variable")));
450
9
                tokens.append(&mut space0.tokenize());
451
9
                tokens.append(&mut name.tokenize());
452
            }
453
3
            QueryValue::Duration => tokens.push(Token::QueryType(String::from("duration"))),
454
18
            QueryValue::Bytes => tokens.push(Token::QueryType(String::from("bytes"))),
455
3
            QueryValue::Sha256 => tokens.push(Token::QueryType(String::from("sha256"))),
456
3
            QueryValue::Md5 => tokens.push(Token::QueryType(String::from("md5"))),
457
            QueryValue::Certificate {
458
30
                space0,
459
30
                attribute_name: field,
460
30
            } => {
461
30
                tokens.push(Token::QueryType(String::from("certificate")));
462
30
                tokens.append(&mut space0.tokenize());
463
30
                tokens.append(&mut field.tokenize());
464
            }
465
        }
466
318
        tokens
467
    }
468
}
469

            
470
impl Tokenizable for RegexValue {
471
21
    fn tokenize(&self) -> Vec<Token> {
472
21
        match self {
473
18
            RegexValue::Template(template) => template.tokenize(),
474
3
            RegexValue::Regex(regex) => regex.tokenize(),
475
        }
476
    }
477
}
478

            
479
impl Tokenizable for CookiePath {
480
6
    fn tokenize(&self) -> Vec<Token> {
481
6
        let mut tokens: Vec<Token> = vec![];
482
6
        tokens.append(&mut self.name.tokenize());
483
6
        if let Some(attribute) = self.attribute.clone() {
484
3
            tokens.append(&mut attribute.tokenize());
485
        }
486
6
        tokens
487
    }
488
}
489

            
490
impl Tokenizable for CookieAttribute {
491
3
    fn tokenize(&self) -> Vec<Token> {
492
3
        let mut tokens: Vec<Token> = vec![Token::CodeDelimiter("[".to_string())];
493
3
        tokens.append(&mut self.space0.tokenize());
494
3
        tokens.push(Token::String(self.name.value()));
495
3
        tokens.append(&mut self.space1.tokenize());
496
3
        tokens.push(Token::CodeDelimiter("]".to_string()));
497
3
        tokens
498
    }
499
}
500

            
501
impl Tokenizable for CertificateAttributeName {
502
30
    fn tokenize(&self) -> Vec<Token> {
503
30
        let value = match self {
504
3
            CertificateAttributeName::Subject => "Subject",
505
3
            CertificateAttributeName::Issuer => "Issuer",
506
9
            CertificateAttributeName::StartDate => "Start-Date",
507
12
            CertificateAttributeName::ExpireDate => "Expire-Date",
508
3
            CertificateAttributeName::SerialNumber => "Serial-Number",
509
        };
510
30
        vec![
511
30
            Token::StringDelimiter("\"".to_string()),
512
30
            Token::String(value.to_string()),
513
30
            Token::StringDelimiter("\"".to_string()),
514
30
        ]
515
    }
516
}
517

            
518
impl Tokenizable for Predicate {
519
312
    fn tokenize(&self) -> Vec<Token> {
520
312
        let mut tokens: Vec<Token> = vec![];
521
312
        if self.not {
522
3
            tokens.push(Token::Not(String::from("not")));
523
3
            tokens.append(&mut self.space0.tokenize());
524
        }
525
312
        tokens.append(&mut self.predicate_func.tokenize());
526
312
        tokens
527
    }
528
}
529

            
530
impl Tokenizable for PredicateFunc {
531
312
    fn tokenize(&self) -> Vec<Token> {
532
312
        self.value.tokenize()
533
    }
534
}
535

            
536
impl Tokenizable for PredicateFuncValue {
537
312
    fn tokenize(&self) -> Vec<Token> {
538
312
        let mut tokens: Vec<Token> = vec![];
539
312
        let name = self.name().to_string();
540
312
        match self {
541
201
            PredicateFuncValue::Equal { space0, value, .. } => {
542
201
                tokens.push(Token::PredicateType(name));
543
201
                tokens.append(&mut space0.tokenize());
544
201
                tokens.append(&mut value.tokenize());
545
            }
546
9
            PredicateFuncValue::NotEqual { space0, value, .. } => {
547
9
                tokens.push(Token::PredicateType(name));
548
9
                tokens.append(&mut space0.tokenize());
549
9
                tokens.append(&mut value.tokenize());
550
            }
551
9
            PredicateFuncValue::GreaterThan { space0, value, .. } => {
552
9
                tokens.push(Token::PredicateType(name));
553
9
                tokens.append(&mut space0.tokenize());
554
9
                tokens.append(&mut value.tokenize());
555
            }
556
3
            PredicateFuncValue::GreaterThanOrEqual { space0, value, .. } => {
557
3
                tokens.push(Token::PredicateType(name));
558
3
                tokens.append(&mut space0.tokenize());
559
3
                tokens.append(&mut value.tokenize());
560
            }
561
12
            PredicateFuncValue::LessThan { space0, value, .. } => {
562
12
                tokens.push(Token::PredicateType(name));
563
12
                tokens.append(&mut space0.tokenize());
564
12
                tokens.append(&mut value.tokenize());
565
            }
566
3
            PredicateFuncValue::LessThanOrEqual { space0, value, .. } => {
567
3
                tokens.push(Token::PredicateType(name));
568
3
                tokens.append(&mut space0.tokenize());
569
3
                tokens.append(&mut value.tokenize());
570
            }
571
9
            PredicateFuncValue::StartWith { space0, value } => {
572
9
                tokens.push(Token::PredicateType(name));
573
9
                tokens.append(&mut space0.tokenize());
574
9
                tokens.append(&mut value.tokenize());
575
            }
576
6
            PredicateFuncValue::EndWith { space0, value } => {
577
6
                tokens.push(Token::PredicateType(name));
578
6
                tokens.append(&mut space0.tokenize());
579
6
                tokens.append(&mut value.tokenize());
580
            }
581
6
            PredicateFuncValue::Contain { space0, value } => {
582
6
                tokens.push(Token::PredicateType(name));
583
6
                tokens.append(&mut space0.tokenize());
584
6
                tokens.append(&mut value.tokenize());
585
            }
586
3
            PredicateFuncValue::Include { space0, value } => {
587
3
                tokens.push(Token::PredicateType(name));
588
3
                tokens.append(&mut space0.tokenize());
589
3
                tokens.append(&mut value.tokenize());
590
            }
591
6
            PredicateFuncValue::Match { space0, value } => {
592
6
                tokens.push(Token::PredicateType(name));
593
6
                tokens.append(&mut space0.tokenize());
594
6
                tokens.append(&mut value.tokenize());
595
            }
596
3
            PredicateFuncValue::IsInteger => {
597
3
                tokens.push(Token::PredicateType(name));
598
            }
599
3
            PredicateFuncValue::IsFloat => {
600
3
                tokens.push(Token::PredicateType(name));
601
            }
602
3
            PredicateFuncValue::IsBoolean => {
603
3
                tokens.push(Token::PredicateType(name));
604
            }
605
3
            PredicateFuncValue::IsString => {
606
3
                tokens.push(Token::PredicateType(name));
607
            }
608
3
            PredicateFuncValue::IsCollection => {
609
3
                tokens.push(Token::PredicateType(name));
610
            }
611
9
            PredicateFuncValue::IsDate => {
612
9
                tokens.push(Token::PredicateType(name));
613
            }
614
3
            PredicateFuncValue::IsIsoDate => {
615
3
                tokens.push(Token::PredicateType(name));
616
            }
617
12
            PredicateFuncValue::Exist => {
618
12
                tokens.push(Token::PredicateType(name));
619
            }
620
3
            PredicateFuncValue::IsEmpty => {
621
3
                tokens.push(Token::PredicateType(name));
622
            }
623
3
            PredicateFuncValue::IsNumber => {
624
3
                tokens.push(Token::PredicateType(name));
625
            }
626
        }
627
312
        tokens
628
    }
629
}
630

            
631
impl Tokenizable for PredicateValue {
632
267
    fn tokenize(&self) -> Vec<Token> {
633
267
        match self {
634
105
            PredicateValue::String(value) => value.tokenize(),
635
15
            PredicateValue::MultilineString(value) => value.tokenize(),
636
3
            PredicateValue::Bool(value) => vec![Token::Boolean(value.to_string())],
637
3
            PredicateValue::Null => vec![Token::Keyword("null".to_string())],
638
108
            PredicateValue::Number(value) => vec![Token::Number(value.to_string())],
639
3
            PredicateValue::File(value) => value.tokenize(),
640
21
            PredicateValue::Hex(value) => vec![Token::String(value.to_string())],
641
3
            PredicateValue::Base64(value) => value.tokenize(),
642
3
            PredicateValue::Placeholder(value) => value.tokenize(),
643
3
            PredicateValue::Regex(value) => value.tokenize(),
644
        }
645
    }
646
}
647

            
648
impl Tokenizable for MultilineString {
649
48
    fn tokenize(&self) -> Vec<Token> {
650
48
        let mut tokens: Vec<Token> = vec![Token::StringDelimiter("```".to_string())];
651
48
        tokens.push(Token::Lang(self.lang().to_string()));
652
48
        for (i, attribute) in self.attributes.iter().enumerate() {
653
3
            if i > 0 || !self.lang().is_empty() {
654
                tokens.push(Token::StringDelimiter(",".to_string()));
655
            }
656
3
            tokens.append(&mut attribute.tokenize());
657
        }
658
48
        match self {
659
            MultilineString {
660
21
                kind: MultilineStringKind::Text(text),
661
                ..
662
            }
663
            | MultilineString {
664
9
                kind: MultilineStringKind::Json(text),
665
                ..
666
            }
667
            | MultilineString {
668
9
                kind: MultilineStringKind::Xml(text),
669
                ..
670
39
            } => tokens.append(&mut text.tokenize()),
671
            MultilineString {
672
9
                kind: MultilineStringKind::GraphQl(graphql),
673
9
                ..
674
9
            } => tokens.append(&mut graphql.tokenize()),
675
        }
676
48
        tokens.push(Token::StringDelimiter("```".to_string()));
677
48
        tokens
678
    }
679
}
680

            
681
impl Tokenizable for MultilineStringAttribute {
682
3
    fn tokenize(&self) -> Vec<Token> {
683
3
        match self {
684
3
            MultilineStringAttribute::Escape => vec![Token::String("escape".to_string())],
685
            MultilineStringAttribute::NoVariable => vec![Token::String("novariable".to_string())],
686
        }
687
    }
688
}
689

            
690
impl Tokenizable for Text {
691
39
    fn tokenize(&self) -> Vec<Token> {
692
39
        let mut tokens: Vec<Token> = vec![];
693
39
        tokens.append(&mut self.space.tokenize());
694
39
        tokens.append(&mut self.newline.tokenize());
695
39
        tokens.append(&mut self.value.tokenize());
696
39
        tokens
697
    }
698
}
699

            
700
impl Tokenizable for GraphQl {
701
9
    fn tokenize(&self) -> Vec<Token> {
702
9
        let mut tokens: Vec<Token> = vec![];
703
9
        tokens.append(&mut self.space.tokenize());
704
9
        tokens.append(&mut self.newline.tokenize());
705
9
        tokens.append(&mut self.value.tokenize());
706
9
        if let Some(vars) = &self.variables {
707
            tokens.append(&mut vars.tokenize());
708
        }
709
9
        tokens
710
    }
711
}
712

            
713
impl Tokenizable for GraphQlVariables {
714
    fn tokenize(&self) -> Vec<Token> {
715
        let mut tokens: Vec<Token> = vec![];
716
        tokens.push(Token::String("variables".to_string()));
717
        tokens.append(&mut self.space.tokenize());
718
        tokens.append(&mut self.value.tokenize());
719
        tokens.append(&mut self.whitespace.tokenize());
720
        tokens
721
    }
722
}
723

            
724
impl Tokenizable for EncodedString {
725
    fn tokenize(&self) -> Vec<Token> {
726
        let mut tokens: Vec<Token> = vec![];
727
        if self.quotes {
728
            tokens.push(Token::StringDelimiter(
729
                if self.quotes { "\"" } else { "" }.to_string(),
730
            ));
731
        }
732
        tokens.push(Token::String(self.encoded.clone()));
733

            
734
        if self.quotes {
735
            tokens.push(Token::StringDelimiter(
736
                if self.quotes { "\"" } else { "" }.to_string(),
737
            ));
738
        }
739
        tokens
740
    }
741
}
742

            
743
impl Tokenizable for Template {
744
1080
    fn tokenize(&self) -> Vec<Token> {
745
1080
        let mut tokens: Vec<Token> = vec![];
746
1080
        if let Some(d) = self.delimiter {
747
411
            tokens.push(Token::StringDelimiter(d.to_string()));
748
        }
749
2163
        for element in &self.elements {
750
1083
            tokens.append(&mut element.tokenize());
751
        }
752
1080
        if let Some(d) = self.delimiter {
753
411
            tokens.push(Token::StringDelimiter(d.to_string()));
754
        }
755
1080
        tokens
756
    }
757
}
758

            
759
impl Tokenizable for TemplateElement {
760
1083
    fn tokenize(&self) -> Vec<Token> {
761
1083
        match self {
762
1017
            TemplateElement::String { encoded, .. } => {
763
1017
                vec![Token::String(encoded.to_string())]
764
            }
765
66
            TemplateElement::Placeholder(value) => {
766
66
                let mut tokens: Vec<Token> = vec![];
767
66
                tokens.append(&mut value.tokenize());
768
66
                tokens
769
            }
770
        }
771
    }
772
}
773

            
774
impl Tokenizable for Placeholder {
775
141
    fn tokenize(&self) -> Vec<Token> {
776
141
        let mut tokens: Vec<Token> = vec![Token::CodeDelimiter(String::from("{{"))];
777
141
        tokens.append(&mut self.space0.tokenize());
778
141
        tokens.append(&mut self.expr.tokenize());
779
141
        tokens.append(&mut self.space1.tokenize());
780
141
        tokens.push(Token::CodeDelimiter(String::from("}}")));
781
141
        tokens
782
    }
783
}
784

            
785
impl Tokenizable for Expr {
786
141
    fn tokenize(&self) -> Vec<Token> {
787
141
        self.kind.tokenize()
788
    }
789
}
790

            
791
impl Tokenizable for ExprKind {
792
141
    fn tokenize(&self) -> Vec<Token> {
793
141
        match self {
794
135
            ExprKind::Variable(variable) => variable.tokenize(),
795
6
            ExprKind::Function(function) => function.tokenize(),
796
        }
797
    }
798
}
799

            
800
impl Tokenizable for Variable {
801
135
    fn tokenize(&self) -> Vec<Token> {
802
135
        vec![Token::CodeVariable(self.name.clone())]
803
    }
804
}
805

            
806
impl Tokenizable for Function {
807
6
    fn tokenize(&self) -> Vec<Token> {
808
6
        match self {
809
3
            Function::NewDate => vec![Token::CodeVariable("newDate".to_string())],
810
3
            Function::NewUuid => vec![Token::CodeVariable("newUuid".to_string())],
811
        }
812
    }
813
}
814

            
815
impl Tokenizable for Regex {
816
6
    fn tokenize(&self) -> Vec<Token> {
817
6
        let s = str::replace(self.inner.as_str(), "/", "\\/");
818
6
        vec![Token::String(format!("/{s}/"))]
819
    }
820
}
821

            
822
impl Tokenizable for LineTerminator {
823
1500
    fn tokenize(&self) -> Vec<Token> {
824
1500
        let mut tokens: Vec<Token> = vec![];
825
1500
        tokens.append(&mut self.space0.tokenize());
826
1500
        if let Some(comment) = &self.comment {
827
231
            tokens.append(&mut comment.tokenize());
828
        }
829
1500
        tokens.append(&mut self.newline.tokenize());
830
1500
        tokens
831
    }
832
}
833

            
834
impl Tokenizable for Whitespace {
835
6945
    fn tokenize(&self) -> Vec<Token> {
836
6945
        let mut tokens: Vec<Token> = vec![];
837
6945
        if !self.value.is_empty() {
838
3462
            tokens.push(Token::Whitespace(self.value.clone()));
839
        }
840
6945
        tokens
841
    }
842
}
843

            
844
impl Tokenizable for Comment {
845
231
    fn tokenize(&self) -> Vec<Token> {
846
231
        vec![Token::Comment(format!("#{}", self.value.clone()))]
847
    }
848
}
849

            
850
impl Tokenizable for JsonValue {
851
102
    fn tokenize(&self) -> Vec<Token> {
852
102
        let mut tokens: Vec<Token> = vec![];
853
102
        match self {
854
18
            JsonValue::String(s) => {
855
18
                //tokens.push(Token::CodeDelimiter("\"".to_string()));
856
18
                tokens.append(&mut s.tokenize());
857
18
                //tokens.push(Token::CodeDelimiter("\"".to_string()));
858
            }
859
45
            JsonValue::Number(value) => {
860
45
                tokens.push(Token::Number(value.to_string()));
861
            }
862
3
            JsonValue::Boolean(value) => {
863
3
                tokens.push(Token::Boolean(value.to_string()));
864
            }
865
15
            JsonValue::List { space0, elements } => {
866
15
                tokens.push(Token::CodeDelimiter("[".to_string()));
867
15
                tokens.push(Token::Whitespace(space0.clone()));
868
51
                for (i, element) in elements.iter().enumerate() {
869
51
                    if i > 0 {
870
39
                        tokens.push(Token::CodeDelimiter(",".to_string()));
871
                    }
872
51
                    tokens.append(&mut element.tokenize());
873
                }
874
15
                tokens.push(Token::CodeDelimiter("]".to_string()));
875
            }
876
15
            JsonValue::Object { space0, elements } => {
877
15
                tokens.push(Token::CodeDelimiter("{".to_string()));
878
15
                tokens.push(Token::Whitespace(space0.clone()));
879
45
                for (i, element) in elements.iter().enumerate() {
880
45
                    if i > 0 {
881
33
                        tokens.push(Token::CodeDelimiter(",".to_string()));
882
                    }
883
45
                    tokens.append(&mut element.tokenize());
884
                }
885
15
                tokens.push(Token::CodeDelimiter("}".to_string()));
886
            }
887
3
            JsonValue::Null => {
888
3
                tokens.push(Token::Keyword("null".to_string()));
889
            }
890
3
            JsonValue::Placeholder(exp) => {
891
3
                tokens.append(&mut exp.tokenize());
892
            }
893
        }
894
102
        tokens
895
    }
896
}
897

            
898
impl Tokenizable for JsonListElement {
899
51
    fn tokenize(&self) -> Vec<Token> {
900
51
        let mut tokens: Vec<Token> = vec![Token::Whitespace(self.space0.clone())];
901
51
        tokens.append(&mut self.value.tokenize());
902
51
        tokens.push(Token::Whitespace(self.space1.clone()));
903
51
        tokens
904
    }
905
}
906

            
907
impl Tokenizable for JsonObjectElement {
908
45
    fn tokenize(&self) -> Vec<Token> {
909
45
        let mut tokens: Vec<Token> = vec![Token::Whitespace(self.space0.clone())];
910
45
        tokens.push(Token::StringDelimiter("\"".to_string()));
911
45
        tokens.push(Token::String(self.name.to_string()));
912
45
        tokens.push(Token::StringDelimiter("\"".to_string()));
913
45
        tokens.push(Token::Whitespace(self.space1.clone()));
914
45
        tokens.push(Token::CodeDelimiter(":".to_string()));
915
45
        tokens.push(Token::Whitespace(self.space2.clone()));
916
45
        tokens.append(&mut self.value.tokenize());
917
45
        tokens.push(Token::Whitespace(self.space3.clone()));
918
45
        tokens
919
    }
920
}
921

            
922
impl Tokenizable for EntryOption {
923
267
    fn tokenize(&self) -> Vec<Token> {
924
267
        let mut tokens: Vec<Token> = vec![];
925
267
        tokens.append(
926
267
            &mut self
927
267
                .line_terminators
928
267
                .iter()
929
268
                .flat_map(|e| e.tokenize())
930
267
                .collect(),
931
267
        );
932
267
        tokens.append(&mut self.space0.tokenize());
933
267
        tokens.push(Token::String(self.kind.name().to_string()));
934
267
        tokens.append(&mut self.space1.tokenize());
935
267
        tokens.push(Token::Colon(String::from(":")));
936
267
        tokens.append(&mut self.space2.tokenize());
937
267
        tokens.append(&mut self.kind.tokenize());
938
267
        tokens.append(&mut self.line_terminator0.tokenize());
939
267
        tokens
940
    }
941
}
942

            
943
impl Tokenizable for OptionKind {
944
267
    fn tokenize(&self) -> Vec<Token> {
945
267
        match self {
946
6
            OptionKind::AwsSigV4(value) => value.tokenize(),
947
6
            OptionKind::CaCertificate(filename) => filename.tokenize(),
948
9
            OptionKind::ClientCert(filename) => filename.tokenize(),
949
6
            OptionKind::ClientKey(filename) => filename.tokenize(),
950
6
            OptionKind::Compressed(value) => value.tokenize(),
951
6
            OptionKind::ConnectTo(value) => value.tokenize(),
952
6
            OptionKind::ConnectTimeout(value) => value.tokenize(),
953
12
            OptionKind::Delay(value) => value.tokenize(),
954
9
            OptionKind::FollowLocation(value) => value.tokenize(),
955
6
            OptionKind::FollowLocationTrusted(value) => value.tokenize(),
956
6
            OptionKind::Http10(value) => value.tokenize(),
957
6
            OptionKind::Http11(value) => value.tokenize(),
958
6
            OptionKind::Http2(value) => value.tokenize(),
959
6
            OptionKind::Http3(value) => value.tokenize(),
960
9
            OptionKind::Insecure(value) => value.tokenize(),
961
6
            OptionKind::IpV4(value) => value.tokenize(),
962
6
            OptionKind::IpV6(value) => value.tokenize(),
963
6
            OptionKind::LimitRate(value) => value.tokenize(),
964
6
            OptionKind::MaxRedirect(value) => value.tokenize(),
965
6
            OptionKind::NetRc(value) => value.tokenize(),
966
6
            OptionKind::NetRcFile(filename) => filename.tokenize(),
967
6
            OptionKind::NetRcOptional(value) => value.tokenize(),
968
6
            OptionKind::Output(filename) => filename.tokenize(),
969
6
            OptionKind::PathAsIs(value) => value.tokenize(),
970
6
            OptionKind::Proxy(value) => value.tokenize(),
971
9
            OptionKind::Repeat(value) => value.tokenize(),
972
6
            OptionKind::Resolve(value) => value.tokenize(),
973
15
            OptionKind::Retry(value) => value.tokenize(),
974
12
            OptionKind::RetryInterval(value) => value.tokenize(),
975
6
            OptionKind::Skip(value) => value.tokenize(),
976
6
            OptionKind::UnixSocket(value) => value.tokenize(),
977
6
            OptionKind::User(value) => value.tokenize(),
978
27
            OptionKind::Variable(value) => value.tokenize(),
979
9
            OptionKind::Verbose(value) => value.tokenize(),
980
6
            OptionKind::VeryVerbose(value) => value.tokenize(),
981
        }
982
    }
983
}
984

            
985
impl Tokenizable for BooleanOption {
986
105
    fn tokenize(&self) -> Vec<Token> {
987
105
        match self {
988
57
            BooleanOption::Literal(value) => vec![Token::Boolean(value.to_string())],
989
48
            BooleanOption::Placeholder(expr) => expr.tokenize(),
990
        }
991
    }
992
}
993

            
994
impl Tokenizable for NaturalOption {
995
6
    fn tokenize(&self) -> Vec<Token> {
996
6
        match self {
997
3
            NaturalOption::Literal(value) => value.tokenize(),
998
3
            NaturalOption::Placeholder(expr) => expr.tokenize(),
999
        }
    }
}
impl Tokenizable for U64 {
3
    fn tokenize(&self) -> Vec<Token> {
3
        vec![Token::Number(self.to_string())]
    }
}
impl Tokenizable for I64 {
    fn tokenize(&self) -> Vec<Token> {
        vec![Token::Number(self.to_string())]
    }
}
impl Tokenizable for CountOption {
30
    fn tokenize(&self) -> Vec<Token> {
30
        match self {
21
            CountOption::Literal(retry) => retry.tokenize(),
9
            CountOption::Placeholder(expr) => expr.tokenize(),
        }
    }
}
impl Tokenizable for Count {
21
    fn tokenize(&self) -> Vec<Token> {
21
        match self {
15
            Count::Finite(n) => vec![Token::Number(n.to_string())],
6
            Count::Infinite => vec![Token::Number("-1".to_string())],
        }
    }
}
impl Tokenizable for DurationOption {
30
    fn tokenize(&self) -> Vec<Token> {
30
        match self {
21
            DurationOption::Literal(value) => value.tokenize(),
9
            DurationOption::Placeholder(expr) => expr.tokenize(),
        }
    }
}
impl Tokenizable for Duration {
21
    fn tokenize(&self) -> Vec<Token> {
21
        let mut tokens = vec![Token::Number(self.value.to_string())];
21
        if let Some(unit) = self.unit {
21
            tokens.push(Token::Unit(unit.to_string()));
        }
21
        tokens
    }
}
impl Tokenizable for VariableDefinition {
27
    fn tokenize(&self) -> Vec<Token> {
27
        let mut tokens: Vec<Token> = vec![Token::String(self.name.clone())];
27
        tokens.append(&mut self.space0.tokenize());
27
        tokens.push(Token::Keyword("=".to_string()));
27
        tokens.append(&mut self.space1.tokenize());
27
        tokens.append(&mut self.value.tokenize());
27
        tokens
    }
}
impl Tokenizable for VariableValue {
27
    fn tokenize(&self) -> Vec<Token> {
27
        match self {
3
            VariableValue::Null => vec![Token::Keyword("null".to_string())],
3
            VariableValue::Bool(v) => vec![Token::Boolean(v.to_string())],
6
            VariableValue::Number(v) => vec![Token::Number(v.to_string())],
15
            VariableValue::String(v) => v.tokenize(),
        }
    }
}
impl Tokenizable for Filter {
90
    fn tokenize(&self) -> Vec<Token> {
90
        match self.value.clone() {
15
            FilterValue::Count => vec![Token::FilterType(String::from("count"))],
3
            FilterValue::DaysAfterNow => vec![Token::FilterType(String::from("daysAfterNow"))],
6
            FilterValue::DaysBeforeNow => vec![Token::FilterType(String::from("daysBeforeNow"))],
6
            FilterValue::Decode { space0, encoding } => {
6
                let mut tokens: Vec<Token> = vec![Token::FilterType(String::from("decode"))];
6
                tokens.append(&mut space0.tokenize());
6
                tokens.append(&mut encoding.tokenize());
6
                tokens
            }
9
            FilterValue::Format { space0, fmt } => {
9
                let mut tokens: Vec<Token> = vec![Token::FilterType(String::from("format"))];
9
                tokens.append(&mut space0.tokenize());
9
                tokens.append(&mut fmt.tokenize());
9
                tokens
            }
3
            FilterValue::HtmlEscape => vec![Token::FilterType(String::from("htmlEscape"))],
            FilterValue::HtmlUnescape => {
3
                vec![Token::FilterType(String::from("htmlUnescape"))]
            }
3
            FilterValue::JsonPath { space0, expr } => {
3
                let mut tokens: Vec<Token> = vec![Token::FilterType(String::from("jsonpath"))];
3
                tokens.append(&mut space0.tokenize());
3
                tokens.append(&mut expr.tokenize());
3
                tokens
            }
3
            FilterValue::Nth { space0, n } => {
3
                let mut tokens: Vec<Token> = vec![Token::FilterType(String::from("nth"))];
3
                tokens.append(&mut space0.tokenize());
3
                tokens.push(Token::Number(n.to_string()));
3
                tokens
            }
3
            FilterValue::Regex { space0, value } => {
3
                let mut tokens: Vec<Token> = vec![Token::FilterType(String::from("regex"))];
3
                tokens.append(&mut space0.tokenize());
3
                tokens.append(&mut value.tokenize());
3
                tokens
            }
            FilterValue::Replace {
15
                space0,
15
                old_value,
15
                space1,
15
                new_value,
15
            } => {
15
                let mut tokens: Vec<Token> = vec![Token::FilterType(String::from("replace"))];
15
                tokens.append(&mut space0.tokenize());
15
                tokens.append(&mut old_value.tokenize());
15
                tokens.append(&mut space1.tokenize());
15
                tokens.append(&mut new_value.tokenize());
15
                tokens
            }
3
            FilterValue::UrlEncode => vec![Token::FilterType(String::from("urlEncode"))],
3
            FilterValue::UrlDecode => vec![Token::FilterType(String::from("urlDecode"))],
3
            FilterValue::Split { space0, sep } => {
3
                let mut tokens: Vec<Token> = vec![Token::FilterType(String::from("split"))];
3
                tokens.append(&mut space0.tokenize());
3
                tokens.append(&mut sep.tokenize());
3
                tokens
            }
3
            FilterValue::ToDate { space0, fmt } => {
3
                let mut tokens: Vec<Token> = vec![Token::FilterType(String::from("toDate"))];
3
                tokens.append(&mut space0.tokenize());
3
                tokens.append(&mut fmt.tokenize());
3
                tokens
            }
3
            FilterValue::ToFloat => vec![Token::FilterType(String::from("toFloat"))],
3
            FilterValue::ToInt => vec![Token::FilterType(String::from("toInt"))],
3
            FilterValue::XPath { space0, expr } => {
3
                let mut tokens: Vec<Token> = vec![Token::FilterType(String::from("xpath"))];
3
                tokens.append(&mut space0.tokenize());
3
                tokens.append(&mut expr.tokenize());
3
                tokens
            }
        }
    }
}