Lines
97.05 %
Functions
94.05 %
Branches
100 %
/*
* Hurl (https://hurl.dev)
* Copyright (C) 2024 Orange
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use hurl_core::ast::*;
use hurl_core::typing::{Count, Duration};
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum Token {
Method(String),
Version(String),
Status(String),
SectionHeader(String),
QueryType(String),
PredicateType(String),
FilterType(String),
Not(String),
Keyword(String),
// Primitives
Whitespace(String),
Comment(String),
Value(String),
Colon(String),
StringDelimiter(String),
Boolean(String),
Number(String),
String(String),
CodeDelimiter(String),
CodeVariable(String),
Lang(String),
Unit(String),
}
pub trait Tokenizable {
fn tokenize(&self) -> Vec<Token>;
impl Tokenizable for HurlFile {
fn tokenize(&self) -> Vec<Token> {
let mut tokens: Vec<Token> = vec![];
tokens.append(&mut self.entries.iter().flat_map(|e| e.tokenize()).collect());
tokens.append(
&mut self
.line_terminators
.iter()
.flat_map(|e| e.tokenize())
.collect(),
);
tokens
impl Tokenizable for Entry {
tokens.append(&mut self.request.tokenize());
if let Some(response) = &self.response {
tokens.append(&mut response.tokenize());
impl Tokenizable for Request {
tokens.append(&mut self.space0.tokenize());
tokens.append(&mut self.method.tokenize());
tokens.append(&mut self.space1.tokenize());
tokens.append(&mut self.url.tokenize());
tokens.append(&mut self.line_terminator0.tokenize());
tokens.append(&mut self.headers.iter().flat_map(|e| e.tokenize()).collect());
tokens.append(&mut self.sections.iter().flat_map(|e| e.tokenize()).collect());
if let Some(body) = &self.body {
tokens.append(&mut body.tokenize());
impl Tokenizable for Method {
vec![Token::Method(self.to_string())]
impl Tokenizable for Response {
tokens.append(&mut self.version.tokenize());
tokens.append(&mut self.status.tokenize());
if let Some(body) = self.clone().body {
impl Tokenizable for Status {
match self.value.clone() {
StatusValue::Any => tokens.push(Token::Status("*".to_string())),
StatusValue::Specific(v) => tokens.push(Token::Status(v.to_string())),
impl Tokenizable for Version {
vec![Token::Version(self.value.to_string())]
impl Tokenizable for Body {
tokens.append(&mut self.value.tokenize());
impl Tokenizable for Bytes {
match self {
Bytes::Json(value) => tokens.append(&mut value.tokenize()),
Bytes::Xml(value) => tokens.push(Token::String(value.to_string())),
Bytes::MultilineString(value) => tokens.append(&mut value.tokenize()),
Bytes::OnelineString(value) => tokens.append(&mut value.tokenize()),
Bytes::Base64(value) => tokens.append(&mut value.tokenize()),
Bytes::Hex(value) => tokens.append(&mut value.tokenize()),
Bytes::File(value) => tokens.append(&mut value.tokenize()),
impl Tokenizable for Section {
tokens.push(Token::SectionHeader(format!("[{}]", self.name())));
impl Tokenizable for SectionValue {
SectionValue::Asserts(items) => {
tokens.append(&mut items.iter().flat_map(|e| e.tokenize()).collect());
SectionValue::QueryParams(items, _) => {
SectionValue::BasicAuth(item) => {
if let Some(kv) = item {
tokens.append(&mut kv.tokenize());
SectionValue::FormParams(items, _) => {
SectionValue::MultipartFormData(items, _) => {
SectionValue::Cookies(items) => {
SectionValue::Captures(items) => {
SectionValue::Options(items) => {
impl Tokenizable for Base64 {
let mut tokens: Vec<Token> = vec![Token::Keyword(String::from("base64,"))];
tokens.push(Token::String(self.encoded.to_string()));
tokens.push(Token::Keyword(String::from(";")));
impl Tokenizable for Hex {
let mut tokens: Vec<Token> = vec![Token::Keyword(String::from("hex,"))];
impl Tokenizable for File {
let mut tokens: Vec<Token> = vec![Token::Keyword(String::from("file,"))];
tokens.append(&mut self.filename.tokenize());
impl Tokenizable for KeyValue {
tokens.append(&mut self.key.tokenize());
tokens.push(Token::Colon(String::from(":")));
tokens.append(&mut self.space2.tokenize());
impl Tokenizable for MultipartParam {
MultipartParam::Param(key_value) => key_value.tokenize(),
MultipartParam::FileParam(file_param) => file_param.tokenize(),
impl Tokenizable for FileParam {
impl Tokenizable for FileValue {
let mut tokens: Vec<Token> = vec![Token::Keyword("file,".to_string())];
tokens.push(Token::Keyword(";".to_string()));
if let Some(content_type) = self.content_type.clone() {
tokens.push(Token::String(content_type));
impl Tokenizable for Cookie {
tokens.append(&mut self.name.tokenize());
impl Tokenizable for Capture {
tokens.append(&mut self.query.tokenize());
for (space, filter) in &self.filters {
tokens.append(&mut space.tokenize());
tokens.append(&mut filter.tokenize());
impl Tokenizable for Assert {
// TODO reconvert back your first predicate for jsonpath
// so that you can use your firstX predicate for other query
tokens.append(&mut self.predicate.tokenize());
impl Tokenizable for Query {
self.value.tokenize()
impl Tokenizable for QueryValue {
match self.clone() {
QueryValue::Status => tokens.push(Token::QueryType(String::from("status"))),
QueryValue::Url => tokens.push(Token::QueryType(String::from("url"))),
QueryValue::Header { space0, name } => {
tokens.push(Token::QueryType(String::from("header")));
tokens.append(&mut space0.tokenize());
tokens.append(&mut name.tokenize());
QueryValue::Cookie { space0, expr } => {
tokens.push(Token::QueryType(String::from("cookie")));
tokens.push(Token::CodeDelimiter("\"".to_string()));
tokens.append(&mut expr.tokenize());
QueryValue::Body => tokens.push(Token::QueryType(String::from("body"))),
QueryValue::Xpath { space0, expr } => {
tokens.push(Token::QueryType(String::from("xpath")));
QueryValue::Jsonpath { space0, expr } => {
tokens.push(Token::QueryType(String::from("jsonpath")));
QueryValue::Regex { space0, value } => {
tokens.push(Token::QueryType(String::from("regex")));
tokens.append(&mut value.tokenize());
QueryValue::Variable { space0, name } => {
tokens.push(Token::QueryType(String::from("variable")));
QueryValue::Duration => tokens.push(Token::QueryType(String::from("duration"))),
QueryValue::Bytes => tokens.push(Token::QueryType(String::from("bytes"))),
QueryValue::Sha256 => tokens.push(Token::QueryType(String::from("sha256"))),
QueryValue::Md5 => tokens.push(Token::QueryType(String::from("md5"))),
QueryValue::Certificate {
space0,
attribute_name: field,
} => {
tokens.push(Token::QueryType(String::from("certificate")));
tokens.append(&mut field.tokenize());
impl Tokenizable for RegexValue {
RegexValue::Template(template) => template.tokenize(),
RegexValue::Regex(regex) => regex.tokenize(),
impl Tokenizable for CookiePath {
if let Some(attribute) = self.attribute.clone() {
tokens.append(&mut attribute.tokenize());
impl Tokenizable for CookieAttribute {
let mut tokens: Vec<Token> = vec![Token::CodeDelimiter("[".to_string())];
tokens.push(Token::String(self.name.value()));
tokens.push(Token::CodeDelimiter("]".to_string()));
impl Tokenizable for CertificateAttributeName {
let value = match self {
CertificateAttributeName::Subject => "Subject",
CertificateAttributeName::Issuer => "Issuer",
CertificateAttributeName::StartDate => "Start-Date",
CertificateAttributeName::ExpireDate => "Expire-Date",
CertificateAttributeName::SerialNumber => "Serial-Number",
};
vec![
Token::StringDelimiter("\"".to_string()),
Token::String(value.to_string()),
]
impl Tokenizable for Predicate {
if self.not {
tokens.push(Token::Not(String::from("not")));
tokens.append(&mut self.predicate_func.tokenize());
impl Tokenizable for PredicateFunc {
impl Tokenizable for PredicateFuncValue {
PredicateFuncValue::Equal { space0, value, .. } => {
tokens.push(Token::PredicateType(self.name()));
PredicateFuncValue::NotEqual { space0, value, .. } => {
PredicateFuncValue::GreaterThan { space0, value, .. } => {
PredicateFuncValue::GreaterThanOrEqual { space0, value, .. } => {
PredicateFuncValue::LessThan { space0, value, .. } => {
PredicateFuncValue::LessThanOrEqual { space0, value, .. } => {
PredicateFuncValue::StartWith { space0, value } => {
PredicateFuncValue::EndWith { space0, value } => {
PredicateFuncValue::Contain { space0, value } => {
PredicateFuncValue::Include { space0, value } => {
PredicateFuncValue::Match { space0, value } => {
PredicateFuncValue::IsInteger => {
PredicateFuncValue::IsFloat => {
PredicateFuncValue::IsBoolean => {
PredicateFuncValue::IsString => {
PredicateFuncValue::IsCollection => {
PredicateFuncValue::IsDate => {
PredicateFuncValue::IsIsoDate => {
PredicateFuncValue::Exist => {
PredicateFuncValue::IsEmpty => {
PredicateFuncValue::IsNumber => {
impl Tokenizable for PredicateValue {
PredicateValue::String(value) => value.tokenize(),
PredicateValue::MultilineString(value) => value.tokenize(),
PredicateValue::Bool(value) => vec![Token::Boolean(value.to_string())],
PredicateValue::Null => vec![Token::Keyword("null".to_string())],
PredicateValue::Number(value) => vec![Token::Number(value.to_string())],
PredicateValue::File(value) => value.tokenize(),
PredicateValue::Hex(value) => vec![Token::String(value.to_string())],
PredicateValue::Base64(value) => value.tokenize(),
PredicateValue::Placeholder(value) => value.tokenize(),
PredicateValue::Regex(value) => value.tokenize(),
impl Tokenizable for MultilineString {
let mut tokens: Vec<Token> = vec![Token::StringDelimiter("```".to_string())];
tokens.push(Token::Lang(self.lang().to_string()));
for (i, attribute) in self.attributes.iter().enumerate() {
if i > 0 || !self.lang().is_empty() {
tokens.push(Token::StringDelimiter(",".to_string()));
MultilineString {
kind: MultilineStringKind::Text(text),
..
| MultilineString {
kind: MultilineStringKind::Json(text),
kind: MultilineStringKind::Xml(text),
} => tokens.append(&mut text.tokenize()),
kind: MultilineStringKind::GraphQl(graphql),
} => tokens.append(&mut graphql.tokenize()),
tokens.push(Token::StringDelimiter("```".to_string()));
impl Tokenizable for MultilineStringAttribute {
MultilineStringAttribute::Escape => vec![Token::String("escape".to_string())],
MultilineStringAttribute::NoVariable => vec![Token::String("novariable".to_string())],
impl Tokenizable for Text {
tokens.append(&mut self.space.tokenize());
tokens.append(&mut self.newline.tokenize());
impl Tokenizable for GraphQl {
if let Some(vars) = &self.variables {
tokens.append(&mut vars.tokenize());
impl Tokenizable for GraphQlVariables {
tokens.push(Token::String("variables".to_string()));
tokens.append(&mut self.whitespace.tokenize());
impl Tokenizable for EncodedString {
if self.quotes {
tokens.push(Token::StringDelimiter(
if self.quotes { "\"" } else { "" }.to_string(),
));
tokens.push(Token::String(self.encoded.clone()));
impl Tokenizable for Template {
if let Some(d) = self.delimiter {
tokens.push(Token::StringDelimiter(d.to_string()));
for element in &self.elements {
tokens.append(&mut element.tokenize());
impl Tokenizable for TemplateElement {
TemplateElement::String { encoded, .. } => {
vec![Token::String(encoded.to_string())]
TemplateElement::Placeholder(value) => {
impl Tokenizable for Placeholder {
let mut tokens: Vec<Token> = vec![Token::CodeDelimiter(String::from("{{"))];
tokens.append(&mut self.expr.tokenize());
tokens.push(Token::CodeDelimiter(String::from("}}")));
impl Tokenizable for Expr {
self.kind.tokenize()
impl Tokenizable for ExprKind {
ExprKind::Variable(variable) => variable.tokenize(),
ExprKind::Function(function) => function.tokenize(),
impl Tokenizable for Variable {
vec![Token::CodeVariable(self.name.clone())]
impl Tokenizable for Function {
Function::NewUuid => vec![Token::CodeVariable("newUuid".to_string())],
impl Tokenizable for Regex {
let s = str::replace(self.inner.as_str(), "/", "\\/");
vec![Token::String(format!("/{s}/"))]
impl Tokenizable for LineTerminator {
if let Some(comment) = &self.comment {
tokens.append(&mut comment.tokenize());
impl Tokenizable for Whitespace {
if !self.value.is_empty() {
tokens.push(Token::Whitespace(self.value.clone()));
impl Tokenizable for Comment {
vec![Token::Comment(format!("#{}", self.value.clone()))]
impl Tokenizable for JsonValue {
JsonValue::String(s) => {
//tokens.push(Token::CodeDelimiter("\"".to_string()));
tokens.append(&mut s.tokenize());
JsonValue::Number(value) => {
tokens.push(Token::Number(value.to_string()));
JsonValue::Boolean(value) => {
tokens.push(Token::Boolean(value.to_string()));
JsonValue::List { space0, elements } => {
tokens.push(Token::CodeDelimiter("[".to_string()));
tokens.push(Token::Whitespace(space0.clone()));
for (i, element) in elements.iter().enumerate() {
if i > 0 {
tokens.push(Token::CodeDelimiter(",".to_string()));
JsonValue::Object { space0, elements } => {
tokens.push(Token::CodeDelimiter("{".to_string()));
tokens.push(Token::CodeDelimiter("}".to_string()));
JsonValue::Null => {
tokens.push(Token::Keyword("null".to_string()));
JsonValue::Placeholder(exp) => {
tokens.append(&mut exp.tokenize());
impl Tokenizable for JsonListElement {
let mut tokens: Vec<Token> = vec![Token::Whitespace(self.space0.clone())];
tokens.push(Token::Whitespace(self.space1.clone()));
impl Tokenizable for JsonObjectElement {
tokens.push(Token::StringDelimiter("\"".to_string()));
tokens.push(Token::String(self.name.to_string()));
tokens.push(Token::CodeDelimiter(":".to_string()));
tokens.push(Token::Whitespace(self.space2.clone()));
tokens.push(Token::Whitespace(self.space3.clone()));
impl Tokenizable for EntryOption {
tokens.push(Token::String(self.kind.name().to_string()));
tokens.append(&mut self.kind.tokenize());
impl Tokenizable for OptionKind {
OptionKind::AwsSigV4(value) => value.tokenize(),
OptionKind::CaCertificate(filename) => filename.tokenize(),
OptionKind::ClientCert(filename) => filename.tokenize(),
OptionKind::ClientKey(filename) => filename.tokenize(),
OptionKind::Compressed(value) => value.tokenize(),
OptionKind::ConnectTo(value) => value.tokenize(),
OptionKind::ConnectTimeout(value) => value.tokenize(),
OptionKind::Delay(value) => value.tokenize(),
OptionKind::FollowLocation(value) => value.tokenize(),
OptionKind::FollowLocationTrusted(value) => value.tokenize(),
OptionKind::Http10(value) => value.tokenize(),
OptionKind::Http11(value) => value.tokenize(),
OptionKind::Http2(value) => value.tokenize(),
OptionKind::Http3(value) => value.tokenize(),
OptionKind::Insecure(value) => value.tokenize(),
OptionKind::IpV4(value) => value.tokenize(),
OptionKind::IpV6(value) => value.tokenize(),
OptionKind::LimitRate(value) => value.tokenize(),
OptionKind::MaxRedirect(value) => value.tokenize(),
OptionKind::NetRc(value) => value.tokenize(),
OptionKind::NetRcFile(filename) => filename.tokenize(),
OptionKind::NetRcOptional(value) => value.tokenize(),
OptionKind::Output(filename) => filename.tokenize(),
OptionKind::PathAsIs(value) => value.tokenize(),
OptionKind::Proxy(value) => value.tokenize(),
OptionKind::Repeat(value) => value.tokenize(),
OptionKind::Resolve(value) => value.tokenize(),
OptionKind::Retry(value) => value.tokenize(),
OptionKind::RetryInterval(value) => value.tokenize(),
OptionKind::Skip(value) => value.tokenize(),
OptionKind::UnixSocket(value) => value.tokenize(),
OptionKind::User(value) => value.tokenize(),
OptionKind::Variable(value) => value.tokenize(),
OptionKind::Verbose(value) => value.tokenize(),
OptionKind::VeryVerbose(value) => value.tokenize(),
impl Tokenizable for BooleanOption {
BooleanOption::Literal(value) => vec![Token::Boolean(value.to_string())],
BooleanOption::Placeholder(expr) => expr.tokenize(),
impl Tokenizable for NaturalOption {
NaturalOption::Literal(value) => vec![Token::Number(value.to_string())],
NaturalOption::Placeholder(expr) => expr.tokenize(),
impl Tokenizable for CountOption {
CountOption::Literal(retry) => retry.tokenize(),
CountOption::Placeholder(expr) => expr.tokenize(),
impl Tokenizable for Count {
Count::Finite(n) => vec![Token::Number(n.to_string())],
Count::Infinite => vec![Token::Number("-1".to_string())],
impl Tokenizable for DurationOption {
DurationOption::Literal(value) => value.tokenize(),
DurationOption::Placeholder(expr) => expr.tokenize(),
impl Tokenizable for Duration {
let mut tokens = vec![Token::Number(self.value.to_string())];
if let Some(unit) = self.unit {
tokens.push(Token::Unit(unit.to_string()));
impl Tokenizable for VariableDefinition {
let mut tokens: Vec<Token> = vec![Token::String(self.name.clone())];
tokens.push(Token::Keyword("=".to_string()));
impl Tokenizable for VariableValue {
VariableValue::Null => vec![Token::Keyword("null".to_string())],
VariableValue::Bool(v) => vec![Token::Boolean(v.to_string())],
VariableValue::Number(v) => vec![Token::Number(v.to_string())],
VariableValue::String(v) => v.tokenize(),
impl Tokenizable for Filter {
FilterValue::Count => vec![Token::FilterType(String::from("count"))],
FilterValue::DaysAfterNow => vec![Token::FilterType(String::from("daysAfterNow"))],
FilterValue::DaysBeforeNow => vec![Token::FilterType(String::from("daysBeforeNow"))],
FilterValue::Decode { space0, encoding } => {
let mut tokens: Vec<Token> = vec![Token::FilterType(String::from("decode"))];
tokens.append(&mut encoding.tokenize());
FilterValue::Format { space0, fmt } => {
let mut tokens: Vec<Token> = vec![Token::FilterType(String::from("format"))];
tokens.append(&mut fmt.tokenize());
FilterValue::HtmlEscape => vec![Token::FilterType(String::from("htmlEscape"))],
FilterValue::HtmlUnescape => {
vec![Token::FilterType(String::from("htmlUnescape"))]
FilterValue::JsonPath { space0, expr } => {
let mut tokens: Vec<Token> = vec![Token::FilterType(String::from("jsonpath"))];
FilterValue::Nth { space0, n } => {
let mut tokens: Vec<Token> = vec![Token::FilterType(String::from("nth"))];
tokens.push(Token::Number(n.to_string()));
FilterValue::Regex { space0, value } => {
let mut tokens: Vec<Token> = vec![Token::FilterType(String::from("regex"))];
FilterValue::Replace {
old_value,
space1,
new_value,
let mut tokens: Vec<Token> = vec![Token::FilterType(String::from("replace"))];
tokens.append(&mut old_value.tokenize());
tokens.append(&mut space1.tokenize());
tokens.append(&mut new_value.tokenize());
FilterValue::UrlEncode => vec![Token::FilterType(String::from("urlEncode"))],
FilterValue::UrlDecode => vec![Token::FilterType(String::from("urlDecode"))],
FilterValue::Split { space0, sep } => {
let mut tokens: Vec<Token> = vec![Token::FilterType(String::from("split"))];
tokens.append(&mut sep.tokenize());
FilterValue::ToDate { space0, fmt } => {
let mut tokens: Vec<Token> = vec![Token::FilterType(String::from("toDate"))];
FilterValue::ToFloat => vec![Token::FilterType(String::from("toFloat"))],
FilterValue::ToInt => vec![Token::FilterType(String::from("toInt"))],
FilterValue::XPath { space0, expr } => {
let mut tokens: Vec<Token> = vec![Token::FilterType(String::from("xpath"))];