diff --git a/notes b/notes index 251656d..fdea5e5 100644 --- a/notes +++ b/notes @@ -1,7 +1,6 @@ todo ---- -- raw strings with '' - multi line strings (not in recipe interpolations) - indentation or slash for line continuation in plain recipes diff --git a/src/integration.rs b/src/integration.rs index 119e6da..a2a8558 100644 --- a/src/integration.rs +++ b/src/integration.rs @@ -533,3 +533,19 @@ recipe: "echo recipe A-IS-A\n", ); } + +#[test] +fn raw_string() { + integration_test( + &[], + r#" +export exported_variable = '\\\\\\"' + +recipe: + echo {{`echo recipe $exported_variable`}} +"#, + 0, + "recipe \\\"\n", + "echo recipe \\\\\\\"\n", + ); +} diff --git a/src/lib.rs b/src/lib.rs index 2ab087f..0fec722 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1103,6 +1103,7 @@ enum TokenKind { Name, Plus, StringToken, + RawString, Text, } @@ -1123,6 +1124,7 @@ impl Display for TokenKind { Name => "name", Plus => "\"+\"", StringToken => "string", + RawString => "raw string", Text => "command text", })); Ok(()) @@ -1152,6 +1154,7 @@ fn tokenize(text: &str) -> Result, Error> { static ref NAME: Regex = token(r"([a-zA-Z_-][a-zA-Z0-9_-]*)"); static ref PLUS: Regex = token(r"[+]" ); static ref STRING: Regex = token("\"" ); + static ref RAW_STRING: Regex = token(r#"'[^'\r\n]*'"# ); static ref INDENT: Regex = re(r"^([ \t]*)[^ \t\n\r]" ); static ref INTERPOLATION_START: Regex = re(r"^[{][{]" ); static ref LEADING_TEXT: Regex = re(r"^(?m)(.+?)[{][{]" ); @@ -1308,6 +1311,8 @@ fn tokenize(text: &str) -> Result, Error> { (captures.at(1).unwrap(), captures.at(2).unwrap(), Equals) } else if let Some(captures) = COMMENT.captures(rest) { (captures.at(1).unwrap(), captures.at(2).unwrap(), Comment) + } else if let Some(captures) = RAW_STRING.captures(rest) { + (captures.at(1).unwrap(), captures.at(2).unwrap(), RawString) } else if let Some(captures) = STRING.captures(rest) { let prefix = captures.at(1).unwrap(); let contents = &rest[prefix.len()+1..]; @@ -1561,11 +1566,15 @@ impl<'a> Parser<'a> { fn expression(&mut self, interpolation: bool) -> Result, Error<'a>> { let first = self.tokens.next().unwrap(); let lhs = match first.kind { - Name => Expression::Variable{name: first.lexeme, token: first}, - Backtick => Expression::Backtick{ + Name => Expression::Variable {name: first.lexeme, token: first}, + Backtick => Expression::Backtick { raw: &first.lexeme[1..first.lexeme.len()-1], token: first }, + RawString => { + let raw = &first.lexeme[1..first.lexeme.len() - 1]; + Expression::String{raw: raw, cooked: raw.to_string()} + } StringToken => { let raw = &first.lexeme[1..first.lexeme.len() - 1]; let mut cooked = String::new(); diff --git a/src/unit.rs b/src/unit.rs index 0412a9e..1d004c7 100644 --- a/src/unit.rs +++ b/src/unit.rs @@ -48,7 +48,8 @@ fn token_summary(tokens: &[Token]) -> String { super::TokenKind::Line{..} => "^", super::TokenKind::Name => "N", super::TokenKind::Plus => "+", - super::TokenKind::StringToken => "'", + super::TokenKind::StringToken => "\"", + super::TokenKind::RawString => "'", super::TokenKind::Text => "_", } }).collect::>().join("") @@ -85,6 +86,14 @@ fn parse_error(text: &str, expected: Error) { } } +#[test] +fn tokanize_strings() { + tokenize_success( + r#"a = "'a'" + '"b"' + "'c'" + '"d"'"#, + r#"N="+'+"+'."# + ); +} + #[test] fn tokenize_recipe_interpolation_eol() { let text = "foo: @@ -103,7 +112,7 @@ fn tokenize_recipe_interpolation_eof() { #[test] fn tokenize_recipe_complex_interpolation_expression() { let text = "foo:\n {{a + b + \"z\" + blarg}}"; - tokenize_success(text, "N:$>^{N+N+'+N}<."); + tokenize_success(text, "N:$>^{N+N+\"+N}<."); } #[test]