Added raw '' strings

This commit is contained in:
Casey Rodarmor 2016-10-30 16:56:22 -07:00
parent 38a47801a2
commit 1290c5a8bd
4 changed files with 38 additions and 5 deletions

1
notes
View File

@ -1,7 +1,6 @@
todo todo
---- ----
- raw strings with ''
- multi line strings (not in recipe interpolations) - multi line strings (not in recipe interpolations)
- indentation or slash for line continuation in plain recipes - indentation or slash for line continuation in plain recipes

View File

@ -533,3 +533,19 @@ recipe:
"echo recipe A-IS-A\n", "echo recipe A-IS-A\n",
); );
} }
#[test]
fn raw_string() {
integration_test(
&[],
r#"
export exported_variable = '\\\\\\"'
recipe:
echo {{`echo recipe $exported_variable`}}
"#,
0,
"recipe \\\"\n",
"echo recipe \\\\\\\"\n",
);
}

View File

@ -1103,6 +1103,7 @@ enum TokenKind {
Name, Name,
Plus, Plus,
StringToken, StringToken,
RawString,
Text, Text,
} }
@ -1123,6 +1124,7 @@ impl Display for TokenKind {
Name => "name", Name => "name",
Plus => "\"+\"", Plus => "\"+\"",
StringToken => "string", StringToken => "string",
RawString => "raw string",
Text => "command text", Text => "command text",
})); }));
Ok(()) Ok(())
@ -1152,6 +1154,7 @@ fn tokenize(text: &str) -> Result<Vec<Token>, Error> {
static ref NAME: Regex = token(r"([a-zA-Z_-][a-zA-Z0-9_-]*)"); static ref NAME: Regex = token(r"([a-zA-Z_-][a-zA-Z0-9_-]*)");
static ref PLUS: Regex = token(r"[+]" ); static ref PLUS: Regex = token(r"[+]" );
static ref STRING: Regex = token("\"" ); static ref STRING: Regex = token("\"" );
static ref RAW_STRING: Regex = token(r#"'[^'\r\n]*'"# );
static ref INDENT: Regex = re(r"^([ \t]*)[^ \t\n\r]" ); static ref INDENT: Regex = re(r"^([ \t]*)[^ \t\n\r]" );
static ref INTERPOLATION_START: Regex = re(r"^[{][{]" ); static ref INTERPOLATION_START: Regex = re(r"^[{][{]" );
static ref LEADING_TEXT: Regex = re(r"^(?m)(.+?)[{][{]" ); static ref LEADING_TEXT: Regex = re(r"^(?m)(.+?)[{][{]" );
@ -1308,6 +1311,8 @@ fn tokenize(text: &str) -> Result<Vec<Token>, Error> {
(captures.at(1).unwrap(), captures.at(2).unwrap(), Equals) (captures.at(1).unwrap(), captures.at(2).unwrap(), Equals)
} else if let Some(captures) = COMMENT.captures(rest) { } else if let Some(captures) = COMMENT.captures(rest) {
(captures.at(1).unwrap(), captures.at(2).unwrap(), Comment) (captures.at(1).unwrap(), captures.at(2).unwrap(), Comment)
} else if let Some(captures) = RAW_STRING.captures(rest) {
(captures.at(1).unwrap(), captures.at(2).unwrap(), RawString)
} else if let Some(captures) = STRING.captures(rest) { } else if let Some(captures) = STRING.captures(rest) {
let prefix = captures.at(1).unwrap(); let prefix = captures.at(1).unwrap();
let contents = &rest[prefix.len()+1..]; let contents = &rest[prefix.len()+1..];
@ -1561,11 +1566,15 @@ impl<'a> Parser<'a> {
fn expression(&mut self, interpolation: bool) -> Result<Expression<'a>, Error<'a>> { fn expression(&mut self, interpolation: bool) -> Result<Expression<'a>, Error<'a>> {
let first = self.tokens.next().unwrap(); let first = self.tokens.next().unwrap();
let lhs = match first.kind { let lhs = match first.kind {
Name => Expression::Variable{name: first.lexeme, token: first}, Name => Expression::Variable {name: first.lexeme, token: first},
Backtick => Expression::Backtick{ Backtick => Expression::Backtick {
raw: &first.lexeme[1..first.lexeme.len()-1], raw: &first.lexeme[1..first.lexeme.len()-1],
token: first token: first
}, },
RawString => {
let raw = &first.lexeme[1..first.lexeme.len() - 1];
Expression::String{raw: raw, cooked: raw.to_string()}
}
StringToken => { StringToken => {
let raw = &first.lexeme[1..first.lexeme.len() - 1]; let raw = &first.lexeme[1..first.lexeme.len() - 1];
let mut cooked = String::new(); let mut cooked = String::new();

View File

@ -48,7 +48,8 @@ fn token_summary(tokens: &[Token]) -> String {
super::TokenKind::Line{..} => "^", super::TokenKind::Line{..} => "^",
super::TokenKind::Name => "N", super::TokenKind::Name => "N",
super::TokenKind::Plus => "+", super::TokenKind::Plus => "+",
super::TokenKind::StringToken => "'", super::TokenKind::StringToken => "\"",
super::TokenKind::RawString => "'",
super::TokenKind::Text => "_", super::TokenKind::Text => "_",
} }
}).collect::<Vec<_>>().join("") }).collect::<Vec<_>>().join("")
@ -85,6 +86,14 @@ fn parse_error(text: &str, expected: Error) {
} }
} }
#[test]
fn tokanize_strings() {
tokenize_success(
r#"a = "'a'" + '"b"' + "'c'" + '"d"'"#,
r#"N="+'+"+'."#
);
}
#[test] #[test]
fn tokenize_recipe_interpolation_eol() { fn tokenize_recipe_interpolation_eol() {
let text = "foo: let text = "foo:
@ -103,7 +112,7 @@ fn tokenize_recipe_interpolation_eof() {
#[test] #[test]
fn tokenize_recipe_complex_interpolation_expression() { fn tokenize_recipe_complex_interpolation_expression() {
let text = "foo:\n {{a + b + \"z\" + blarg}}"; let text = "foo:\n {{a + b + \"z\" + blarg}}";
tokenize_success(text, "N:$>^{N+N+'+N}<."); tokenize_success(text, "N:$>^{N+N+\"+N}<.");
} }
#[test] #[test]