Added raw '' strings

This commit is contained in:
Casey Rodarmor 2016-10-30 16:56:22 -07:00
parent 38a47801a2
commit 1290c5a8bd
4 changed files with 38 additions and 5 deletions

1
notes
View File

@ -1,7 +1,6 @@
todo
----
- raw strings with ''
- multi line strings (not in recipe interpolations)
- indentation or slash for line continuation in plain recipes

View File

@ -533,3 +533,19 @@ recipe:
"echo recipe A-IS-A\n",
);
}
#[test]
fn raw_string() {
integration_test(
&[],
r#"
export exported_variable = '\\\\\\"'
recipe:
echo {{`echo recipe $exported_variable`}}
"#,
0,
"recipe \\\"\n",
"echo recipe \\\\\\\"\n",
);
}

View File

@ -1103,6 +1103,7 @@ enum TokenKind {
Name,
Plus,
StringToken,
RawString,
Text,
}
@ -1123,6 +1124,7 @@ impl Display for TokenKind {
Name => "name",
Plus => "\"+\"",
StringToken => "string",
RawString => "raw string",
Text => "command text",
}));
Ok(())
@ -1152,6 +1154,7 @@ fn tokenize(text: &str) -> Result<Vec<Token>, Error> {
static ref NAME: Regex = token(r"([a-zA-Z_-][a-zA-Z0-9_-]*)");
static ref PLUS: Regex = token(r"[+]" );
static ref STRING: Regex = token("\"" );
static ref RAW_STRING: Regex = token(r#"'[^'\r\n]*'"# );
static ref INDENT: Regex = re(r"^([ \t]*)[^ \t\n\r]" );
static ref INTERPOLATION_START: Regex = re(r"^[{][{]" );
static ref LEADING_TEXT: Regex = re(r"^(?m)(.+?)[{][{]" );
@ -1308,6 +1311,8 @@ fn tokenize(text: &str) -> Result<Vec<Token>, Error> {
(captures.at(1).unwrap(), captures.at(2).unwrap(), Equals)
} else if let Some(captures) = COMMENT.captures(rest) {
(captures.at(1).unwrap(), captures.at(2).unwrap(), Comment)
} else if let Some(captures) = RAW_STRING.captures(rest) {
(captures.at(1).unwrap(), captures.at(2).unwrap(), RawString)
} else if let Some(captures) = STRING.captures(rest) {
let prefix = captures.at(1).unwrap();
let contents = &rest[prefix.len()+1..];
@ -1561,11 +1566,15 @@ impl<'a> Parser<'a> {
fn expression(&mut self, interpolation: bool) -> Result<Expression<'a>, Error<'a>> {
let first = self.tokens.next().unwrap();
let lhs = match first.kind {
Name => Expression::Variable{name: first.lexeme, token: first},
Backtick => Expression::Backtick{
Name => Expression::Variable {name: first.lexeme, token: first},
Backtick => Expression::Backtick {
raw: &first.lexeme[1..first.lexeme.len()-1],
token: first
},
RawString => {
let raw = &first.lexeme[1..first.lexeme.len() - 1];
Expression::String{raw: raw, cooked: raw.to_string()}
}
StringToken => {
let raw = &first.lexeme[1..first.lexeme.len() - 1];
let mut cooked = String::new();

View File

@ -48,7 +48,8 @@ fn token_summary(tokens: &[Token]) -> String {
super::TokenKind::Line{..} => "^",
super::TokenKind::Name => "N",
super::TokenKind::Plus => "+",
super::TokenKind::StringToken => "'",
super::TokenKind::StringToken => "\"",
super::TokenKind::RawString => "'",
super::TokenKind::Text => "_",
}
}).collect::<Vec<_>>().join("")
@ -85,6 +86,14 @@ fn parse_error(text: &str, expected: Error) {
}
}
#[test]
fn tokanize_strings() {
tokenize_success(
r#"a = "'a'" + '"b"' + "'c'" + '"d"'"#,
r#"N="+'+"+'."#
);
}
#[test]
fn tokenize_recipe_interpolation_eol() {
let text = "foo:
@ -103,7 +112,7 @@ fn tokenize_recipe_interpolation_eof() {
#[test]
fn tokenize_recipe_complex_interpolation_expression() {
let text = "foo:\n {{a + b + \"z\" + blarg}}";
tokenize_success(text, "N:$>^{N+N+'+N}<.");
tokenize_success(text, "N:$>^{N+N+\"+N}<.");
}
#[test]