diff --git a/schala-lang/language/src/tokenizing.rs b/schala-lang/language/src/tokenizing.rs index cff8a32..80fa90c 100644 --- a/schala-lang/language/src/tokenizing.rs +++ b/schala-lang/language/src/tokenizing.rs @@ -217,7 +217,7 @@ fn handle_quote(input: &mut Peekable>, quote_prefix } }, Some(c) => buf.push(c), - None => return TokenKind::Error(format!("Unclosed string")), + None => return TokenKind::Error("Unclosed string".to_string()), } } TokenKind::StrLiteral { s: Rc::new(buf), prefix: quote_prefix.map(|s| Rc::new(s.to_string())) } @@ -329,6 +329,9 @@ mod schala_tokenizer_tests { fn comments() { let token_kinds: Vec = tokenize("1 + /* hella /* bro */ */ 2").into_iter().map(move |t| t.kind).collect(); assert_eq!(token_kinds, vec![digit!("1"), op!("+"), digit!("2")]); + + let token_kinds: Vec = tokenize("1 + /* hella /* bro */ 2").into_iter().map(move |t| t.kind).collect(); + assert_eq!(token_kinds, vec![digit!("1"), op!("+"), Error("Unclosed comment".to_string())]); } #[test] @@ -344,5 +347,8 @@ mod schala_tokenizer_tests { let token_kinds: Vec = tokenize(r#"b"some bytestring""#).into_iter().map(move |t| t.kind).collect(); assert_eq!(token_kinds, vec![StrLiteral { s: Rc::new("some bytestring".to_string()), prefix: Some(Rc::new("b".to_string())) }]); + + let token_kinds: Vec = tokenize(r#""Do \n \" escapes work\t""#).into_iter().map(move |t| t.kind).collect(); + assert_eq!(token_kinds, vec![StrLiteral { s: Rc::new("Do \n \" escapes work\t".to_string()), prefix: None }]); } }