Compare commits

..

No commits in common. "63360e5617637e2983c303e91953311f6c178fec" and "0cb0145cc52e84f2e02cba1e5f9078f252c3f551" have entirely different histories.

2 changed files with 86 additions and 107 deletions

View File

@ -1,113 +1,99 @@
use crate::parsing::ParseError;
use crate::schala::{SourceReference, Stage}; use crate::schala::{SourceReference, Stage};
use crate::source_map::Location; use crate::source_map::Location;
use crate::tokenizing::{Token, TokenKind}; use crate::tokenizing::{Token, TokenKind};
use crate::parsing::ParseError;
use crate::typechecking::TypeError; use crate::typechecking::TypeError;
pub struct SchalaError { pub struct SchalaError {
errors: Vec<Error>, errors: Vec<Error>,
//TODO unify these sometime //TODO unify these sometime
formatted_parse_error: Option<String>, formatted_parse_error: Option<String>,
} }
impl SchalaError { impl SchalaError {
pub(crate) fn display(&self) -> String {
if let Some(ref err) = self.formatted_parse_error {
err.clone()
} else {
self.errors[0].text.as_ref().cloned().unwrap_or_default()
}
}
pub(crate) fn from_type_error(err: TypeError) -> Self { pub(crate) fn display(&self) -> String {
Self { if let Some(ref err) = self.formatted_parse_error {
formatted_parse_error: None, err.clone()
errors: vec![Error { } else {
location: None, self.errors[0].text.as_ref().cloned().unwrap_or_default()
text: Some(err.msg),
stage: Stage::Typechecking,
}],
}
} }
}
pub(crate) fn from_string(text: String, stage: Stage) -> Self { pub(crate) fn from_type_error(err: TypeError) -> Self {
Self { Self {
formatted_parse_error: None, formatted_parse_error: None,
errors: vec![Error { errors: vec![
location: None, Error { location: None, text: Some(err.msg), stage: Stage::Typechecking }
text: Some(text), ]
stage,
}],
}
} }
}
pub(crate) fn from_parse_error( pub(crate) fn from_string(text: String, stage: Stage) -> Self {
parse_error: ParseError, Self {
source_reference: &SourceReference, formatted_parse_error: None,
) -> Self { errors: vec![
Self { Error { location: None, text: Some(text), stage }
formatted_parse_error: Some(format_parse_error(parse_error, source_reference)), ]
errors: vec![],
}
} }
}
pub(crate) fn from_tokens(tokens: &[Token]) -> Option<SchalaError> { pub(crate) fn from_parse_error(parse_error: ParseError, source_reference: &SourceReference) -> Self {
let token_errors: Vec<Error> = tokens Self {
.iter() formatted_parse_error: Some(format_parse_error(parse_error, source_reference)),
.filter_map(|tok| match tok.kind { errors: vec![],
TokenKind::Error(ref err) => Some(Error {
location: Some(tok.location),
text: Some(err.clone()),
stage: Stage::Tokenizing,
}),
_ => None,
})
.collect();
if token_errors.is_empty() {
None
} else {
Some(SchalaError {
errors: token_errors,
formatted_parse_error: None,
})
}
} }
}
pub(crate) fn from_tokens(tokens: &[Token]) -> Option<SchalaError> {
let token_errors: Vec<Error> = tokens.iter()
.filter_map(|tok| match tok.kind {
TokenKind::Error(ref err) => Some(Error {
location: Some(tok.location),
text: Some(err.clone()),
stage: Stage::Tokenizing,
}),
_ => None
}).collect();
if token_errors.is_empty() {
None
} else {
Some(SchalaError {
errors: token_errors,
formatted_parse_error: None,
})
}
}
} }
#[allow(dead_code)] #[allow(dead_code)]
struct Error { struct Error {
location: Option<Location>, location: Option<Location>,
text: Option<String>, text: Option<String>,
stage: Stage, stage: Stage,
} }
fn format_parse_error(error: ParseError, source_reference: &SourceReference) -> String { fn format_parse_error(error: ParseError, source_reference: &SourceReference) -> String {
let line_num = error.token.location.line_num; let line_num = error.token.location.line_num;
let ch = error.token.location.char_num; let ch = error.token.location.char_num;
let line_from_program = source_reference.get_line(line_num); let line_from_program = source_reference.get_line(line_num);
let location_pointer = format!("{}^", " ".repeat(ch)); let location_pointer = format!("{}^", " ".repeat(ch));
let line_num_digits = format!("{}", line_num).chars().count(); let line_num_digits = format!("{}", line_num).chars().count();
let space_padding = " ".repeat(line_num_digits); let space_padding = " ".repeat(line_num_digits);
let production = match error.production_name { let production = match error.production_name {
Some(n) => format!("\n(from production \"{}\")", n), Some(n) => format!("\n(from production \"{}\")", n),
None => "".to_string(), None => "".to_string()
}; };
format!( format!(r#"
r#"
{error_msg}{production} {error_msg}{production}
{space_padding} | {space_padding} |
{line_num} | {} {line_num} | {}
{space_padding} | {} {space_padding} | {}
"#, "#, line_from_program, location_pointer, error_msg=error.msg, space_padding=space_padding, line_num=line_num, production=production
line_from_program, )
location_pointer,
error_msg = error.msg,
space_padding = space_padding,
line_num = line_num,
production = production
)
} }

View File

@ -302,54 +302,47 @@ mod schala_tokenizer_tests {
macro_rules! ident { ($ident:expr) => { Identifier(Rc::new($ident.to_string())) } } macro_rules! ident { ($ident:expr) => { Identifier(Rc::new($ident.to_string())) } }
macro_rules! op { ($ident:expr) => { Operator(Rc::new($ident.to_string())) } } macro_rules! op { ($ident:expr) => { Operator(Rc::new($ident.to_string())) } }
fn token_kinds(input: &str) -> Vec<TokenKind> {
tokenize(input).into_iter().map(move |tok| tok.kind).collect()
}
#[test] #[test]
fn tokens() { fn tokens() {
let output = token_kinds("let a: A<B> = c ++ d"); let a = tokenize("let a: A<B> = c ++ d");
assert_eq!(output, vec![Keyword(Let), ident!("a"), Colon, ident!("A"), let token_kinds: Vec<TokenKind> = a.into_iter().map(move |t| t.kind).collect();
assert_eq!(token_kinds, vec![Keyword(Let), ident!("a"), Colon, ident!("A"),
LAngleBracket, ident!("B"), RAngleBracket, Equals, ident!("c"), op!("++"), ident!("d")]); LAngleBracket, ident!("B"), RAngleBracket, Equals, ident!("c"), op!("++"), ident!("d")]);
} }
#[test] #[test]
fn underscores() { fn underscores() {
let output = token_kinds("4_8"); let token_kinds: Vec<TokenKind> = tokenize("4_8").into_iter().map(move |t| t.kind).collect();
assert_eq!(output, vec![digit!("4"), Underscore, digit!("8")]); assert_eq!(token_kinds, vec![digit!("4"), Underscore, digit!("8")]);
let output = token_kinds("aba_yo"); let token_kinds2: Vec<TokenKind> = tokenize("aba_yo").into_iter().map(move |t| t.kind).collect();
assert_eq!(output, vec![ident!("aba_yo")]); assert_eq!(token_kinds2, vec![ident!("aba_yo")]);
} }
#[test] #[test]
fn comments() { fn comments() {
let output = token_kinds("1 + /* hella /* bro */ */ 2"); let token_kinds: Vec<TokenKind> = tokenize("1 + /* hella /* bro */ */ 2").into_iter().map(move |t| t.kind).collect();
assert_eq!(output, vec![digit!("1"), op!("+"), digit!("2")]); assert_eq!(token_kinds, vec![digit!("1"), op!("+"), digit!("2")]);
let output = token_kinds("1 + /* hella /* bro */ 2"); let token_kinds: Vec<TokenKind> = tokenize("1 + /* hella /* bro */ 2").into_iter().map(move |t| t.kind).collect();
assert_eq!(output, vec![digit!("1"), op!("+"), Error("Unclosed comment".to_string())]); assert_eq!(token_kinds, vec![digit!("1"), op!("+"), Error("Unclosed comment".to_string())]);
//TODO not sure if I want this behavior
let output = token_kinds("1 + /* hella */ bro */ 2");
assert_eq!(output, vec![digit!("1"), op!("+"), Identifier(Rc::new("bro".to_string())), Operator(Rc::new("*".to_string())), Slash, DigitGroup(Rc::new("2".to_string()))]);
} }
#[test] #[test]
fn backtick_operators() { fn backtick_operators() {
let output = token_kinds("1 `plus` 2"); let token_kinds: Vec<TokenKind> = tokenize("1 `plus` 2").into_iter().map(move |t| t.kind).collect();
assert_eq!(output, vec![digit!("1"), op!("plus"), digit!("2")]); assert_eq!(token_kinds, vec![digit!("1"), op!("plus"), digit!("2")]);
} }
#[test] #[test]
fn string_literals() { fn string_literals() {
let output = token_kinds(r#""some string""#); let token_kinds: Vec<TokenKind> = tokenize(r#""some string""#).into_iter().map(move |t| t.kind).collect();
assert_eq!(output, vec![StrLiteral { s: Rc::new("some string".to_string()), prefix: None }]); assert_eq!(token_kinds, vec![StrLiteral { s: Rc::new("some string".to_string()), prefix: None }]);
let output = token_kinds(r#"b"some bytestring""#); let token_kinds: Vec<TokenKind> = tokenize(r#"b"some bytestring""#).into_iter().map(move |t| t.kind).collect();
assert_eq!(output, vec![StrLiteral { s: Rc::new("some bytestring".to_string()), prefix: Some(Rc::new("b".to_string())) }]); assert_eq!(token_kinds, vec![StrLiteral { s: Rc::new("some bytestring".to_string()), prefix: Some(Rc::new("b".to_string())) }]);
let output = token_kinds(r#""Do \n \" escapes work\t""#); let token_kinds: Vec<TokenKind> = tokenize(r#""Do \n \" escapes work\t""#).into_iter().map(move |t| t.kind).collect();
assert_eq!(output, vec![StrLiteral { s: Rc::new("Do \n \" escapes work\t".to_string()), prefix: None }]); assert_eq!(token_kinds, vec![StrLiteral { s: Rc::new("Do \n \" escapes work\t".to_string()), prefix: None }]);
} }
} }