Compare commits
2 Commits
0cb0145cc5
...
63360e5617
Author | SHA1 | Date | |
---|---|---|---|
|
63360e5617 | ||
|
90ede076cc |
@ -1,99 +1,113 @@
|
|||||||
|
use crate::parsing::ParseError;
|
||||||
use crate::schala::{SourceReference, Stage};
|
use crate::schala::{SourceReference, Stage};
|
||||||
use crate::source_map::Location;
|
use crate::source_map::Location;
|
||||||
use crate::tokenizing::{Token, TokenKind};
|
use crate::tokenizing::{Token, TokenKind};
|
||||||
use crate::parsing::ParseError;
|
|
||||||
use crate::typechecking::TypeError;
|
use crate::typechecking::TypeError;
|
||||||
|
|
||||||
pub struct SchalaError {
|
pub struct SchalaError {
|
||||||
errors: Vec<Error>,
|
errors: Vec<Error>,
|
||||||
//TODO unify these sometime
|
//TODO unify these sometime
|
||||||
formatted_parse_error: Option<String>,
|
formatted_parse_error: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SchalaError {
|
impl SchalaError {
|
||||||
|
pub(crate) fn display(&self) -> String {
|
||||||
pub(crate) fn display(&self) -> String {
|
if let Some(ref err) = self.formatted_parse_error {
|
||||||
if let Some(ref err) = self.formatted_parse_error {
|
err.clone()
|
||||||
err.clone()
|
} else {
|
||||||
} else {
|
self.errors[0].text.as_ref().cloned().unwrap_or_default()
|
||||||
self.errors[0].text.as_ref().cloned().unwrap_or_default()
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn from_type_error(err: TypeError) -> Self {
|
pub(crate) fn from_type_error(err: TypeError) -> Self {
|
||||||
Self {
|
Self {
|
||||||
formatted_parse_error: None,
|
formatted_parse_error: None,
|
||||||
errors: vec![
|
errors: vec![Error {
|
||||||
Error { location: None, text: Some(err.msg), stage: Stage::Typechecking }
|
location: None,
|
||||||
]
|
text: Some(err.msg),
|
||||||
|
stage: Stage::Typechecking,
|
||||||
|
}],
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn from_string(text: String, stage: Stage) -> Self {
|
pub(crate) fn from_string(text: String, stage: Stage) -> Self {
|
||||||
Self {
|
Self {
|
||||||
formatted_parse_error: None,
|
formatted_parse_error: None,
|
||||||
errors: vec![
|
errors: vec![Error {
|
||||||
Error { location: None, text: Some(text), stage }
|
location: None,
|
||||||
]
|
text: Some(text),
|
||||||
|
stage,
|
||||||
|
}],
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn from_parse_error(parse_error: ParseError, source_reference: &SourceReference) -> Self {
|
pub(crate) fn from_parse_error(
|
||||||
Self {
|
parse_error: ParseError,
|
||||||
formatted_parse_error: Some(format_parse_error(parse_error, source_reference)),
|
source_reference: &SourceReference,
|
||||||
errors: vec![],
|
) -> Self {
|
||||||
|
Self {
|
||||||
|
formatted_parse_error: Some(format_parse_error(parse_error, source_reference)),
|
||||||
|
errors: vec![],
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn from_tokens(tokens: &[Token]) -> Option<SchalaError> {
|
pub(crate) fn from_tokens(tokens: &[Token]) -> Option<SchalaError> {
|
||||||
let token_errors: Vec<Error> = tokens.iter()
|
let token_errors: Vec<Error> = tokens
|
||||||
.filter_map(|tok| match tok.kind {
|
.iter()
|
||||||
TokenKind::Error(ref err) => Some(Error {
|
.filter_map(|tok| match tok.kind {
|
||||||
location: Some(tok.location),
|
TokenKind::Error(ref err) => Some(Error {
|
||||||
text: Some(err.clone()),
|
location: Some(tok.location),
|
||||||
stage: Stage::Tokenizing,
|
text: Some(err.clone()),
|
||||||
}),
|
stage: Stage::Tokenizing,
|
||||||
_ => None
|
}),
|
||||||
}).collect();
|
_ => None,
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
if token_errors.is_empty() {
|
if token_errors.is_empty() {
|
||||||
None
|
None
|
||||||
} else {
|
} else {
|
||||||
Some(SchalaError {
|
Some(SchalaError {
|
||||||
errors: token_errors,
|
errors: token_errors,
|
||||||
formatted_parse_error: None,
|
formatted_parse_error: None,
|
||||||
})
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
struct Error {
|
struct Error {
|
||||||
location: Option<Location>,
|
location: Option<Location>,
|
||||||
text: Option<String>,
|
text: Option<String>,
|
||||||
stage: Stage,
|
stage: Stage,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
fn format_parse_error(error: ParseError, source_reference: &SourceReference) -> String {
|
fn format_parse_error(error: ParseError, source_reference: &SourceReference) -> String {
|
||||||
let line_num = error.token.location.line_num;
|
let line_num = error.token.location.line_num;
|
||||||
let ch = error.token.location.char_num;
|
let ch = error.token.location.char_num;
|
||||||
let line_from_program = source_reference.get_line(line_num);
|
let line_from_program = source_reference.get_line(line_num);
|
||||||
let location_pointer = format!("{}^", " ".repeat(ch));
|
let location_pointer = format!("{}^", " ".repeat(ch));
|
||||||
|
|
||||||
let line_num_digits = format!("{}", line_num).chars().count();
|
let line_num_digits = format!("{}", line_num).chars().count();
|
||||||
let space_padding = " ".repeat(line_num_digits);
|
let space_padding = " ".repeat(line_num_digits);
|
||||||
|
|
||||||
let production = match error.production_name {
|
let production = match error.production_name {
|
||||||
Some(n) => format!("\n(from production \"{}\")", n),
|
Some(n) => format!("\n(from production \"{}\")", n),
|
||||||
None => "".to_string()
|
None => "".to_string(),
|
||||||
};
|
};
|
||||||
|
|
||||||
format!(r#"
|
format!(
|
||||||
|
r#"
|
||||||
{error_msg}{production}
|
{error_msg}{production}
|
||||||
{space_padding} |
|
{space_padding} |
|
||||||
{line_num} | {}
|
{line_num} | {}
|
||||||
{space_padding} | {}
|
{space_padding} | {}
|
||||||
"#, line_from_program, location_pointer, error_msg=error.msg, space_padding=space_padding, line_num=line_num, production=production
|
"#,
|
||||||
)
|
line_from_program,
|
||||||
|
location_pointer,
|
||||||
|
error_msg = error.msg,
|
||||||
|
space_padding = space_padding,
|
||||||
|
line_num = line_num,
|
||||||
|
production = production
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
@ -302,47 +302,54 @@ mod schala_tokenizer_tests {
|
|||||||
macro_rules! ident { ($ident:expr) => { Identifier(Rc::new($ident.to_string())) } }
|
macro_rules! ident { ($ident:expr) => { Identifier(Rc::new($ident.to_string())) } }
|
||||||
macro_rules! op { ($ident:expr) => { Operator(Rc::new($ident.to_string())) } }
|
macro_rules! op { ($ident:expr) => { Operator(Rc::new($ident.to_string())) } }
|
||||||
|
|
||||||
|
fn token_kinds(input: &str) -> Vec<TokenKind> {
|
||||||
|
tokenize(input).into_iter().map(move |tok| tok.kind).collect()
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn tokens() {
|
fn tokens() {
|
||||||
let a = tokenize("let a: A<B> = c ++ d");
|
let output = token_kinds("let a: A<B> = c ++ d");
|
||||||
let token_kinds: Vec<TokenKind> = a.into_iter().map(move |t| t.kind).collect();
|
assert_eq!(output, vec![Keyword(Let), ident!("a"), Colon, ident!("A"),
|
||||||
assert_eq!(token_kinds, vec![Keyword(Let), ident!("a"), Colon, ident!("A"),
|
|
||||||
LAngleBracket, ident!("B"), RAngleBracket, Equals, ident!("c"), op!("++"), ident!("d")]);
|
LAngleBracket, ident!("B"), RAngleBracket, Equals, ident!("c"), op!("++"), ident!("d")]);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn underscores() {
|
fn underscores() {
|
||||||
let token_kinds: Vec<TokenKind> = tokenize("4_8").into_iter().map(move |t| t.kind).collect();
|
let output = token_kinds("4_8");
|
||||||
assert_eq!(token_kinds, vec![digit!("4"), Underscore, digit!("8")]);
|
assert_eq!(output, vec![digit!("4"), Underscore, digit!("8")]);
|
||||||
|
|
||||||
let token_kinds2: Vec<TokenKind> = tokenize("aba_yo").into_iter().map(move |t| t.kind).collect();
|
let output = token_kinds("aba_yo");
|
||||||
assert_eq!(token_kinds2, vec![ident!("aba_yo")]);
|
assert_eq!(output, vec![ident!("aba_yo")]);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn comments() {
|
fn comments() {
|
||||||
let token_kinds: Vec<TokenKind> = tokenize("1 + /* hella /* bro */ */ 2").into_iter().map(move |t| t.kind).collect();
|
let output = token_kinds("1 + /* hella /* bro */ */ 2");
|
||||||
assert_eq!(token_kinds, vec![digit!("1"), op!("+"), digit!("2")]);
|
assert_eq!(output, vec![digit!("1"), op!("+"), digit!("2")]);
|
||||||
|
|
||||||
let token_kinds: Vec<TokenKind> = tokenize("1 + /* hella /* bro */ 2").into_iter().map(move |t| t.kind).collect();
|
let output = token_kinds("1 + /* hella /* bro */ 2");
|
||||||
assert_eq!(token_kinds, vec![digit!("1"), op!("+"), Error("Unclosed comment".to_string())]);
|
assert_eq!(output, vec![digit!("1"), op!("+"), Error("Unclosed comment".to_string())]);
|
||||||
|
|
||||||
|
//TODO not sure if I want this behavior
|
||||||
|
let output = token_kinds("1 + /* hella */ bro */ 2");
|
||||||
|
assert_eq!(output, vec![digit!("1"), op!("+"), Identifier(Rc::new("bro".to_string())), Operator(Rc::new("*".to_string())), Slash, DigitGroup(Rc::new("2".to_string()))]);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn backtick_operators() {
|
fn backtick_operators() {
|
||||||
let token_kinds: Vec<TokenKind> = tokenize("1 `plus` 2").into_iter().map(move |t| t.kind).collect();
|
let output = token_kinds("1 `plus` 2");
|
||||||
assert_eq!(token_kinds, vec![digit!("1"), op!("plus"), digit!("2")]);
|
assert_eq!(output, vec![digit!("1"), op!("plus"), digit!("2")]);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn string_literals() {
|
fn string_literals() {
|
||||||
let token_kinds: Vec<TokenKind> = tokenize(r#""some string""#).into_iter().map(move |t| t.kind).collect();
|
let output = token_kinds(r#""some string""#);
|
||||||
assert_eq!(token_kinds, vec![StrLiteral { s: Rc::new("some string".to_string()), prefix: None }]);
|
assert_eq!(output, vec![StrLiteral { s: Rc::new("some string".to_string()), prefix: None }]);
|
||||||
|
|
||||||
let token_kinds: Vec<TokenKind> = tokenize(r#"b"some bytestring""#).into_iter().map(move |t| t.kind).collect();
|
let output = token_kinds(r#"b"some bytestring""#);
|
||||||
assert_eq!(token_kinds, vec![StrLiteral { s: Rc::new("some bytestring".to_string()), prefix: Some(Rc::new("b".to_string())) }]);
|
assert_eq!(output, vec![StrLiteral { s: Rc::new("some bytestring".to_string()), prefix: Some(Rc::new("b".to_string())) }]);
|
||||||
|
|
||||||
let token_kinds: Vec<TokenKind> = tokenize(r#""Do \n \" escapes work\t""#).into_iter().map(move |t| t.kind).collect();
|
let output = token_kinds(r#""Do \n \" escapes work\t""#);
|
||||||
assert_eq!(token_kinds, vec![StrLiteral { s: Rc::new("Do \n \" escapes work\t".to_string()), prefix: None }]);
|
assert_eq!(output, vec![StrLiteral { s: Rc::new("Do \n \" escapes work\t".to_string()), prefix: None }]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user