Compare commits
8 Commits
codegen
...
autoparser
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fb26293157 | ||
|
|
c0289f238f | ||
|
|
7afb9d47fc | ||
|
|
2431a074b0 | ||
|
|
3bfd251a68 | ||
|
|
a033c82d13 | ||
|
|
c176c1c918 | ||
|
|
aa40b985f3 |
@@ -19,6 +19,7 @@ extern { }
|
||||
fn main() {
|
||||
let generators: Vec<PLIGenerator> = vec![
|
||||
Box::new(|| { Box::new(schala_lang::Schala::new())}),
|
||||
Box::new(|| { Box::new(schala_lang::autoparser::Schala::new())}),
|
||||
Box::new(|| { Box::new(maaru_lang::Maaru::new())}),
|
||||
Box::new(|| { Box::new(robo_lang::Robo::new())}),
|
||||
Box::new(|| { Box::new(rukka_lang::Rukka::new())}),
|
||||
|
||||
133
src/schala_lang/autoparser.rs
Normal file
133
src/schala_lang/autoparser.rs
Normal file
@@ -0,0 +1,133 @@
|
||||
use schala_lib::{ProgrammingLanguageInterface, EvalOptions, TraceArtifact, LanguageOutput};
|
||||
use itertools::Itertools;
|
||||
|
||||
use schala_lang::{tokenizing, parsing};
|
||||
use self::tokenizing::*;
|
||||
use self::parsing::*;
|
||||
|
||||
use schala_lang::tokenizing::TokenType::*;
|
||||
|
||||
struct AutoParser {
|
||||
tokens: Vec<Token>,
|
||||
}
|
||||
|
||||
/* BNF
|
||||
* all terminals in this BNF refer to TokenType values
|
||||
|
||||
literal := Kw::True | Kw::False | StrLiteral | number_literal
|
||||
number_literal := int_literal | float_literal
|
||||
float_literal := digits float_continued
|
||||
float_continued := ε | Period digits
|
||||
int_literal := HexLiteral | nonhex_int
|
||||
nonhex_int := BinNumberSigil+ digits
|
||||
digits := (DigitGroup Underscore)+
|
||||
*/
|
||||
|
||||
impl AutoParser {
|
||||
fn new(tokens: Vec<Token>) -> AutoParser {
|
||||
AutoParser { tokens: tokens.into_iter().rev().collect() }
|
||||
}
|
||||
fn peek(&mut self) -> TokenType {
|
||||
self.tokens.last().map(|ref t| { t.token_type.clone() }).unwrap_or(TokenType::EOF)
|
||||
}
|
||||
fn next(&mut self) -> TokenType {
|
||||
self.tokens.pop().map(|t| { t.token_type }).unwrap_or(TokenType::EOF)
|
||||
}
|
||||
fn parse(&mut self) -> (Result<AST, ParseError>, Vec<String>) {
|
||||
let ast = self.program();
|
||||
(ast, vec![])
|
||||
}
|
||||
fn program(&mut self) -> ParseResult<AST> {
|
||||
let etype = self.literal()?;
|
||||
Ok(AST(vec![Statement::ExpressionStatement(Expression(etype, None))]))
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! expand_match_var {
|
||||
(($pat:pat => $e:expr)) => { $pat };
|
||||
(nonterm ($pat:pat => $e:expr)) => { $pat };
|
||||
}
|
||||
|
||||
macro_rules! expand_match_expr {
|
||||
($self:ident, ($pat:pat => $e:expr)) => {
|
||||
{ $self.next(); $e }
|
||||
};
|
||||
($self:ident, nonterm ($pat:pat => $e:expr)) => {
|
||||
{ $self.next(); $e }
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! bnf_rule {
|
||||
($self:ident, $type:ty, $rule:ident := $( $rule_clauses:tt )|*) => {
|
||||
fn $rule(&mut $self) -> ParseResult<$type> {
|
||||
Ok(match $self.peek() {
|
||||
$(
|
||||
expand_match_var!($rule_clauses) => expand_match_expr!($self, $rule_clauses),
|
||||
)*
|
||||
_ => return ParseError::new("Not found"),
|
||||
})
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl AutoParser {
|
||||
bnf_rule!(self, ExpressionType, literal :=
|
||||
(Keyword(Kw::True) => ExpressionType::BoolLiteral(true)) |
|
||||
(Keyword(Kw::False) => ExpressionType::BoolLiteral(false))
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
pub struct Schala { }
|
||||
|
||||
impl Schala {
|
||||
pub fn new() -> Schala {
|
||||
Schala { }
|
||||
}
|
||||
}
|
||||
|
||||
impl ProgrammingLanguageInterface for Schala {
|
||||
fn get_language_name(&self) -> String {
|
||||
"Schala-autoparser".to_string()
|
||||
}
|
||||
fn get_source_file_suffix(&self) -> String {
|
||||
format!("schala")
|
||||
}
|
||||
|
||||
fn evaluate_in_repl(&mut self, input: &str, options: &EvalOptions) -> LanguageOutput {
|
||||
let mut output = LanguageOutput::default();
|
||||
|
||||
let tokens = tokenizing::tokenize(input);
|
||||
if options.debug_tokens {
|
||||
let token_string = tokens.iter().map(|t| format!("{:?}<L:{},C:{}>", t.token_type, t.offset.0, t.offset.1)).join(", ");
|
||||
output.add_artifact(TraceArtifact::new("tokens", format!("{:?}", token_string)));
|
||||
}
|
||||
{
|
||||
let token_errors: Vec<&String> = tokens.iter().filter_map(|t| t.get_error()).collect();
|
||||
if token_errors.len() != 0 {
|
||||
output.add_output(format!("Tokenization error: {:?}\n", token_errors));
|
||||
return output;
|
||||
}
|
||||
}
|
||||
|
||||
let mut parser = AutoParser::new(tokens);
|
||||
|
||||
let ast = match parser.parse() {
|
||||
(Ok(ast), trace) => {
|
||||
if options.debug_parse {
|
||||
output.add_artifact(TraceArtifact::new_parse_trace(trace));
|
||||
output.add_artifact(TraceArtifact::new("ast", format!("{:?}", ast)));
|
||||
}
|
||||
ast
|
||||
},
|
||||
(Err(err), trace) => {
|
||||
output.add_artifact(TraceArtifact::new_parse_trace(trace));
|
||||
output.add_output(format!("Parse error: {:?}\n", err.msg));
|
||||
return output;
|
||||
}
|
||||
};
|
||||
|
||||
output.add_output(format!("{:?}", ast));
|
||||
output
|
||||
}
|
||||
}
|
||||
@@ -5,6 +5,8 @@ macro_rules! bx {
|
||||
($e:expr) => { Box::new($e) }
|
||||
}
|
||||
|
||||
pub mod autoparser;
|
||||
|
||||
mod builtin;
|
||||
|
||||
mod tokenizing;
|
||||
|
||||
@@ -101,7 +101,7 @@ pub struct ParseError {
|
||||
}
|
||||
|
||||
impl ParseError {
|
||||
fn new<T>(msg: &str) -> ParseResult<T> {
|
||||
pub fn new<T>(msg: &str) -> ParseResult<T> {
|
||||
Err(ParseError { msg: msg.to_string() })
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user