Compare commits

...

7 Commits

Author SHA1 Message Date
Greg Shuflin
91a7abf4cd Clippy lints for tokenizing.rs 2021-10-19 21:27:05 -07:00
Greg Shuflin
0c6c4ef47e Symbol table clippy 2021-10-19 21:18:57 -07:00
Greg Shuflin
355ed3c749 Rename FQSN -> Fqsn 2021-10-19 21:14:15 -07:00
Greg Shuflin
c0a3a03045 Clippy on eval.rs 2021-10-19 21:06:59 -07:00
Greg Shuflin
f8c2e57b37 Clippy on reduced_ast.rs 2021-10-19 20:56:52 -07:00
Greg Shuflin
49a50deb04 Run rustfmt on schala.rs 2021-10-19 20:50:43 -07:00
Greg Shuflin
052a2feb23 schala.rs - clippy lints 2021-10-19 20:45:59 -07:00
8 changed files with 282 additions and 264 deletions

View File

@ -14,11 +14,12 @@ pub struct State<'a> {
impl<'a> State<'a> { impl<'a> State<'a> {
pub fn new() -> State<'a> { pub fn new() -> State<'a> {
let values = ScopeStack::new(Some(format!("global"))); let values = ScopeStack::new(Some("global".to_string()));
State { values } State { values }
} }
fn new_frame(&'a self, items: &'a Vec<Node>, bound_vars: &BoundVars) -> State<'a> { #[allow(clippy::ptr_arg)]
fn new_frame(&'a self, items: &'a [Node], bound_vars: &BoundVars) -> State<'a> {
let mut inner_state = State { let mut inner_state = State {
values: self.values.new_scope(None), values: self.values.new_scope(None),
}; };
@ -47,7 +48,7 @@ enum Node {
fn paren_wrapped_vec(terms: impl Iterator<Item=String>) -> String { fn paren_wrapped_vec(terms: impl Iterator<Item=String>) -> String {
let mut buf = String::new(); let mut buf = String::new();
write!(buf, "(").unwrap(); write!(buf, "(").unwrap();
for term in terms.map(|e| Some(e)).intersperse(None) { for term in terms.map(Some).intersperse(None) {
match term { match term {
Some(e) => write!(buf, "{}", e).unwrap(), Some(e) => write!(buf, "{}", e).unwrap(),
None => write!(buf, ", ").unwrap(), None => write!(buf, ", ").unwrap(),
@ -62,16 +63,13 @@ impl Node {
fn to_repl(&self) -> String { fn to_repl(&self) -> String {
match self { match self {
Node::Expr(e) => e.to_repl(), Node::Expr(e) => e.to_repl(),
Node::PrimObject { name, items, .. } if items.len() == 0 => format!("{}", name), Node::PrimObject { name, items, .. } if items.is_empty() => format!("{}", name),
Node::PrimObject { name, items, .. } => format!("{}{}", name, paren_wrapped_vec(items.iter().map(|x| x.to_repl()))), Node::PrimObject { name, items, .. } => format!("{}{}", name, paren_wrapped_vec(items.iter().map(|x| x.to_repl()))),
Node::PrimTuple { items } => format!("{}", paren_wrapped_vec(items.iter().map(|x| x.to_repl()))), Node::PrimTuple { items } => paren_wrapped_vec(items.iter().map(|x| x.to_repl())),
} }
} }
fn is_true(&self) -> bool { fn is_true(&self) -> bool {
match self { matches!(self, Node::Expr(Expr::Lit(crate::reduced_ast::Lit::Bool(true))))
Node::Expr(Expr::Lit(crate::reduced_ast::Lit::Bool(true))) => true,
_ => false,
}
} }
} }
@ -86,6 +84,7 @@ enum ValueEntry {
type EvalResult<T> = Result<T, String>; type EvalResult<T> = Result<T, String>;
impl Expr { impl Expr {
#[allow(clippy::wrong_self_convention)]
fn to_node(self) -> Node { fn to_node(self) -> Node {
Node::Expr(self) Node::Expr(self)
} }
@ -103,7 +102,7 @@ impl Expr {
}, },
Expr::Func(f) => match f { Expr::Func(f) => match f {
BuiltIn(builtin) => format!("<built-in function '{:?}'>", builtin), BuiltIn(builtin) => format!("<built-in function '{:?}'>", builtin),
UserDefined { name: None, .. } => format!("<function>"), UserDefined { name: None, .. } => "<function>".to_string(),
UserDefined { name: Some(name), .. } => format!("<function '{}'>", name), UserDefined { name: Some(name), .. } => format!("<function '{}'>", name),
}, },
Expr::Constructor { type_name, arity, .. } => { Expr::Constructor { type_name, arity, .. } => {
@ -174,7 +173,7 @@ impl<'a> State<'a> {
match stmt { match stmt {
Stmt::Binding { name, constant, expr } => { Stmt::Binding { name, constant, expr } => {
let val = self.expression(Node::Expr(expr))?; let val = self.expression(Node::Expr(expr))?;
self.values.insert(name.clone(), ValueEntry::Binding { constant, val }); self.values.insert(name, ValueEntry::Binding { constant, val });
Ok(None) Ok(None)
}, },
Stmt::Expr(expr) => Ok(Some(self.expression(expr.to_node())?)), Stmt::Expr(expr) => Ok(Some(self.expression(expr.to_node())?)),
@ -214,7 +213,7 @@ impl<'a> State<'a> {
Unit => Ok(Node::Expr(Unit)), Unit => Ok(Node::Expr(Unit)),
CaseMatch { box cond, alternatives } => self.case_match_expression(cond, alternatives), CaseMatch { box cond, alternatives } => self.case_match_expression(cond, alternatives),
ConditionalTargetSigilValue => Ok(Node::Expr(ConditionalTargetSigilValue)), ConditionalTargetSigilValue => Ok(Node::Expr(ConditionalTargetSigilValue)),
UnimplementedSigilValue => Err(format!("Sigil value eval not implemented")), UnimplementedSigilValue => Err("Sigil value eval not implemented".to_string()),
ReductionError(err) => Err(format!("Reduction error: {}", err)), ReductionError(err) => Err(format!("Reduction error: {}", err)),
} }
} }
@ -237,7 +236,7 @@ impl<'a> State<'a> {
let evaled_args = args.into_iter().map(|expr| self.expression(Node::Expr(expr))).collect::<Result<Vec<Node>,_>>()?; let evaled_args = args.into_iter().map(|expr| self.expression(Node::Expr(expr))).collect::<Result<Vec<Node>,_>>()?;
//let evaled_args = vec![]; //let evaled_args = vec![];
Ok(Node::PrimObject { Ok(Node::PrimObject {
name: name.clone(), name,
items: evaled_args, items: evaled_args,
tag tag
}) })
@ -286,7 +285,7 @@ impl<'a> State<'a> {
(Multiply, Lit(Nat(l)), Lit(Nat(r))) => Lit(Nat(l * r)), (Multiply, Lit(Nat(l)), Lit(Nat(r))) => Lit(Nat(l * r)),
(Divide, Lit(Nat(l)), Lit(Nat(r))) => Lit(Float((*l as f64)/ (*r as f64))), (Divide, Lit(Nat(l)), Lit(Nat(r))) => Lit(Float((*l as f64)/ (*r as f64))),
(Quotient, Lit(Nat(l)), Lit(Nat(r))) => if *r == 0 { (Quotient, Lit(Nat(l)), Lit(Nat(r))) => if *r == 0 {
return Err(format!("divide by zero")); return Err("Divide-by-zero error".to_string());
} else { } else {
Lit(Nat(l / r)) Lit(Nat(l / r))
}, },
@ -322,8 +321,8 @@ impl<'a> State<'a> {
(prefix, &[Node::Expr(ref arg)]) => match (prefix, arg) { (prefix, &[Node::Expr(ref arg)]) => match (prefix, arg) {
(BooleanNot, Lit(Bool(true))) => Lit(Bool(false)), (BooleanNot, Lit(Bool(true))) => Lit(Bool(false)),
(BooleanNot, Lit(Bool(false))) => Lit(Bool(true)), (BooleanNot, Lit(Bool(false))) => Lit(Bool(true)),
(Negate, Lit(Nat(n))) => Lit(Int(-1*(*n as i64))), (Negate, Lit(Nat(n))) => Lit(Int(-(*n as i64))),
(Negate, Lit(Int(n))) => Lit(Int(-1*(*n as i64))), (Negate, Lit(Int(n))) => Lit(Int(-(*n as i64))),
(Increment, Lit(Int(n))) => Lit(Int(*n)), (Increment, Lit(Int(n))) => Lit(Int(*n)),
(Increment, Lit(Nat(n))) => Lit(Nat(*n)), (Increment, Lit(Nat(n))) => Lit(Nat(*n)),
_ => return Err("No valid prefix op".to_string()) _ => return Err("No valid prefix op".to_string())
@ -352,25 +351,25 @@ impl<'a> State<'a> {
Ok(match cond { Ok(match cond {
Node::Expr(Expr::Lit(Lit::Bool(true))) => self.block(then_clause)?, Node::Expr(Expr::Lit(Lit::Bool(true))) => self.block(then_clause)?,
Node::Expr(Expr::Lit(Lit::Bool(false))) => self.block(else_clause)?, Node::Expr(Expr::Lit(Lit::Bool(false))) => self.block(else_clause)?,
_ => return Err(format!("Conditional with non-boolean condition")) _ => return Err("Conditional with non-boolean condition".to_string())
}) })
} }
fn assign_expression(&mut self, val: Expr, expr: Expr) -> EvalResult<Node> { fn assign_expression(&mut self, val: Expr, expr: Expr) -> EvalResult<Node> {
let name = match val { let name = match val {
Expr::Sym(name) => name, Expr::Sym(name) => name,
_ => return Err(format!("Trying to assign to a non-value")), _ => return Err("Trying to assign to a non-value".to_string()),
}; };
let constant = match self.values.lookup(&name) { let constant = match self.values.lookup(&name) {
None => return Err(format!("Constant {} is undefined", name)), None => return Err(format!("Constant {} is undefined", name)),
Some(ValueEntry::Binding { constant, .. }) => constant.clone(), Some(ValueEntry::Binding { constant, .. }) => *constant,
}; };
if constant { if constant {
return Err(format!("trying to update {}, a non-mutable binding", name)); return Err(format!("trying to update {}, a non-mutable binding", name));
} }
let val = self.expression(Node::Expr(expr))?; let val = self.expression(Node::Expr(expr))?;
self.values.insert(name.clone(), ValueEntry::Binding { constant: false, val }); self.values.insert(name, ValueEntry::Binding { constant: false, val });
Ok(Node::Expr(Expr::Unit)) Ok(Node::Expr(Expr::Unit))
} }
@ -390,8 +389,7 @@ impl<'a> State<'a> {
//TODO need to handle recursive subpatterns //TODO need to handle recursive subpatterns
let all_subpatterns_pass = |state: &mut State, subpatterns: &Vec<Option<Subpattern>>, items: &Vec<Node>| -> EvalResult<bool> { let all_subpatterns_pass = |state: &mut State, subpatterns: &Vec<Option<Subpattern>>, items: &Vec<Node>| -> EvalResult<bool> {
if subpatterns.is_empty() {
if subpatterns.len() == 0 {
return Ok(true) return Ok(true)
} }
@ -401,7 +399,7 @@ impl<'a> State<'a> {
for (maybe_subp, cond) in subpatterns.iter().zip(items.iter()) { for (maybe_subp, cond) in subpatterns.iter().zip(items.iter()) {
if let Some(subp) = maybe_subp { if let Some(subp) = maybe_subp {
if !state.guard_passes(&subp.guard, &cond)? { if !state.guard_passes(&subp.guard, cond)? {
return Ok(false) return Ok(false)
} }
} }
@ -436,7 +434,7 @@ impl<'a> State<'a> {
} }
}, },
Node::Expr(ref _e) => { Node::Expr(ref _e) => {
if let None = alt.matchable.tag { if alt.matchable.tag.is_none() {
return self.block(alt.item) return self.block(alt.item)
} }
} }

View File

@ -12,6 +12,9 @@
//! then ReducedAST shouldn't be duplicating information that can be queried at runtime from the //! then ReducedAST shouldn't be duplicating information that can be queried at runtime from the
//! symbol table. But I think the former might make sense since ultimately the bytecode will be //! symbol table. But I think the former might make sense since ultimately the bytecode will be
//! built from the ReducedAST. //! built from the ReducedAST.
#![allow(clippy::enum_variant_names)]
use std::rc::Rc; use std::rc::Rc;
use std::str::FromStr; use std::str::FromStr;
use std::convert::TryFrom; use std::convert::TryFrom;
@ -141,6 +144,7 @@ impl<'a> Reducer<'a> {
} }
} }
#[allow(clippy::ptr_arg)]
fn block(&mut self, block: &Block) -> Vec<Stmt> { fn block(&mut self, block: &Block) -> Vec<Stmt> {
block.iter().map(|stmt| self.statement(stmt)).collect() block.iter().map(|stmt| self.statement(stmt)).collect()
} }
@ -189,11 +193,11 @@ impl<'a> Reducer<'a> {
}; };
match spec { match spec {
SymbolSpec::RecordConstructor { .. } => Expr::ReductionError(format!("AST reducer doesn't expect a RecordConstructor here")), SymbolSpec::RecordConstructor { .. } => Expr::ReductionError("AST reducer doesn't expect a RecordConstructor here".to_string()),
SymbolSpec::DataConstructor { index, arity, type_name } => Expr::Constructor { SymbolSpec::DataConstructor { index, arity, type_name } => Expr::Constructor {
type_name: type_name.clone(), type_name: type_name.clone(),
name: local_name.clone(), name: local_name.clone(),
tag: index.clone(), tag: *index,
arity: *arity, arity: *arity,
}, },
SymbolSpec::Func(_) => Expr::Sym(local_name.clone()), SymbolSpec::Func(_) => Expr::Sym(local_name.clone()),
@ -201,7 +205,8 @@ impl<'a> Reducer<'a> {
} }
} }
fn reduce_lambda(&mut self, params: &Vec<FormalParam>, body: &Block) -> Expr { #[allow(clippy::ptr_arg)]
fn reduce_lambda(&mut self, params: &[FormalParam], body: &Block) -> Expr {
Expr::Func(Func::UserDefined { Expr::Func(Func::UserDefined {
name: None, name: None,
params: params.iter().map(|param| param.name.clone()).collect(), params: params.iter().map(|param| param.name.clone()).collect(),
@ -209,7 +214,7 @@ impl<'a> Reducer<'a> {
}) })
} }
fn reduce_named_struct(&mut self, name: &QualifiedName, fields: &Vec<(Rc<String>, Expression)>) -> Expr { fn reduce_named_struct(&mut self, name: &QualifiedName, fields: &[(Rc<String>, Expression)]) -> Expr {
let symbol = match self.symbol_table.lookup_symbol(&name.id) { let symbol = match self.symbol_table.lookup_symbol(&name.id) {
Some(fqsn) => fqsn, Some(fqsn) => fqsn,
None => return Expr::ReductionError(format!("FQSN lookup for name {:?} failed", name)), None => return Expr::ReductionError(format!("FQSN lookup for name {:?} failed", name)),
@ -236,7 +241,7 @@ impl<'a> Reducer<'a> {
Expr::Call { f, args } Expr::Call { f, args }
} }
fn reduce_call_expression(&mut self, func: &Expression, arguments: &Vec<InvocationArgument>) -> Expr { fn reduce_call_expression(&mut self, func: &Expression, arguments: &[ InvocationArgument ]) -> Expr {
Expr::Call { Expr::Call {
f: Box::new(self.expression(func)), f: Box::new(self.expression(func)),
args: arguments.iter().map(|arg| self.invocation_argument(arg)).collect(), args: arguments.iter().map(|arg| self.invocation_argument(arg)).collect(),
@ -246,23 +251,23 @@ impl<'a> Reducer<'a> {
fn reduce_if_expression(&mut self, discriminator: Option<&Expression>, body: &IfExpressionBody) -> Expr { fn reduce_if_expression(&mut self, discriminator: Option<&Expression>, body: &IfExpressionBody) -> Expr {
let cond = Box::new(match discriminator { let cond = Box::new(match discriminator {
Some(expr) => self.expression(expr), Some(expr) => self.expression(expr),
None => return Expr::ReductionError(format!("blank cond if-expr not supported")), None => return Expr::ReductionError("blank cond if-expr not supported".to_string()),
}); });
match body { match body {
IfExpressionBody::SimpleConditional { then_case, else_case } => { IfExpressionBody::SimpleConditional { then_case, else_case } => {
let then_clause = self.block(&then_case); let then_clause = self.block(then_case);
let else_clause = match else_case.as_ref() { let else_clause = match else_case.as_ref() {
None => vec![], None => vec![],
Some(stmts) => self.block(&stmts), Some(stmts) => self.block(stmts),
}; };
Expr::Conditional { cond, then_clause, else_clause } Expr::Conditional { cond, then_clause, else_clause }
}, },
IfExpressionBody::SimplePatternMatch { pattern, then_case, else_case } => { IfExpressionBody::SimplePatternMatch { pattern, then_case, else_case } => {
let then_clause = self.block(&then_case); let then_clause = self.block(then_case);
let else_clause = match else_case.as_ref() { let else_clause = match else_case.as_ref() {
None => vec![], None => vec![],
Some(stmts) => self.block(&stmts), Some(stmts) => self.block(stmts),
}; };
let alternatives = vec![ let alternatives = vec![
@ -368,7 +373,7 @@ impl<'a> Reducer<'a> {
* x is SomeBigOldEnum(_, x, Some(t)) * x is SomeBigOldEnum(_, x, Some(t))
*/ */
fn handle_symbol(symbol: Option<&Symbol>, inner_patterns: &Vec<Pattern>, symbol_table: &SymbolTable) -> Subpattern { fn handle_symbol(symbol: Option<&Symbol>, inner_patterns: &[Pattern], symbol_table: &SymbolTable) -> Subpattern {
use self::Pattern::*; use self::Pattern::*;
let tag = symbol.map(|symbol| match symbol.spec { let tag = symbol.map(|symbol| match symbol.spec {
SymbolSpec::DataConstructor { index, .. } => index, SymbolSpec::DataConstructor { index, .. } => index,
@ -451,7 +456,7 @@ impl Pattern {
// if symbol is Some, treat this as a symbol pattern. If it's None, treat it // if symbol is Some, treat this as a symbol pattern. If it's None, treat it
// as a variable. // as a variable.
match symbol_table.lookup_symbol(id) { match symbol_table.lookup_symbol(id) {
Some(symbol) => handle_symbol(Some(symbol), &vec![], symbol_table), Some(symbol) => handle_symbol(Some(symbol), &[], symbol_table),
None => { None => {
println!("Components: {:?}", components); println!("Components: {:?}", components);
let name = if components.len() == 1 { let name = if components.len() == 1 {
@ -480,8 +485,8 @@ impl PatternLiteral {
let comparison = Expr::Lit(match (neg, num) { let comparison = Expr::Lit(match (neg, num) {
(false, ExpressionKind::NatLiteral(n)) => Lit::Nat(*n), (false, ExpressionKind::NatLiteral(n)) => Lit::Nat(*n),
(false, ExpressionKind::FloatLiteral(f)) => Lit::Float(*f), (false, ExpressionKind::FloatLiteral(f)) => Lit::Float(*f),
(true, ExpressionKind::NatLiteral(n)) => Lit::Int(-1*(*n as i64)), (true, ExpressionKind::NatLiteral(n)) => Lit::Int(-(*n as i64)),
(true, ExpressionKind::FloatLiteral(f)) => Lit::Float(-1.0*f), (true, ExpressionKind::FloatLiteral(f)) => Lit::Float(-f),
_ => panic!("This should never happen") _ => panic!("This should never happen")
}); });
let guard = Some(Expr::Call { let guard = Some(Expr::Call {

View File

@ -1,25 +1,20 @@
use stopwatch::Stopwatch; use stopwatch::Stopwatch;
use std::cell::RefCell;
use std::rc::Rc;
use schala_repl::{ProgrammingLanguageInterface,
ComputationRequest, ComputationResponse,
LangMetaRequest, LangMetaResponse, GlobalOutputStats};
use crate::{reduced_ast, tokenizing, parsing, eval, typechecking, symbol_table};
use crate::error::SchalaError; use crate::error::SchalaError;
use crate::{eval, parsing, reduced_ast, symbol_table, tokenizing, typechecking};
pub type SymbolTableHandle = Rc<RefCell<symbol_table::SymbolTable>>; use schala_repl::{
ComputationRequest, ComputationResponse, GlobalOutputStats, LangMetaRequest, LangMetaResponse,
ProgrammingLanguageInterface,
};
/// All the state necessary to parse and execute a Schala program are stored in this struct. /// All the state necessary to parse and execute a Schala program are stored in this struct.
#[allow(dead_code)]
pub struct Schala { pub struct Schala {
/// Holds a reference to the original source code, parsed into line and character /// Holds a reference to the original source code, parsed into line and character
source_reference: SourceReference, source_reference: SourceReference,
/// Execution state for AST-walking interpreter /// Execution state for AST-walking interpreter
state: eval::State<'static>, state: eval::State<'static>,
/// Keeps track of symbols and scopes /// Keeps track of symbols and scopes
symbol_table: SymbolTableHandle, symbol_table: symbol_table::SymbolTable,
/// Contains information for type-checking /// Contains information for type-checking
type_context: typechecking::TypeContext<'static>, type_context: typechecking::TypeContext<'static>,
/// Schala Parser /// Schala Parser
@ -40,18 +35,18 @@ impl Schala {
impl Schala { impl Schala {
/// Creates a new Schala environment *without* any prelude. /// Creates a new Schala environment *without* any prelude.
fn new_blank_env() -> Schala { fn new_blank_env() -> Schala {
let symbols = Rc::new(RefCell::new(symbol_table::SymbolTable::new()));
Schala { Schala {
source_reference: SourceReference::new(), source_reference: SourceReference::new(),
symbol_table: symbols.clone(), symbol_table: symbol_table::SymbolTable::new(),
state: eval::State::new(), state: eval::State::new(),
type_context: typechecking::TypeContext::new(), type_context: typechecking::TypeContext::new(),
active_parser: parsing::Parser::new() active_parser: parsing::Parser::new(),
} }
} }
/// Creates a new Schala environment with the standard prelude, which is defined as ordinary /// Creates a new Schala environment with the standard prelude, which is defined as ordinary
/// Schala code in the file `prelude.schala` /// Schala code in the file `prelude.schala`
#[allow(clippy::new_without_default)]
pub fn new() -> Schala { pub fn new() -> Schala {
let prelude = include_str!("../source-files/prelude.schala"); let prelude = include_str!("../source-files/prelude.schala");
let mut env = Schala::new_blank_env(); let mut env = Schala::new_blank_env();
@ -71,47 +66,48 @@ impl Schala {
// TODO tokenize should return its own error type // TODO tokenize should return its own error type
let tokens = tokenizing::tokenize(source); let tokens = tokenizing::tokenize(source);
if let Some(err) = SchalaError::from_tokens(&tokens) { if let Some(err) = SchalaError::from_tokens(&tokens) {
return Err(err) return Err(err);
} }
//2nd stage - parsing //2nd stage - parsing
self.active_parser.add_new_tokens(tokens); self.active_parser.add_new_tokens(tokens);
let ast = self.active_parser.parse() let ast = self
.active_parser
.parse()
.map_err(|err| SchalaError::from_parse_error(err, &self.source_reference))?; .map_err(|err| SchalaError::from_parse_error(err, &self.source_reference))?;
//Perform all symbol table work //Perform all symbol table work
self.symbol_table.borrow_mut().process_ast(&ast) self.symbol_table
.map_err(|err| SchalaError::from_symbol_table(err))?; .process_ast(&ast)
.map_err(SchalaError::from_symbol_table)?;
// Typechecking // Typechecking
// TODO typechecking not working // TODO typechecking not working
let _overall_type = self.type_context.typecheck(&ast) let _overall_type = self
.type_context
.typecheck(&ast)
.map_err(SchalaError::from_type_error); .map_err(SchalaError::from_type_error);
// Reduce AST - TODO this doesn't produce an error yet, but probably should // Reduce AST - TODO this doesn't produce an error yet, but probably should
let symbol_table = self.symbol_table.borrow(); let reduced_ast = reduced_ast::reduce(&ast, &self.symbol_table);
let reduced_ast = reduced_ast::reduce(&ast, &symbol_table);
// Tree-walking evaluator. TODO fix this // Tree-walking evaluator. TODO fix this
let evaluation_outputs = self.state.evaluate(reduced_ast, true); let evaluation_outputs = self.state.evaluate(reduced_ast, true);
let text_output: Result<Vec<String>, String> = evaluation_outputs let text_output: Result<Vec<String>, String> = evaluation_outputs.into_iter().collect();
.into_iter()
.collect();
let text_output: Result<Vec<String>, SchalaError> = text_output let text_output: Result<Vec<String>, SchalaError> =
.map_err(|err| SchalaError::from_string(err, Stage::Evaluation)); text_output.map_err(|err| SchalaError::from_string(err, Stage::Evaluation));
let eval_output: String = text_output let eval_output: String =
.map(|v| { Iterator::intersperse(v.into_iter(), "\n".to_owned()).collect() })?; text_output.map(|v| Iterator::intersperse(v.into_iter(), "\n".to_owned()).collect())?;
Ok(eval_output) Ok(eval_output)
} }
} }
/// Represents lines of source code /// Represents lines of source code
pub(crate) struct SourceReference { pub(crate) struct SourceReference {
lines: Option<Vec<String>> lines: Option<Vec<String>>,
} }
impl SourceReference { impl SourceReference {
@ -121,10 +117,14 @@ impl SourceReference {
fn load_new_source(&mut self, source: &str) { fn load_new_source(&mut self, source: &str) {
//TODO this is a lot of heap allocations - maybe there's a way to make it more efficient? //TODO this is a lot of heap allocations - maybe there's a way to make it more efficient?
self.lines = Some(source.lines().map(|s| s.to_string()).collect()); } self.lines = Some(source.lines().map(|s| s.to_string()).collect());
}
pub fn get_line(&self, line: usize) -> String { pub fn get_line(&self, line: usize) -> String {
self.lines.as_ref().and_then(|x| x.get(line).map(|s| s.to_string())).unwrap_or(format!("NO LINE FOUND")) self.lines
.as_ref()
.and_then(|x| x.get(line).map(|s| s.to_string()))
.unwrap_or_else(|| "NO LINE FOUND".to_string())
} }
} }
@ -145,14 +145,12 @@ fn stage_names() -> Vec<&'static str> {
"tokenizing", "tokenizing",
"parsing", "parsing",
"symbol-table", "symbol-table",
"scope-resolution",
"typechecking", "typechecking",
"ast-reduction", "ast-reduction",
"ast-walking-evaluation" "ast-walking-evaluation",
] ]
} }
impl ProgrammingLanguageInterface for Schala { impl ProgrammingLanguageInterface for Schala {
type Config = (); type Config = ();
fn language_name() -> String { fn language_name() -> String {
@ -163,30 +161,43 @@ impl ProgrammingLanguageInterface for Schala {
"schala".to_owned() "schala".to_owned()
} }
fn run_computation(&mut self, request: ComputationRequest<Self::Config>) -> ComputationResponse { fn run_computation(
let ComputationRequest { source, debug_requests: _, config: _ } = request; &mut self,
request: ComputationRequest<Self::Config>,
) -> ComputationResponse {
let ComputationRequest {
source,
debug_requests: _,
config: _,
} = request;
self.source_reference.load_new_source(source); self.source_reference.load_new_source(source);
let sw = Stopwatch::start_new(); let sw = Stopwatch::start_new();
let main_output = self.run_pipeline(source) let main_output = self
.run_pipeline(source)
.map_err(|schala_err| schala_err.display()); .map_err(|schala_err| schala_err.display());
let global_output_stats = GlobalOutputStats { let global_output_stats = GlobalOutputStats {
total_duration: sw.elapsed(), total_duration: sw.elapsed(),
stage_durations: vec![] stage_durations: vec![],
}; };
ComputationResponse { ComputationResponse {
main_output, main_output,
global_output_stats, global_output_stats,
debug_responses: vec![] debug_responses: vec![],
} }
} }
fn request_meta(&mut self, request: LangMetaRequest) -> LangMetaResponse { fn request_meta(&mut self, request: LangMetaRequest) -> LangMetaResponse {
match request { match request {
LangMetaRequest::StageNames => LangMetaResponse::StageNames(stage_names().iter().map(|s| s.to_string()).collect()), LangMetaRequest::StageNames => {
_ => LangMetaResponse::Custom { kind: format!("not-implemented"), value: format!("") } LangMetaResponse::StageNames(stage_names().iter().map(|s| s.to_string()).collect())
}
_ => LangMetaResponse::Custom {
kind: "not-implemented".to_string(),
value: "".to_string(),
},
} }
} }
} }

View File

@ -15,28 +15,28 @@ mod test;
/// Fully-qualified symbol name /// Fully-qualified symbol name
#[derive(Debug, Clone, Eq, PartialEq, Hash, PartialOrd, Ord)] #[derive(Debug, Clone, Eq, PartialEq, Hash, PartialOrd, Ord)]
pub struct FQSN { pub struct Fqsn {
//TODO FQSN's need to be cheaply cloneable //TODO Fqsn's need to be cheaply cloneable
scopes: Vec<Scope>, //TODO rename to ScopeSegment scopes: Vec<Scope>, //TODO rename to ScopeSegment
} }
impl FQSN { impl Fqsn {
fn from_scope_stack(scopes: &[Scope], new_name: Rc<String>) -> Self { fn from_scope_stack(scopes: &[Scope], new_name: Rc<String>) -> Self {
let mut v = Vec::new(); let mut v = Vec::new();
for s in scopes { for s in scopes {
v.push(s.clone()); v.push(s.clone());
} }
v.push(Scope::Name(new_name)); v.push(Scope::Name(new_name));
FQSN { scopes: v } Fqsn { scopes: v }
} }
#[cfg(test)] #[cfg(test)]
fn from_strs(strs: &[&str]) -> FQSN { fn from_strs(strs: &[&str]) -> Fqsn {
let mut scopes = vec![]; let mut scopes = vec![];
for s in strs { for s in strs {
scopes.push(Scope::Name(Rc::new(s.to_string()))); scopes.push(Scope::Name(Rc::new(s.to_string())));
} }
FQSN { Fqsn {
scopes scopes
} }
} }
@ -55,11 +55,11 @@ enum Scope {
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub enum SymbolError { pub enum SymbolError {
DuplicateName { DuplicateName {
prev_name: FQSN, prev_name: Fqsn,
location: Location location: Location
}, },
DuplicateRecord { DuplicateRecord {
type_name: FQSN, type_name: Fqsn,
location: Location, location: Location,
member: String, member: String,
} }
@ -84,7 +84,7 @@ struct TypeKind;
/// Keeps track of what names were used in a given namespace. /// Keeps track of what names were used in a given namespace.
struct NameTable<K> { struct NameTable<K> {
table: HashMap<FQSN, NameSpec<K>> table: HashMap<Fqsn, NameSpec<K>>
} }
impl<K> NameTable<K> { impl<K> NameTable<K> {
@ -92,7 +92,7 @@ impl<K> NameTable<K> {
Self { table: HashMap::new() } Self { table: HashMap::new() }
} }
fn register(&mut self, name: FQSN, spec: NameSpec<K>) -> Result<(), SymbolError> { fn register(&mut self, name: Fqsn, spec: NameSpec<K>) -> Result<(), SymbolError> {
match self.table.entry(name.clone()) { match self.table.entry(name.clone()) {
Entry::Occupied(o) => { Entry::Occupied(o) => {
Err(SymbolError::DuplicateName { prev_name: name, location: o.get().location }) Err(SymbolError::DuplicateName { prev_name: name, location: o.get().location })
@ -114,14 +114,14 @@ pub struct SymbolTable {
fq_names: NameTable<NameKind>, //Note that presence of two tables implies that a type and other binding with the same name can co-exist fq_names: NameTable<NameKind>, //Note that presence of two tables implies that a type and other binding with the same name can co-exist
types: NameTable<TypeKind>, types: NameTable<TypeKind>,
/// A map of the `ItemId`s of instances of use of names to their fully-canonicalized FQSN form. /// A map of the `ItemId`s of instances of use of names to their fully-canonicalized Fqsn form.
/// Updated by the item id resolver. /// Updated by the item id resolver.
id_to_fqsn: HashMap<ItemId, FQSN>, id_to_fqsn: HashMap<ItemId, Fqsn>,
/// A map of the FQSN of an AST definition to a Symbol data structure, which contains /// A map of the Fqsn of an AST definition to a Symbol data structure, which contains
/// some basic information about what that symbol is and (ideally) references to other tables /// some basic information about what that symbol is and (ideally) references to other tables
/// (e.g. typechecking tables) with more information about that symbol. /// (e.g. typechecking tables) with more information about that symbol.
fqsn_to_symbol: HashMap<FQSN, Symbol>, fqsn_to_symbol: HashMap<Fqsn, Symbol>,
} }
impl SymbolTable { impl SymbolTable {
@ -203,7 +203,7 @@ impl SymbolTable {
/// Register a new mapping of a fully-qualified symbol name (e.g. `Option::Some`) /// Register a new mapping of a fully-qualified symbol name (e.g. `Option::Some`)
/// to a Symbol, a descriptor of what that name refers to. /// to a Symbol, a descriptor of what that name refers to.
fn add_symbol(&mut self, fqsn: FQSN, symbol: Symbol) { fn add_symbol(&mut self, fqsn: Fqsn, symbol: Symbol) {
self.symbol_trie.insert(&fqsn); self.symbol_trie.insert(&fqsn);
self.fqsn_to_symbol.insert(fqsn, symbol); self.fqsn_to_symbol.insert(fqsn, symbol);
} }
@ -230,7 +230,7 @@ impl SymbolTable {
for statement in statements { for statement in statements {
let Statement { id: _, kind, location } = statement; //TODO I'm not sure if I need to do anything with this ID let Statement { id: _, kind, location } = statement; //TODO I'm not sure if I need to do anything with this ID
let location = *location; let location = *location;
if let Err(err) = self.add_single_statement(kind, location, &scope_stack) { if let Err(err) = self.add_single_statement(kind, location, scope_stack) {
errors.push(err); errors.push(err);
} else { // If there's an error with a name, don't recurse into subscopes of that name } else { // If there's an error with a name, don't recurse into subscopes of that name
let recursive_errs = match kind { let recursive_errs = match kind {
@ -260,10 +260,10 @@ impl SymbolTable {
errors errors
} }
fn add_single_statement(&mut self, kind: &StatementKind, location: Location, scope_stack: &Vec<Scope>) -> Result<(), SymbolError> { fn add_single_statement(&mut self, kind: &StatementKind, location: Location, scope_stack: &[Scope]) -> Result<(), SymbolError> {
match kind { match kind {
StatementKind::Declaration(Declaration::FuncSig(signature)) => { StatementKind::Declaration(Declaration::FuncSig(signature)) => {
let fq_function = FQSN::from_scope_stack(scope_stack.as_ref(), signature.name.clone()); let fq_function = Fqsn::from_scope_stack(scope_stack, signature.name.clone());
self.fq_names.register(fq_function.clone(), NameSpec { location, kind: NameKind::Function })?; self.fq_names.register(fq_function.clone(), NameSpec { location, kind: NameKind::Function })?;
self.types.register(fq_function.clone(), NameSpec { location, kind: TypeKind } )?; self.types.register(fq_function.clone(), NameSpec { location, kind: TypeKind } )?;
@ -274,7 +274,7 @@ impl SymbolTable {
} }
StatementKind::Declaration(Declaration::FuncDecl(signature, ..)) => { StatementKind::Declaration(Declaration::FuncDecl(signature, ..)) => {
let fn_name = &signature.name; let fn_name = &signature.name;
let fq_function = FQSN::from_scope_stack(scope_stack.as_ref(), fn_name.clone()); let fq_function = Fqsn::from_scope_stack(scope_stack, fn_name.clone());
self.fq_names.register(fq_function.clone(), NameSpec { location, kind: NameKind::Function })?; self.fq_names.register(fq_function.clone(), NameSpec { location, kind: NameKind::Function })?;
self.types.register(fq_function.clone(), NameSpec { location, kind: TypeKind } )?; self.types.register(fq_function.clone(), NameSpec { location, kind: TypeKind } )?;
@ -284,11 +284,11 @@ impl SymbolTable {
}); });
}, },
StatementKind::Declaration(Declaration::TypeDecl { name, .. }) => { StatementKind::Declaration(Declaration::TypeDecl { name, .. }) => {
let fq_type = FQSN::from_scope_stack(scope_stack.as_ref(), name.name.clone()); let fq_type = Fqsn::from_scope_stack(scope_stack, name.name.clone());
self.types.register(fq_type, NameSpec { location, kind: TypeKind } )?; self.types.register(fq_type, NameSpec { location, kind: TypeKind } )?;
}, },
StatementKind::Declaration(Declaration::Binding { name, .. }) => { StatementKind::Declaration(Declaration::Binding { name, .. }) => {
let fq_binding = FQSN::from_scope_stack(scope_stack.as_ref(), name.clone()); let fq_binding = Fqsn::from_scope_stack(scope_stack, name.clone());
self.fq_names.register(fq_binding.clone(), NameSpec { location, kind: NameKind::Binding })?; self.fq_names.register(fq_binding.clone(), NameSpec { location, kind: NameKind::Binding })?;
self.add_symbol(fq_binding, Symbol { self.add_symbol(fq_binding, Symbol {
local_name: name.clone(), local_name: name.clone(),
@ -296,7 +296,7 @@ impl SymbolTable {
}); });
} }
StatementKind::Module(ModuleSpecifier { name, .. }) => { StatementKind::Module(ModuleSpecifier { name, .. }) => {
let fq_module = FQSN::from_scope_stack(scope_stack.as_ref(), name.clone()); let fq_module = Fqsn::from_scope_stack(scope_stack, name.clone());
self.fq_names.register(fq_module, NameSpec { location, kind: NameKind::Module })?; self.fq_names.register(fq_module, NameSpec { location, kind: NameKind::Module })?;
}, },
_ => (), _ => (),
@ -308,7 +308,7 @@ impl SymbolTable {
let mut member_errors = vec![]; let mut member_errors = vec![];
let mut errors = vec![]; let mut errors = vec![];
let mut register = |fqsn: FQSN, spec: SymbolSpec| { let mut register = |fqsn: Fqsn, spec: SymbolSpec| {
let name_spec = NameSpec { location, kind: TypeKind }; let name_spec = NameSpec { location, kind: TypeKind };
if let Err(err) = self.types.register(fqsn.clone(), name_spec) { if let Err(err) = self.types.register(fqsn.clone(), name_spec) {
errors.push(err); errors.push(err);
@ -329,7 +329,7 @@ impl SymbolTable {
for (index, variant) in variants.iter().enumerate() { for (index, variant) in variants.iter().enumerate() {
match variant { match variant {
Variant::UnitStruct(name) => { Variant::UnitStruct(name) => {
let fq_name = FQSN::from_scope_stack(scope_stack.as_ref(), name.clone()); let fq_name = Fqsn::from_scope_stack(scope_stack.as_ref(), name.clone());
let spec = SymbolSpec::DataConstructor { let spec = SymbolSpec::DataConstructor {
index, index,
arity: 0, arity: 0,
@ -338,7 +338,7 @@ impl SymbolTable {
register(fq_name, spec); register(fq_name, spec);
}, },
Variant::TupleStruct(name, items) => { Variant::TupleStruct(name, items) => {
let fq_name = FQSN::from_scope_stack(scope_stack.as_ref(), name.clone()); let fq_name = Fqsn::from_scope_stack(scope_stack.as_ref(), name.clone());
let spec = SymbolSpec::DataConstructor { let spec = SymbolSpec::DataConstructor {
index, index,
arity: items.len(), arity: items.len(),
@ -347,7 +347,7 @@ impl SymbolTable {
register(fq_name, spec); register(fq_name, spec);
}, },
Variant::Record { name, members } => { Variant::Record { name, members } => {
let fq_name = FQSN::from_scope_stack(scope_stack.as_ref(), name.clone()); let fq_name = Fqsn::from_scope_stack(scope_stack.as_ref(), name.clone());
let mut seen_members = HashMap::new(); let mut seen_members = HashMap::new();
for (member_name, _) in members.iter() { for (member_name, _) in members.iter() {

View File

@ -1,31 +1,31 @@
use std::rc::Rc; use std::rc::Rc;
use crate::symbol_table::{SymbolTable, FQSN, Scope}; use crate::symbol_table::{SymbolTable, Fqsn, Scope};
use crate::ast::*; use crate::ast::*;
use crate::util::ScopeStack; use crate::util::ScopeStack;
type FQSNPrefix = Vec<Scope>; type FqsnPrefix = Vec<Scope>;
pub struct Resolver<'a> { pub struct Resolver<'a> {
symbol_table: &'a mut super::SymbolTable, symbol_table: &'a mut super::SymbolTable,
name_scope_stack: ScopeStack<'a, Rc<String>, FQSNPrefix>, name_scope_stack: ScopeStack<'a, Rc<String>, FqsnPrefix>,
} }
impl<'a> Resolver<'a> { impl<'a> Resolver<'a> {
pub fn new(symbol_table: &'a mut SymbolTable) -> Self { pub fn new(symbol_table: &'a mut SymbolTable) -> Self {
let name_scope_stack: ScopeStack<'a, Rc<String>, FQSNPrefix> = ScopeStack::new(None); let name_scope_stack: ScopeStack<'a, Rc<String>, FqsnPrefix> = ScopeStack::new(None);
Self { symbol_table, name_scope_stack } Self { symbol_table, name_scope_stack }
} }
pub fn resolve(&mut self, ast: &AST) { pub fn resolve(&mut self, ast: &AST) {
walk_ast(self, ast); walk_ast(self, ast);
} }
fn lookup_name_in_scope(&self, sym_name: &QualifiedName) -> FQSN { fn lookup_name_in_scope(&self, sym_name: &QualifiedName) -> Fqsn {
let QualifiedName { components, .. } = sym_name; let QualifiedName { components, .. } = sym_name;
let first_component = &components[0]; let first_component = &components[0];
match self.name_scope_stack.lookup(first_component) { match self.name_scope_stack.lookup(first_component) {
None => { None => {
FQSN { Fqsn {
scopes: components.iter() scopes: components.iter()
.map(|name| Scope::Name(name.clone())) .map(|name| Scope::Name(name.clone()))
.collect() .collect()
@ -33,10 +33,10 @@ impl<'a> Resolver<'a> {
}, },
Some(fqsn_prefix) => { Some(fqsn_prefix) => {
let mut full_name = fqsn_prefix.clone(); let mut full_name = fqsn_prefix.clone();
let rest_of_name: FQSNPrefix = components[1..].iter().map(|name| Scope::Name(name.clone())).collect(); let rest_of_name: FqsnPrefix = components[1..].iter().map(|name| Scope::Name(name.clone())).collect();
full_name.extend_from_slice(&rest_of_name); full_name.extend_from_slice(&rest_of_name);
FQSN { Fqsn {
scopes: full_name scopes: full_name
} }
} }
@ -59,7 +59,7 @@ impl<'a> ASTVisitor for Resolver<'a> {
let ImportSpecifier { ref path_components, ref imported_names, .. } = &import_spec; let ImportSpecifier { ref path_components, ref imported_names, .. } = &import_spec;
match imported_names { match imported_names {
ImportedNames::All => { ImportedNames::All => {
let prefix = FQSN { let prefix = Fqsn {
scopes: path_components.iter().map(|c| Scope::Name(c.clone())).collect() scopes: path_components.iter().map(|c| Scope::Name(c.clone())).collect()
}; };
let members = self.symbol_table.symbol_trie.get_children(&prefix); let members = self.symbol_table.symbol_trie.get_children(&prefix);
@ -77,7 +77,7 @@ impl<'a> ASTVisitor for Resolver<'a> {
self.name_scope_stack.insert(name.clone(), fqsn_prefix); self.name_scope_stack.insert(name.clone(), fqsn_prefix);
} }
ImportedNames::List(ref names) => { ImportedNames::List(ref names) => {
let fqsn_prefix: FQSNPrefix = path_components.iter() let fqsn_prefix: FqsnPrefix = path_components.iter()
.map(|c| Scope::Name(c.clone())) .map(|c| Scope::Name(c.clone()))
.collect(); .collect();
for name in names.iter() { for name in names.iter() {
@ -88,12 +88,12 @@ impl<'a> ASTVisitor for Resolver<'a> {
} }
fn qualified_name(&mut self, qualified_name: &QualifiedName) { fn qualified_name(&mut self, qualified_name: &QualifiedName) {
let fqsn = self.lookup_name_in_scope(&qualified_name); let fqsn = self.lookup_name_in_scope(qualified_name);
self.symbol_table.id_to_fqsn.insert(qualified_name.id.clone(), fqsn); self.symbol_table.id_to_fqsn.insert(qualified_name.id.clone(), fqsn);
} }
fn named_struct(&mut self, qualified_name: &QualifiedName, _fields: &Vec<(Rc<String>, Expression)>) { fn named_struct(&mut self, qualified_name: &QualifiedName, _fields: &Vec<(Rc<String>, Expression)>) {
let fqsn = self.lookup_name_in_scope(&qualified_name); let fqsn = self.lookup_name_in_scope(qualified_name);
self.symbol_table.id_to_fqsn.insert(qualified_name.id.clone(), fqsn); self.symbol_table.id_to_fqsn.insert(qualified_name.id.clone(), fqsn);
} }

View File

@ -1,12 +1,12 @@
use radix_trie::{Trie, TrieCommon, TrieKey}; use radix_trie::{Trie, TrieCommon, TrieKey};
use super::{Scope, FQSN}; use super::{Scope, Fqsn};
use std::hash::{Hasher, Hash}; use std::hash::{Hasher, Hash};
use std::collections::hash_map::DefaultHasher; use std::collections::hash_map::DefaultHasher;
#[derive(Debug)] #[derive(Debug)]
pub struct SymbolTrie(Trie<FQSN, ()>); pub struct SymbolTrie(Trie<Fqsn, ()>);
impl TrieKey for FQSN { impl TrieKey for Fqsn {
fn encode_bytes(&self) -> Vec<u8> { fn encode_bytes(&self) -> Vec<u8> {
let mut hasher = DefaultHasher::new(); let mut hasher = DefaultHasher::new();
let mut output = vec![]; let mut output = vec![];
@ -24,16 +24,16 @@ impl SymbolTrie {
SymbolTrie(Trie::new()) SymbolTrie(Trie::new())
} }
pub fn insert(&mut self, fqsn: &FQSN) { pub fn insert(&mut self, fqsn: &Fqsn) {
self.0.insert(fqsn.clone(), ()); self.0.insert(fqsn.clone(), ());
} }
pub fn get_children(&self, fqsn: &FQSN) -> Vec<FQSN> { pub fn get_children(&self, fqsn: &Fqsn) -> Vec<Fqsn> {
let subtrie = match self.0.subtrie(fqsn) { let subtrie = match self.0.subtrie(fqsn) {
Some(s) => s, Some(s) => s,
None => return vec![] None => return vec![]
}; };
let output: Vec<FQSN> = subtrie.keys().filter(|cur_key| **cur_key != *fqsn).map(|fqsn| fqsn.clone()).collect(); let output: Vec<Fqsn> = subtrie.keys().filter(|cur_key| **cur_key != *fqsn).cloned().collect();
output output
} }
} }
@ -41,10 +41,10 @@ impl SymbolTrie {
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use super::*; use super::*;
use crate::symbol_table::FQSN; use crate::symbol_table::Fqsn;
fn make_fqsn(strs: &[&str]) -> FQSN { fn make_fqsn(strs: &[&str]) -> Fqsn {
FQSN::from_strs(strs) Fqsn::from_strs(strs)
} }
#[test] #[test]

View File

@ -10,8 +10,8 @@ fn add_symbols(src: &str) -> (SymbolTable, Result<(), Vec<SymbolError>>) {
(symbol_table, result) (symbol_table, result)
} }
fn make_fqsn(strs: &[&str]) -> FQSN { fn make_fqsn(strs: &[&str]) -> Fqsn {
FQSN::from_strs(strs) Fqsn::from_strs(strs)
} }
@ -20,8 +20,8 @@ fn basic_symbol_table() {
let src = "let a = 10; fn b() { 20 }"; let src = "let a = 10; fn b() { 20 }";
let (symbols, _) = add_symbols(src); let (symbols, _) = add_symbols(src);
fn make_fqsn(strs: &[&str]) -> FQSN { fn make_fqsn(strs: &[&str]) -> Fqsn {
FQSN::from_strs(strs) Fqsn::from_strs(strs)
} }
symbols.fq_names.table.get(&make_fqsn(&["b"])).unwrap(); symbols.fq_names.table.get(&make_fqsn(&["b"])).unwrap();
@ -45,7 +45,7 @@ fn no_function_definition_duplicates() {
let errs = output.unwrap_err(); let errs = output.unwrap_err();
assert_matches!(&errs[..], [ assert_matches!(&errs[..], [
SymbolError::DuplicateName { prev_name, ..} SymbolError::DuplicateName { prev_name, ..}
] if prev_name == &FQSN::from_strs(&["a"]) ] if prev_name == &Fqsn::from_strs(&["a"])
); );
} }
@ -64,7 +64,7 @@ fn no_variable_definition_duplicates() {
assert_matches!(&errs[..], [ assert_matches!(&errs[..], [
SymbolError::DuplicateName { prev_name: pn1, ..}, SymbolError::DuplicateName { prev_name: pn1, ..},
SymbolError::DuplicateName { prev_name: pn2, ..} SymbolError::DuplicateName { prev_name: pn2, ..}
] if pn1 == &FQSN::from_strs(&["a"]) && pn2 == &FQSN::from_strs(&["x"]) ] if pn1 == &Fqsn::from_strs(&["a"]) && pn2 == &Fqsn::from_strs(&["x"])
); );
} }
@ -87,7 +87,7 @@ fn no_variable_definition_duplicates_in_function() {
let errs = output.unwrap_err(); let errs = output.unwrap_err();
assert_matches!(&errs[..], [ assert_matches!(&errs[..], [
SymbolError::DuplicateName { prev_name: pn1, ..}, SymbolError::DuplicateName { prev_name: pn1, ..},
] if pn1 == &FQSN::from_strs(&["q", "x"]) ] if pn1 == &Fqsn::from_strs(&["q", "x"])
); );
} }
@ -202,7 +202,7 @@ fn duplicate_modules() {
assert_matches!(&errs[..], [ assert_matches!(&errs[..], [
SymbolError::DuplicateName { prev_name: pn1, ..}, SymbolError::DuplicateName { prev_name: pn1, ..},
] if pn1 == &FQSN::from_strs(&["a"]) ] if pn1 == &Fqsn::from_strs(&["a"])
); );
} }
@ -227,7 +227,7 @@ fn duplicate_struct_members() {
assert_matches!(&errs[..], [ assert_matches!(&errs[..], [
SymbolError::DuplicateRecord { SymbolError::DuplicateRecord {
type_name, member, ..}, type_name, member, ..},
] if type_name == &FQSN::from_strs(&["Tarak", "Tarak"]) && member == "mets" ] if type_name == &Fqsn::from_strs(&["Tarak", "Tarak"]) && member == "mets"
); );
} }

View File

@ -135,7 +135,7 @@ pub fn tokenize(input: &str) -> Vec<Token> {
let mut tokens: Vec<Token> = Vec::new(); let mut tokens: Vec<Token> = Vec::new();
let mut input = Iterator::intersperse(input.lines().enumerate(), (0, "\n")) let mut input = Iterator::intersperse(input.lines().enumerate(), (0, "\n"))
.flat_map(|(line_idx, ref line)| { .flat_map(|(line_idx, line)| {
line.chars().enumerate().map(move |(ch_idx, ch)| (line_idx, ch_idx, ch)) line.chars().enumerate().map(move |(ch_idx, ch)| (line_idx, ch_idx, ch))
}) })
.peekable(); .peekable();
@ -144,7 +144,7 @@ pub fn tokenize(input: &str) -> Vec<Token> {
let cur_tok_kind = match c { let cur_tok_kind = match c {
'/' => match input.peek().map(|t| t.2) { '/' => match input.peek().map(|t| t.2) {
Some('/') => { Some('/') => {
while let Some((_, _, c)) = input.next() { for (_, _, c) in input.by_ref() {
if c == '\n' { if c == '\n' {
break; break;
} }
@ -194,11 +194,13 @@ pub fn tokenize(input: &str) -> Vec<Token> {
} }
fn handle_digit(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenKind { fn handle_digit(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenKind {
if c == '0' && input.peek().map_or(false, |&(_, _, c)| { c == 'x' }) { let next_ch = input.peek().map(|&(_, _, c)| c);
if c == '0' && next_ch == Some('x') {
input.next(); input.next();
let rest: String = input.peeking_take_while(|&(_, _, ref c)| c.is_digit(16) || *c == '_').map(|(_, _, c)| { c }).collect(); let rest: String = input.peeking_take_while(|&(_, _, ref c)| c.is_digit(16) || *c == '_').map(|(_, _, c)| { c }).collect();
HexLiteral(Rc::new(rest)) HexLiteral(Rc::new(rest))
} else if c == '0' && input.peek().map_or(false, |&(_, _, c)| { c == 'b' }) { } else if c == '0' && next_ch == Some('b') {
input.next(); input.next();
BinNumberSigil BinNumberSigil
} else { } else {
@ -236,7 +238,8 @@ fn handle_quote(input: &mut Peekable<impl Iterator<Item=CharData>>, quote_prefix
fn handle_alphabetic(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenKind { fn handle_alphabetic(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenKind {
let mut buf = String::new(); let mut buf = String::new();
buf.push(c); buf.push(c);
if c == '_' && input.peek().map(|&(_, _, c)| { !c.is_alphabetic() }).unwrap_or(true) { let next_is_alphabetic = input.peek().map(|&(_, _, c)| !c.is_alphabetic()).unwrap_or(true);
if c == '_' && next_is_alphabetic {
return TokenKind::Underscore return TokenKind::Underscore
} }
@ -263,8 +266,9 @@ fn handle_alphabetic(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>
fn handle_operator(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenKind { fn handle_operator(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenKind {
match c { match c {
'<' | '>' | '|' | '.' | '=' => { '<' | '>' | '|' | '.' | '=' => {
let ref next = input.peek().map(|&(_, _, c)| { c }); let next = &input.peek().map(|&(_, _, c)| { c });
if !next.map(|n| { is_operator(&n) }).unwrap_or(false) { let next_is_op = next.map(|n| { is_operator(&n) }).unwrap_or(false);
if !next_is_op {
return match c { return match c {
'<' => LAngleBracket, '<' => LAngleBracket,
'>' => RAngleBracket, '>' => RAngleBracket,