Compare commits

7 Commits

Author SHA1 Message Date
greg
d7baf065fb Changing what method to call to start parsing 2018-10-20 15:41:09 -07:00
greg
6b42f8b8de Change how parsing works 2018-10-20 14:27:00 -07:00
greg
d9e67a6341 Delete this old grammar file 2018-10-20 11:43:12 -07:00
greg
7de536ade0 Install failure crate 2018-10-20 11:17:18 -07:00
greg
f62b4c6906 Change format of error msg 2018-10-20 11:14:40 -07:00
greg
4679a9fc7f Remove compiler warnings 2018-10-20 00:55:37 -07:00
greg
c25354b2c7 Get rid of typechecking code (for now)
I'm tired of seeing the errors. See branch last_commit_with_typechecking
2018-10-20 00:41:56 -07:00
10 changed files with 71 additions and 576 deletions

31
Grammar
View File

@@ -1,31 +0,0 @@
<program> := <statements> EOF
<statements> := <statement>
| <statement> SEP <statements>
<statement> := let <id> = <expr>
| <expr>
| <fn_block>
<fn_block> := fn <id> ( <arg_list> ) <statements> end
<arg_list> := e
| <id>
| <id> , <arg_list>
<expr> := if <expr> then <statements> end
| if <expr> then <statements> else <statements> end
| while <expr> SEP <statements> end
| ( <expr> )
| <binop>
<binop> := <simple_expr>
| <simple_expr> <id> <binop>
<simple_expr> := <id>
| <number>
| <string>

View File

@@ -45,7 +45,7 @@ impl Fold for RecursiveDescentFn {
}
#[proc_macro_attribute]
pub fn recursive_descent_method(attr: TokenStream, item: TokenStream) -> TokenStream {
pub fn recursive_descent_method(_attr: TokenStream, item: TokenStream) -> TokenStream {
let input: syn::ItemFn = parse_macro_input!(item as syn::ItemFn);
let mut folder = RecursiveDescentFn {};

View File

@@ -8,6 +8,8 @@ itertools = "0.5.8"
take_mut = "0.1.3"
maplit = "*"
lazy_static = "0.2.8"
failure = "0.1.2"
schala-lang-codegen = { path = "../codegen" }
schala-repl = { path = "../../schala-repl" }

View File

@@ -106,7 +106,7 @@ impl Expr {
UserDefined { name: Some(name), .. } => format!("<function '{}'>", name),
},
Expr::Constructor {
type_name: _, name, tag, arity,
type_name: _, name, arity, ..
} => if *arity == 0 {
format!("{}", name)
} else {
@@ -208,7 +208,7 @@ impl<'a> State<'a> {
}
}
fn apply_data_constructor(&mut self, type_name: Rc<String>, name: Rc<String>, tag: usize, arity: usize, args: Vec<Expr>) -> EvalResult<Node> {
fn apply_data_constructor(&mut self, _type_name: Rc<String>, name: Rc<String>, tag: usize, arity: usize, args: Vec<Expr>) -> EvalResult<Node> {
if arity != args.len() {
return Err(format!("Data constructor {} requires {} args", name, arity));
}
@@ -353,7 +353,7 @@ impl<'a> State<'a> {
fn case_match_expression(&mut self, cond: Expr, alternatives: Vec<Alternative>) -> EvalResult<Node> {
match self.expression(Node::Expr(cond))? {
Node::PrimObject { name, tag, items } => {
Node::PrimObject { tag, items, .. } => {
for alt in alternatives {
if alt.tag.map(|t| t == tag).unwrap_or(true) {
let mut inner_state = State {
@@ -378,7 +378,7 @@ fn case_match_expression(&mut self, cond: Expr, alternatives: Vec<Alternative>)
return Err(format!("PrimObject failed pattern match"));
},
Node::PrimTuple { .. } => Err(format!("Tuples not implemented")), //TODO make a distinction between not yet implemented and an actual runtime error
Node::Expr(e) => {
Node::Expr(_e) => {
for alt in alternatives {
match (alt.guard, alt.tag) {
(Some(ref guard_expr), None) => {
@@ -436,17 +436,24 @@ fn case_match_expression(&mut self, cond: Expr, alternatives: Vec<Alternative>)
mod eval_tests {
use std::cell::RefCell;
use std::rc::Rc;
use tokenizing::{Token, tokenize};
use ::parsing::ParseResult;
use ::ast::AST;
use symbol_table::SymbolTable;
use tokenizing::tokenize;
use parsing::parse;
use eval::State;
fn parse(tokens: Vec<Token>) -> ParseResult<AST> {
let mut parser = ::parsing::Parser::new(tokens);
parser.parse()
}
macro_rules! all_output {
($string:expr) => {
{
let symbol_table = Rc::new(RefCell::new(SymbolTable::new()));
let mut state = State::new(symbol_table);
let ast = parse(tokenize($string)).0.unwrap();
let ast = parse(tokenize($string)).unwrap();
state.symbol_table_handle.borrow_mut().add_top_level_symbols(&ast).unwrap();
let reduced = ast.reduce(&state.symbol_table_handle.borrow());
let all_output = state.evaluate(reduced, true);

View File

@@ -44,7 +44,7 @@ mod eval;
pub struct Schala {
state: eval::State<'static>,
symbol_table: Rc<RefCell<symbol_table::SymbolTable>>,
type_context: typechecking::TypeContext<'static>,
active_parser: Option<parsing::Parser>,
}
impl Schala {
@@ -53,7 +53,7 @@ impl Schala {
}
fn handle_custom_interpreter_directives(&mut self, commands: &Vec<&str>) -> Option<String> {
Some(format!("You typed a command but I can't handle it"))
Some(format!("Schala-lang command: {:?} not supported", commands.get(0)))
}
}
@@ -62,8 +62,8 @@ impl Schala {
let symbols = Rc::new(RefCell::new(symbol_table::SymbolTable::new()));
Schala {
symbol_table: symbols.clone(),
type_context: typechecking::TypeContext::new(symbols.clone()),
state: eval::State::new(symbols),
active_parser: None,
}
}
@@ -94,9 +94,17 @@ fn tokenizing(_handle: &mut Schala, input: &str, comp: Option<&mut UnfinishedCom
}
}
fn parsing(_handle: &mut Schala, input: Vec<tokenizing::Token>, comp: Option<&mut UnfinishedComputation>) -> Result<ast::AST, String> {
fn parsing(handle: &mut Schala, input: Vec<tokenizing::Token>, comp: Option<&mut UnfinishedComputation>) -> Result<ast::AST, String> {
use parsing::Parser;
let mut parser = match handle.active_parser.take() {
None => Parser::new(input),
Some(parser) => parser
};
let ast = parser.parse();
let trace = parser.format_parse_trace();
let (ast, trace) = parsing::parse(input);
comp.map(|comp| {
//TODO need to control which of these debug stages get added
let opt = comp.cur_debug_options.get(0).map(|s| s.clone());
@@ -123,23 +131,8 @@ fn symbol_table(handle: &mut Schala, input: ast::AST, comp: Option<&mut Unfinish
}
}
fn typechecking(handle: &mut Schala, input: ast::AST, comp: Option<&mut UnfinishedComputation>) -> Result<ast::AST, String> {
match handle.type_context.type_check_ast(&input) {
Ok(ty) => {
comp.map(|c| {
c.add_artifact(TraceArtifact::new("type_table", format!("{}", handle.type_context.debug_types())));
c.add_artifact(TraceArtifact::new("type_check", format!("{:?}", ty)));
});
Ok(input)
},
Err(msg) => {
comp.map(|comp| {
comp.add_artifact(TraceArtifact::new("type_table", format!("{}", handle.type_context.debug_types())));
comp.add_artifact(TraceArtifact::new("type_check", format!("Type error: {:?}", msg)));
});
Ok(input)
}
}
fn typechecking(_handle: &mut Schala, input: ast::AST, _comp: Option<&mut UnfinishedComputation>) -> Result<ast::AST, String> {
Ok(input)
}
fn ast_reducing(handle: &mut Schala, input: ast::AST, comp: Option<&mut UnfinishedComputation>) -> Result<reduced_ast::ReducedAST, String> {

View File

@@ -34,7 +34,7 @@ pub struct ParseRecord {
level: u32,
}
struct Parser {
pub struct Parser {
tokens: Peekable<IntoIter<Token>>,
parse_record: Vec<ParseRecord>,
parse_level: u32,
@@ -46,9 +46,9 @@ struct ParserRestrictions {
}
impl Parser {
fn new(input: Vec<Token>) -> Parser {
pub fn new(initial_input: Vec<Token>) -> Parser {
Parser {
tokens: input.into_iter().peekable(),
tokens: initial_input.into_iter().peekable(),
parse_record: vec![],
parse_level: 0,
restrictions: ParserRestrictions { no_struct_literal: false }
@@ -64,6 +64,26 @@ impl Parser {
fn next(&mut self) -> TokenType {
self.tokens.next().map(|ref t| { t.token_type.clone() }).unwrap_or(TokenType::EOF)
}
pub fn parse(&mut self) -> ParseResult<AST> {
self.program()
}
/*
pub fn parse_with_new_tokens(&mut self, new_tokens: Vec<Token>) -> ParseResult<AST> {
}
*/
pub fn format_parse_trace(self) -> Vec<String> {
self.parse_record.into_iter().map(|r| {
let mut indent = String::new();
for _ in 0..r.level {
indent.push(' ');
}
format!("{}Production `{}`, token: {}", indent, r.production_name, r.next_token)
}).collect()
}
}
macro_rules! print_token_pattern {
@@ -240,6 +260,7 @@ enumerator := identifier '<-' expression | identifier '=' expression //TODO add
*/
impl Parser {
//TODO make this a proper public interface
#[recursive_descent_method]
fn program(&mut self) -> ParseResult<AST> {
let mut statements = Vec::new();
@@ -748,7 +769,7 @@ impl Parser {
let Expression(expr, _) = self.precedence_expr(precedence)?;
Guard::HalfExpr(HalfExpr { op: Some(op), expr })
},
x => {
_ => {
//TODO - I think there's a better way to do this involving the precedence of ->
let Expression(expr, _) = self.prefix_expr()?;
Guard::HalfExpr(HalfExpr { op: None, expr })
@@ -1034,24 +1055,11 @@ fn parse_hex(digits: String) -> ParseResult<u64> {
Ok(result)
}
pub fn parse(input: Vec<Token>) -> (Result<AST, ParseError>, Vec<String>) {
let mut parser = Parser::new(input);
let ast = parser.program();
let trace = parser.parse_record.into_iter().map(|r| {
let mut indent = String::new();
for _ in 0..r.level {
indent.push(' ');
}
format!("{}Production `{}`, token: {}", indent, r.production_name, r.next_token)
}).collect();
(ast, trace)
}
#[cfg(test)]
mod parse_tests {
use ::std::rc::Rc;
use super::{parse, tokenize};
use super::tokenize;
use super::ParseResult;
use builtin::{PrefixOp, BinOp};
use ast::{AST, Expression, Statement, IfExpressionBody, Discriminator, Pattern, PatternLiteral, TypeBody, Enumerator, ForBody};
use super::Statement::*;
@@ -1063,14 +1071,19 @@ mod parse_tests {
use super::Variant::*;
use super::ForBody::*;
fn parse(tokens: Vec<::tokenizing::Token>) -> ParseResult<AST> {
let mut parser = super::Parser::new(tokens);
parser.parse()
}
macro_rules! rc {
($string:tt) => { Rc::new(stringify!($string).to_string()) }
}
macro_rules! parse_test {
($string:expr, $correct:expr) => { assert_eq!(parse(tokenize($string)).0.unwrap(), $correct) }
($string:expr, $correct:expr) => { assert_eq!(parse(tokenize($string)).unwrap(), $correct) }
}
macro_rules! parse_error {
($string:expr) => { assert!(parse(tokenize($string)).0.is_err()) }
($string:expr) => { assert!(parse(tokenize($string)).is_err()) }
}
macro_rules! val {
($var:expr) => { Value(Rc::new($var.to_string())) }

View File

@@ -143,7 +143,7 @@ impl Expression {
fn reduce_if_expression(discriminator: &Discriminator, body: &IfExpressionBody, symbol_table: &SymbolTable) -> Expr {
let cond = Box::new(match *discriminator {
Discriminator::Simple(ref expr) => expr.reduce(symbol_table),
Discriminator::BinOp(ref expr, ref binop) => panic!("Can't yet handle binop discriminators")
Discriminator::BinOp(ref _expr, ref _binop) => panic!("Can't yet handle binop discriminators")
});
match *body {
IfExpressionBody::SimpleConditional(ref then_clause, ref else_clause) => {
@@ -234,7 +234,7 @@ impl Pattern {
let symbol = symbol_table.lookup_by_name(name).expect(&format!("Symbol {} not found", name));
handle_symbol(symbol, subpatterns, item)
},
TuplePattern(items) => {
TuplePattern(_items) => {
unimplemented!()
},
Record(_name, _pairs) => {

View File

@@ -1,493 +1,3 @@
use std::cell::RefCell;
use std::rc::Rc;
use std::collections::HashMap;
use std::fmt;
use std::fmt::Write;
/*
use std::collections::hash_set::Union;
use std::iter::Iterator;
use itertools::Itertools;
*/
use ast;
use util::ScopeStack;
use symbol_table::{SymbolSpec, SymbolTable};
pub type TypeName = Rc<String>;
type TypeResult<T> = Result<T, String>;
#[derive(Debug, PartialEq, Clone)]
enum Type {
Const(TConst),
Var(TypeName),
Func(Vec<Type>),
}
#[derive(Debug, PartialEq, Clone)]
enum TConst {
Unit,
Nat,
StringT,
//Custom(String)
}
#[derive(Debug, PartialEq, Clone)]
struct Scheme {
names: Vec<TypeName>,
ty: Type,
}
impl fmt::Display for Scheme {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "∀{:?} . {:?}", self.names, self.ty)
}
}
#[derive(Debug, PartialEq, Clone)]
struct Substitution(HashMap<TypeName, Type>);
impl Substitution {
fn empty() -> Substitution {
Substitution(HashMap::new())
}
}
#[derive(Debug, PartialEq, Clone)]
struct TypeEnv(HashMap<TypeName, Scheme>);
impl TypeEnv {
fn default() -> TypeEnv {
TypeEnv(HashMap::new())
}
fn populate_from_symbols(&mut self, symbol_table: &SymbolTable) {
for (name, symbol) in symbol_table.values.iter() {
if let SymbolSpec::Func(ref type_names) = symbol.spec {
let mut ch: char = 'a';
let mut names = vec![];
for _ in type_names.iter() {
names.push(Rc::new(format!("{}", ch)));
ch = ((ch as u8) + 1) as char;
}
let sigma = Scheme {
names: names.clone(),
ty: Type::Func(names.into_iter().map(|n| Type::Var(n)).collect())
};
self.0.insert(name.clone(), sigma);
}
}
}
}
pub struct TypeContext<'a> {
values: ScopeStack<'a, TypeName, Type>,
symbol_table_handle: Rc<RefCell<SymbolTable>>,
global_env: TypeEnv
}
impl<'a> TypeContext<'a> {
pub fn new(symbol_table_handle: Rc<RefCell<SymbolTable>>) -> TypeContext<'static> {
TypeContext { values: ScopeStack::new(None), global_env: TypeEnv::default(), symbol_table_handle }
}
pub fn debug_types(&self) -> String {
let mut output = format!("Type environment\n");
for (name, scheme) in &self.global_env.0 {
write!(output, "{} -> {}\n", name, scheme).unwrap();
}
output
}
pub fn type_check_ast(&mut self, input: &ast::AST) -> Result<String, String> {
let ref symbol_table = self.symbol_table_handle.borrow();
self.global_env.populate_from_symbols(symbol_table);
let output = self.global_env.infer_block(&input.0)?;
Ok(format!("{:?}", output))
}
}
impl TypeEnv {
fn instantiate(&mut self, sigma: Scheme) -> Type {
match sigma {
Scheme { ty, .. } => ty,
}
}
fn generate(&mut self, ty: Type) -> Scheme {
Scheme {
names: vec![], //TODO incomplete
ty
}
}
fn infer_block(&mut self, block: &Vec<ast::Statement>) -> TypeResult<Type> {
let mut output = Type::Const(TConst::Unit);
for statement in block {
output = self.infer_statement(statement)?;
}
Ok(output)
}
fn infer_statement(&mut self, statement: &ast::Statement) -> TypeResult<Type> {
match statement {
ast::Statement::ExpressionStatement(expr) => self.infer_expr(expr),
ast::Statement::Declaration(decl) => self.infer_decl(decl)
}
}
fn infer_decl(&mut self, decl: &ast::Declaration) -> TypeResult<Type> {
use ast::Declaration::*;
match decl {
Binding { name, expr, .. } => {
let ty = self.infer_expr(expr)?;
let sigma = self.generate(ty);
self.0.insert(name.clone(), sigma);
},
_ => (),
}
Ok(Type::Const(TConst::Unit))
}
fn infer_expr(&mut self, expr: &ast::Expression) -> TypeResult<Type> {
match expr {
ast::Expression(expr, Some(anno)) => {
self.infer_exprtype(expr)
},
ast::Expression(expr, None) => {
self.infer_exprtype(expr)
}
}
}
fn infer_exprtype(&mut self, expr: &ast::ExpressionType) -> TypeResult<Type> {
use self::TConst::*;
use ast::ExpressionType::*;
Ok(match expr {
NatLiteral(_) => Type::Const(Nat),
StringLiteral(_) => Type::Const(StringT),
BinExp(op, lhs, rhs) => {
return Err(format!("NOTDONE"))
},
Call { f, arguments } => {
return Err(format!("NOTDONE"))
},
Value(name) => {
let s = match self.0.get(name) {
Some(sigma) => sigma.clone(),
None => return Err(format!("Unknown variable: {}", name))
};
self.instantiate(s)
},
_ => Type::Const(Unit)
})
}
}
/* GIANT TODO - use the rust im crate, unless I make this code way less haskell-ish after it's done
*/
/*
pub type TypeResult<T> = Result<T, String>;
*/
/* TODO this should just check the name against a map, and that map should be pre-populated with
* types */
/*
impl parsing::TypeName {
fn to_type(&self) -> TypeResult<Type> {
use self::parsing::TypeSingletonName;
use self::parsing::TypeName::*;
use self::Type::*; use self::TConstOld::*;
Ok(match self {
Tuple(_) => return Err(format!("Tuples not yet implemented")),
Singleton(name) => match name {
TypeSingletonName { name, .. } => match &name[..] {
/*
"Nat" => Const(Nat),
"Int" => Const(Int),
"Float" => Const(Float),
"Bool" => Const(Bool),
"String" => Const(StringT),
*/
n => Const(Custom(n.to_string()))
}
}
})
}
}
*/
/*
impl TypeContext {
pub fn type_check_ast(&mut self, ast: &parsing::AST) -> TypeResult<String> {
let ref block = ast.0;
let mut infer = Infer::default();
let env = TypeEnvironment::default();
let output = infer.infer_block(block, &env);
match output {
Ok(s) => Ok(format!("{:?}", s)),
Err(s) => Err(format!("Error: {:?}", s))
}
}
}
// this is the equivalent of the Haskell Infer monad
#[derive(Debug, Default)]
struct Infer {
_idents: u32,
}
#[derive(Debug)]
enum InferError {
CannotUnify(MonoType, MonoType),
OccursCheckFailed(Rc<String>, MonoType),
UnknownIdentifier(Rc<String>),
Custom(String),
}
type InferResult<T> = Result<T, InferError>;
impl Infer {
fn fresh(&mut self) -> MonoType {
let i = self._idents;
self._idents += 1;
let name = Rc::new(format!("{}", ('a' as u8 + 1) as char));
MonoType::Var(name)
}
fn unify(&mut self, a: MonoType, b: MonoType) -> InferResult<Substitution> {
use self::InferError::*; use self::MonoType::*;
Ok(match (a, b) {
(Const(ref a), Const(ref b)) if a == b => Substitution::new(),
(Var(ref name), ref var) => Substitution::bind_variable(name, var),
(ref var, Var(ref name)) => Substitution::bind_variable(name, var),
(Function(box a1, box b1), Function(box a2, box b2)) => {
let s1 = self.unify(a1, a2)?;
let s2 = self.unify(b1.apply_substitution(&s1), b2.apply_substitution(&s1))?;
s1.merge(s2)
},
(a, b) => return Err(CannotUnify(a, b))
})
}
fn infer_block(&mut self, block: &Vec<parsing::Statement>, env: &TypeEnvironment) -> InferResult<MonoType> {
use self::parsing::Statement;
let mut ret = MonoType::Const(TypeConst::Unit);
for statement in block.iter() {
ret = match statement {
Statement::ExpressionStatement(expr) => {
let (sub, ty) = self.infer_expr(expr, env)?;
//TODO handle substitution monadically
ty
}
Statement::Declaration(decl) => MonoType::Const(TypeConst::Unit),
}
}
Ok(ret)
}
fn infer_expr(&mut self, expr: &parsing::Expression, env: &TypeEnvironment) -> InferResult<(Substitution, MonoType)> {
use self::parsing::Expression;
match expr {
Expression(e, Some(anno)) => self.infer_annotated_expr(e, anno, env),
/*
let anno_ty = anno.to_type()?;
let ty = self.infer_exprtype(&e)?;
self.unify(ty, anno_ty)
},
*/
Expression(e, None) => self.infer_exprtype(e, env)
}
}
fn infer_annotated_expr(&mut self, expr: &parsing::ExpressionType, anno: &parsing::TypeName, env: &TypeEnvironment) -> InferResult<(Substitution, MonoType)> {
Err(InferError::Custom(format!("exprtype not done: {:?}", expr)))
}
fn infer_exprtype(&mut self, expr: &parsing::ExpressionType, env: &TypeEnvironment) -> InferResult<(Substitution, MonoType)> {
use self::parsing::ExpressionType::*;
use self::TypeConst::*;
Ok(match expr {
NatLiteral(_) => (Substitution::new(), MonoType::Const(Nat)),
FloatLiteral(_) => (Substitution::new(), MonoType::Const(Float)),
StringLiteral(_) => (Substitution::new(), MonoType::Const(StringT)),
BoolLiteral(_) => (Substitution::new(), MonoType::Const(Bool)),
Value(name) => match env.lookup(name) {
Some(sigma) => {
let tau = self.instantiate(&sigma);
(Substitution::new(), tau)
},
None => return Err(InferError::UnknownIdentifier(name.clone())),
},
e => return Err(InferError::Custom(format!("Type inference for {:?} not done", e)))
})
}
fn instantiate(&mut self, sigma: &PolyType) -> MonoType {
let ref ty: MonoType = sigma.1;
let mut subst = Substitution::new();
for name in sigma.0.iter() {
let fresh_mvar = self.fresh();
let new = Substitution::bind_variable(name, &fresh_mvar);
subst = subst.merge(new);
}
ty.apply_substitution(&subst)
}
}
*/
/* OLD STUFF DOWN HERE */
/*
impl TypeContext {
fn infer_block(&mut self, statements: &Vec<parsing::Statement>) -> TypeResult<Type> {
let mut ret_type = Type::Const(TConst::Unit);
for statement in statements {
ret_type = self.infer_statement(statement)?;
}
Ok(ret_type)
}
fn infer_statement(&mut self, statement: &parsing::Statement) -> TypeResult<Type> {
use self::parsing::Statement::*;
match statement {
ExpressionStatement(expr) => self.infer(expr),
Declaration(decl) => self.add_declaration(decl),
}
}
fn add_declaration(&mut self, decl: &parsing::Declaration) -> TypeResult<Type> {
use self::parsing::Declaration::*;
use self::Type::*;
match decl {
Binding { name, expr, .. } => {
let ty = self.infer(expr)?;
self.bindings.insert(name.clone(), ty);
},
_ => return Err(format!("other formats not done"))
}
Ok(Void)
}
fn infer(&mut self, expr: &parsing::Expression) -> TypeResult<Type> {
use self::parsing::Expression;
match expr {
Expression(e, Some(anno)) => {
let anno_ty = anno.to_type()?;
let ty = self.infer_exprtype(&e)?;
self.unify(ty, anno_ty)
},
Expression(e, None) => self.infer_exprtype(e)
}
}
fn infer_exprtype(&mut self, expr: &parsing::ExpressionType) -> TypeResult<Type> {
use self::parsing::ExpressionType::*;
use self::Type::*; use self::TConst::*;
match expr {
NatLiteral(_) => Ok(Const(Nat)),
FloatLiteral(_) => Ok(Const(Float)),
StringLiteral(_) => Ok(Const(StringT)),
BoolLiteral(_) => Ok(Const(Bool)),
BinExp(op, lhs, rhs) => { /* remember there are both the haskell convention talk and the write you a haskell ways to do this! */
match op.get_type()? {
Func(box t1, box Func(box t2, box t3)) => {
let lhs_ty = self.infer(lhs)?;
let rhs_ty = self.infer(rhs)?;
self.unify(t1, lhs_ty)?;
self.unify(t2, rhs_ty)?;
Ok(t3)
},
other => Err(format!("{:?} is not a binary function type", other))
}
},
PrefixExp(op, expr) => match op.get_type()? {
Func(box t1, box t2) => {
let expr_ty = self.infer(expr)?;
self.unify(t1, expr_ty)?;
Ok(t2)
},
other => Err(format!("{:?} is not a prefix op function type", other))
},
Value(name) => {
match self.bindings.get(name) {
Some(ty) => Ok(ty.clone()),
None => Err(format!("No binding found for variable: {}", name)),
}
},
Call { f, arguments } => {
let mut tf = self.infer(f)?;
for arg in arguments.iter() {
match tf {
Func(box t, box rest) => {
let t_arg = self.infer(arg)?;
self.unify(t, t_arg)?;
tf = rest;
},
other => return Err(format!("Function call failed to unify; last type: {:?}", other)),
}
}
Ok(tf)
},
TupleLiteral(expressions) => {
let mut types = vec![];
for expr in expressions {
types.push(self.infer(expr)?);
}
Ok(Sum(types))
},
_ => Err(format!("Type not yet implemented"))
}
}
fn unify(&mut self, t1: Type, t2: Type) -> TypeResult<Type> {
use self::Type::*;// use self::TConst::*;
match (t1, t2) {
(Const(ref a), Const(ref b)) if a == b => Ok(Const(a.clone())),
(a, b) => Err(format!("Types {:?} and {:?} don't unify", a, b))
}
}
}
*/
#[cfg(test)]
mod tests {
/*
use super::{Type, TConst, TypeContext};
use super::Type::*;
use super::TConst::*;
use std::rc::Rc;
use std::cell::RefCell;
macro_rules! type_test {
($input:expr, $correct:expr) => {
{
let symbol_table = Rc::new(RefCell::new(SymbolTable::new()));
let mut tc = TypeContext::new(symbol_table);
let ast = ::ast::parse(::tokenizing::tokenize($input)).0.unwrap() ;
//tc.add_symbols(&ast);
assert_eq!($correct, tc.infer_block(&ast.0).unwrap())
}
}
}
#[test]
fn basic_inference() {
type_test!("30", Const(Nat));
//type_test!("fn x(a: Int): Bool {}; x(1)", TConst(Boolean));
}
*/
}

View File

@@ -35,6 +35,7 @@ impl<'a, T, V> ScopeStack<'a, T, V> where T: Hash + Eq {
scope_name: name,
}
}
#[allow(dead_code)]
pub fn get_name(&self) -> Option<&String> {
self.scope_name.as_ref()
}

View File

@@ -217,8 +217,8 @@ macro_rules! pass_chain_helper {
}
match output {
Ok(result) => pass_chain_helper! { ($state, $comp, $eval_options); result $(, $rest)* },
Err(err) => {
$comp.output(Err(format!("Pass {} failed with {:?}", pass_name, err)))
Err(err) => { //TODO this error type needs to be guaranteed to provide a useable string
$comp.output(Err(format!("Pass {} failed:\n{}", pass_name, err)))
}
}
}