Compare commits
76 Commits
failure_st
...
working_on
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
98de15c07d | ||
|
|
0246e510ca | ||
|
|
71dacc94d6 | ||
|
|
4ded241c82 | ||
|
|
1a934d7804 | ||
|
|
627a740b0d | ||
|
|
401d5aabd6 | ||
|
|
f79125e9df | ||
|
|
4ad5739615 | ||
|
|
654e326c40 | ||
|
|
e00948cad9 | ||
|
|
0af6fed505 | ||
|
|
1f527f7949 | ||
|
|
8680c4faf6 | ||
|
|
b198984fc5 | ||
|
|
58779f8470 | ||
|
|
a0fa50392c | ||
|
|
d357876b16 | ||
|
|
e42f0c644c | ||
|
|
2ec7bf3b9a | ||
|
|
5147e1a3eb | ||
|
|
955c073174 | ||
|
|
7c46a29141 | ||
|
|
0adc761e72 | ||
|
|
b2039a7b67 | ||
|
|
b4c4531e4d | ||
|
|
2d36ad44d6 | ||
|
|
21132a369c | ||
|
|
ff0294c56e | ||
|
|
bc80c8f9ad | ||
|
|
e39356c0e5 | ||
|
|
d44bb02d61 | ||
|
|
9056e9b0e1 | ||
|
|
e9b90412ce | ||
|
|
65c47c20fc | ||
|
|
fab3fb8ec2 | ||
|
|
0d5ccd21fe | ||
|
|
69b7b9f528 | ||
|
|
9a09f40222 | ||
|
|
020819550b | ||
|
|
15f9dbe7a6 | ||
|
|
836bed1207 | ||
|
|
cee5b085d5 | ||
|
|
837a55c718 | ||
|
|
f4f89b39b6 | ||
|
|
c6b4ed7ee4 | ||
|
|
be425860af | ||
|
|
17e88b33f2 | ||
|
|
47f7eb1ef6 | ||
|
|
72d0cfe466 | ||
|
|
cea2f63b44 | ||
|
|
eec315dd58 | ||
|
|
1e9aa91c5d | ||
|
|
9813609ad7 | ||
|
|
5953d9d815 | ||
|
|
a74e09c761 | ||
|
|
ad53d4394b | ||
|
|
151246e1c5 | ||
|
|
77d2826918 | ||
|
|
1bd48ed5db | ||
|
|
c394b81746 | ||
|
|
ec29077247 | ||
|
|
62043ac2d1 | ||
|
|
bada386979 | ||
|
|
e71d404071 | ||
|
|
cab4702bd6 | ||
|
|
ec5a9d457e | ||
|
|
bfbc1580aa | ||
|
|
2d6c9010b9 | ||
|
|
f4ff92302f | ||
|
|
e88ed97b06 | ||
|
|
b8df09e956 | ||
|
|
d7f0147a4f | ||
|
|
f883512882 | ||
|
|
37070a6b3e | ||
|
|
ffe7deb00a |
31
README.md
31
README.md
@@ -1,21 +1,21 @@
|
|||||||
|
|
||||||
# Schala - a programming language meta-interpreter
|
# Schala - a programming language meta-interpreter
|
||||||
|
|
||||||
Schala is a Rust framework written to make it easy to
|
Schala is a Rust framework written to make it easy to create and experiment
|
||||||
create and experiment with toy programming languages. It provides
|
with toy programming languages. It provides a cross-language REPL and
|
||||||
a common REPL, and a trait `ProgrammingLanguage` with provisions
|
provisions for tokenizing text, parsing tokens, evaluating an abstract syntax
|
||||||
for tokenizing text, parsing tokens, evaluating an abstract syntax tree,
|
tree, and other tasks that are common to all programming languages.
|
||||||
and other tasks that are common to all programming languages.
|
|
||||||
|
|
||||||
Schala is implemented as a Rust library `schala_lib`, which provides a
|
Schala is implemented as a Rust library `schala-repl`, which provides a
|
||||||
`schala_main` function. This function serves as the main loop of the REPL, if run
|
function `repl_main` meant to be used as the equivalent of main() for library
|
||||||
interactively, or otherwise reads and interprets programming language source
|
users. This function parses command-line arguments and either runs an interactive
|
||||||
files. It expects as input a vector of `PLIGenerator`, which is a type representing
|
REPL or interprets a program non-interactively.
|
||||||
a closure that returns a boxed trait object that implements the `ProgrammingLanguage` trait,
|
|
||||||
and stores any persistent state relevant to that programming language. The ability
|
|
||||||
to share state between different programming languages is in the works.
|
|
||||||
|
|
||||||
## About
|
Individual programming language implementations are Rust types that implement
|
||||||
|
the `ProgrammingLanguageInterface` trait and store whatever persistent state is
|
||||||
|
relevant to that language. The ability to share state between different
|
||||||
|
programming languages is in the works.
|
||||||
|
|
||||||
|
## History
|
||||||
|
|
||||||
Schala started out life as an experiment in writing a Javascript-like
|
Schala started out life as an experiment in writing a Javascript-like
|
||||||
programming language that would never encounter any kind of runtime value
|
programming language that would never encounter any kind of runtime value
|
||||||
@@ -60,6 +60,7 @@ of learning how to write a programming language.
|
|||||||
https://skillsmatter.com/skillscasts/10868-inside-the-rust-compiler
|
https://skillsmatter.com/skillscasts/10868-inside-the-rust-compiler
|
||||||
https://www.youtube.com/watch?v=il3gD7XMdmA
|
https://www.youtube.com/watch?v=il3gD7XMdmA
|
||||||
http://dev.stephendiehl.com/fun/006_hindley_milner.html
|
http://dev.stephendiehl.com/fun/006_hindley_milner.html
|
||||||
|
https://rust-lang-nursery.github.io/rustc-guide/type-inference.html
|
||||||
|
|
||||||
### Evaluation
|
### Evaluation
|
||||||
*Understanding Computation*, Tom Stuart, O'Reilly 2013
|
*Understanding Computation*, Tom Stuart, O'Reilly 2013
|
||||||
@@ -69,6 +70,7 @@ http://dev.stephendiehl.com/fun/006_hindley_milner.html
|
|||||||
### Parsing
|
### Parsing
|
||||||
http://journal.stuffwithstuff.com/2011/03/19/pratt-parsers-expression-parsing-made-easy/
|
http://journal.stuffwithstuff.com/2011/03/19/pratt-parsers-expression-parsing-made-easy/
|
||||||
https://soc.github.io/languages/unified-condition-syntax
|
https://soc.github.io/languages/unified-condition-syntax
|
||||||
|
http://www.lihaoyi.com/post/ZeroOverheadTreeProcessingwiththeVisitorPattern.html?a=1
|
||||||
|
|
||||||
[Crafting Interpreters](http://www.craftinginterpreters.com/)
|
[Crafting Interpreters](http://www.craftinginterpreters.com/)
|
||||||
|
|
||||||
@@ -77,4 +79,5 @@ http://blog.ulysse.io/2016/07/03/llvm-getting-started.html
|
|||||||
|
|
||||||
###Rust resources
|
###Rust resources
|
||||||
https://thefullsnack.com/en/rust-for-the-web.html
|
https://thefullsnack.com/en/rust-for-the-web.html
|
||||||
|
|
||||||
https://rocket.rs/guide/getting-started/
|
https://rocket.rs/guide/getting-started/
|
||||||
|
|||||||
16
TODO.md
16
TODO.md
@@ -1,6 +1,19 @@
|
|||||||
|
|
||||||
# TODO Items
|
# TODO Items
|
||||||
|
|
||||||
|
-Plan of attack:
|
||||||
|
-write a visitor pattern for AST
|
||||||
|
-convert AST type to including SourceMap'd wrappers (w/ .into())
|
||||||
|
-at the same time, amke sure the visitor pattern "skips over" the SourceMap'd stuff
|
||||||
|
so it can just care about AST structure
|
||||||
|
|
||||||
|
- AST : maybe replace the Expression type with "Ascription(TypeName, Box<Expression>) nodes??
|
||||||
|
- parser: add a "debug" field to the Parser struct for all debug-related things
|
||||||
|
|
||||||
|
-scala-style html"dfasfsadf${}" string interpolations!
|
||||||
|
|
||||||
|
-fuzz test schala
|
||||||
|
|
||||||
|
|
||||||
*A neat idea for pattern matching optimization would be if you could match on one of several things in a list
|
*A neat idea for pattern matching optimization would be if you could match on one of several things in a list
|
||||||
ex:
|
ex:
|
||||||
@@ -101,10 +114,7 @@ type enum {
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
- AST : maybe replace the Expression type with "Ascription(TypeName, Box<Expression>) nodes??
|
|
||||||
- parser: add a "debug" field to the Parser struct for all debug-related things
|
|
||||||
|
|
||||||
-scala-style html"dfasfsadf${}" string interpolations!
|
|
||||||
|
|
||||||
*Compiler passes architecture
|
*Compiler passes architecture
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
use std::convert::From;
|
||||||
|
|
||||||
|
use source_map::{SourceMap};
|
||||||
use builtin::{BinOp, PrefixOp};
|
use builtin::{BinOp, PrefixOp};
|
||||||
|
|
||||||
#[derive(Debug, PartialEq)]
|
#[derive(Debug, PartialEq)]
|
||||||
@@ -63,6 +65,12 @@ pub enum Variant {
|
|||||||
#[derive(Debug, PartialEq, Clone)]
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
pub struct Expression(pub ExpressionType, pub Option<TypeIdentifier>);
|
pub struct Expression(pub ExpressionType, pub Option<TypeIdentifier>);
|
||||||
|
|
||||||
|
impl From<Expression> for SourceMap<Expression> {
|
||||||
|
fn from(node: Expression) -> Self {
|
||||||
|
SourceMap { node, data: None }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
pub enum TypeIdentifier {
|
pub enum TypeIdentifier {
|
||||||
Tuple(Vec<TypeIdentifier>),
|
Tuple(Vec<TypeIdentifier>),
|
||||||
@@ -111,10 +119,18 @@ pub enum ExpressionType {
|
|||||||
},
|
},
|
||||||
Lambda {
|
Lambda {
|
||||||
params: Vec<FormalParam>,
|
params: Vec<FormalParam>,
|
||||||
|
type_anno: Option<TypeIdentifier>,
|
||||||
body: Block,
|
body: Block,
|
||||||
},
|
},
|
||||||
ListLiteral(Vec<Expression>),
|
ListLiteral(Vec<Expression>),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl From<ExpressionType> for SourceMap<ExpressionType> {
|
||||||
|
fn from(node: ExpressionType) -> Self {
|
||||||
|
SourceMap { node, data: None }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
pub enum Discriminator {
|
pub enum Discriminator {
|
||||||
Simple(Expression),
|
Simple(Expression),
|
||||||
|
|||||||
157
schala-lang/language/src/ast_visitor.rs
Normal file
157
schala-lang/language/src/ast_visitor.rs
Normal file
@@ -0,0 +1,157 @@
|
|||||||
|
use std::rc::Rc;
|
||||||
|
|
||||||
|
use builtin::{PrefixOp, BinOp};
|
||||||
|
use ast::*;
|
||||||
|
|
||||||
|
pub fn dispatch<V: ASTVisitor>(visitor: &mut V, ast: &AST) {
|
||||||
|
for statement in ast.0.iter() {
|
||||||
|
match statement {
|
||||||
|
Statement::ExpressionStatement(e) => {
|
||||||
|
dispatch_expression(visitor, e);
|
||||||
|
visitor.expression(e);
|
||||||
|
},
|
||||||
|
Statement::Declaration(decl) => {
|
||||||
|
dispatch_declaration(visitor, decl);
|
||||||
|
visitor.declaration(decl);
|
||||||
|
},
|
||||||
|
};
|
||||||
|
visitor.statement(statement);
|
||||||
|
}
|
||||||
|
visitor.ast(ast)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn dispatch_expression<V: ASTVisitor>(visitor: &mut V, expression: &Expression) {
|
||||||
|
match expression {
|
||||||
|
Expression(expr, maybe_anno) => {
|
||||||
|
match expr {
|
||||||
|
ExpressionType::NatLiteral(n) => visitor.nat_literal(n),
|
||||||
|
ExpressionType::FloatLiteral(f) => visitor.float_literal(f),
|
||||||
|
ExpressionType::StringLiteral(s) => visitor.string_literal(s),
|
||||||
|
ExpressionType::BoolLiteral(b) => visitor.bool_literal(b),
|
||||||
|
ExpressionType::BinExp(binop, lhs, rhs) => visitor.binop(binop, lhs, rhs),
|
||||||
|
ExpressionType::PrefixExp(prefix, expr) => visitor.prefixop(prefix, expr),
|
||||||
|
ExpressionType::TupleLiteral(v) => visitor.tuple_literal(v),
|
||||||
|
ExpressionType::Value(v) => visitor.value(v),
|
||||||
|
ExpressionType::NamedStruct { name, fields } => visitor.named_struct(name, fields),
|
||||||
|
ExpressionType::Call { f, arguments } => visitor.call(f, arguments),
|
||||||
|
ExpressionType::Index { indexee, indexers } => visitor.index(indexee, indexers),
|
||||||
|
ExpressionType::IfExpression { discriminator, body } => visitor.if_expression(discriminator, body),
|
||||||
|
ExpressionType::WhileExpression { condition, body } => visitor.while_expresssion(condition, body),
|
||||||
|
ExpressionType::ForExpression { enumerators, body } => visitor.for_expression(enumerators, body),
|
||||||
|
ExpressionType::Lambda { params, type_anno, body } => visitor.lambda_expression(params, type_anno, body),
|
||||||
|
ExpressionType::ListLiteral(items) => visitor.list_literal(items),
|
||||||
|
}
|
||||||
|
visitor.anno_expr(maybe_anno);
|
||||||
|
visitor.expr_kind(expr);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn dispatch_declaration<V: ASTVisitor>(visitor: &mut V, declaration: &Declaration) {
|
||||||
|
match declaration {
|
||||||
|
Declaration::FuncSig(sig) => visitor.func_signature(sig),
|
||||||
|
Declaration::FuncDecl(sig, block) => visitor.func_declaration(sig, block),
|
||||||
|
Declaration::TypeDecl { name, body, mutable } => visitor.type_declaration(name, body, mutable),
|
||||||
|
Declaration::TypeAlias(alias, name) => visitor.type_alias(alias, name),
|
||||||
|
Declaration::Binding { name, constant, expr} => visitor.binding(name, constant, expr),
|
||||||
|
Declaration::Impl { type_name, interface_name, block } => visitor.impl_block(type_name, interface_name, block),
|
||||||
|
Declaration::Interface { name, signatures } => visitor.interface(name, signatures),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait ASTVisitor {
|
||||||
|
fn ast(&mut self, _ast: &AST) { }
|
||||||
|
fn statement(&mut self, _stmt: &Statement) { }
|
||||||
|
fn expression(&mut self, _expr: &Expression) { }
|
||||||
|
fn anno_expr(&mut self, &Option<TypeIdentifier>) { }
|
||||||
|
fn expr_kind(&mut self, _expr: &ExpressionType) { }
|
||||||
|
|
||||||
|
fn nat_literal(&mut self, _n: &u64) { }
|
||||||
|
fn float_literal(&mut self, _f: &f64) { }
|
||||||
|
fn string_literal(&mut self, _s: &Rc<String>) { }
|
||||||
|
fn bool_literal(&mut self, _bool: &bool) { }
|
||||||
|
fn binop(&mut self, _binop: &BinOp, _lhs: &Expression, _rhs: &Expression) { }
|
||||||
|
fn prefixop(&mut self, prefix: &PrefixOp, _expr: &Expression) { }
|
||||||
|
fn tuple_literal(&mut self, _v: &Vec<Expression>) { }
|
||||||
|
fn value(&mut self, _v: &Rc<String>) { }
|
||||||
|
fn named_struct(&mut self, _name: &Rc<String>, _values: &Vec<(Rc<String>, Expression)>) { }
|
||||||
|
fn call(&mut self, _f: &Box<Expression>, _arguments: &Vec<Expression>) { }
|
||||||
|
fn index(&mut self, _indexee: &Box<Expression>, _indexers: &Vec<Expression>) { }
|
||||||
|
fn if_expression(&mut self, _discriminator: &Discriminator, _body: &IfExpressionBody) { }
|
||||||
|
fn while_expresssion(&mut self, _condition: &Option<Box<Expression>>, body: &Block) { }
|
||||||
|
fn for_expression(&mut self, _enumerators: &Vec<Enumerator>, _body: &Box<ForBody>) { }
|
||||||
|
fn lambda_expression(&mut self, _params: &Vec<FormalParam>, type_anno: &Option<TypeIdentifier>, body: &Block) { }
|
||||||
|
fn list_literal(&mut self, _items: &Vec<Expression>) { }
|
||||||
|
|
||||||
|
|
||||||
|
fn declaration(&mut self, _decl: &Declaration) { }
|
||||||
|
fn func_signature(&mut self, _sig: &Signature) { }
|
||||||
|
fn func_declaration(&mut self, _sig: &Signature, _block: &Vec<Statement>) { }
|
||||||
|
fn type_declaration(&mut self, _name: &TypeSingletonName, _body: &TypeBody, _mutable: &bool) { }
|
||||||
|
fn type_alias(&mut self, _alias: &Rc<String>, _name: &Rc<String>) { }
|
||||||
|
fn binding(&mut self, _name: &Rc<String>, _constant: &bool, _expr: &Expression) { }
|
||||||
|
fn impl_block(&mut self, _type_name: &TypeIdentifier, _interface_name: &Option<InterfaceName>, _block: &Vec<Declaration>) { }
|
||||||
|
fn interface(&mut self, name: &Rc<String>, signatures: &Vec<Signature>) { }
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
struct SchalaPrinter {
|
||||||
|
s: String
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SchalaPrinter {
|
||||||
|
fn new() -> SchalaPrinter {
|
||||||
|
SchalaPrinter {
|
||||||
|
s: format!("Schala source code:\n"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn done(self) -> String {
|
||||||
|
self.s
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ASTVisitor for SchalaPrinter {
|
||||||
|
fn statement(&mut self, _: &Statement) {
|
||||||
|
self.s.push_str("\n");
|
||||||
|
}
|
||||||
|
|
||||||
|
fn expression(&mut self, _: &Expression) {
|
||||||
|
self.s.push_str("some_expr");
|
||||||
|
}
|
||||||
|
|
||||||
|
fn binding(&mut self, name: &Rc<String>, constant: &bool, _expr: &Expression) {
|
||||||
|
self.s.push_str(&format!("let{} {} = {}",
|
||||||
|
if *constant { "" } else { " mut" },
|
||||||
|
name,
|
||||||
|
"some_expr"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod visitor_tests {
|
||||||
|
use ::tokenizing::{Token, tokenize};
|
||||||
|
use ::parsing::ParseResult;
|
||||||
|
use ::ast::AST;
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
fn parse(input: &str) -> ParseResult<AST> {
|
||||||
|
let tokens = tokenize(input);
|
||||||
|
let mut parser = ::parsing::Parser::new(tokens);
|
||||||
|
parser.parse()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test() {
|
||||||
|
let ast = parse("let a = 1 + 2; let b = 2 + 44;foo()").unwrap();
|
||||||
|
let mut pp = SchalaPrinter::new();
|
||||||
|
dispatch(&mut pp, &ast);
|
||||||
|
let result = pp.done();
|
||||||
|
assert_eq!(result, r#"Schala source code:
|
||||||
|
let a = 1 + 2
|
||||||
|
let b = 2 + 44
|
||||||
|
foo()
|
||||||
|
"#);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -2,7 +2,7 @@ use std::rc::Rc;
|
|||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
|
||||||
use tokenizing::TokenType;
|
use tokenizing::TokenKind;
|
||||||
use self::BuiltinTypeSpecifier::*;
|
use self::BuiltinTypeSpecifier::*;
|
||||||
use self::BuiltinTConst::*;
|
use self::BuiltinTConst::*;
|
||||||
|
|
||||||
@@ -40,8 +40,8 @@ impl BinOp {
|
|||||||
pub fn sigil(&self) -> &Rc<String> {
|
pub fn sigil(&self) -> &Rc<String> {
|
||||||
&self.sigil
|
&self.sigil
|
||||||
}
|
}
|
||||||
pub fn from_sigil_token(tok: &TokenType) -> Option<BinOp> {
|
pub fn from_sigil_token(tok: &TokenKind) -> Option<BinOp> {
|
||||||
use self::TokenType::*;
|
use self::TokenKind::*;
|
||||||
let s = match tok {
|
let s = match tok {
|
||||||
Operator(op) => op,
|
Operator(op) => op,
|
||||||
Period => ".",
|
Period => ".",
|
||||||
@@ -62,8 +62,8 @@ impl BinOp {
|
|||||||
pub fn min_precedence() -> i32 {
|
pub fn min_precedence() -> i32 {
|
||||||
i32::min_value()
|
i32::min_value()
|
||||||
}
|
}
|
||||||
pub fn get_precedence_from_token(op: &TokenType) -> Option<i32> {
|
pub fn get_precedence_from_token(op: &TokenKind) -> Option<i32> {
|
||||||
use self::TokenType::*;
|
use self::TokenKind::*;
|
||||||
let s = match op {
|
let s = match op {
|
||||||
Operator(op) => op,
|
Operator(op) => op,
|
||||||
Period => ".",
|
Period => ".",
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ use std::io;
|
|||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
|
|
||||||
use util::ScopeStack;
|
use util::ScopeStack;
|
||||||
use reduced_ast::{ReducedAST, Stmt, Expr, Lit, Func, Alternative};
|
use reduced_ast::{BoundVars, ReducedAST, Stmt, Expr, Lit, Func, Alternative, Subpattern};
|
||||||
use symbol_table::{SymbolSpec, Symbol, SymbolTable};
|
use symbol_table::{SymbolSpec, Symbol, SymbolTable};
|
||||||
|
|
||||||
pub struct State<'a> {
|
pub struct State<'a> {
|
||||||
@@ -21,7 +21,6 @@ macro_rules! builtin_binding {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
//TODO add a more concise way of getting a new frame
|
|
||||||
impl<'a> State<'a> {
|
impl<'a> State<'a> {
|
||||||
pub fn new(symbol_table_handle: Rc<RefCell<SymbolTable>>) -> State<'a> {
|
pub fn new(symbol_table_handle: Rc<RefCell<SymbolTable>>) -> State<'a> {
|
||||||
let mut values = ScopeStack::new(Some(format!("global")));
|
let mut values = ScopeStack::new(Some(format!("global")));
|
||||||
@@ -34,6 +33,19 @@ impl<'a> State<'a> {
|
|||||||
pub fn debug_print(&self) -> String {
|
pub fn debug_print(&self) -> String {
|
||||||
format!("Values: {:?}", self.values)
|
format!("Values: {:?}", self.values)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn new_frame(&'a self, items: &'a Vec<Node>, bound_vars: &BoundVars) -> State<'a> {
|
||||||
|
let mut inner_state = State {
|
||||||
|
values: self.values.new_scope(None),
|
||||||
|
symbol_table_handle: self.symbol_table_handle.clone(),
|
||||||
|
};
|
||||||
|
for (bound_var, val) in bound_vars.iter().zip(items.iter()) {
|
||||||
|
if let Some(bv) = bound_var.as_ref() {
|
||||||
|
inner_state.values.insert(bv.clone(), ValueEntry::Binding { constant: true, val: val.clone() });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
inner_state
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
@@ -72,6 +84,12 @@ impl Node {
|
|||||||
Node::PrimTuple { items } => format!("{}", paren_wrapped_vec(items.iter().map(|x| x.to_repl()))),
|
Node::PrimTuple { items } => format!("{}", paren_wrapped_vec(items.iter().map(|x| x.to_repl()))),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
fn is_true(&self) -> bool {
|
||||||
|
match self {
|
||||||
|
Node::Expr(Expr::Lit(::reduced_ast::Lit::Bool(true))) => true,
|
||||||
|
_ => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
@@ -116,6 +134,23 @@ impl Expr {
|
|||||||
_ => format!("{:?}", self),
|
_ => format!("{:?}", self),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn replace_conditional_target_sigil(self, replacement: &Expr) -> Expr {
|
||||||
|
use self::Expr::*;
|
||||||
|
|
||||||
|
match self {
|
||||||
|
ConditionalTargetSigilValue => replacement.clone(),
|
||||||
|
Unit | Lit(_) | Func(_) | Val(_) | Constructor { .. } |
|
||||||
|
CaseMatch { .. } | UnimplementedSigilValue => self,
|
||||||
|
Tuple(exprs) => Tuple(exprs.into_iter().map(|e| e.replace_conditional_target_sigil(replacement)).collect()),
|
||||||
|
Call { f, args } => {
|
||||||
|
let new_args = args.into_iter().map(|e| e.replace_conditional_target_sigil(replacement)).collect();
|
||||||
|
Call { f, args: new_args }
|
||||||
|
},
|
||||||
|
Conditional { .. } => panic!("Dunno if I need this, but if so implement"),
|
||||||
|
Assign { .. } => panic!("I'm pretty sure I don't need this"),
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> State<'a> {
|
impl<'a> State<'a> {
|
||||||
@@ -194,7 +229,8 @@ impl<'a> State<'a> {
|
|||||||
Assign { box val, box expr } => self.assign_expression(val, expr),
|
Assign { box val, box expr } => self.assign_expression(val, expr),
|
||||||
Unit => Ok(Node::Expr(Unit)),
|
Unit => Ok(Node::Expr(Unit)),
|
||||||
CaseMatch { box cond, alternatives } => self.case_match_expression(cond, alternatives),
|
CaseMatch { box cond, alternatives } => self.case_match_expression(cond, alternatives),
|
||||||
UnimplementedSigilValue => Err(format!("Sigil value eval not implemented"))
|
ConditionalTargetSigilValue => Ok(Node::Expr(ConditionalTargetSigilValue)),
|
||||||
|
UnimplementedSigilValue => Err(format!("Sigil value eval not implemented")),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -351,52 +387,76 @@ impl<'a> State<'a> {
|
|||||||
Ok(Node::Expr(Expr::Unit))
|
Ok(Node::Expr(Expr::Unit))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn case_match_expression(&mut self, cond: Expr, alternatives: Vec<Alternative>) -> EvalResult<Node> {
|
fn guard_passes(&mut self, guard: &Option<Expr>, cond: &Node) -> EvalResult<bool> {
|
||||||
match self.expression(Node::Expr(cond))? {
|
if let Some(ref guard_expr) = guard {
|
||||||
Node::PrimObject { tag, items, .. } => {
|
let guard_expr = match cond {
|
||||||
for alt in alternatives {
|
Node::Expr(ref e) => guard_expr.clone().replace_conditional_target_sigil(e),
|
||||||
if alt.tag.map(|t| t == tag).unwrap_or(true) {
|
_ => guard_expr.clone()
|
||||||
let mut inner_state = State {
|
};
|
||||||
values: self.values.new_scope(None),
|
Ok(self.expression(guard_expr.to_node())?.is_true())
|
||||||
symbol_table_handle: self.symbol_table_handle.clone(),
|
} else {
|
||||||
};
|
Ok(true)
|
||||||
for (bound_var, val) in alt.bound_vars.iter().zip(items.iter()) {
|
|
||||||
if let Some(bv) = bound_var.as_ref() {
|
|
||||||
inner_state.values.insert(bv.clone(), ValueEntry::Binding { constant: true, val: val.clone() });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(guard_expr) = alt.guard {
|
|
||||||
let evaled_guard = inner_state.expression(guard_expr.to_node());
|
|
||||||
println!("EVALED GUARD: {:?}", evaled_guard);
|
|
||||||
//continue
|
|
||||||
}
|
|
||||||
|
|
||||||
return inner_state.block(alt.item)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return Err(format!("PrimObject failed pattern match"));
|
|
||||||
},
|
|
||||||
Node::PrimTuple { .. } => Err(format!("Tuples not implemented")), //TODO make a distinction between not yet implemented and an actual runtime error
|
|
||||||
Node::Expr(_e) => {
|
|
||||||
for alt in alternatives {
|
|
||||||
match (alt.guard, alt.tag) {
|
|
||||||
(Some(ref guard_expr), None) => {
|
|
||||||
match self.expression(guard_expr.clone().to_node())? {
|
|
||||||
Node::Expr(Expr::Lit(::reduced_ast::Lit::Bool(true))) =>
|
|
||||||
return self.block(alt.item),
|
|
||||||
_ => continue,
|
|
||||||
}
|
|
||||||
},
|
|
||||||
(None, None) => return self.block(alt.item),
|
|
||||||
_ => return Err(format!("Shouldn't match an expr against a pattern"))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return Err(format!("Expr Failed pattern match"));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
fn case_match_expression(&mut self, cond: Expr, alternatives: Vec<Alternative>) -> EvalResult<Node> {
|
||||||
|
|
||||||
|
//TODO need to handle recursive subpatterns
|
||||||
|
let all_subpatterns_pass = |state: &mut State, subpatterns: &Vec<Option<Subpattern>>, items: &Vec<Node>| -> EvalResult<bool> {
|
||||||
|
|
||||||
|
if subpatterns.len() == 0 {
|
||||||
|
return Ok(true)
|
||||||
|
}
|
||||||
|
|
||||||
|
if items.len() != subpatterns.len() {
|
||||||
|
return Err(format!("Subpattern length isn't correct items {} subpatterns {}", items.len(), subpatterns.len()));
|
||||||
|
}
|
||||||
|
|
||||||
|
for (maybe_subp, cond) in subpatterns.iter().zip(items.iter()) {
|
||||||
|
if let Some(subp) = maybe_subp {
|
||||||
|
if !state.guard_passes(&subp.guard, &cond)? {
|
||||||
|
return Ok(false)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(true)
|
||||||
|
};
|
||||||
|
|
||||||
|
let cond = self.expression(Node::Expr(cond))?;
|
||||||
|
for alt in alternatives {
|
||||||
|
// no matter what type of condition we have, ignore alternative if the guard evaluates false
|
||||||
|
if !self.guard_passes(&alt.guard, &cond)? {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
match cond {
|
||||||
|
Node::PrimObject { ref tag, ref items, .. } => {
|
||||||
|
if alt.tag.map(|t| t == *tag).unwrap_or(true) {
|
||||||
|
let mut inner_state = self.new_frame(items, &alt.bound_vars);
|
||||||
|
if all_subpatterns_pass(&mut inner_state, &alt.subpatterns, items)? {
|
||||||
|
return inner_state.block(alt.item);
|
||||||
|
} else {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Node::PrimTuple { ref items } => {
|
||||||
|
let mut inner_state = self.new_frame(items, &alt.bound_vars);
|
||||||
|
if all_subpatterns_pass(&mut inner_state, &alt.subpatterns, items)? {
|
||||||
|
return inner_state.block(alt.item);
|
||||||
|
} else {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Node::Expr(ref _e) => {
|
||||||
|
if let None = alt.tag {
|
||||||
|
return self.block(alt.item)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(format!("{:?} failed pattern match", cond))
|
||||||
|
}
|
||||||
|
|
||||||
fn value(&mut self, name: Rc<String>) -> EvalResult<Node> {
|
fn value(&mut self, name: Rc<String>) -> EvalResult<Node> {
|
||||||
use self::ValueEntry::*;
|
use self::ValueEntry::*;
|
||||||
@@ -448,24 +508,20 @@ mod eval_tests {
|
|||||||
parser.parse()
|
parser.parse()
|
||||||
}
|
}
|
||||||
|
|
||||||
macro_rules! all_output {
|
fn evaluate_all_outputs(input: &str) -> Vec<Result<String, String>> {
|
||||||
($string:expr) => {
|
let symbol_table = Rc::new(RefCell::new(SymbolTable::new()));
|
||||||
{
|
let mut state = State::new(symbol_table);
|
||||||
let symbol_table = Rc::new(RefCell::new(SymbolTable::new()));
|
let ast = parse(tokenize(input)).unwrap();
|
||||||
let mut state = State::new(symbol_table);
|
state.symbol_table_handle.borrow_mut().add_top_level_symbols(&ast).unwrap();
|
||||||
let ast = parse(tokenize($string)).unwrap();
|
let reduced = ast.reduce(&state.symbol_table_handle.borrow());
|
||||||
state.symbol_table_handle.borrow_mut().add_top_level_symbols(&ast).unwrap();
|
let all_output = state.evaluate(reduced, true);
|
||||||
let reduced = ast.reduce(&state.symbol_table_handle.borrow());
|
all_output
|
||||||
let all_output = state.evaluate(reduced, true);
|
|
||||||
all_output
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
macro_rules! test_in_fresh_env {
|
macro_rules! test_in_fresh_env {
|
||||||
($string:expr, $correct:expr) => {
|
($string:expr, $correct:expr) => {
|
||||||
{
|
{
|
||||||
let all_output = all_output!($string);
|
let all_output = evaluate_all_outputs($string);
|
||||||
let ref output = all_output.last().unwrap();
|
let ref output = all_output.last().unwrap();
|
||||||
assert_eq!(**output, Ok($correct.to_string()));
|
assert_eq!(**output, Ok($correct.to_string()));
|
||||||
}
|
}
|
||||||
@@ -550,6 +606,15 @@ if a { is 15 -> "x", is 10 -> "y" }
|
|||||||
test_in_fresh_env!(source, "\"y\"");
|
test_in_fresh_env!(source, "\"y\"");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn string_pattern() {
|
||||||
|
let source = r#"
|
||||||
|
let a = "foo"
|
||||||
|
if a { is "foo" -> "x", is _ -> "y" }
|
||||||
|
"#;
|
||||||
|
test_in_fresh_env!(source, "\"x\"");
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn boolean_pattern() {
|
fn boolean_pattern() {
|
||||||
let source = r#"
|
let source = r#"
|
||||||
@@ -581,4 +646,102 @@ if Some(10) {
|
|||||||
"#;
|
"#;
|
||||||
test_in_fresh_env!(source, "\"hella\"");
|
test_in_fresh_env!(source, "\"hella\"");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn tuple_pattern() {
|
||||||
|
let source = r#"
|
||||||
|
if (1, 2) {
|
||||||
|
is (1, x) -> x,
|
||||||
|
is _ -> 99
|
||||||
|
}
|
||||||
|
"#;
|
||||||
|
test_in_fresh_env!(source, 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn tuple_pattern_2() {
|
||||||
|
let source = r#"
|
||||||
|
if (1, 2) {
|
||||||
|
is (10, x) -> x,
|
||||||
|
is (y, x) -> x + y
|
||||||
|
}
|
||||||
|
"#;
|
||||||
|
test_in_fresh_env!(source, 3);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn tuple_pattern_3() {
|
||||||
|
let source = r#"
|
||||||
|
if (1, 5) {
|
||||||
|
is (10, x) -> x,
|
||||||
|
is (1, x) -> x
|
||||||
|
}
|
||||||
|
"#;
|
||||||
|
test_in_fresh_env!(source, 5);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn tuple_pattern_4() {
|
||||||
|
let source = r#"
|
||||||
|
if (1, 5) {
|
||||||
|
is (10, x) -> x,
|
||||||
|
is (1, x) -> x,
|
||||||
|
}
|
||||||
|
"#;
|
||||||
|
test_in_fresh_env!(source, 5);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn prim_obj_pattern() {
|
||||||
|
let source = r#"
|
||||||
|
type Stuff = Mulch(Nat) | Jugs(Nat, String) | Mardok
|
||||||
|
let a = Mulch(20)
|
||||||
|
let b = Jugs(1, "haha")
|
||||||
|
let c = Mardok
|
||||||
|
|
||||||
|
let x = if a {
|
||||||
|
is Mulch(20) -> "x",
|
||||||
|
is _ -> "ERR"
|
||||||
|
}
|
||||||
|
|
||||||
|
let y = if b {
|
||||||
|
is Mulch(n) -> "ERR",
|
||||||
|
is Jugs(2, _) -> "ERR",
|
||||||
|
is Jugs(1, s) -> s,
|
||||||
|
is _ -> "ERR",
|
||||||
|
}
|
||||||
|
|
||||||
|
let z = if c {
|
||||||
|
is Jugs(_, _) -> "ERR",
|
||||||
|
is Mardok -> "NIGH",
|
||||||
|
is _ -> "ERR",
|
||||||
|
}
|
||||||
|
|
||||||
|
(x, y, z)
|
||||||
|
"#;
|
||||||
|
test_in_fresh_env!(source, r#"("x", "haha", "NIGH")"#);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn basic_lambda_syntax() {
|
||||||
|
let source = r#"
|
||||||
|
let q = \(x, y) { x * y }
|
||||||
|
let x = q(5,2)
|
||||||
|
let y = \(m, n, o) { m + n + o }(1,2,3)
|
||||||
|
(x, y)
|
||||||
|
"#;
|
||||||
|
test_in_fresh_env!(source, r"(10, 6)");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn lambda_syntax_2() {
|
||||||
|
let source = r#"
|
||||||
|
fn milta() {
|
||||||
|
\(x) { x + 33 }
|
||||||
|
}
|
||||||
|
milta()(10)
|
||||||
|
"#;
|
||||||
|
test_in_fresh_env!(source, "43");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,12 +2,16 @@
|
|||||||
#![feature(custom_attribute)]
|
#![feature(custom_attribute)]
|
||||||
#![feature(unrestricted_attribute_tokens)]
|
#![feature(unrestricted_attribute_tokens)]
|
||||||
#![feature(slice_patterns, box_patterns, box_syntax)]
|
#![feature(slice_patterns, box_patterns, box_syntax)]
|
||||||
|
|
||||||
|
//! `schala-lang` is where the Schala programming language is actually implemented.
|
||||||
|
//! It defines the `Schala` type, which contains the state for a Schala REPL, and implements
|
||||||
|
//! `ProgrammingLanguageInterface` and the chain of compiler passes for it.
|
||||||
|
|
||||||
extern crate itertools;
|
extern crate itertools;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate lazy_static;
|
extern crate lazy_static;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate maplit;
|
extern crate maplit;
|
||||||
#[macro_use]
|
|
||||||
extern crate schala_repl;
|
extern crate schala_repl;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate schala_repl_codegen;
|
extern crate schala_repl_codegen;
|
||||||
@@ -25,9 +29,11 @@ macro_rules! bx {
|
|||||||
}
|
}
|
||||||
|
|
||||||
mod util;
|
mod util;
|
||||||
|
mod source_map;
|
||||||
mod builtin;
|
mod builtin;
|
||||||
mod tokenizing;
|
mod tokenizing;
|
||||||
mod ast;
|
mod ast;
|
||||||
|
mod ast_visitor;
|
||||||
mod parsing;
|
mod parsing;
|
||||||
mod symbol_table;
|
mod symbol_table;
|
||||||
mod typechecking;
|
mod typechecking;
|
||||||
@@ -41,9 +47,13 @@ mod eval;
|
|||||||
#[PipelineSteps(tokenizing, parsing(compact,expanded,trace), symbol_table, typechecking, ast_reducing, eval)]
|
#[PipelineSteps(tokenizing, parsing(compact,expanded,trace), symbol_table, typechecking, ast_reducing, eval)]
|
||||||
#[DocMethod = get_doc]
|
#[DocMethod = get_doc]
|
||||||
#[HandleCustomInterpreterDirectives = handle_custom_interpreter_directives]
|
#[HandleCustomInterpreterDirectives = handle_custom_interpreter_directives]
|
||||||
|
/// All bits of state necessary to parse and execute a Schala program are stored in this struct
|
||||||
|
/// `state` represents the execution state for the AST-walking interpreter, the other fields
|
||||||
|
/// should be self-explanatory.
|
||||||
pub struct Schala {
|
pub struct Schala {
|
||||||
state: eval::State<'static>,
|
state: eval::State<'static>,
|
||||||
symbol_table: Rc<RefCell<symbol_table::SymbolTable>>,
|
symbol_table: Rc<RefCell<symbol_table::SymbolTable>>,
|
||||||
|
type_context: typechecking::TypeContext<'static>,
|
||||||
active_parser: Option<parsing::Parser>,
|
active_parser: Option<parsing::Parser>,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -58,21 +68,21 @@ impl Schala {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Schala {
|
impl Schala {
|
||||||
|
/// Creates a new Schala environment *without* any prelude.
|
||||||
fn new_blank_env() -> Schala {
|
fn new_blank_env() -> Schala {
|
||||||
let symbols = Rc::new(RefCell::new(symbol_table::SymbolTable::new()));
|
let symbols = Rc::new(RefCell::new(symbol_table::SymbolTable::new()));
|
||||||
Schala {
|
Schala {
|
||||||
symbol_table: symbols.clone(),
|
symbol_table: symbols.clone(),
|
||||||
state: eval::State::new(symbols),
|
state: eval::State::new(symbols),
|
||||||
|
type_context: typechecking::TypeContext::new(),
|
||||||
active_parser: None,
|
active_parser: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Creates a new Schala environment with the standard prelude, which is defined as ordinary
|
||||||
|
/// Schala code in the file `prelude.schala`
|
||||||
pub fn new() -> Schala {
|
pub fn new() -> Schala {
|
||||||
let prelude = r#"
|
let prelude = include_str!("prelude.schala");
|
||||||
type Option<T> = Some(T) | None
|
|
||||||
type Color = Red | Green | Blue
|
|
||||||
type Ord = LT | EQ | GT
|
|
||||||
"#;
|
|
||||||
let mut s = Schala::new_blank_env();
|
let mut s = Schala::new_blank_env();
|
||||||
s.execute_pipeline(prelude, &EvalOptions::default());
|
s.execute_pipeline(prelude, &EvalOptions::default());
|
||||||
s
|
s
|
||||||
@@ -82,7 +92,7 @@ type Ord = LT | EQ | GT
|
|||||||
fn tokenizing(_handle: &mut Schala, input: &str, comp: Option<&mut UnfinishedComputation>) -> Result<Vec<tokenizing::Token>, String> {
|
fn tokenizing(_handle: &mut Schala, input: &str, comp: Option<&mut UnfinishedComputation>) -> Result<Vec<tokenizing::Token>, String> {
|
||||||
let tokens = tokenizing::tokenize(input);
|
let tokens = tokenizing::tokenize(input);
|
||||||
comp.map(|comp| {
|
comp.map(|comp| {
|
||||||
let token_string = tokens.iter().map(|t| format!("{:?}<L:{},C:{}>", t.token_type, t.offset.0, t.offset.1)).join(", ");
|
let token_string = tokens.iter().map(|t| format!("{:?}<L:{},C:{}>", t.kind, t.offset.0, t.offset.1)).join(", ");
|
||||||
comp.add_artifact(TraceArtifact::new("tokens", token_string));
|
comp.add_artifact(TraceArtifact::new("tokens", token_string));
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -131,7 +141,14 @@ fn symbol_table(handle: &mut Schala, input: ast::AST, comp: Option<&mut Unfinish
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn typechecking(_handle: &mut Schala, input: ast::AST, _comp: Option<&mut UnfinishedComputation>) -> Result<ast::AST, String> {
|
fn typechecking(handle: &mut Schala, input: ast::AST, comp: Option<&mut UnfinishedComputation>) -> Result<ast::AST, String> {
|
||||||
|
let result = handle.type_context.typecheck(&input);
|
||||||
|
|
||||||
|
comp.map(|comp| {
|
||||||
|
let artifact = TraceArtifact::new("type", format!("{:?}", result));
|
||||||
|
comp.add_artifact(artifact);
|
||||||
|
});
|
||||||
|
|
||||||
Ok(input)
|
Ok(input)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -4,8 +4,9 @@ use std::vec::IntoIter;
|
|||||||
|
|
||||||
use tokenizing::*;
|
use tokenizing::*;
|
||||||
use tokenizing::Kw::*;
|
use tokenizing::Kw::*;
|
||||||
use tokenizing::TokenType::*;
|
use tokenizing::TokenKind::*;
|
||||||
|
|
||||||
|
use source_map::{SourceMap, SourceData};
|
||||||
use ast::*;
|
use ast::*;
|
||||||
|
|
||||||
use builtin::{BinOp, PrefixOp};
|
use builtin::{BinOp, PrefixOp};
|
||||||
@@ -35,7 +36,7 @@ pub struct ParseRecord {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub struct Parser {
|
pub struct Parser {
|
||||||
tokens: Peekable<IntoIter<Token>>,
|
token_handler: TokenHandler,
|
||||||
parse_record: Vec<ParseRecord>,
|
parse_record: Vec<ParseRecord>,
|
||||||
parse_level: u32,
|
parse_level: u32,
|
||||||
restrictions: ParserRestrictions,
|
restrictions: ParserRestrictions,
|
||||||
@@ -45,26 +46,57 @@ struct ParserRestrictions {
|
|||||||
no_struct_literal: bool
|
no_struct_literal: bool
|
||||||
}
|
}
|
||||||
|
|
||||||
|
struct TokenHandler {
|
||||||
|
tokens: Peekable<IntoIter<Token>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TokenHandler {
|
||||||
|
fn new(tokens: Vec<Token>) -> TokenHandler {
|
||||||
|
let tokens = tokens.into_iter().peekable();
|
||||||
|
TokenHandler { tokens }
|
||||||
|
}
|
||||||
|
|
||||||
|
fn peek(&mut self) -> TokenKind {
|
||||||
|
self.tokens.peek().map(|ref t| { t.kind.clone() }).unwrap_or(TokenKind::EOF)
|
||||||
|
}
|
||||||
|
fn peek_with_token_offset(&mut self) -> Token {
|
||||||
|
self.tokens.peek().map(|t: &Token| { t.clone()}).unwrap_or(Token { kind: TokenKind::EOF, offset: (0,0)})
|
||||||
|
}
|
||||||
|
fn next(&mut self) -> TokenKind {
|
||||||
|
self.tokens.next().map(|ref t| { t.kind.clone() }).unwrap_or(TokenKind::EOF)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl Parser {
|
impl Parser {
|
||||||
pub fn new(initial_input: Vec<Token>) -> Parser {
|
pub fn new(initial_input: Vec<Token>) -> Parser {
|
||||||
Parser {
|
Parser {
|
||||||
tokens: initial_input.into_iter().peekable(),
|
token_handler: TokenHandler::new(initial_input),
|
||||||
parse_record: vec![],
|
parse_record: vec![],
|
||||||
parse_level: 0,
|
parse_level: 0,
|
||||||
restrictions: ParserRestrictions { no_struct_literal: false }
|
restrictions: ParserRestrictions { no_struct_literal: false }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn peek(&mut self) -> TokenType {
|
fn peek(&mut self) -> TokenKind {
|
||||||
self.tokens.peek().map(|ref t| { t.token_type.clone() }).unwrap_or(TokenType::EOF)
|
self.token_handler.peek()
|
||||||
}
|
}
|
||||||
fn peek_with_token_offset(&mut self) -> Token {
|
fn peek_with_token_offset(&mut self) -> Token {
|
||||||
self.tokens.peek().map(|t: &Token| { t.clone()}).unwrap_or(Token { token_type: TokenType::EOF, offset: (0,0)})
|
self.token_handler.peek_with_token_offset()
|
||||||
}
|
}
|
||||||
fn next(&mut self) -> TokenType {
|
fn next(&mut self) -> TokenKind {
|
||||||
self.tokens.next().map(|ref t| { t.token_type.clone() }).unwrap_or(TokenType::EOF)
|
self.token_handler.next()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
fn next_mapped(&mut self) -> SourceMap<TokenKind> {
|
||||||
|
let tt = self.next();
|
||||||
|
SourceMap {
|
||||||
|
node: tt,
|
||||||
|
data: Some(SourceData { line_number: 420, char_idx: 69 })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
pub fn parse(&mut self) -> ParseResult<AST> {
|
pub fn parse(&mut self) -> ParseResult<AST> {
|
||||||
self.program()
|
self.program()
|
||||||
}
|
}
|
||||||
@@ -165,8 +197,9 @@ typed_identifier := IDENTIFIER type_anno
|
|||||||
/* Declaration - Functions */
|
/* Declaration - Functions */
|
||||||
|
|
||||||
func_declaration := func_signature func_body
|
func_declaration := func_signature func_body
|
||||||
func_body := ε | '{' (statement delimiter)* '}'
|
func_body := ε | nonempty_func_body
|
||||||
func_signature := 'fn' func_name formal_param_list func_body
|
nonempty_func_body := '{' (statement delimiter)* '}'
|
||||||
|
func_signature := 'fn' func_name formal_param_list type_anno+
|
||||||
func_name := IDENTIFIER | operator
|
func_name := IDENTIFIER | operator
|
||||||
formal_param_list := '(' (formal_param ',')* ')'
|
formal_param_list := '(' (formal_param ',')* ')'
|
||||||
formal_param := IDENTIFIER type_anno+
|
formal_param := IDENTIFIER type_anno+
|
||||||
@@ -200,13 +233,13 @@ prefix_op := '+' | '-' | '!' | '~'
|
|||||||
call_expr := index_expr ( '(' expr_list ')' )*
|
call_expr := index_expr ( '(' expr_list ')' )*
|
||||||
expr_list := expression (',' expression)* | ε
|
expr_list := expression (',' expression)* | ε
|
||||||
index_expr := primary ( '[' (expression (',' (expression)* | ε) ']' )*
|
index_expr := primary ( '[' (expression (',' (expression)* | ε) ']' )*
|
||||||
primary := literal | paren_expr | if_expr | for_expr | while_expr | identifier_expr | curly_brace_expr | list_expr
|
primary := literal | paren_expr | if_expr | for_expr | while_expr | identifier_expr | lambda_expr | anonymous_struct | list_expr
|
||||||
|
|
||||||
/* Primary Expressions */
|
/* Primary Expressions */
|
||||||
|
|
||||||
curly_brace_expr := lambda_expr | anonymous_struct //TODO
|
|
||||||
list_expr := '[' (expression, ',')* ']'
|
list_expr := '[' (expression, ',')* ']'
|
||||||
lambda_expr := '{' '|' (formal_param ',')* '|' (type_anno)* (statement delimiter)* '}'
|
lambda_expr := '\' lambda_param_list type_anno+ nonempty_func_body
|
||||||
|
lambda_param_list := formal_param_list | formal_param
|
||||||
paren_expr := LParen paren_inner RParen
|
paren_expr := LParen paren_inner RParen
|
||||||
paren_inner := (expression ',')*
|
paren_inner := (expression ',')*
|
||||||
identifier_expr := named_struct | IDENTIFIER
|
identifier_expr := named_struct | IDENTIFIER
|
||||||
@@ -368,7 +401,7 @@ impl Parser {
|
|||||||
fn func_declaration(&mut self) -> ParseResult<Declaration> {
|
fn func_declaration(&mut self) -> ParseResult<Declaration> {
|
||||||
let signature = self.func_signature()?;
|
let signature = self.func_signature()?;
|
||||||
if let LCurlyBrace = self.peek() {
|
if let LCurlyBrace = self.peek() {
|
||||||
let statements = delimited!(self, LCurlyBrace, statement, Newline | Semicolon, RCurlyBrace, nonstrict);
|
let statements = self.nonempty_func_body()?;
|
||||||
Ok(Declaration::FuncDecl(signature, statements))
|
Ok(Declaration::FuncDecl(signature, statements))
|
||||||
} else {
|
} else {
|
||||||
Ok(Declaration::FuncSig(signature))
|
Ok(Declaration::FuncSig(signature))
|
||||||
@@ -386,7 +419,7 @@ impl Parser {
|
|||||||
},
|
},
|
||||||
_ => (self.identifier()?, false)
|
_ => (self.identifier()?, false)
|
||||||
};
|
};
|
||||||
let params = delimited!(self, LParen, formal_param, Comma, RParen);
|
let params = self.formal_param_list()?;
|
||||||
let type_anno = match self.peek() {
|
let type_anno = match self.peek() {
|
||||||
Colon => Some(self.type_anno()?),
|
Colon => Some(self.type_anno()?),
|
||||||
_ => None,
|
_ => None,
|
||||||
@@ -394,6 +427,16 @@ impl Parser {
|
|||||||
Ok(Signature { name, operator, params, type_anno })
|
Ok(Signature { name, operator, params, type_anno })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[recursive_descent_method]
|
||||||
|
fn nonempty_func_body(&mut self) -> ParseResult<Vec<Statement>> {
|
||||||
|
Ok(delimited!(self, LCurlyBrace, statement, Newline | Semicolon, RCurlyBrace, nonstrict))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[recursive_descent_method]
|
||||||
|
fn formal_param_list(&mut self) -> ParseResult<Vec<FormalParam>> {
|
||||||
|
Ok(delimited!(self, LParen, formal_param, Comma, RParen))
|
||||||
|
}
|
||||||
|
|
||||||
#[recursive_descent_method]
|
#[recursive_descent_method]
|
||||||
fn formal_param(&mut self) -> ParseResult<FormalParam> {
|
fn formal_param(&mut self) -> ParseResult<FormalParam> {
|
||||||
let name = self.identifier()?;
|
let name = self.identifier()?;
|
||||||
@@ -555,13 +598,13 @@ impl Parser {
|
|||||||
|
|
||||||
#[recursive_descent_method]
|
#[recursive_descent_method]
|
||||||
fn call_expr(&mut self) -> ParseResult<Expression> {
|
fn call_expr(&mut self) -> ParseResult<Expression> {
|
||||||
let index = self.index_expr()?;
|
let mut expr = self.index_expr()?;
|
||||||
Ok(if let LParen = self.peek() {
|
while let LParen = self.peek() {
|
||||||
let arguments = delimited!(self, LParen, expression, Comma, RParen);
|
let arguments = delimited!(self, LParen, expression, Comma, RParen);
|
||||||
Expression(ExpressionType::Call { f: bx!(index), arguments }, None) //TODO fix this none
|
expr = Expression(ExpressionType::Call { f: bx!(expr), arguments }, None); //TODO none is incorrect
|
||||||
} else {
|
}
|
||||||
index
|
|
||||||
})
|
Ok(expr)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[recursive_descent_method]
|
#[recursive_descent_method]
|
||||||
@@ -582,6 +625,7 @@ impl Parser {
|
|||||||
fn primary(&mut self) -> ParseResult<Expression> {
|
fn primary(&mut self) -> ParseResult<Expression> {
|
||||||
match self.peek() {
|
match self.peek() {
|
||||||
LCurlyBrace => self.curly_brace_expr(),
|
LCurlyBrace => self.curly_brace_expr(),
|
||||||
|
Backslash => self.lambda_expr(),
|
||||||
LParen => self.paren_expr(),
|
LParen => self.paren_expr(),
|
||||||
LSquareBracket => self.list_expr(),
|
LSquareBracket => self.list_expr(),
|
||||||
Keyword(Kw::If) => self.if_expr(),
|
Keyword(Kw::If) => self.if_expr(),
|
||||||
@@ -600,26 +644,29 @@ impl Parser {
|
|||||||
|
|
||||||
#[recursive_descent_method]
|
#[recursive_descent_method]
|
||||||
fn curly_brace_expr(&mut self) -> ParseResult<Expression> {
|
fn curly_brace_expr(&mut self) -> ParseResult<Expression> {
|
||||||
self.lambda_expr()
|
ParseError::new("Not implemented")
|
||||||
}
|
}
|
||||||
|
|
||||||
#[recursive_descent_method]
|
#[recursive_descent_method]
|
||||||
fn lambda_expr(&mut self) -> ParseResult<Expression> {
|
fn lambda_expr(&mut self) -> ParseResult<Expression> {
|
||||||
expect!(self, LCurlyBrace);
|
expect!(self, Backslash);
|
||||||
let params = delimited!(self, Pipe, formal_param, Comma, Pipe);
|
let params = self.lambda_param_list()?;
|
||||||
let mut body = Vec::new();
|
let type_anno = match self.peek() {
|
||||||
loop {
|
Colon => Some(self.type_anno()?),
|
||||||
match self.peek() {
|
_ => None,
|
||||||
EOF | RCurlyBrace => break,
|
};
|
||||||
Newline | Semicolon => {
|
let body = self.nonempty_func_body()?;
|
||||||
self.next();
|
Ok(Expression(ExpressionType::Lambda { params, type_anno, body }, None)) //TODO need to handle types somehow
|
||||||
continue;
|
}
|
||||||
},
|
|
||||||
_ => body.push(self.statement()?),
|
#[recursive_descent_method]
|
||||||
}
|
fn lambda_param_list(&mut self) -> ParseResult<Vec<FormalParam>> {
|
||||||
|
if let LParen = self.peek() {
|
||||||
|
self.formal_param_list()
|
||||||
|
} else {
|
||||||
|
let single_param = self.formal_param()?;
|
||||||
|
Ok(vec![single_param])
|
||||||
}
|
}
|
||||||
expect!(self, RCurlyBrace);
|
|
||||||
Ok(Expression(ExpressionType::Lambda { params, body }, None)) //TODO need to handle types somehow
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[recursive_descent_method]
|
#[recursive_descent_method]
|
||||||
@@ -728,19 +775,26 @@ impl Parser {
|
|||||||
//TODO - delimited! isn't sophisticated enough to do thisa
|
//TODO - delimited! isn't sophisticated enough to do thisa
|
||||||
//let guards = delimited!(self, LCurlyBrace, guard_arm, Comma, RCurlyBrace);
|
//let guards = delimited!(self, LCurlyBrace, guard_arm, Comma, RCurlyBrace);
|
||||||
expect!(self, LCurlyBrace);
|
expect!(self, LCurlyBrace);
|
||||||
|
|
||||||
let mut guards = vec![];
|
let mut guards = vec![];
|
||||||
loop {
|
loop {
|
||||||
while let Newline = self.peek() {
|
|
||||||
self.next();
|
|
||||||
}
|
|
||||||
let guard_arm = self.guard_arm()?;
|
|
||||||
guards.push(guard_arm);
|
|
||||||
while let Newline = self.peek() {
|
|
||||||
self.next();
|
|
||||||
}
|
|
||||||
match self.peek() {
|
match self.peek() {
|
||||||
Comma => {self.next(); continue },
|
RCurlyBrace | EOF => break,
|
||||||
_ => break
|
Semicolon | Newline => { self.next(); continue},
|
||||||
|
_ => {
|
||||||
|
let guard_arm = self.guard_arm()?;
|
||||||
|
guards.push(guard_arm);
|
||||||
|
loop {
|
||||||
|
match self.peek() {
|
||||||
|
Semicolon | Newline => { self.next(); continue; },
|
||||||
|
_ => break,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if let RCurlyBrace = self.peek() {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
expect!(self, Comma);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
expect!(self, RCurlyBrace);
|
expect!(self, RCurlyBrace);
|
||||||
@@ -991,7 +1045,7 @@ impl Parser {
|
|||||||
fn float_literal(&mut self) -> ParseResult<Expression> {
|
fn float_literal(&mut self) -> ParseResult<Expression> {
|
||||||
use self::ExpressionType::*;
|
use self::ExpressionType::*;
|
||||||
let mut digits = self.digits()?;
|
let mut digits = self.digits()?;
|
||||||
if let TokenType::Period = self.peek() {
|
if let Period = self.peek() {
|
||||||
self.next();
|
self.next();
|
||||||
digits.push_str(".");
|
digits.push_str(".");
|
||||||
digits.push_str(&self.digits()?);
|
digits.push_str(&self.digits()?);
|
||||||
@@ -1071,7 +1125,8 @@ mod parse_tests {
|
|||||||
use super::Variant::*;
|
use super::Variant::*;
|
||||||
use super::ForBody::*;
|
use super::ForBody::*;
|
||||||
|
|
||||||
fn parse(tokens: Vec<::tokenizing::Token>) -> ParseResult<AST> {
|
fn parse(input: &str) -> ParseResult<AST> {
|
||||||
|
let tokens: Vec<::tokenizing::Token> = tokenize(input);
|
||||||
let mut parser = super::Parser::new(tokens);
|
let mut parser = super::Parser::new(tokens);
|
||||||
parser.parse()
|
parser.parse()
|
||||||
}
|
}
|
||||||
@@ -1080,18 +1135,17 @@ mod parse_tests {
|
|||||||
($string:tt) => { Rc::new(stringify!($string).to_string()) }
|
($string:tt) => { Rc::new(stringify!($string).to_string()) }
|
||||||
}
|
}
|
||||||
macro_rules! parse_test {
|
macro_rules! parse_test {
|
||||||
($string:expr, $correct:expr) => { assert_eq!(parse(tokenize($string)).unwrap(), $correct) }
|
($string:expr, $correct:expr) => { assert_eq!(parse($string).unwrap(), $correct) };
|
||||||
|
}
|
||||||
|
macro_rules! parse_test_wrap_ast {
|
||||||
|
($string:expr, $correct:expr) => { parse_test!($string, AST(vec![$correct])) }
|
||||||
}
|
}
|
||||||
macro_rules! parse_error {
|
macro_rules! parse_error {
|
||||||
($string:expr) => { assert!(parse(tokenize($string)).is_err()) }
|
($string:expr) => { assert!(parse($string).is_err()) }
|
||||||
}
|
}
|
||||||
macro_rules! val {
|
macro_rules! val {
|
||||||
($var:expr) => { Value(Rc::new($var.to_string())) }
|
($var:expr) => { Value(Rc::new($var.to_string())) }
|
||||||
}
|
}
|
||||||
macro_rules! exprstatement {
|
|
||||||
($expr_type:expr) => { Statement::ExpressionStatement(Expression($expr_type, None)) };
|
|
||||||
($expr_type:expr, $type_anno:expr) => { Statement::ExpressionStatement(Expression($expr_type, Some($type_anno))) };
|
|
||||||
}
|
|
||||||
macro_rules! ty {
|
macro_rules! ty {
|
||||||
($name:expr) => { Singleton(tys!($name)) }
|
($name:expr) => { Singleton(tys!($name)) }
|
||||||
}
|
}
|
||||||
@@ -1099,16 +1153,17 @@ mod parse_tests {
|
|||||||
($name:expr) => { TypeSingletonName { name: Rc::new($name.to_string()), params: vec![] } };
|
($name:expr) => { TypeSingletonName { name: Rc::new($name.to_string()), params: vec![] } };
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/* new style of test macros */
|
|
||||||
|
|
||||||
macro_rules! single_expr {
|
|
||||||
($exprtype:expr) => { AST(vec![Statement::ExpressionStatement(Expression($exprtype, None))]) };
|
|
||||||
($exprtype:expr, $type:expr) => { AST(vec![Statement::ExpressionStatement(Expression($exprtype, $type))]) }
|
|
||||||
}
|
|
||||||
macro_rules! ex {
|
macro_rules! ex {
|
||||||
($expr_type:expr) => { Expression($expr_type, None) }
|
($expr_type:expr) => { Expression($expr_type, None) };
|
||||||
|
(s $expr_text:expr) => {
|
||||||
|
{
|
||||||
|
let tokens: Vec<::tokenizing::Token> = tokenize($expr_text);
|
||||||
|
let mut parser = super::Parser::new(tokens);
|
||||||
|
parser.expression().unwrap()
|
||||||
|
}
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
macro_rules! binexp {
|
macro_rules! binexp {
|
||||||
($op:expr, $lhs:expr, $rhs:expr) => { BinExp(BinOp::from_sigil($op), bx!(Expression($lhs, None)), bx!(Expression($rhs, None))) }
|
($op:expr, $lhs:expr, $rhs:expr) => { BinExp(BinOp::from_sigil($op), bx!(Expression($lhs, None)), bx!(Expression($rhs, None))) }
|
||||||
}
|
}
|
||||||
@@ -1119,59 +1174,66 @@ mod parse_tests {
|
|||||||
($expr_type:expr) => { Statement::ExpressionStatement(Expression($expr_type, None)) };
|
($expr_type:expr) => { Statement::ExpressionStatement(Expression($expr_type, None)) };
|
||||||
($expr_type:expr, $type_anno:expr) => { Statement::ExpressionStatement(Expression($expr_type, Some($type_anno))) };
|
($expr_type:expr, $type_anno:expr) => { Statement::ExpressionStatement(Expression($expr_type, Some($type_anno))) };
|
||||||
($op:expr, $lhs:expr, $rhs:expr) => { Statement::ExpressionStatement(ex!(binexp!($op, $lhs, $rhs))) };
|
($op:expr, $lhs:expr, $rhs:expr) => { Statement::ExpressionStatement(ex!(binexp!($op, $lhs, $rhs))) };
|
||||||
|
(s $statement_text:expr) => {
|
||||||
|
{
|
||||||
|
let tokens: Vec<::tokenizing::Token> = tokenize($statement_text);
|
||||||
|
let mut parser = super::Parser::new(tokens);
|
||||||
|
parser.statement().unwrap()
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn parsing_number_literals_and_binexps() {
|
fn parsing_number_literals_and_binexps() {
|
||||||
parse_test! { ".2", single_expr!(FloatLiteral(0.2)) };
|
parse_test_wrap_ast! { ".2", exst!(FloatLiteral(0.2)) };
|
||||||
parse_test! { "8.1", single_expr!(FloatLiteral(8.1)) };
|
parse_test_wrap_ast! { "8.1", exst!(FloatLiteral(8.1)) };
|
||||||
|
|
||||||
parse_test! { "0b010", single_expr!(NatLiteral(2)) };
|
parse_test_wrap_ast! { "0b010", exst!(NatLiteral(2)) };
|
||||||
parse_test! { "0b0_1_0_", single_expr!(NatLiteral(2)) }
|
parse_test_wrap_ast! { "0b0_1_0_", exst!(NatLiteral(2)) }
|
||||||
|
|
||||||
parse_test! {"0xff", single_expr!(NatLiteral(255)) };
|
parse_test_wrap_ast! {"0xff", exst!(NatLiteral(255)) };
|
||||||
parse_test! {"0xf_f_", single_expr!(NatLiteral(255)) };
|
parse_test_wrap_ast! {"0xf_f_", exst!(NatLiteral(255)) };
|
||||||
|
|
||||||
parse_test!("0xf_f_+1", AST(vec![exprstatement!(binexp!("+", NatLiteral(255), NatLiteral(1)))]));
|
parse_test_wrap_ast! {"0xf_f_+1", exst!(binexp!("+", NatLiteral(255), NatLiteral(1))) };
|
||||||
|
|
||||||
parse_test! {"3; 4; 4.3", AST(
|
parse_test! {"3; 4; 4.3", AST(
|
||||||
vec![exprstatement!(NatLiteral(3)), exprstatement!(NatLiteral(4)),
|
vec![exst!(NatLiteral(3)), exst!(NatLiteral(4)),
|
||||||
exprstatement!(FloatLiteral(4.3))])
|
exst!(FloatLiteral(4.3))])
|
||||||
};
|
};
|
||||||
|
|
||||||
parse_test!("1 + 2 * 3", AST(vec!
|
parse_test!("1 + 2 * 3", AST(vec!
|
||||||
[
|
[
|
||||||
exprstatement!(binexp!("+", NatLiteral(1), binexp!("*", NatLiteral(2), NatLiteral(3))))
|
exst!(binexp!("+", NatLiteral(1), binexp!("*", NatLiteral(2), NatLiteral(3))))
|
||||||
]));
|
]));
|
||||||
|
|
||||||
parse_test!("1 * 2 + 3", AST(vec!
|
parse_test!("1 * 2 + 3", AST(vec!
|
||||||
[
|
[
|
||||||
exprstatement!(binexp!("+", binexp!("*", NatLiteral(1), NatLiteral(2)), NatLiteral(3)))
|
exst!(binexp!("+", binexp!("*", NatLiteral(1), NatLiteral(2)), NatLiteral(3)))
|
||||||
]));
|
]));
|
||||||
|
|
||||||
parse_test!("1 && 2", AST(vec![exprstatement!(binexp!("&&", NatLiteral(1), NatLiteral(2)))]));
|
parse_test!("1 && 2", AST(vec![exst!(binexp!("&&", NatLiteral(1), NatLiteral(2)))]));
|
||||||
|
|
||||||
parse_test!("1 + 2 * 3 + 4", AST(vec![exprstatement!(
|
parse_test!("1 + 2 * 3 + 4", AST(vec![exst!(
|
||||||
binexp!("+",
|
binexp!("+",
|
||||||
binexp!("+", NatLiteral(1), binexp!("*", NatLiteral(2), NatLiteral(3))),
|
binexp!("+", NatLiteral(1), binexp!("*", NatLiteral(2), NatLiteral(3))),
|
||||||
NatLiteral(4)))]));
|
NatLiteral(4)))]));
|
||||||
|
|
||||||
parse_test!("(1 + 2) * 3", AST(vec!
|
parse_test!("(1 + 2) * 3", AST(vec!
|
||||||
[exprstatement!(binexp!("*", binexp!("+", NatLiteral(1), NatLiteral(2)), NatLiteral(3)))]));
|
[exst!(binexp!("*", binexp!("+", NatLiteral(1), NatLiteral(2)), NatLiteral(3)))]));
|
||||||
|
|
||||||
parse_test!(".1 + .2", AST(vec![exprstatement!(binexp!("+", FloatLiteral(0.1), FloatLiteral(0.2)))]));
|
parse_test!(".1 + .2", AST(vec![exst!(binexp!("+", FloatLiteral(0.1), FloatLiteral(0.2)))]));
|
||||||
parse_test!("1 / 2", AST(vec![exprstatement!(binexp!("/", NatLiteral(1), NatLiteral(2)))]));
|
parse_test!("1 / 2", AST(vec![exst!(binexp!("/", NatLiteral(1), NatLiteral(2)))]));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn parsing_tuples() {
|
fn parsing_tuples() {
|
||||||
parse_test!("()", AST(vec![exprstatement!(TupleLiteral(vec![]))]));
|
parse_test!("()", AST(vec![exst!(TupleLiteral(vec![]))]));
|
||||||
parse_test!("(\"hella\", 34)", AST(vec![exprstatement!(
|
parse_test!("(\"hella\", 34)", AST(vec![exst!(
|
||||||
TupleLiteral(
|
TupleLiteral(
|
||||||
vec![ex!(StringLiteral(rc!(hella))), ex!(NatLiteral(34))]
|
vec![ex!(s r#""hella""#), ex!(s "34")]
|
||||||
)
|
)
|
||||||
)]));
|
)]));
|
||||||
parse_test!("((1+2), \"slough\")", AST(vec![exprstatement!(TupleLiteral(vec![
|
parse_test!("((1+2), \"slough\")", AST(vec![exst!(TupleLiteral(vec![
|
||||||
ex!(binexp!("+", NatLiteral(1), NatLiteral(2))),
|
ex!(binexp!("+", NatLiteral(1), NatLiteral(2))),
|
||||||
ex!(StringLiteral(rc!(slough))),
|
ex!(StringLiteral(rc!(slough))),
|
||||||
]))]))
|
]))]))
|
||||||
@@ -1179,41 +1241,42 @@ mod parse_tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn parsing_identifiers() {
|
fn parsing_identifiers() {
|
||||||
parse_test!("a", AST(vec![exprstatement!(val!("a"))]));
|
parse_test!("a", AST(vec![exst!(val!("a"))]));
|
||||||
parse_test!("a + b", AST(vec![exprstatement!(binexp!("+", val!("a"), val!("b")))]));
|
parse_test!("some_value", AST(vec![exst!(val!("some_value"))]));
|
||||||
|
parse_test!("a + b", AST(vec![exst!(binexp!("+", val!("a"), val!("b")))]));
|
||||||
//parse_test!("a[b]", AST(vec![Expression(
|
//parse_test!("a[b]", AST(vec![Expression(
|
||||||
//parse_test!("a[]", <- TODO THIS NEEDS TO FAIL
|
//parse_test!("a[]", <- TODO THIS NEEDS TO FAIL
|
||||||
//parse_test!(damn()[a] ,<- TODO needs to succeed
|
//parse_test!(damn()[a] ,<- TODO needs to succeed
|
||||||
parse_test!("a[b,c]", AST(vec![exprstatement!(Index { indexee: bx!(ex!(val!("a"))), indexers: vec![ex!(val!("b")), ex!(val!("c"))]} )]));
|
parse_test!("a[b,c]", AST(vec![exst!(Index { indexee: bx!(ex!(val!("a"))), indexers: vec![ex!(val!("b")), ex!(val!("c"))]} )]));
|
||||||
|
|
||||||
parse_test!("None", AST(vec![exprstatement!(val!("None"))]));
|
parse_test!("None", AST(vec![exst!(val!("None"))]));
|
||||||
parse_test!("Pandas { a: x + y }", AST(vec![
|
parse_test!("Pandas { a: x + y }", AST(vec![
|
||||||
exprstatement!(NamedStruct { name: rc!(Pandas), fields: vec![(rc!(a), ex!(binexp!("+", val!("x"), val!("y"))))]})
|
exst!(NamedStruct { name: rc!(Pandas), fields: vec![(rc!(a), ex!(binexp!("+", val!("x"), val!("y"))))]})
|
||||||
]));
|
]));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn parsing_complicated_operators() {
|
fn parsing_complicated_operators() {
|
||||||
parse_test!("a <- b", AST(vec![exprstatement!(binexp!("<-", val!("a"), val!("b")))]));
|
parse_test!("a <- b", AST(vec![exst!(binexp!("<-", val!("a"), val!("b")))]));
|
||||||
parse_test!("a || b", AST(vec![exprstatement!(binexp!("||", val!("a"), val!("b")))]));
|
parse_test!("a || b", AST(vec![exst!(binexp!("||", val!("a"), val!("b")))]));
|
||||||
parse_test!("a<>b", AST(vec![exprstatement!(binexp!("<>", val!("a"), val!("b")))]));
|
parse_test!("a<>b", AST(vec![exst!(binexp!("<>", val!("a"), val!("b")))]));
|
||||||
parse_test!("a.b.c.d", AST(vec![exprstatement!(binexp!(".",
|
parse_test!("a.b.c.d", AST(vec![exst!(binexp!(".",
|
||||||
binexp!(".",
|
binexp!(".",
|
||||||
binexp!(".", val!("a"), val!("b")),
|
binexp!(".", val!("a"), val!("b")),
|
||||||
val!("c")),
|
val!("c")),
|
||||||
val!("d")))]));
|
val!("d")))]));
|
||||||
parse_test!("-3", AST(vec![exprstatement!(prefexp!("-", NatLiteral(3)))]));
|
parse_test!("-3", AST(vec![exst!(prefexp!("-", NatLiteral(3)))]));
|
||||||
parse_test!("-0.2", AST(vec![exprstatement!(prefexp!("-", FloatLiteral(0.2)))]));
|
parse_test!("-0.2", AST(vec![exst!(prefexp!("-", FloatLiteral(0.2)))]));
|
||||||
parse_test!("!3", AST(vec![exprstatement!(prefexp!("!", NatLiteral(3)))]));
|
parse_test!("!3", AST(vec![exst!(prefexp!("!", NatLiteral(3)))]));
|
||||||
parse_test!("a <- -b", AST(vec![exprstatement!(binexp!("<-", val!("a"), prefexp!("-", val!("b"))))]));
|
parse_test!("a <- -b", AST(vec![exst!(binexp!("<-", val!("a"), prefexp!("-", val!("b"))))]));
|
||||||
parse_test!("a <--b", AST(vec![exprstatement!(binexp!("<--", val!("a"), val!("b")))]));
|
parse_test!("a <--b", AST(vec![exst!(binexp!("<--", val!("a"), val!("b")))]));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn parsing_functions() {
|
fn parsing_functions() {
|
||||||
parse_test!("fn oi()", AST(vec![Declaration(FuncSig(Signature { name: rc!(oi), operator: false, params: vec![], type_anno: None }))]));
|
parse_test!("fn oi()", AST(vec![Declaration(FuncSig(Signature { name: rc!(oi), operator: false, params: vec![], type_anno: None }))]));
|
||||||
parse_test!("oi()", AST(vec![exprstatement!(Call { f: bx!(ex!(val!("oi"))), arguments: vec![] })]));
|
parse_test!("oi()", AST(vec![exst!(Call { f: bx!(ex!(val!("oi"))), arguments: vec![] })]));
|
||||||
parse_test!("oi(a, 2 + 2)", AST(vec![exprstatement!(Call
|
parse_test!("oi(a, 2 + 2)", AST(vec![exst!(Call
|
||||||
{ f: bx!(ex!(val!("oi"))),
|
{ f: bx!(ex!(val!("oi"))),
|
||||||
arguments: vec![ex!(val!("a")), ex!(binexp!("+", NatLiteral(2), NatLiteral(2)))]
|
arguments: vec![ex!(val!("a")), ex!(binexp!("+", NatLiteral(2), NatLiteral(2)))]
|
||||||
})]));
|
})]));
|
||||||
@@ -1227,10 +1290,10 @@ mod parse_tests {
|
|||||||
|
|
||||||
parse_test!("fn a(x) { x() }", AST(vec![Declaration(
|
parse_test!("fn a(x) { x() }", AST(vec![Declaration(
|
||||||
FuncDecl(Signature { name: rc!(a), operator: false, params: vec![(rc!(x),None)], type_anno: None },
|
FuncDecl(Signature { name: rc!(a), operator: false, params: vec![(rc!(x),None)], type_anno: None },
|
||||||
vec![exprstatement!(Call { f: bx!(ex!(val!("x"))), arguments: vec![] })]))]));
|
vec![exst!(Call { f: bx!(ex!(val!("x"))), arguments: vec![] })]))]));
|
||||||
parse_test!("fn a(x) {\n x() }", AST(vec![Declaration(
|
parse_test!("fn a(x) {\n x() }", AST(vec![Declaration(
|
||||||
FuncDecl(Signature { name: rc!(a), operator: false, params: vec![(rc!(x),None)], type_anno: None },
|
FuncDecl(Signature { name: rc!(a), operator: false, params: vec![(rc!(x),None)], type_anno: None },
|
||||||
vec![exprstatement!(Call { f: bx!(ex!(val!("x"))), arguments: vec![] })]))]));
|
vec![exst!(Call { f: bx!(ex!(val!("x"))), arguments: vec![] })]))]));
|
||||||
|
|
||||||
let multiline = r#"
|
let multiline = r#"
|
||||||
fn a(x) {
|
fn a(x) {
|
||||||
@@ -1239,7 +1302,7 @@ fn a(x) {
|
|||||||
"#;
|
"#;
|
||||||
parse_test!(multiline, AST(vec![Declaration(
|
parse_test!(multiline, AST(vec![Declaration(
|
||||||
FuncDecl(Signature { name: rc!(a), operator: false, params: vec![(rc!(x),None)], type_anno: None },
|
FuncDecl(Signature { name: rc!(a), operator: false, params: vec![(rc!(x),None)], type_anno: None },
|
||||||
vec![exprstatement!(Call { f: bx!(ex!(val!("x"))), arguments: vec![] })]))]));
|
vec![exst!(Call { f: bx!(ex!(val!("x"))), arguments: vec![] })]))]));
|
||||||
let multiline2 = r#"
|
let multiline2 = r#"
|
||||||
fn a(x) {
|
fn a(x) {
|
||||||
|
|
||||||
@@ -1249,19 +1312,18 @@ fn a(x) {
|
|||||||
"#;
|
"#;
|
||||||
parse_test!(multiline2, AST(vec![Declaration(
|
parse_test!(multiline2, AST(vec![Declaration(
|
||||||
FuncDecl(Signature { name: rc!(a), operator: false, params: vec![(rc!(x),None)], type_anno: None },
|
FuncDecl(Signature { name: rc!(a), operator: false, params: vec![(rc!(x),None)], type_anno: None },
|
||||||
vec![exprstatement!(Call { f: bx!(ex!(val!("x"))), arguments: vec![] })]))]));
|
vec![exst!(s "x()")]))]));
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn parsing_bools() {
|
fn parsing_bools() {
|
||||||
parse_test!("false", AST(vec![exprstatement!(BoolLiteral(false))]));
|
parse_test!("false", AST(vec![exst!(BoolLiteral(false))]));
|
||||||
parse_test!("true", AST(vec![exprstatement!(BoolLiteral(true))]));
|
parse_test!("true", AST(vec![exst!(BoolLiteral(true))]));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn parsing_strings() {
|
fn parsing_strings() {
|
||||||
parse_test!(r#""hello""#, AST(vec![exprstatement!(StringLiteral(rc!(hello)))]));
|
parse_test!(r#""hello""#, AST(vec![exst!(StringLiteral(rc!(hello)))]));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@@ -1304,14 +1366,14 @@ fn a(x) {
|
|||||||
#[test]
|
#[test]
|
||||||
fn parsing_block_expressions() {
|
fn parsing_block_expressions() {
|
||||||
parse_test! {
|
parse_test! {
|
||||||
"if a() then { b(); c() }", AST(vec![exprstatement!(
|
"if a() then { b(); c() }", AST(vec![exst!(
|
||||||
IfExpression {
|
IfExpression {
|
||||||
discriminator: bx! {
|
discriminator: bx! {
|
||||||
Discriminator::Simple(ex!(Call { f: bx!(ex!(val!("a"))), arguments: vec![]}))
|
Discriminator::Simple(ex!(Call { f: bx!(ex!(val!("a"))), arguments: vec![]}))
|
||||||
},
|
},
|
||||||
body: bx! {
|
body: bx! {
|
||||||
IfExpressionBody::SimpleConditional(
|
IfExpressionBody::SimpleConditional(
|
||||||
vec![exprstatement!(Call { f: bx!(ex!(val!("b"))), arguments: vec![]}), exprstatement!(Call { f: bx!(ex!(val!("c"))), arguments: vec![] })],
|
vec![exst!(Call { f: bx!(ex!(val!("b"))), arguments: vec![]}), exst!(Call { f: bx!(ex!(val!("c"))), arguments: vec![] })],
|
||||||
None
|
None
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
@@ -1320,16 +1382,16 @@ fn a(x) {
|
|||||||
};
|
};
|
||||||
|
|
||||||
parse_test! {
|
parse_test! {
|
||||||
"if a() then { b(); c() } else { q }", AST(vec![exprstatement!(
|
"if a() then { b(); c() } else { q }", AST(vec![exst!(
|
||||||
IfExpression {
|
IfExpression {
|
||||||
discriminator: bx! {
|
discriminator: bx! {
|
||||||
Discriminator::Simple(ex!(Call { f: bx!(ex!(val!("a"))), arguments: vec![]}))
|
Discriminator::Simple(ex!(Call { f: bx!(ex!(val!("a"))), arguments: vec![]}))
|
||||||
},
|
},
|
||||||
body: bx! {
|
body: bx! {
|
||||||
IfExpressionBody::SimpleConditional(
|
IfExpressionBody::SimpleConditional(
|
||||||
vec![exprstatement!(Call { f: bx!(ex!(val!("b"))), arguments: vec![]}), exprstatement!(Call { f: bx!(ex!(val!("c"))), arguments: vec![] })],
|
vec![exst!(Call { f: bx!(ex!(val!("b"))), arguments: vec![]}), exst!(Call { f: bx!(ex!(val!("c"))), arguments: vec![] })],
|
||||||
Some(
|
Some(
|
||||||
vec![exprstatement!(val!("q"))],
|
vec![exst!(val!("q"))],
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
@@ -1338,9 +1400,9 @@ fn a(x) {
|
|||||||
};
|
};
|
||||||
|
|
||||||
/*
|
/*
|
||||||
parse_test!("if a() then { b(); c() }", AST(vec![exprstatement!(
|
parse_test!("if a() then { b(); c() }", AST(vec![exst!(
|
||||||
IfExpression(bx!(ex!(Call { f: bx!(ex!(val!("a"))), arguments: vec![]})),
|
IfExpression(bx!(ex!(Call { f: bx!(ex!(val!("a"))), arguments: vec![]})),
|
||||||
vec![exprstatement!(Call { f: bx!(ex!(val!("b"))), arguments: vec![]}), exprstatement!(Call { f: bx!(ex!(val!("c"))), arguments: vec![] })],
|
vec![exst!(Call { f: bx!(ex!(val!("b"))), arguments: vec![]}), exst!(Call { f: bx!(ex!(val!("c"))), arguments: vec![] })],
|
||||||
None)
|
None)
|
||||||
)]));
|
)]));
|
||||||
parse_test!(r#"
|
parse_test!(r#"
|
||||||
@@ -1350,21 +1412,21 @@ fn a(x) {
|
|||||||
} else {
|
} else {
|
||||||
c
|
c
|
||||||
}"#,
|
}"#,
|
||||||
AST(vec![exprstatement!(IfExpression(bx!(ex!(BoolLiteral(true))),
|
AST(vec![exst!(IfExpression(bx!(ex!(BoolLiteral(true))),
|
||||||
vec![Declaration(Binding { name: rc!(a), constant: true, expr: ex!(NatLiteral(10)) }),
|
vec![Declaration(Binding { name: rc!(a), constant: true, expr: ex!(NatLiteral(10)) }),
|
||||||
exprstatement!(val!(rc!(b)))],
|
exst!(val!(rc!(b)))],
|
||||||
Some(vec![exprstatement!(val!(rc!(c)))])))])
|
Some(vec![exst!(val!(rc!(c)))])))])
|
||||||
);
|
);
|
||||||
|
|
||||||
parse_test!("if a { b } else { c }", AST(vec![exprstatement!(
|
parse_test!("if a { b } else { c }", AST(vec![exst!(
|
||||||
IfExpression(bx!(ex!(val!("a"))),
|
IfExpression(bx!(ex!(val!("a"))),
|
||||||
vec![exprstatement!(val!("b"))],
|
vec![exst!(val!("b"))],
|
||||||
Some(vec![exprstatement!(val!("c"))])))]));
|
Some(vec![exst!(val!("c"))])))]));
|
||||||
|
|
||||||
parse_test!("if (A {a: 1}) { b } else { c }", AST(vec![exprstatement!(
|
parse_test!("if (A {a: 1}) { b } else { c }", AST(vec![exst!(
|
||||||
IfExpression(bx!(ex!(NamedStruct { name: rc!(A), fields: vec![(rc!(a), ex!(NatLiteral(1)))]})),
|
IfExpression(bx!(ex!(NamedStruct { name: rc!(A), fields: vec![(rc!(a), ex!(NatLiteral(1)))]})),
|
||||||
vec![exprstatement!(val!("b"))],
|
vec![exst!(val!("b"))],
|
||||||
Some(vec![exprstatement!(val!("c"))])))]));
|
Some(vec![exst!(val!("c"))])))]));
|
||||||
|
|
||||||
parse_error!("if A {a: 1} { b } else { c }");
|
parse_error!("if A {a: 1} { b } else { c }");
|
||||||
*/
|
*/
|
||||||
@@ -1418,21 +1480,21 @@ fn a(x) {
|
|||||||
Expression(val!("b"), Some(ty!("Int"))) })]));
|
Expression(val!("b"), Some(ty!("Int"))) })]));
|
||||||
|
|
||||||
parse_test!("a : Int", AST(vec![
|
parse_test!("a : Int", AST(vec![
|
||||||
exprstatement!(val!("a"), ty!("Int"))
|
exst!(val!("a"), ty!("Int"))
|
||||||
]));
|
]));
|
||||||
|
|
||||||
parse_test!("a : Option<Int>", AST(vec![
|
parse_test!("a : Option<Int>", AST(vec![
|
||||||
exprstatement!(val!("a"), Singleton(TypeSingletonName { name: rc!(Option), params: vec![ty!("Int")] }))
|
exst!(val!("a"), Singleton(TypeSingletonName { name: rc!(Option), params: vec![ty!("Int")] }))
|
||||||
]));
|
]));
|
||||||
|
|
||||||
parse_test!("a : KoreanBBQSpecifier<Kimchi, Option<Bulgogi> >", AST(vec![
|
parse_test!("a : KoreanBBQSpecifier<Kimchi, Option<Bulgogi> >", AST(vec![
|
||||||
exprstatement!(val!("a"), Singleton(TypeSingletonName { name: rc!(KoreanBBQSpecifier), params: vec![
|
exst!(val!("a"), Singleton(TypeSingletonName { name: rc!(KoreanBBQSpecifier), params: vec![
|
||||||
ty!("Kimchi"), Singleton(TypeSingletonName { name: rc!(Option), params: vec![ty!("Bulgogi")] })
|
ty!("Kimchi"), Singleton(TypeSingletonName { name: rc!(Option), params: vec![ty!("Bulgogi")] })
|
||||||
] }))
|
] }))
|
||||||
]));
|
]));
|
||||||
|
|
||||||
parse_test!("a : (Int, Yolo<a>)", AST(vec![
|
parse_test!("a : (Int, Yolo<a>)", AST(vec![
|
||||||
exprstatement!(val!("a"), Tuple(
|
exst!(val!("a"), Tuple(
|
||||||
vec![ty!("Int"), Singleton(TypeSingletonName {
|
vec![ty!("Int"), Singleton(TypeSingletonName {
|
||||||
name: rc!(Yolo), params: vec![ty!("a")]
|
name: rc!(Yolo), params: vec![ty!("a")]
|
||||||
})]))]));
|
})]))]));
|
||||||
@@ -1440,28 +1502,80 @@ fn a(x) {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn parsing_lambdas() {
|
fn parsing_lambdas() {
|
||||||
parse_test! { "{|x| x + 1}", single_expr!(
|
parse_test_wrap_ast! { r#"\(x) { x + 1}"#, exst!(
|
||||||
Lambda { params: vec![(rc!(x), None)], body: vec![exst!("+", val!("x"), NatLiteral(1))] }
|
Lambda { params: vec![(rc!(x), None)], type_anno: None, body: vec![exst!(s "x + 1")] }
|
||||||
) }
|
)
|
||||||
|
}
|
||||||
|
|
||||||
parse_test!("{ |x: Int, y| a;b;c;}", AST(vec![
|
parse_test!(r#"\ (x: Int, y) { a;b;c;}"#, AST(vec![
|
||||||
exprstatement!(Lambda {
|
exst!(Lambda {
|
||||||
params: vec![(rc!(x), Some(ty!("Int"))), (rc!(y), None)],
|
params: vec![(rc!(x), Some(ty!("Int"))), (rc!(y), None)],
|
||||||
body: vec![exst!(val!("a")), exst!(val!("b")), exst!(val!("c"))]
|
type_anno: None,
|
||||||
|
body: vec![exst!(s "a"), exst!(s "b"), exst!(s "c")]
|
||||||
})
|
})
|
||||||
]));
|
]));
|
||||||
|
|
||||||
parse_test!("{|x| y}(1)", AST(vec![
|
parse_test!(r#"\(x){y}(1)"#, AST(vec![
|
||||||
exprstatement!(Call { f: bx!(ex!(
|
exst!(Call { f: bx!(ex!(
|
||||||
Lambda { params: vec![(rc!(x), None)], body: vec![exprstatement!(val!("y"))] })),
|
Lambda {
|
||||||
|
params: vec![(rc!(x), None)],
|
||||||
|
type_anno: None,
|
||||||
|
body: vec![exst!(s "y")] }
|
||||||
|
)),
|
||||||
arguments: vec![ex!(NatLiteral(1))] })]));
|
arguments: vec![ex!(NatLiteral(1))] })]));
|
||||||
|
|
||||||
|
parse_test_wrap_ast! {
|
||||||
|
r#"\(x: Int): String { "q" }"#,
|
||||||
|
exst!(Lambda {
|
||||||
|
params: vec![(rc!(x), Some(ty!("Int")))],
|
||||||
|
type_anno: Some(ty!("String")),
|
||||||
|
body: vec![exst!(s r#""q""#)]
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn single_param_lambda() {
|
||||||
|
parse_test_wrap_ast! {
|
||||||
|
r"\x { x + 10 }",
|
||||||
|
exst!(Lambda {
|
||||||
|
params: vec![(rc!(x), None)],
|
||||||
|
type_anno: None,
|
||||||
|
body: vec![exst!(s r"x + 10")]
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
parse_test_wrap_ast! {
|
||||||
|
r"\x: Nat { x + 10 }",
|
||||||
|
exst!(Lambda {
|
||||||
|
params: vec![(rc!(x), Some(ty!("Nat")))],
|
||||||
|
type_anno: None,
|
||||||
|
body: vec![exst!(s r"x + 10")]
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn more_advanced_lambdas() {
|
||||||
|
parse_test! {
|
||||||
|
r#"fn wahoo() { let a = 10; \(x) { x + a } };
|
||||||
|
wahoo()(3) "#, AST(vec![
|
||||||
|
exst!(s r"fn wahoo() { let a = 10; \(x) { x + a } }"),
|
||||||
|
exst! {
|
||||||
|
Call {
|
||||||
|
f: bx!(ex!(Call { f: bx!(ex!(val!("wahoo"))), arguments: vec![] })),
|
||||||
|
arguments: vec![ex!(s "3")],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
])
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn list_literals() {
|
fn list_literals() {
|
||||||
parse_test! {
|
parse_test! {
|
||||||
"[1,2]", AST(vec![
|
"[1,2]", AST(vec![
|
||||||
exprstatement!(ListLiteral(vec![ex!(NatLiteral(1)), ex!(NatLiteral(2))]))])
|
exst!(ListLiteral(vec![ex!(NatLiteral(1)), ex!(NatLiteral(2))]))])
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1469,12 +1583,12 @@ fn a(x) {
|
|||||||
fn while_expr() {
|
fn while_expr() {
|
||||||
parse_test! {
|
parse_test! {
|
||||||
"while { }", AST(vec![
|
"while { }", AST(vec![
|
||||||
exprstatement!(WhileExpression { condition: None, body: vec![] })])
|
exst!(WhileExpression { condition: None, body: vec![] })])
|
||||||
}
|
}
|
||||||
|
|
||||||
parse_test! {
|
parse_test! {
|
||||||
"while a == b { }", AST(vec![
|
"while a == b { }", AST(vec![
|
||||||
exprstatement!(WhileExpression { condition: Some(bx![ex![binexp!("==", val!("a"), val!("b"))]]), body: vec![] })])
|
exst!(WhileExpression { condition: Some(bx![ex![binexp!("==", val!("a"), val!("b"))]]), body: vec![] })])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1482,120 +1596,110 @@ fn a(x) {
|
|||||||
fn for_expr() {
|
fn for_expr() {
|
||||||
parse_test! {
|
parse_test! {
|
||||||
"for { a <- maybeValue } return 1", AST(vec![
|
"for { a <- maybeValue } return 1", AST(vec![
|
||||||
exprstatement!(ForExpression {
|
exst!(ForExpression {
|
||||||
enumerators: vec![Enumerator { id: rc!(a), generator: ex!(val!("maybeValue")) }],
|
enumerators: vec![Enumerator { id: rc!(a), generator: ex!(val!("maybeValue")) }],
|
||||||
body: bx!(MonadicReturn(ex!(NatLiteral(1))))
|
body: bx!(MonadicReturn(ex!(s "1")))
|
||||||
})])
|
})])
|
||||||
}
|
}
|
||||||
|
|
||||||
parse_test! {
|
parse_test! {
|
||||||
"for n <- someRange { f(n); }", AST(vec![
|
"for n <- someRange { f(n); }", AST(vec![
|
||||||
exprstatement!(ForExpression { enumerators: vec![Enumerator { id: rc!(n), generator: ex!(val!("someRange"))}],
|
exst!(ForExpression { enumerators: vec![Enumerator { id: rc!(n), generator: ex!(val!("someRange"))}],
|
||||||
body: bx!(ForBody::StatementBlock(vec![exprstatement!(Call { f: bx![ex!(val!("f"))], arguments: vec![ex!(val!("n"))] })]))
|
body: bx!(ForBody::StatementBlock(vec![exst!(s "f(n)")]))
|
||||||
})])
|
})])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn patterns() {
|
fn patterns() {
|
||||||
parse_test! {
|
parse_test_wrap_ast! {
|
||||||
"if x is Some(a) then { 4 } else { 9 }", AST(vec![
|
"if x is Some(a) then { 4 } else { 9 }", exst!(
|
||||||
exprstatement!(
|
IfExpression {
|
||||||
IfExpression {
|
discriminator: bx!(Discriminator::Simple(ex!(s "x"))),
|
||||||
discriminator: bx!(Discriminator::Simple(ex!(Value(rc!(x))))),
|
body: bx!(IfExpressionBody::SimplePatternMatch(Pattern::TupleStruct(rc!(Some), vec![Pattern::Literal(PatternLiteral::VarPattern(rc!(a)))]), vec![exst!(s "4")], Some(vec![exst!(s "9")]))) }
|
||||||
body: bx!(IfExpressionBody::SimplePatternMatch(Pattern::TupleStruct(rc!(Some), vec![Pattern::Literal(PatternLiteral::VarPattern(rc!(a)))]), vec![exprstatement!(NatLiteral(4))], Some(vec![exprstatement!(NatLiteral(9))]))) }
|
)
|
||||||
)
|
|
||||||
])
|
|
||||||
}
|
}
|
||||||
|
|
||||||
parse_test! {
|
parse_test_wrap_ast! {
|
||||||
"if x is Some(a) then 4 else 9", AST(vec![
|
"if x is Some(a) then 4 else 9", exst!(
|
||||||
exprstatement!(
|
IfExpression {
|
||||||
IfExpression {
|
discriminator: bx!(Discriminator::Simple(ex!(s "x"))),
|
||||||
discriminator: bx!(Discriminator::Simple(ex!(Value(rc!(x))))),
|
body: bx!(IfExpressionBody::SimplePatternMatch(Pattern::TupleStruct(rc!(Some), vec![Pattern::Literal(PatternLiteral::VarPattern(rc!(a)))]), vec![exst!(s "4")], Some(vec![exst!(s "9")]))) }
|
||||||
body: bx!(IfExpressionBody::SimplePatternMatch(Pattern::TupleStruct(rc!(Some), vec![Pattern::Literal(PatternLiteral::VarPattern(rc!(a)))]), vec![exprstatement!(NatLiteral(4))], Some(vec![exprstatement!(NatLiteral(9))]))) }
|
)
|
||||||
)
|
|
||||||
])
|
|
||||||
}
|
}
|
||||||
|
|
||||||
parse_test! {
|
parse_test_wrap_ast! {
|
||||||
"if x is Something { a, b: x } then { 4 } else { 9 }", AST(vec![
|
"if x is Something { a, b: x } then { 4 } else { 9 }", exst!(
|
||||||
exprstatement!(
|
IfExpression {
|
||||||
IfExpression {
|
discriminator: bx!(Discriminator::Simple(ex!(s "x"))),
|
||||||
discriminator: bx!(Discriminator::Simple(ex!(Value(rc!(x))))),
|
body: bx!(IfExpressionBody::SimplePatternMatch(
|
||||||
body: bx!(IfExpressionBody::SimplePatternMatch(
|
|
||||||
Pattern::Record(rc!(Something), vec![
|
Pattern::Record(rc!(Something), vec![
|
||||||
(rc!(a),Pattern::Literal(PatternLiteral::StringPattern(rc!(a)))),
|
(rc!(a),Pattern::Literal(PatternLiteral::StringPattern(rc!(a)))),
|
||||||
(rc!(b),Pattern::Literal(PatternLiteral::VarPattern(rc!(x))))
|
(rc!(b),Pattern::Literal(PatternLiteral::VarPattern(rc!(x))))
|
||||||
]),
|
]),
|
||||||
vec![exprstatement!(NatLiteral(4))], Some(vec![exprstatement!(NatLiteral(9))])))
|
vec![exst!(s "4")], Some(vec![exst!(s "9")])))
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
])
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn pattern_literals() {
|
fn pattern_literals() {
|
||||||
parse_test! {
|
parse_test_wrap_ast! {
|
||||||
"if x is -1 then 1 else 2", AST(vec![
|
"if x is -1 then 1 else 2",
|
||||||
exprstatement!(
|
exst!(
|
||||||
IfExpression {
|
IfExpression {
|
||||||
discriminator: bx!(Discriminator::Simple(ex!(Value(rc!(x))))),
|
discriminator: bx!(Discriminator::Simple(ex!(s "x"))),
|
||||||
body: bx!(IfExpressionBody::SimplePatternMatch(
|
body: bx!(IfExpressionBody::SimplePatternMatch(
|
||||||
Pattern::Literal(PatternLiteral::NumPattern { neg: true, num: NatLiteral(1) }),
|
Pattern::Literal(PatternLiteral::NumPattern { neg: true, num: NatLiteral(1) }),
|
||||||
vec![exprstatement!(NatLiteral(1))],
|
vec![exst!(NatLiteral(1))],
|
||||||
Some(vec![exprstatement!(NatLiteral(2))]),
|
Some(vec![exst!(NatLiteral(2))]),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
])
|
|
||||||
}
|
}
|
||||||
|
|
||||||
parse_test! {
|
parse_test_wrap_ast! {
|
||||||
"if x is 1 then 1 else 2", AST(vec![
|
"if x is 1 then 1 else 2",
|
||||||
exprstatement!(
|
exst!(
|
||||||
IfExpression {
|
IfExpression {
|
||||||
discriminator: bx!(Discriminator::Simple(ex!(Value(rc!(x))))),
|
discriminator: bx!(Discriminator::Simple(ex!(s "x"))),
|
||||||
body: bx!(IfExpressionBody::SimplePatternMatch(
|
body: bx!(IfExpressionBody::SimplePatternMatch(
|
||||||
Pattern::Literal(PatternLiteral::NumPattern { neg: false, num: NatLiteral(1) }),
|
Pattern::Literal(PatternLiteral::NumPattern { neg: false, num: NatLiteral(1) }),
|
||||||
vec![exprstatement!(NatLiteral(1))],
|
vec![exst!(s "1")],
|
||||||
Some(vec![exprstatement!(NatLiteral(2))]),
|
Some(vec![exst!(s "2")]),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
])
|
|
||||||
}
|
}
|
||||||
|
|
||||||
parse_test! {
|
parse_test! {
|
||||||
"if x is true then 1 else 2", AST(vec![
|
"if x is true then 1 else 2", AST(vec![
|
||||||
exprstatement!(
|
exst!(
|
||||||
IfExpression {
|
IfExpression {
|
||||||
discriminator: bx!(Discriminator::Simple(ex!(Value(rc!(x))))),
|
discriminator: bx!(Discriminator::Simple(ex!(s "x"))),
|
||||||
body: bx!(IfExpressionBody::SimplePatternMatch(
|
body: bx!(IfExpressionBody::SimplePatternMatch(
|
||||||
Pattern::Literal(PatternLiteral::BoolPattern(true)),
|
Pattern::Literal(PatternLiteral::BoolPattern(true)),
|
||||||
vec![exprstatement!(NatLiteral(1))],
|
vec![exst!(NatLiteral(1))],
|
||||||
Some(vec![exprstatement!(NatLiteral(2))]),
|
Some(vec![exst!(NatLiteral(2))]),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
])
|
])
|
||||||
}
|
}
|
||||||
|
|
||||||
parse_test! {
|
parse_test_wrap_ast! {
|
||||||
"if x is \"gnosticism\" then 1 else 2", AST(vec![
|
"if x is \"gnosticism\" then 1 else 2",
|
||||||
exprstatement!(
|
exst!(
|
||||||
IfExpression {
|
IfExpression {
|
||||||
discriminator: bx!(Discriminator::Simple(ex!(Value(rc!(x))))),
|
discriminator: bx!(Discriminator::Simple(ex!(s "x"))),
|
||||||
body: bx!(IfExpressionBody::SimplePatternMatch(
|
body: bx!(IfExpressionBody::SimplePatternMatch(
|
||||||
Pattern::Literal(PatternLiteral::StringPattern(rc!(gnosticism))),
|
Pattern::Literal(PatternLiteral::StringPattern(rc!(gnosticism))),
|
||||||
vec![exprstatement!(NatLiteral(1))],
|
vec![exst!(s "1")],
|
||||||
Some(vec![exprstatement!(NatLiteral(2))]),
|
Some(vec![exst!(s "2")]),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
])
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
13
schala-lang/language/src/prelude.schala
Normal file
13
schala-lang/language/src/prelude.schala
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
|
||||||
|
type Option<T> = Some(T) | None
|
||||||
|
type Color = Red | Green | Blue
|
||||||
|
type Ord = LT | EQ | GT
|
||||||
|
|
||||||
|
|
||||||
|
fn map(input: Option<T>, func: Func): Option<T> {
|
||||||
|
if input {
|
||||||
|
is Some(x) -> Some(func(x)),
|
||||||
|
is None -> None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
use ast::{AST, Statement, Expression, ExpressionType, Declaration, Discriminator, IfExpressionBody, Pattern, PatternLiteral, Guard, HalfExpr};
|
use ast::*;
|
||||||
use symbol_table::{Symbol, SymbolSpec, SymbolTable};
|
use symbol_table::{Symbol, SymbolSpec, SymbolTable};
|
||||||
use builtin::{BinOp, PrefixOp};
|
use builtin::{BinOp, PrefixOp};
|
||||||
|
|
||||||
@@ -48,6 +48,7 @@ pub enum Expr {
|
|||||||
then_clause: Vec<Stmt>,
|
then_clause: Vec<Stmt>,
|
||||||
else_clause: Vec<Stmt>,
|
else_clause: Vec<Stmt>,
|
||||||
},
|
},
|
||||||
|
ConditionalTargetSigilValue,
|
||||||
CaseMatch {
|
CaseMatch {
|
||||||
cond: Box<Expr>,
|
cond: Box<Expr>,
|
||||||
alternatives: Vec<Alternative>
|
alternatives: Vec<Alternative>
|
||||||
@@ -55,19 +56,23 @@ pub enum Expr {
|
|||||||
UnimplementedSigilValue
|
UnimplementedSigilValue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub type BoundVars = Vec<Option<Rc<String>>>; //remember that order matters here
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct Alternative {
|
pub struct Alternative {
|
||||||
pub tag: Option<usize>,
|
pub tag: Option<usize>,
|
||||||
pub subpatterns: Vec<Alternative>,
|
pub subpatterns: Vec<Option<Subpattern>>,
|
||||||
pub guard: Option<Expr>,
|
pub guard: Option<Expr>,
|
||||||
pub bound_vars: Vec<Option<Rc<String>>>, //remember that order matters here
|
pub bound_vars: BoundVars,
|
||||||
pub item: Vec<Stmt>,
|
pub item: Vec<Stmt>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Alternative {
|
#[derive(Debug, Clone)]
|
||||||
fn default(item: Vec<Stmt>) -> Alternative {
|
pub struct Subpattern {
|
||||||
Alternative { tag: None, subpatterns: vec![], guard: None, bound_vars: vec![], item }
|
pub tag: Option<usize>,
|
||||||
}
|
pub subpatterns: Vec<Option<Subpattern>>,
|
||||||
|
pub bound_vars: BoundVars,
|
||||||
|
pub guard: Option<Expr>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
@@ -135,11 +140,24 @@ impl Expression {
|
|||||||
},
|
},
|
||||||
TupleLiteral(exprs) => Expr::Tuple(exprs.iter().map(|e| e.reduce(symbol_table)).collect()),
|
TupleLiteral(exprs) => Expr::Tuple(exprs.iter().map(|e| e.reduce(symbol_table)).collect()),
|
||||||
IfExpression { discriminator, body } => reduce_if_expression(discriminator, body, symbol_table),
|
IfExpression { discriminator, body } => reduce_if_expression(discriminator, body, symbol_table),
|
||||||
_ => Expr::UnimplementedSigilValue,
|
Lambda { params, body, .. } => reduce_lambda(params, body, symbol_table),
|
||||||
|
NamedStruct { .. } => Expr::UnimplementedSigilValue,
|
||||||
|
Index { .. } => Expr::UnimplementedSigilValue,
|
||||||
|
WhileExpression { .. } => Expr::UnimplementedSigilValue,
|
||||||
|
ForExpression { .. } => Expr::UnimplementedSigilValue,
|
||||||
|
ListLiteral { .. } => Expr::UnimplementedSigilValue,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn reduce_lambda(params: &Vec<FormalParam>, body: &Block, symbol_table: &SymbolTable) -> Expr {
|
||||||
|
Expr::Func(Func::UserDefined {
|
||||||
|
name: None,
|
||||||
|
params: params.iter().map(|param| param.0.clone()).collect(),
|
||||||
|
body: body.iter().map(|stmt| stmt.reduce(symbol_table)).collect(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
fn reduce_if_expression(discriminator: &Discriminator, body: &IfExpressionBody, symbol_table: &SymbolTable) -> Expr {
|
fn reduce_if_expression(discriminator: &Discriminator, body: &IfExpressionBody, symbol_table: &SymbolTable) -> Expr {
|
||||||
let cond = Box::new(match *discriminator {
|
let cond = Box::new(match *discriminator {
|
||||||
Discriminator::Simple(ref expr) => expr.reduce(symbol_table),
|
Discriminator::Simple(ref expr) => expr.reduce(symbol_table),
|
||||||
@@ -162,8 +180,14 @@ fn reduce_if_expression(discriminator: &Discriminator, body: &IfExpressionBody,
|
|||||||
};
|
};
|
||||||
|
|
||||||
let alternatives = vec![
|
let alternatives = vec![
|
||||||
pat.to_alternative(&cond, then_clause, symbol_table),
|
pat.to_alternative(then_clause, symbol_table),
|
||||||
Alternative::default(else_clause),
|
Alternative {
|
||||||
|
tag: None,
|
||||||
|
subpatterns: vec![],
|
||||||
|
bound_vars: vec![],
|
||||||
|
guard: None,
|
||||||
|
item: else_clause
|
||||||
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
Expr::CaseMatch {
|
Expr::CaseMatch {
|
||||||
@@ -177,7 +201,7 @@ fn reduce_if_expression(discriminator: &Discriminator, body: &IfExpressionBody,
|
|||||||
match arm.guard {
|
match arm.guard {
|
||||||
Guard::Pat(ref p) => {
|
Guard::Pat(ref p) => {
|
||||||
let item = arm.body.iter().map(|expr| expr.reduce(symbol_table)).collect();
|
let item = arm.body.iter().map(|expr| expr.reduce(symbol_table)).collect();
|
||||||
let alt = p.to_alternative(&cond, item, symbol_table);
|
let alt = p.to_alternative(item, symbol_table);
|
||||||
alternatives.push(alt);
|
alternatives.push(alt);
|
||||||
},
|
},
|
||||||
Guard::HalfExpr(HalfExpr { op: _, expr: _ }) => {
|
Guard::HalfExpr(HalfExpr { op: _, expr: _ }) => {
|
||||||
@@ -193,104 +217,134 @@ fn reduce_if_expression(discriminator: &Discriminator, body: &IfExpressionBody,
|
|||||||
* x is SomeBigOldEnum(_, x, Some(t))
|
* x is SomeBigOldEnum(_, x, Some(t))
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
fn handle_symbol(symbol: Option<&Symbol>, inner_patterns: &Vec<Pattern>, symbol_table: &SymbolTable) -> Subpattern {
|
||||||
|
use self::Pattern::*;
|
||||||
|
let tag = symbol.map(|symbol| match symbol.spec {
|
||||||
|
SymbolSpec::DataConstructor { index, .. } => index.clone(),
|
||||||
|
_ => panic!("Symbol is not a data constructor - this should've been caught in type-checking"),
|
||||||
|
});
|
||||||
|
let bound_vars = inner_patterns.iter().map(|p| match p {
|
||||||
|
Literal(PatternLiteral::VarPattern(var)) => Some(var.clone()),
|
||||||
|
_ => None,
|
||||||
|
}).collect();
|
||||||
|
|
||||||
|
let subpatterns = inner_patterns.iter().map(|p| match p {
|
||||||
|
Ignored => None,
|
||||||
|
Literal(PatternLiteral::VarPattern(_)) => None,
|
||||||
|
Literal(other) => Some(other.to_subpattern(symbol_table)),
|
||||||
|
tp @ TuplePattern(_) => Some(tp.to_subpattern(symbol_table)),
|
||||||
|
ts @ TupleStruct(_, _) => Some(ts.to_subpattern(symbol_table)),
|
||||||
|
Record(..) => unimplemented!(),
|
||||||
|
}).collect();
|
||||||
|
|
||||||
|
let guard = None;
|
||||||
|
/*
|
||||||
|
let guard_equality_exprs: Vec<Expr> = subpatterns.iter().map(|p| match p {
|
||||||
|
Literal(lit) => match lit {
|
||||||
|
_ => unimplemented!()
|
||||||
|
},
|
||||||
|
_ => unimplemented!()
|
||||||
|
}).collect();
|
||||||
|
*/
|
||||||
|
|
||||||
|
Subpattern {
|
||||||
|
tag,
|
||||||
|
subpatterns,
|
||||||
|
guard,
|
||||||
|
bound_vars,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl Pattern {
|
impl Pattern {
|
||||||
fn to_alternative(&self, cond: &Box<Expr>, item: Vec<Stmt>, symbol_table: &SymbolTable) -> Alternative {
|
fn to_alternative(&self, item: Vec<Stmt>, symbol_table: &SymbolTable) -> Alternative {
|
||||||
use self::Pattern::*;
|
let s = self.to_subpattern(symbol_table);
|
||||||
|
Alternative {
|
||||||
fn handle_symbol(symbol: &Symbol, subpatterns: &Vec<Pattern>, item: Vec<Stmt>) -> Alternative {
|
tag: s.tag,
|
||||||
let tag = match symbol.spec {
|
subpatterns: s.subpatterns,
|
||||||
SymbolSpec::DataConstructor { index, .. } => index.clone(),
|
bound_vars: s.bound_vars,
|
||||||
_ => panic!("Symbol is not a data constructor - this should've been caught in type-checking"),
|
guard: s.guard,
|
||||||
};
|
item
|
||||||
let bound_vars = subpatterns.iter().map(|p| match p {
|
|
||||||
Literal(PatternLiteral::VarPattern(var)) => Some(var.clone()),
|
|
||||||
_ => None,
|
|
||||||
}).collect();
|
|
||||||
|
|
||||||
/*
|
|
||||||
let guard_equality_exprs: Vec<Expr> = subpatterns.iter().map(|p| match p {
|
|
||||||
Literal(lit) => match lit {
|
|
||||||
_ => unimplemented!()
|
|
||||||
},
|
|
||||||
_ => unimplemented!()
|
|
||||||
}).collect();
|
|
||||||
*/
|
|
||||||
|
|
||||||
let guard = None;
|
|
||||||
let subpatterns = vec![];
|
|
||||||
|
|
||||||
Alternative {
|
|
||||||
tag: Some(tag),
|
|
||||||
subpatterns,
|
|
||||||
guard,
|
|
||||||
bound_vars,
|
|
||||||
item,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn to_subpattern(&self, symbol_table: &SymbolTable) -> Subpattern {
|
||||||
|
use self::Pattern::*;
|
||||||
match self {
|
match self {
|
||||||
TupleStruct(name, subpatterns) => {
|
TupleStruct(name, inner_patterns) => {
|
||||||
let symbol = symbol_table.lookup_by_name(name).expect(&format!("Symbol {} not found", name));
|
let symbol = symbol_table.lookup_by_name(name).expect(&format!("Symbol {} not found", name));
|
||||||
handle_symbol(symbol, subpatterns, item)
|
handle_symbol(Some(symbol), inner_patterns, symbol_table)
|
||||||
},
|
|
||||||
TuplePattern(_items) => {
|
|
||||||
unimplemented!()
|
|
||||||
},
|
},
|
||||||
|
TuplePattern(inner_patterns) => handle_symbol(None, inner_patterns, symbol_table),
|
||||||
Record(_name, _pairs) => {
|
Record(_name, _pairs) => {
|
||||||
unimplemented!()
|
unimplemented!()
|
||||||
},
|
},
|
||||||
Ignored => Alternative::default(item),
|
Ignored => Subpattern { tag: None, subpatterns: vec![], guard: None, bound_vars: vec![] },
|
||||||
Literal(lit) => match lit {
|
Literal(lit) => lit.to_subpattern(symbol_table),
|
||||||
PatternLiteral::NumPattern { neg, num } => {
|
}
|
||||||
let comparison = Expr::Lit(match (neg, num) {
|
}
|
||||||
(false, ExpressionType::NatLiteral(n)) => Lit::Nat(*n),
|
}
|
||||||
(false, ExpressionType::FloatLiteral(f)) => Lit::Float(*f),
|
|
||||||
(true, ExpressionType::NatLiteral(n)) => Lit::Int(-1*(*n as i64)),
|
impl PatternLiteral {
|
||||||
(true, ExpressionType::FloatLiteral(f)) => Lit::Float(-1.0*f),
|
fn to_subpattern(&self, symbol_table: &SymbolTable) -> Subpattern {
|
||||||
_ => panic!("This should never happen")
|
use self::PatternLiteral::*;
|
||||||
});
|
match self {
|
||||||
let guard = Some(Expr::Call {
|
NumPattern { neg, num } => {
|
||||||
f: Box::new(Expr::Func(Func::BuiltIn(Rc::new("==".to_string())))),
|
let comparison = Expr::Lit(match (neg, num) {
|
||||||
args: vec![comparison, *cond.clone()]
|
(false, ExpressionType::NatLiteral(n)) => Lit::Nat(*n),
|
||||||
});
|
(false, ExpressionType::FloatLiteral(f)) => Lit::Float(*f),
|
||||||
Alternative {
|
(true, ExpressionType::NatLiteral(n)) => Lit::Int(-1*(*n as i64)),
|
||||||
tag: None,
|
(true, ExpressionType::FloatLiteral(f)) => Lit::Float(-1.0*f),
|
||||||
subpatterns: vec![],
|
_ => panic!("This should never happen")
|
||||||
guard,
|
});
|
||||||
bound_vars: vec![],
|
let guard = Some(Expr::Call {
|
||||||
item
|
f: Box::new(Expr::Func(Func::BuiltIn(Rc::new("==".to_string())))),
|
||||||
}
|
args: vec![comparison, Expr::ConditionalTargetSigilValue],
|
||||||
},
|
});
|
||||||
PatternLiteral::StringPattern(_s) => unimplemented!(),
|
Subpattern {
|
||||||
PatternLiteral::BoolPattern(b) => {
|
tag: None,
|
||||||
let guard = Some(if *b {
|
subpatterns: vec![],
|
||||||
*cond.clone()
|
guard,
|
||||||
} else {
|
bound_vars: vec![],
|
||||||
Expr::Call {
|
|
||||||
f: Box::new(Expr::Func(Func::BuiltIn(Rc::new("!".to_string())))),
|
|
||||||
args: vec![*cond.clone()]
|
|
||||||
}
|
|
||||||
});
|
|
||||||
Alternative {
|
|
||||||
tag: None,
|
|
||||||
subpatterns: vec![],
|
|
||||||
guard,
|
|
||||||
bound_vars: vec![],
|
|
||||||
item
|
|
||||||
}
|
|
||||||
},
|
|
||||||
PatternLiteral::VarPattern(var) => match symbol_table.lookup_by_name(var) {
|
|
||||||
Some(symbol) => handle_symbol(symbol, &vec![], item),
|
|
||||||
None => Alternative {
|
|
||||||
tag: None,
|
|
||||||
subpatterns: vec![],
|
|
||||||
guard: None,
|
|
||||||
bound_vars: vec![Some(var.clone())],
|
|
||||||
item
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
StringPattern(s) => {
|
||||||
|
let guard = Some(Expr::Call {
|
||||||
|
f: Box::new(Expr::Func(Func::BuiltIn(Rc::new("==".to_string())))),
|
||||||
|
args: vec![Expr::Lit(Lit::StringLit(s.clone())), Expr::ConditionalTargetSigilValue]
|
||||||
|
});
|
||||||
|
|
||||||
|
Subpattern {
|
||||||
|
tag: None,
|
||||||
|
subpatterns: vec![],
|
||||||
|
guard,
|
||||||
|
bound_vars: vec![],
|
||||||
|
}
|
||||||
|
},
|
||||||
|
BoolPattern(b) => {
|
||||||
|
let guard = Some(if *b {
|
||||||
|
Expr::ConditionalTargetSigilValue
|
||||||
|
} else {
|
||||||
|
Expr::Call {
|
||||||
|
f: Box::new(Expr::Func(Func::BuiltIn(Rc::new("!".to_string())))),
|
||||||
|
args: vec![Expr::ConditionalTargetSigilValue]
|
||||||
|
}
|
||||||
|
});
|
||||||
|
Subpattern {
|
||||||
|
tag: None,
|
||||||
|
subpatterns: vec![],
|
||||||
|
guard,
|
||||||
|
bound_vars: vec![],
|
||||||
|
}
|
||||||
|
},
|
||||||
|
VarPattern(var) => match symbol_table.lookup_by_name(var) {
|
||||||
|
Some(symbol) => handle_symbol(Some(symbol), &vec![], symbol_table),
|
||||||
|
None => Subpattern {
|
||||||
|
tag: None,
|
||||||
|
subpatterns: vec![],
|
||||||
|
guard: None,
|
||||||
|
bound_vars: vec![Some(var.clone())],
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
27
schala-lang/language/src/source_map.rs
Normal file
27
schala-lang/language/src/source_map.rs
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct SourceMap<T> {
|
||||||
|
pub node: T,
|
||||||
|
pub data: Option<SourceData>
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> SourceMap<T> {
|
||||||
|
pub fn get(&self) -> &T {
|
||||||
|
&self.node
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
pub fn get_source_data(&self) -> Option<SourceData> {
|
||||||
|
self.data.clone()
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct SourceData {
|
||||||
|
pub line_number: usize,
|
||||||
|
pub char_idx: usize
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@@ -5,14 +5,14 @@ use std::iter::{Iterator, Peekable};
|
|||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
pub enum TokenType {
|
pub enum TokenKind {
|
||||||
Newline, Semicolon,
|
Newline, Semicolon,
|
||||||
|
|
||||||
LParen, RParen,
|
LParen, RParen,
|
||||||
LSquareBracket, RSquareBracket,
|
LSquareBracket, RSquareBracket,
|
||||||
LAngleBracket, RAngleBracket,
|
LAngleBracket, RAngleBracket,
|
||||||
LCurlyBrace, RCurlyBrace,
|
LCurlyBrace, RCurlyBrace,
|
||||||
Pipe,
|
Pipe, Backslash,
|
||||||
|
|
||||||
Comma, Period, Colon, Underscore,
|
Comma, Period, Colon, Underscore,
|
||||||
Slash,
|
Slash,
|
||||||
@@ -27,9 +27,9 @@ pub enum TokenType {
|
|||||||
|
|
||||||
Error(String),
|
Error(String),
|
||||||
}
|
}
|
||||||
use self::TokenType::*;
|
use self::TokenKind::*;
|
||||||
|
|
||||||
impl fmt::Display for TokenType {
|
impl fmt::Display for TokenKind {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
&Operator(ref s) => write!(f, "Operator({})", **s),
|
&Operator(ref s) => write!(f, "Operator({})", **s),
|
||||||
@@ -87,19 +87,28 @@ lazy_static! {
|
|||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct Token {
|
pub struct Token {
|
||||||
pub token_type: TokenType,
|
pub kind: TokenKind,
|
||||||
pub offset: (usize, usize),
|
pub offset: (usize, usize),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct TokenMetadata {
|
||||||
|
pub offset: (usize, usize)
|
||||||
|
}
|
||||||
|
|
||||||
impl Token {
|
impl Token {
|
||||||
pub fn get_error(&self) -> Option<String> {
|
pub fn get_error(&self) -> Option<String> {
|
||||||
match self.token_type {
|
match self.kind {
|
||||||
TokenType::Error(ref s) => Some(s.clone()),
|
TokenKind::Error(ref s) => Some(s.clone()),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
pub fn to_string_with_metadata(&self) -> String {
|
pub fn to_string_with_metadata(&self) -> String {
|
||||||
format!("{}(L:{},c:{})", self.token_type, self.offset.0, self.offset.1)
|
format!("{}(L:{},c:{})", self.kind, self.offset.0, self.offset.1)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_kind(&self) -> TokenKind {
|
||||||
|
self.kind.clone()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -121,7 +130,7 @@ pub fn tokenize(input: &str) -> Vec<Token> {
|
|||||||
.peekable();
|
.peekable();
|
||||||
|
|
||||||
while let Some((line_idx, ch_idx, c)) = input.next() {
|
while let Some((line_idx, ch_idx, c)) = input.next() {
|
||||||
let cur_tok_type = match c {
|
let cur_tok_kind = match c {
|
||||||
'/' => match input.peek().map(|t| t.2) {
|
'/' => match input.peek().map(|t| t.2) {
|
||||||
Some('/') => {
|
Some('/') => {
|
||||||
while let Some((_, _, c)) = input.next() {
|
while let Some((_, _, c)) = input.next() {
|
||||||
@@ -157,17 +166,18 @@ pub fn tokenize(input: &str) -> Vec<Token> {
|
|||||||
'{' => LCurlyBrace, '}' => RCurlyBrace,
|
'{' => LCurlyBrace, '}' => RCurlyBrace,
|
||||||
'[' => LSquareBracket, ']' => RSquareBracket,
|
'[' => LSquareBracket, ']' => RSquareBracket,
|
||||||
'"' => handle_quote(&mut input),
|
'"' => handle_quote(&mut input),
|
||||||
|
'\\' => Backslash,
|
||||||
c if c.is_digit(10) => handle_digit(c, &mut input),
|
c if c.is_digit(10) => handle_digit(c, &mut input),
|
||||||
c if c.is_alphabetic() || c == '_' => handle_alphabetic(c, &mut input), //TODO I'll probably have to rewrite this if I care about types being uppercase, also type parameterization
|
c if c.is_alphabetic() || c == '_' => handle_alphabetic(c, &mut input), //TODO I'll probably have to rewrite this if I care about types being uppercase, also type parameterization
|
||||||
c if is_operator(&c) => handle_operator(c, &mut input),
|
c if is_operator(&c) => handle_operator(c, &mut input),
|
||||||
unknown => Error(format!("Unexpected character: {}", unknown)),
|
unknown => Error(format!("Unexpected character: {}", unknown)),
|
||||||
};
|
};
|
||||||
tokens.push(Token { token_type: cur_tok_type, offset: (line_idx, ch_idx) });
|
tokens.push(Token { kind: cur_tok_kind, offset: (line_idx, ch_idx) });
|
||||||
}
|
}
|
||||||
tokens
|
tokens
|
||||||
}
|
}
|
||||||
|
|
||||||
fn handle_digit(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenType {
|
fn handle_digit(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenKind {
|
||||||
if c == '0' && input.peek().map_or(false, |&(_, _, c)| { c == 'x' }) {
|
if c == '0' && input.peek().map_or(false, |&(_, _, c)| { c == 'x' }) {
|
||||||
input.next();
|
input.next();
|
||||||
let rest: String = input.peeking_take_while(|&(_, _, ref c)| c.is_digit(16) || *c == '_').map(|(_, _, c)| { c }).collect();
|
let rest: String = input.peeking_take_while(|&(_, _, ref c)| c.is_digit(16) || *c == '_').map(|(_, _, c)| { c }).collect();
|
||||||
@@ -182,7 +192,7 @@ fn handle_digit(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>) ->
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn handle_quote(input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenType {
|
fn handle_quote(input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenKind {
|
||||||
let mut buf = String::new();
|
let mut buf = String::new();
|
||||||
loop {
|
loop {
|
||||||
match input.next().map(|(_, _, c)| { c }) {
|
match input.next().map(|(_, _, c)| { c }) {
|
||||||
@@ -201,22 +211,22 @@ fn handle_quote(input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenType
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
Some(c) => buf.push(c),
|
Some(c) => buf.push(c),
|
||||||
None => return TokenType::Error(format!("Unclosed string")),
|
None => return TokenKind::Error(format!("Unclosed string")),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
TokenType::StrLiteral(Rc::new(buf))
|
TokenKind::StrLiteral(Rc::new(buf))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn handle_alphabetic(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenType {
|
fn handle_alphabetic(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenKind {
|
||||||
let mut buf = String::new();
|
let mut buf = String::new();
|
||||||
buf.push(c);
|
buf.push(c);
|
||||||
if c == '_' && input.peek().map(|&(_, _, c)| { !c.is_alphabetic() }).unwrap_or(true) {
|
if c == '_' && input.peek().map(|&(_, _, c)| { !c.is_alphabetic() }).unwrap_or(true) {
|
||||||
return TokenType::Underscore
|
return TokenKind::Underscore
|
||||||
}
|
}
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
match input.peek().map(|&(_, _, c)| { c }) {
|
match input.peek().map(|&(_, _, c)| { c }) {
|
||||||
Some(c) if c.is_alphanumeric() => {
|
Some(c) if c.is_alphanumeric() || c == '_' => {
|
||||||
input.next();
|
input.next();
|
||||||
buf.push(c);
|
buf.push(c);
|
||||||
},
|
},
|
||||||
@@ -225,12 +235,12 @@ fn handle_alphabetic(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>
|
|||||||
}
|
}
|
||||||
|
|
||||||
match KEYWORDS.get(buf.as_str()) {
|
match KEYWORDS.get(buf.as_str()) {
|
||||||
Some(kw) => TokenType::Keyword(*kw),
|
Some(kw) => TokenKind::Keyword(*kw),
|
||||||
None => TokenType::Identifier(Rc::new(buf)),
|
None => TokenKind::Identifier(Rc::new(buf)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn handle_operator(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenType {
|
fn handle_operator(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenKind {
|
||||||
match c {
|
match c {
|
||||||
'<' | '>' | '|' | '.' => {
|
'<' | '>' | '|' | '.' => {
|
||||||
let ref next = input.peek().map(|&(_, _, c)| { c });
|
let ref next = input.peek().map(|&(_, _, c)| { c });
|
||||||
@@ -275,7 +285,7 @@ fn handle_operator(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>)
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
TokenType::Operator(Rc::new(buf))
|
TokenKind::Operator(Rc::new(buf))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
@@ -290,26 +300,29 @@ mod schala_tokenizer_tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn tokens() {
|
fn tokens() {
|
||||||
let a = tokenize("let a: A<B> = c ++ d");
|
let a = tokenize("let a: A<B> = c ++ d");
|
||||||
let token_types: Vec<TokenType> = a.into_iter().map(move |t| t.token_type).collect();
|
let token_kinds: Vec<TokenKind> = a.into_iter().map(move |t| t.kind).collect();
|
||||||
assert_eq!(token_types, vec![Keyword(Let), ident!("a"), Colon, ident!("A"),
|
assert_eq!(token_kinds, vec![Keyword(Let), ident!("a"), Colon, ident!("A"),
|
||||||
LAngleBracket, ident!("B"), RAngleBracket, op!("="), ident!("c"), op!("++"), ident!("d")]);
|
LAngleBracket, ident!("B"), RAngleBracket, op!("="), ident!("c"), op!("++"), ident!("d")]);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn underscores() {
|
fn underscores() {
|
||||||
let token_types: Vec<TokenType> = tokenize("4_8").into_iter().map(move |t| t.token_type).collect();
|
let token_kinds: Vec<TokenKind> = tokenize("4_8").into_iter().map(move |t| t.kind).collect();
|
||||||
assert_eq!(token_types, vec![digit!("4"), Underscore, digit!("8")]);
|
assert_eq!(token_kinds, vec![digit!("4"), Underscore, digit!("8")]);
|
||||||
|
|
||||||
|
let token_kinds2: Vec<TokenKind> = tokenize("aba_yo").into_iter().map(move |t| t.kind).collect();
|
||||||
|
assert_eq!(token_kinds2, vec![ident!("aba_yo")]);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn comments() {
|
fn comments() {
|
||||||
let token_types: Vec<TokenType> = tokenize("1 + /* hella /* bro */ */ 2").into_iter().map(move |t| t.token_type).collect();
|
let token_kinds: Vec<TokenKind> = tokenize("1 + /* hella /* bro */ */ 2").into_iter().map(move |t| t.kind).collect();
|
||||||
assert_eq!(token_types, vec![digit!("1"), op!("+"), digit!("2")]);
|
assert_eq!(token_kinds, vec![digit!("1"), op!("+"), digit!("2")]);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn backtick_operators() {
|
fn backtick_operators() {
|
||||||
let token_types: Vec<TokenType> = tokenize("1 `plus` 2").into_iter().map(move |t| t.token_type).collect();
|
let token_kinds: Vec<TokenKind> = tokenize("1 `plus` 2").into_iter().map(move |t| t.kind).collect();
|
||||||
assert_eq!(token_types, vec![digit!("1"), op!("plus"), digit!("2")]);
|
assert_eq!(token_kinds, vec![digit!("1"), op!("plus"), digit!("2")]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,3 +1,254 @@
|
|||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
|
use ast::*;
|
||||||
|
use util::ScopeStack;
|
||||||
|
|
||||||
pub type TypeName = Rc<String>;
|
pub type TypeName = Rc<String>;
|
||||||
|
|
||||||
|
pub struct TypeContext<'a> {
|
||||||
|
variable_map: ScopeStack<'a, Rc<String>, Type<TVar>>,
|
||||||
|
evar_count: u32
|
||||||
|
}
|
||||||
|
|
||||||
|
/// `InferResult` is the monad in which type inference takes place.
|
||||||
|
type InferResult<T> = Result<T, TypeError>;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
struct TypeError { msg: String }
|
||||||
|
|
||||||
|
impl TypeError {
|
||||||
|
fn new<A>(msg: &str) -> InferResult<A> {
|
||||||
|
Err(TypeError { msg: msg.to_string() })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// `Type` is parameterized by whether the type variables can be just universal, or universal or
|
||||||
|
/// existential.
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
enum Type<A> {
|
||||||
|
Var(A),
|
||||||
|
Const(TConst),
|
||||||
|
Arrow(Box<Type<A>>, Box<Type<A>>),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
enum TVar {
|
||||||
|
Univ(UVar),
|
||||||
|
Exist(ExistentialVar)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
struct UVar(Rc<String>);
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
struct ExistentialVar(u32);
|
||||||
|
|
||||||
|
impl Type<UVar> {
|
||||||
|
fn to_tvar(&self) -> Type<TVar> {
|
||||||
|
match self {
|
||||||
|
Type::Var(UVar(name)) => Type::Var(TVar::Univ(UVar(name.clone()))),
|
||||||
|
Type::Const(ref c) => Type::Const(c.clone()),
|
||||||
|
Type::Arrow(a, b) => Type::Arrow(
|
||||||
|
Box::new(a.to_tvar()),
|
||||||
|
Box::new(b.to_tvar())
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Type<TVar> {
|
||||||
|
fn skolemize(&self) -> Type<UVar> {
|
||||||
|
match self {
|
||||||
|
Type::Var(TVar::Univ(uvar)) => Type::Var(uvar.clone()),
|
||||||
|
Type::Var(TVar::Exist(_)) => Type::Var(UVar(Rc::new(format!("sk")))),
|
||||||
|
Type::Const(ref c) => Type::Const(c.clone()),
|
||||||
|
Type::Arrow(a, b) => Type::Arrow(
|
||||||
|
Box::new(a.skolemize()),
|
||||||
|
Box::new(b.skolemize())
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TypeIdentifier {
|
||||||
|
fn to_monotype(&self) -> Type<UVar> {
|
||||||
|
match self {
|
||||||
|
TypeIdentifier::Tuple(_) => Type::Const(TConst::Nat),
|
||||||
|
TypeIdentifier::Singleton(TypeSingletonName { name, .. }) => {
|
||||||
|
match &name[..] {
|
||||||
|
"Nat" => Type::Const(TConst::Nat),
|
||||||
|
"Int" => Type::Const(TConst::Int),
|
||||||
|
"Float" => Type::Const(TConst::Float),
|
||||||
|
"Bool" => Type::Const(TConst::Bool),
|
||||||
|
"String" => Type::Const(TConst::StringT),
|
||||||
|
_ => Type::Const(TConst::Nat),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
enum TConst {
|
||||||
|
User(Rc<String>),
|
||||||
|
Unit,
|
||||||
|
Nat,
|
||||||
|
Int,
|
||||||
|
Float,
|
||||||
|
StringT,
|
||||||
|
Bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TConst {
|
||||||
|
fn user(name: &str) -> TConst {
|
||||||
|
TConst::User(Rc::new(name.to_string()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> TypeContext<'a> {
|
||||||
|
pub fn new() -> TypeContext<'a> {
|
||||||
|
TypeContext {
|
||||||
|
variable_map: ScopeStack::new(None),
|
||||||
|
evar_count: 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn typecheck(&mut self, ast: &AST) -> Result<String, String> {
|
||||||
|
match self.infer_ast(ast) {
|
||||||
|
Ok(t) => Ok(format!("{:?}", t)),
|
||||||
|
Err(err) => Err(format!("Type error: {:?}", err))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> TypeContext<'a> {
|
||||||
|
fn infer_ast(&mut self, ast: &AST) -> InferResult<Type<UVar>> {
|
||||||
|
self.infer_block(&ast.0)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn infer_statement(&mut self, stmt: &Statement) -> InferResult<Type<UVar>> {
|
||||||
|
match stmt {
|
||||||
|
Statement::ExpressionStatement(ref expr) => self.infer_expr(expr),
|
||||||
|
Statement::Declaration(ref decl) => self.infer_decl(decl),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn infer_expr(&mut self, expr: &Expression) -> InferResult<Type<UVar>> {
|
||||||
|
match expr {
|
||||||
|
Expression(expr_type, Some(type_anno)) => {
|
||||||
|
let tx = self.infer_expr_type(expr_type)?;
|
||||||
|
let ty = type_anno.to_monotype();
|
||||||
|
self.unify(&ty.to_tvar(), &tx.to_tvar()).map(|x| x.skolemize())
|
||||||
|
},
|
||||||
|
Expression(expr_type, None) => self.infer_expr_type(expr_type)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn infer_decl(&mut self, _decl: &Declaration) -> InferResult<Type<UVar>> {
|
||||||
|
Ok(Type::Const(TConst::user("unimplemented")))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn infer_expr_type(&mut self, expr_type: &ExpressionType) -> InferResult<Type<UVar>> {
|
||||||
|
use self::ExpressionType::*;
|
||||||
|
Ok(match expr_type {
|
||||||
|
NatLiteral(_) => Type::Const(TConst::Nat),
|
||||||
|
FloatLiteral(_) => Type::Const(TConst::Float),
|
||||||
|
StringLiteral(_) => Type::Const(TConst::StringT),
|
||||||
|
BoolLiteral(_) => Type::Const(TConst::Bool),
|
||||||
|
Value(name) => {
|
||||||
|
//TODO handle the distinction between 0-arg constructors and variables at some point
|
||||||
|
// need symbol table for that
|
||||||
|
match self.variable_map.lookup(name) {
|
||||||
|
Some(ty) => ty.clone().skolemize(),
|
||||||
|
None => return TypeError::new(&format!("Variable {} not found", name))
|
||||||
|
}
|
||||||
|
},
|
||||||
|
IfExpression { discriminator, body } => self.infer_if_expr(discriminator, body)?,
|
||||||
|
Call { f, arguments } => {
|
||||||
|
let tf = self.infer_expr(f)?; //has to be an Arrow Type
|
||||||
|
let targ = self.infer_expr(&arguments[0])?; // TODO make this work with functions with more than one arg
|
||||||
|
match tf {
|
||||||
|
Type::Arrow(t1, t2) => {
|
||||||
|
self.unify(&t1.to_tvar(), &targ.to_tvar())?;
|
||||||
|
*t2.clone()
|
||||||
|
},
|
||||||
|
_ => return TypeError::new("not a function")
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
Lambda { params, .. } => {
|
||||||
|
|
||||||
|
let _arg_type = match ¶ms[0] {
|
||||||
|
(_, Some(type_anno)) => type_anno.to_monotype().to_tvar(),
|
||||||
|
(_, None) => self.allocate_existential(),
|
||||||
|
};
|
||||||
|
//let _result_type = unimplemented!();
|
||||||
|
return TypeError::new("Unimplemented");
|
||||||
|
|
||||||
|
//Type::Arrow(Box::new(arg_type), Box::new(result_type))
|
||||||
|
}
|
||||||
|
_ => Type::Const(TConst::user("unimplemented"))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn infer_if_expr(&mut self, discriminator: &Discriminator, body: &IfExpressionBody) -> InferResult<Type<UVar>> {
|
||||||
|
let _test = match discriminator {
|
||||||
|
Discriminator::Simple(expr) => expr,
|
||||||
|
_ => return TypeError::new("Dame desu")
|
||||||
|
};
|
||||||
|
|
||||||
|
let (_then_clause, _maybe_else_clause) = match body {
|
||||||
|
IfExpressionBody::SimpleConditional(a, b) => (a, b),
|
||||||
|
_ => return TypeError::new("Dont work")
|
||||||
|
};
|
||||||
|
|
||||||
|
TypeError::new("Not implemented")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn infer_block(&mut self, block: &Block) -> InferResult<Type<UVar>> {
|
||||||
|
let mut output = Type::Const(TConst::Unit);
|
||||||
|
for statement in block.iter() {
|
||||||
|
output = self.infer_statement(statement)?;
|
||||||
|
}
|
||||||
|
Ok(output)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn unify(&mut self, _t1: &Type<TVar>, _t2: &Type<TVar>) -> InferResult<Type<TVar>> {
|
||||||
|
TypeError::new("not implemented")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn allocate_existential(&mut self) -> Type<TVar> {
|
||||||
|
let n = self.evar_count;
|
||||||
|
self.evar_count += 1;
|
||||||
|
Type::Var(TVar::Exist(ExistentialVar(n)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
fn parse(input: &str) -> AST {
|
||||||
|
let tokens: Vec<::tokenizing::Token> = ::tokenizing::tokenize(input);
|
||||||
|
let mut parser = ::parsing::Parser::new(tokens);
|
||||||
|
parser.parse().unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
macro_rules! type_test {
|
||||||
|
($input:expr, $correct:expr) => {
|
||||||
|
{
|
||||||
|
let mut tc = TypeContext::new();
|
||||||
|
let ast = parse($input);
|
||||||
|
tc.add_symbols(&ast);
|
||||||
|
assert_eq!($correct, tc.type_check(&ast).unwrap())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn basic_inference() {
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
#![feature(trace_macros)]
|
#![feature(trace_macros)]
|
||||||
|
#![recursion_limit="128"]
|
||||||
extern crate proc_macro;
|
extern crate proc_macro;
|
||||||
extern crate proc_macro2;
|
extern crate proc_macro2;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
@@ -66,6 +67,65 @@ fn get_attribute_identifier(attr_name: &str, attrs: &Vec<Attribute>) -> Option<p
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* a pass_chain function signature looks like:
|
||||||
|
* fn(&mut ProgrammingLanguageInterface, A, Option<&mut DebugHandler>) -> Result<B, String>
|
||||||
|
*
|
||||||
|
* TODO use some kind of failure-handling library to make this better
|
||||||
|
*/
|
||||||
|
fn generate_pass_chain(idents: Vec<Ident>) -> proc_macro2::TokenStream {
|
||||||
|
let final_return = quote! {
|
||||||
|
{
|
||||||
|
let final_output: FinishedComputation = unfinished_computation.finish(Ok(input_to_next_stage));
|
||||||
|
final_output
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let nested_passes = idents.iter()
|
||||||
|
.rev()
|
||||||
|
.fold(final_return, |later_fragment, pass_name| {
|
||||||
|
quote! {
|
||||||
|
{
|
||||||
|
let pass_name = stringify!(#pass_name);
|
||||||
|
let (output, duration) = {
|
||||||
|
let ref debug_map = eval_options.debug_passes;
|
||||||
|
let debug_handle = match debug_map.get(pass_name) {
|
||||||
|
Some(PassDebugOptionsDescriptor { opts }) => {
|
||||||
|
let ptr = &mut unfinished_computation;
|
||||||
|
ptr.cur_debug_options = opts.clone();
|
||||||
|
Some(ptr)
|
||||||
|
}
|
||||||
|
_ => None
|
||||||
|
};
|
||||||
|
let start = time::Instant::now();
|
||||||
|
let pass_output = #pass_name(self, input_to_next_stage, debug_handle);
|
||||||
|
let elapsed = start.elapsed();
|
||||||
|
(pass_output, elapsed)
|
||||||
|
};
|
||||||
|
if eval_options.debug_timing {
|
||||||
|
unfinished_computation.durations.push(duration);
|
||||||
|
}
|
||||||
|
match output {
|
||||||
|
Ok(input_to_next_stage) => #later_fragment,
|
||||||
|
//TODO this error type needs to be guaranteed to provide a useable string
|
||||||
|
Err(err) => return unfinished_computation.output(Err(format!("Pass {} failed:\n{}", pass_name, err))),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
quote! {
|
||||||
|
{
|
||||||
|
use std::time;
|
||||||
|
use schala_repl::PassDebugOptionsDescriptor;
|
||||||
|
|
||||||
|
let eval_options = options;
|
||||||
|
let input_to_next_stage = input;
|
||||||
|
let mut unfinished_computation = UnfinishedComputation::default();
|
||||||
|
#nested_passes
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[proc_macro_derive(ProgrammingLanguageInterface,
|
#[proc_macro_derive(ProgrammingLanguageInterface,
|
||||||
attributes(LanguageName, SourceFileExtension, PipelineSteps, DocMethod, HandleCustomInterpreterDirectives))]
|
attributes(LanguageName, SourceFileExtension, PipelineSteps, DocMethod, HandleCustomInterpreterDirectives))]
|
||||||
pub fn derive_programming_language_interface(input: TokenStream) -> TokenStream {
|
pub fn derive_programming_language_interface(input: TokenStream) -> TokenStream {
|
||||||
@@ -112,6 +172,8 @@ pub fn derive_programming_language_interface(input: TokenStream) -> TokenStream
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
let pass_chain = generate_pass_chain(pass_idents.collect());
|
||||||
|
|
||||||
let tokens = quote! {
|
let tokens = quote! {
|
||||||
use schala_repl::PassDescriptor;
|
use schala_repl::PassDescriptor;
|
||||||
impl ProgrammingLanguageInterface for #name {
|
impl ProgrammingLanguageInterface for #name {
|
||||||
@@ -122,8 +184,7 @@ pub fn derive_programming_language_interface(input: TokenStream) -> TokenStream
|
|||||||
#file_ext.to_string()
|
#file_ext.to_string()
|
||||||
}
|
}
|
||||||
fn execute_pipeline(&mut self, input: &str, options: &EvalOptions) -> FinishedComputation {
|
fn execute_pipeline(&mut self, input: &str, options: &EvalOptions) -> FinishedComputation {
|
||||||
let mut chain = pass_chain![self, options; #(#pass_idents),* ];
|
#pass_chain
|
||||||
chain(input)
|
|
||||||
}
|
}
|
||||||
fn get_passes(&self) -> Vec<PassDescriptor> {
|
fn get_passes(&self) -> Vec<PassDescriptor> {
|
||||||
vec![ #(#pass_descriptors),* ]
|
vec![ #(#pass_descriptors),* ]
|
||||||
|
|||||||
@@ -173,61 +173,3 @@ pub trait ProgrammingLanguageInterface {
|
|||||||
None
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/* a pass_chain function signature looks like:
|
|
||||||
* fn(&mut ProgrammingLanguageInterface, A, Option<&mut DebugHandler>) -> Result<B, String>
|
|
||||||
*
|
|
||||||
* TODO use some kind of failure-handling library to make this better
|
|
||||||
*/
|
|
||||||
|
|
||||||
#[macro_export]
|
|
||||||
macro_rules! pass_chain {
|
|
||||||
($state:expr, $eval_options:expr; $($pass:path), *) => {
|
|
||||||
|text_input| {
|
|
||||||
let mut comp = UnfinishedComputation::default();
|
|
||||||
pass_chain_helper! { ($state, comp, $eval_options); text_input $(, $pass)* }
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
#[macro_export]
|
|
||||||
macro_rules! pass_chain_helper {
|
|
||||||
(($state:expr, $comp:expr, $eval_options:expr); $input:expr, $pass:path $(, $rest:path)*) => {
|
|
||||||
{
|
|
||||||
use std::time;
|
|
||||||
use schala_repl::PassDebugOptionsDescriptor;
|
|
||||||
let pass_name = stringify!($pass);
|
|
||||||
let (output, duration) = {
|
|
||||||
let ref debug_map = $eval_options.debug_passes;
|
|
||||||
let debug_handle = match debug_map.get(pass_name) {
|
|
||||||
Some(PassDebugOptionsDescriptor { opts }) => {
|
|
||||||
let ptr = &mut $comp;
|
|
||||||
ptr.cur_debug_options = opts.clone();
|
|
||||||
Some(ptr)
|
|
||||||
}
|
|
||||||
_ => None
|
|
||||||
};
|
|
||||||
let start = time::Instant::now();
|
|
||||||
let pass_output = $pass($state, $input, debug_handle);
|
|
||||||
let elapsed = start.elapsed();
|
|
||||||
(pass_output, elapsed)
|
|
||||||
};
|
|
||||||
if $eval_options.debug_timing {
|
|
||||||
$comp.durations.push(duration);
|
|
||||||
}
|
|
||||||
match output {
|
|
||||||
Ok(result) => pass_chain_helper! { ($state, $comp, $eval_options); result $(, $rest)* },
|
|
||||||
Err(err) => { //TODO this error type needs to be guaranteed to provide a useable string
|
|
||||||
$comp.output(Err(format!("Pass {} failed:\n{}", pass_name, err)))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
// Done
|
|
||||||
(($state:expr, $comp:expr, $eval_options:expr); $final_output:expr) => {
|
|
||||||
{
|
|
||||||
let final_output: FinishedComputation = $comp.finish(Ok($final_output));
|
|
||||||
final_output
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|||||||
Reference in New Issue
Block a user