Compare commits

..

1 Commits

Author SHA1 Message Date
greg
1e7f5bbd25 WIP 2018-08-05 13:37:49 -07:00
67 changed files with 5067 additions and 11061 deletions

2
.gitignore vendored
View File

@ -1,4 +1,4 @@
Cargo.lock
target target
.schala_repl .schala_repl
.schala_history .schala_history
rusty-tags.vi

1258
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -2,17 +2,15 @@
name = "schala" name = "schala"
version = "0.1.0" version = "0.1.0"
authors = ["greg <greg.shuflin@protonmail.com>"] authors = ["greg <greg.shuflin@protonmail.com>"]
edition = "2018"
resolver = "2"
[dependencies] [dependencies]
getopts = "0.2.21"
schala-repl = { path = "schala-repl" } schala-repl = { path = "schala-repl" }
schala-codegen = { path = "schala-codegen" }
maaru-lang = { path = "maaru" }
rukka-lang = { path = "rukka" }
robo-lang = { path = "robo" }
schala-lang = { path = "schala-lang" } schala-lang = { path = "schala-lang" }
# maaru-lang = { path = "maaru" }
# rukka-lang = { path = "rukka" }
# robo-lang = { path = "robo" }
[build-dependencies] [build-dependencies]
includedir_codegen = "0.2.0" includedir_codegen = "0.2.0"

31
Grammar Normal file
View File

@ -0,0 +1,31 @@
<program> := <statements> EOF
<statements> := <statement>
| <statement> SEP <statements>
<statement> := let <id> = <expr>
| <expr>
| <fn_block>
<fn_block> := fn <id> ( <arg_list> ) <statements> end
<arg_list> := e
| <id>
| <id> , <arg_list>
<expr> := if <expr> then <statements> end
| if <expr> then <statements> else <statements> end
| while <expr> SEP <statements> end
| ( <expr> )
| <binop>
<binop> := <simple_expr>
| <simple_expr> <id> <binop>
<simple_expr> := <id>
| <number>
| <string>

100
README.md
View File

@ -1,46 +1,21 @@
# Schala - a programming language meta-interpreter # Schala - a programming language meta-interpreter
Schala is a Rust framework written to make it easy to create and experiment Schala is a Rust framework written to make it easy to
with multiple toy programming languages. It provides a cross-language REPL and create and experiment with toy programming languages. It provides
provisions for tokenizing text, parsing tokens, evaluating an abstract syntax a common REPL, and a trait `ProgrammingLanguage` with provisions
tree, and other tasks that are common to all programming languages, as well as for tokenizing text, parsing tokens, evaluating an abstract syntax tree,
sharing state between multiple programming languages. and other tasks that are common to all programming languages.
Schala is implemented as a Rust library `schala-repl`, which provides a `Repl` Schala is implemented as a Rust library `schala_lib`, which provides a
data structure that takes in a value implementing the `schala_main` function. This function serves as the main loop of the REPL, if run
`ProgrammingLanguageInterface` trait. Individual programming language interactively, or otherwise reads and interprets programming language source
implementations are Rust types that implement `ProgrammingLanguageInterface` files. It expects as input a vector of `PLIGenerator`, which is a type representing
and store whatever persistent state is relevant to that language. a closure that returns a boxed trait object that implements the `ProgrammingLanguage` trait,
and stores any persistent state relevant to that programming language. The ability
to share state between different programming languages is in the works.
## About
## Running
Run schala with the normal `cargo run`. This will drop you into a REPL
environment. Type `:help` for more information, or type in text in any
supported programming language (currently only `schala-lang`) to evaluate it in
the REPL.
### Examples
Try running the following `schala-lang` code example in the REPL:
```
>> 1 + 1
(Total time)=> 736.368µs
=> 2
>> fn foo(x) { x + 10 }
(Total time)=> 772.496µs
=>
>> foo(0)
(Total time)=> 593.591µs
=> 10
>> 5 + foo(1)
(Total time)=> 1.119916ms
=> 16
>>
```
## History
Schala started out life as an experiment in writing a Javascript-like Schala started out life as an experiment in writing a Javascript-like
programming language that would never encounter any kind of runtime value programming language that would never encounter any kind of runtime value
@ -58,18 +33,18 @@ creating a language name confusingly close to Scala. The naming scheme for
languages implemented with the Schala meta-interpreter is Chrono Trigger languages implemented with the Schala meta-interpreter is Chrono Trigger
characters. characters.
Schala and languages implemented with it are incomplete alpha software and are Schala is incomplete alpha software and is not ready for public release.
not ready for public release.
## Languages implemented using the meta-interpreter ## Languages implemented using the meta-interpreter
* The eponymous *Schala* language is a work-in-progress general purpose * The eponymous *Schala* language is an interpreted/compiled scripting langauge,
programming language with static typing and algebraic data types. Its design designed to be relatively simple, but with a reasonably sophisticated type
goals include having a very straightforward implemenation and being syntactically system.
minimal.
* *Maaru* is a very simple dynamically-typed scripting language, with the semantics * *Maaru* was the original Schala (since renamed to free up the name *Schala*
that all runtime errors return a `null` value rather than fail. for the above language), a very simple dynamically-typed scripting language
such that all possible runtime errors result in null rather than program
failure.
* *Robo* is an experiment in creating a lazy, functional, strongly-typed language * *Robo* is an experiment in creating a lazy, functional, strongly-typed language
much like Haskell much like Haskell
@ -81,30 +56,23 @@ much like Haskell
Here's a partial list of resources I've made use of in the process Here's a partial list of resources I've made use of in the process
of learning how to write a programming language. of learning how to write a programming language.
### General
* http://thume.ca/2019/04/18/writing-a-compiler-in-rust/
### Type-checking ### Type-checking
* https://skillsmatter.com/skillscasts/10868-inside-the-rust-compiler https://skillsmatter.com/skillscasts/10868-inside-the-rust-compiler
* https://www.youtube.com/watch?v=il3gD7XMdmA
* http://dev.stephendiehl.com/fun/006_hindley_milner.html
* https://rust-lang-nursery.github.io/rustc-guide/type-inference.html
* https://eli.thegreenplace.net/2018/unification/
* https://eli.thegreenplace.net/2018/type-inference/
* http://smallcultfollowing.com/babysteps/blog/2017/03/25/unification-in-chalk-part-1/
* http://reasonableapproximation.net/2019/05/05/hindley-milner.html
https://rickyhan.com/jekyll/update/2018/05/26/hindley-milner-tutorial-rust.html
### Evaluation ### Evaluation
*Understanding Computation*, Tom Stuart, O'Reilly 2013
* _Understanding Computation_, Tom Stuart, O'Reilly 2013 *Basics of Compiler Design*, Torben Mogensen
* _Basics of Compiler Design_, Torben Mogensen
### Parsing ### Parsing
* http://journal.stuffwithstuff.com/2011/03/19/pratt-parsers-expression-parsing-made-easy/ http://journal.stuffwithstuff.com/2011/03/19/pratt-parsers-expression-parsing-made-easy/
* https://soc.github.io/languages/unified-condition-syntax https://soc.github.io/languages/unified-condition-syntax
* [Crafting Interpreters](http://www.craftinginterpreters.com/)
[Crafting Interpreters](http://www.craftinginterpreters.com/)
### LLVM ### LLVM
* http://blog.ulysse.io/2016/07/03/llvm-getting-started.html http://blog.ulysse.io/2016/07/03/llvm-getting-started.html
###Rust resources
https://thefullsnack.com/en/rust-for-the-web.html
https://rocket.rs/guide/getting-started/

241
TODO.md
View File

@ -1,177 +1,11 @@
# Immediate TODOs / General Code Cleanup
## Parsing # TODO Items
* cf. https://siraben.dev/2022/03/22/tree-sitter-linter.html write a tree-sitter parser for Schala
* Create a macro system, perhaps c.f. Crystal's?
* Macro system should be able to implement:
* printf-style variadic arguments
* something like the Rust/Haskell `Derive` construct
* doing useful things with all variants of an enum
* (e.g. what https://matklad.github.io//2022/03/26/self-modifying-code.html tries to solve)
## Testing - https://nshipster.com/never/
-https://cranelift.readthedocs.io/en/latest/?badge=latest<Paste>
* Make an automatic (macro-based?) system for numbering compiler errors, this should be every type of error
## Symbols
* Add some good printf-debugging impls for SymbolTable-related items
* the symbol table should probably *only* be for global definitions (maybe rename it to reflect this?)
* dealing with variable lookup w/in functions/closures should probably happen in AST -> ReducedAST
* b/c that's where we go from a string name to a canonical ID (for e.g. 2nd param in 3rd enclosing scope)
* In fact to prove this works, the symbol table shoudl _parallelize_ the process of checking subscopes for local items
* Old notes on a plan of attack:
1. modify visitor so it can handle scopes
-this is needed both to handle import scope correctly
-and also to support making FQSNs aware of function parameters
2. Once FQSNs are aware of function parameters, most of the Rc<String> things in eval.rs can go away
## Typechecking
* make a type to represent types rather than relying on string comparisons
* look at https://rickyhan.com/jekyll/update/2018/05/26/hindley-milner-tutorial-rust.html
## General code cleanup
* standardize on an error type that isn't String
* implement a visitor pattern for the use of scope_resolver
* maybe implement this twice: 1) the value-returning, no-default one in the haoyi blogpost,
* look at
* https://gitlab.haskell.org/ghc/ghc/wikis/pattern-synonyms
* the non-value-returning, default one like in rustc (cf. https://github.com/rust-unofficial/patterns/blob/master/patterns/visitor.md)
# Longer-term Ideas
## Language Syntax
* the `type` declaration should have some kind of GADT-like syntax
* syntactic sugar for typestates? (cf. https://rustype.github.io/notes/notes/rust-typestate-series/rust-typestate-part-1.html )
* use `let` sigil to indicate a variable in a pattern explicitly:
```
q is MyStruct(let a, Chrono::Trigga) then {
// a is in scope here
}
```
* if you have a pattern-match where one variant has a variable and the other
lacks it instead of treating this as a type error, promote the bound variable
to an option type
* what if there was something like React jsx syntas built in? i.e. a way to
automatically transform some kind of markup into a function call, cf. `<h1
prop="arg">` -> h1(prop=arg)
* implement and test open/use statements
* Include extensible scala-style `html"string ${var}"` string interpolations
* A neat idea for pattern matching optimization would be if you could match on
one of several things in a list
ex:
```
if x {
is (comp, LHSPat, RHSPat) if comp in ["==, "<"] -> ...
}
```
* Schala should have both currying *and* default arguments!
```
fn a(b: Int, c:Int, d:Int = 1) -> Int
a(1,2) : Int
a(1,2,d=2): Int
a(_,1,3) : Int -> Int
a(1,2, c=_): Int -> Int
a(_,_,_) : Int -> Int -> Int -> Int
```
* scoped types - be able to define a quick enum type scoped to a function or other type for
something, that only is meant to be used as a quick bespoke interface between
two other things
ex.
```
type enum {
type enum MySubVariant {
SubVariant1, SubVariant2, etc.
}
Variant1(MySubVariant),
Variant2(...),
}
```
* inclusive/exclusive range syntax like .. vs ..=
* Nameable patterns/ pattern synonyms cf. https://gitlab.haskell.org/ghc/ghc/-/wikis/pattern-synonyms
## Typechecking
* cf. the notation mentioned in the cardelli paper, the debug information for the `typechecking` pass should
* print the generated type variable for every subexpression in an expression
* think about idris-related ideas of multiple implementations of a type for an interface (+ vs * impl for monoids, for preorder/inorder/postorder for Foldable)
* should have an Idris-like `cast To From` function
* something like the swift `Never` type ( https://nshipster.com/never/ ) in the stdlib
## Compilation
* look into Inkwell for rust LLVM bindings
* https://cranelift.readthedocs.io/en/latest/?badge=latest<Paste>
* look at https://gluon-lang.org/doc/nightly/book/embedding-api.html
# Syntax Playground
## Trying if-syntax again
```
//simple if expr
if x == 10 then "a" else "z"
//complex if expr
if x == 10 then {
let a = 1
let b = 2
a + b
} else {
55
}
// different comparison ops
if x {
== 1 then "a"
.isPrime() then "b"
else "c"
}
/* for now disallow `if x == { 1 then ... }`, b/c hard to parse
//simple pattern-matching
if x is Person("Ivan", age) then age else 0
//match-block equivalent
if x {
is Person("Ivan", _) then "Ivan"
is Person(_, age) if age > 13 then "barmitzvah'd"
else "foo"
}
```
## (OLD) Playing around with conditional syntax ideas
-consult http://gluon-lang.org/book/embedding-api.html
- if/match playground - if/match playground
@ -211,3 +45,70 @@ if the only two guard patterns are true and false, then the abbreviated syntax:
`'if' discriminator 'then' block_or_expr 'else' block_or_expr` `'if' discriminator 'then' block_or_expr 'else' block_or_expr`
can replace `'if' discriminator '{' 'true' 'then' block_or_expr; 'false' 'then' block_or_expr '}'` can replace `'if' discriminator '{' 'true' 'then' block_or_expr; 'false' 'then' block_or_expr '}'`
- Next priorities: - get ADTs working, get matches working
- inclusive/exclusive range syntax like .. vs ..=
- sketch of an idea for the REPL:
-each compiler pass should be a (procedural?) macro like
compiler_pass!("parse", dataproducts: ["ast", "parse_tree"], {
match parsing::parse(INPUT) {
Ok(
PASS.add_artifact(
}
-should have an Idris-like `cast To From` function
- REPL:
- want to be able to do things like `:doc Identifier`, and have the language load up these definitions to the REPL
* change 'trait' to 'interface'
-think about idris-related ideas of multiple implementations of a type for an interface (+ vs * impl for monoids, for preorder/inorder/postorder for Foldable)
* Share state between programming languages
* idea for Schala - scoped types - be able to define a quick enum type scoped to a function ro something, that only is meant to be used as a quick bespoke interface between two other things
* another idea, allow:
type enum {
type enum MySubVariant {
SubVariant1, SubVariant2, etc.
}
Variant1(MySubVariant),
Variant2(...),
}
* idea for Schala: both currying *and* default arguments!
ex. fn a(b: Int, c:Int, d:Int = 1) -> Int
a(1,2) : Int
a(1,2,d=2): Int
a(_,1,3) : Int -> Int
a(1,2, c=_): Int -> Int
a(_,_,_) : Int -> Int -> Int -> Int
- AST : maybe replace the Expression type with "Ascription(TypeName, Box<Expression>) nodes??
- parser: add a "debug" field to the Parser struct for all debug-related things
-scala-style html"dfasfsadf${}" string interpolations!
*Compiler passes architecture
-ProgrammingLanguageInterface defines a evaluate_in_repl() and evaluate_no_repl() functions
-these take in a vec of CompilerPasses
struct CompilerPass {
name: String,
run: fn(PrevPass) -> NextPass
}
-change "Type...." names in parser.rs to "Anno..." for non-collision with names in typechecking.rs
-get rid of code pertaining to compilation specifically, have a more generation notion of "execution type"

279
maaru/src/compilation.rs Normal file
View File

@ -0,0 +1,279 @@
extern crate llvm_sys;
use std::collections::HashMap;
use self::llvm_sys::prelude::*;
use self::llvm_sys::{LLVMIntPredicate};
use parser::{AST, Statement, Function, Prototype, Expression, BinOp};
use schala_repl::LLVMCodeString;
use schala_repl::llvm_wrap as LLVMWrap;
type VariableMap = HashMap<String, LLVMValueRef>;
struct CompilationData {
context: LLVMContextRef,
module: LLVMModuleRef,
builder: LLVMBuilderRef,
variables: VariableMap,
main_function: LLVMValueRef,
current_function: Option<LLVMValueRef>,
}
pub fn compile_ast(ast: AST) -> LLVMCodeString {
println!("Compiling!");
let names: VariableMap = HashMap::new();
let context = LLVMWrap::create_context();
let module = LLVMWrap::module_create_with_name("example module");
let builder = LLVMWrap::CreateBuilderInContext(context);
let program_return_type = LLVMWrap::Int64TypeInContext(context);
let main_function_type = LLVMWrap::FunctionType(program_return_type, Vec::new(), false);
let main_function: LLVMValueRef = LLVMWrap::AddFunction(module, "main", main_function_type);
let mut data = CompilationData {
context: context,
builder: builder,
module: module,
variables: names,
main_function: main_function,
current_function: None,
};
let bb = LLVMWrap::AppendBasicBlockInContext(data.context, data.main_function, "entry");
LLVMWrap::PositionBuilderAtEnd(builder, bb);
let value = ast.codegen(&mut data);
LLVMWrap::BuildRet(builder, value);
let ret = LLVMWrap::PrintModuleToString(module);
// Clean up. Values created in the context mostly get cleaned up there.
LLVMWrap::DisposeBuilder(builder);
LLVMWrap::DisposeModule(module);
LLVMWrap::ContextDispose(context);
LLVMCodeString(ret)
}
trait CodeGen {
fn codegen(&self, &mut CompilationData) -> LLVMValueRef;
}
impl CodeGen for AST {
fn codegen(&self, data: &mut CompilationData) -> LLVMValueRef {
let int_type = LLVMWrap::Int64TypeInContext(data.context);
let mut ret = LLVMWrap::ConstInt(int_type, 0, false);
for statement in self {
ret = statement.codegen(data);
}
ret
}
}
impl CodeGen for Statement {
fn codegen(&self, data: &mut CompilationData) -> LLVMValueRef {
use self::Statement::*;
match self {
&ExprNode(ref expr) => expr.codegen(data),
&FuncDefNode(ref func) => func.codegen(data),
}
}
}
impl CodeGen for Function {
fn codegen(&self, data: &mut CompilationData) -> LLVMValueRef {
/* should have a check here for function already being defined */
let function = self.prototype.codegen(data);
let ref body = self.body;
data.current_function = Some(function);
let return_type = LLVMWrap::Int64TypeInContext(data.context);
let mut ret = LLVMWrap::ConstInt(return_type, 0, false);
let block = LLVMWrap::AppendBasicBlockInContext(data.context, function, "entry");
LLVMWrap::PositionBuilderAtEnd(data.builder, block);
//insert function params into variables
for value in LLVMWrap::GetParams(function) {
let name = LLVMWrap::GetValueName(value);
data.variables.insert(name, value);
}
for expr in body {
ret = expr.codegen(data);
}
LLVMWrap::BuildRet(data.builder, ret);
// get basic block of main
let main_bb = LLVMWrap::GetBasicBlocks(data.main_function).get(0).expect("Couldn't get first block of main").clone();
LLVMWrap::PositionBuilderAtEnd(data.builder, main_bb);
data.current_function = None;
ret
}
}
impl CodeGen for Prototype {
fn codegen(&self, data: &mut CompilationData) -> LLVMValueRef {
let num_args = self.parameters.len();
let return_type = LLVMWrap::Int64TypeInContext(data.context);
let mut arguments: Vec<LLVMTypeRef> = vec![];
for _ in 0..num_args {
arguments.push(LLVMWrap::Int64TypeInContext(data.context));
}
let function_type =
LLVMWrap::FunctionType(return_type,
arguments,
false);
let function = LLVMWrap::AddFunction(data.module,
&*self.name,
function_type);
let function_params = LLVMWrap::GetParams(function);
for (index, param) in function_params.iter().enumerate() {
let name = self.parameters.get(index).expect(&format!("Failed this check at index {}", index));
let new = *param;
LLVMWrap::SetValueName(new, name);
}
function
}
}
impl CodeGen for Expression {
fn codegen(&self, data: &mut CompilationData) -> LLVMValueRef {
use self::BinOp::*;
use self::Expression::*;
let int_type = LLVMWrap::Int64TypeInContext(data.context);
let zero = LLVMWrap::ConstInt(int_type, 0, false);
match *self {
Variable(ref name) => *data.variables.get(&**name).expect(&format!("Can't find variable {}", name)),
BinExp(Assign, ref left, ref right) => {
if let Variable(ref name) = **left {
let new_value = right.codegen(data);
data.variables.insert((**name).clone(), new_value);
new_value
} else {
panic!("Bad variable assignment")
}
}
BinExp(ref op, ref left, ref right) => {
let lhs = left.codegen(data);
let rhs = right.codegen(data);
op.codegen_with_ops(data, lhs, rhs)
}
Number(ref n) => {
let native_val = *n as u64;
let int_value: LLVMValueRef = LLVMWrap::ConstInt(int_type, native_val, false);
int_value
}
Conditional(ref test, ref then_expr, ref else_expr) => {
let condition_value = test.codegen(data);
let is_nonzero =
LLVMWrap::BuildICmp(data.builder,
LLVMIntPredicate::LLVMIntNE,
condition_value,
zero,
"ifcond");
let func = LLVMWrap::GetBasicBlockParent(LLVMWrap::GetInsertBlock(data.builder));
let mut then_block =
LLVMWrap::AppendBasicBlockInContext(data.context, func, "then_block");
let mut else_block =
LLVMWrap::AppendBasicBlockInContext(data.context, func, "else_block");
let merge_block =
LLVMWrap::AppendBasicBlockInContext(data.context, func, "ifcont");
// add conditional branch to ifcond block
LLVMWrap::BuildCondBr(data.builder, is_nonzero, then_block, else_block);
// start inserting into then block
LLVMWrap::PositionBuilderAtEnd(data.builder, then_block);
// then-block codegen
let then_return = then_expr.codegen(data);
LLVMWrap::BuildBr(data.builder, merge_block);
// update then block b/c recursive codegen() call may have changed the notion of
// the current block
then_block = LLVMWrap::GetInsertBlock(data.builder);
// then do the same stuff again for the else branch
//
LLVMWrap::PositionBuilderAtEnd(data.builder, else_block);
let else_return = match *else_expr {
Some(ref e) => e.codegen(data),
None => zero,
};
LLVMWrap::BuildBr(data.builder, merge_block);
else_block = LLVMWrap::GetInsertBlock(data.builder);
LLVMWrap::PositionBuilderAtEnd(data.builder, merge_block);
let phi = LLVMWrap::BuildPhi(data.builder, int_type, "phinode");
let values = vec![then_return, else_return];
let blocks = vec![then_block, else_block];
LLVMWrap::AddIncoming(phi, values, blocks);
phi
}
Block(ref exprs) => {
let mut ret = zero;
for e in exprs.iter() {
ret = e.codegen(data);
}
ret
}
ref e => {
println!("Unimplemented {:?}", e);
unimplemented!()
}
}
}
}
impl BinOp {
fn codegen_with_ops(&self, data: &CompilationData, lhs: LLVMValueRef, rhs: LLVMValueRef) -> LLVMValueRef {
use self::BinOp::*;
macro_rules! simple_binop {
($fnname: expr, $name: expr) => {
$fnname(data.builder, lhs, rhs, $name)
}
}
let int_type = LLVMWrap::Int64TypeInContext(data.context);
match *self {
Add => simple_binop!(LLVMWrap::BuildAdd, "addtemp"),
Sub => simple_binop!(LLVMWrap::BuildSub, "subtemp"),
Mul => simple_binop!(LLVMWrap::BuildMul, "multemp"),
Div => simple_binop!(LLVMWrap::BuildUDiv, "divtemp"),
Mod => simple_binop!(LLVMWrap::BuildSRem, "remtemp"),
Less => {
let pred: LLVMValueRef =
LLVMWrap::BuildICmp(data.builder, LLVMIntPredicate::LLVMIntULT, lhs, rhs, "tmp");
LLVMWrap::BuildZExt(data.builder, pred, int_type, "temp")
}
Greater => {
let pred: LLVMValueRef =
LLVMWrap::BuildICmp(data.builder, LLVMIntPredicate::LLVMIntUGT, lhs, rhs, "tmp");
LLVMWrap::BuildZExt(data.builder, pred, int_type, "temp")
}
ref unknown => panic!("Bad operator {:?}", unknown),
}
}
}

View File

@ -5,6 +5,9 @@ extern crate schala_repl;
mod tokenizer; mod tokenizer;
mod parser; mod parser;
mod eval; mod eval;
mod compilation;
use schala_repl::{ProgrammingLanguageInterface, EvalOptions, UnfinishedComputation, FinishedComputation, TraceArtifact};
#[derive(Debug)] #[derive(Debug)]
pub struct TokenError { pub struct TokenError {
@ -31,8 +34,15 @@ impl<'a> Maaru<'a> {
} }
} }
/* impl<'a> ProgrammingLanguageInterface for Maaru<'a> {
fn execute_pipeline(&mut self, input: &str, options: &EvalOptions) -> Result<String, String> { fn get_language_name(&self) -> String {
"Maaru".to_string()
}
fn get_source_file_suffix(&self) -> String {
format!("maaru")
}
fn execute_pipeline(&mut self, input: &str, options: &EvalOptions) -> FinishedComputation {
let mut output = UnfinishedComputation::default(); let mut output = UnfinishedComputation::default();
let tokens = match tokenizer::tokenize(input) { let tokens = match tokenizer::tokenize(input) {
@ -62,17 +72,32 @@ fn execute_pipeline(&mut self, input: &str, options: &EvalOptions) -> Result<Str
for s in self.evaluator.run(ast).iter() { for s in self.evaluator.run(ast).iter() {
evaluation_output.push_str(s); evaluation_output.push_str(s);
} }
Ok(evaluation_output) output.finish(Ok(evaluation_output))
} }
*/
/* TODO make this work with new framework */
/* /*
impl<'a> ProgrammingLanguageInterface for Maaru<'a> { fn can_compile(&self) -> bool {
fn get_language_name(&self) -> String { true
"Maaru".to_string()
} }
fn get_source_file_suffix(&self) -> String {
format!("maaru") fn compile(&mut self, input: &str) -> LLVMCodeString {
let tokens = match tokenizer::tokenize(input) {
Ok(tokens) => tokens,
Err(err) => {
let msg = format!("Tokenization error: {:?}\n", err.msg);
panic!("{}", msg);
} }
};
let ast = match parser::parse(&tokens, &[]) {
Ok(ast) => ast,
Err(err) => {
let msg = format!("Parse error: {:?}\n", err.msg);
panic!("{}", msg);
}
};
compilation::compile_ast(ast)
} }
*/ */
}

View File

@ -4,7 +4,7 @@ extern crate itertools;
extern crate schala_repl; extern crate schala_repl;
use itertools::Itertools; use itertools::Itertools;
use schala_repl::{ProgrammingLanguageInterface, EvalOptions}; use schala_repl::{ProgrammingLanguageInterface, EvalOptions, FinishedComputation, UnfinishedComputation};
pub struct Robo { pub struct Robo {
} }
@ -154,5 +154,17 @@ impl ProgrammingLanguageInterface for Robo {
fn get_source_file_suffix(&self) -> String { fn get_source_file_suffix(&self) -> String {
format!("robo") format!("robo")
} }
fn execute_pipeline(&mut self, input: &str, _eval_options: &EvalOptions) -> FinishedComputation {
let output = UnfinishedComputation::default();
let tokens = match tokenize(input) {
Ok(tokens) => tokens,
Err(e) => {
return output.finish(Err(format!("Tokenize error: {:?}", e)));
}
};
output.finish(Ok(format!("{:?}", tokens)))
}
} }

View File

@ -4,7 +4,7 @@ extern crate itertools;
extern crate schala_repl; extern crate schala_repl;
use itertools::Itertools; use itertools::Itertools;
use schala_repl::{ProgrammingLanguageInterface, EvalOptions}; use schala_repl::{ProgrammingLanguageInterface, EvalOptions, UnfinishedComputation, FinishedComputation};
use std::iter::Peekable; use std::iter::Peekable;
use std::vec::IntoIter; use std::vec::IntoIter;
use std::str::Chars; use std::str::Chars;
@ -72,6 +72,24 @@ impl ProgrammingLanguageInterface for Rukka {
fn get_source_file_suffix(&self) -> String { fn get_source_file_suffix(&self) -> String {
format!("rukka") format!("rukka")
} }
fn execute_pipeline(&mut self, input: &str, _eval_options: &EvalOptions) -> FinishedComputation {
let output = UnfinishedComputation::default();
let sexps = match read(input) {
Err(err) => {
return output.finish(Err(format!("Error: {}", err)));
},
Ok(sexps) => sexps
};
let output_str: String = sexps.into_iter().enumerate().map(|(i, sexp)| {
match self.state.eval(sexp) {
Ok(result) => format!("{}: {}", i, result.print()),
Err(err) => format!("{} Error: {}", i, err),
}
}).intersperse(format!("\n")).collect();
output.finish(Ok(output_str))
}
} }
impl EvaluatorState { impl EvaluatorState {

View File

@ -1,2 +0,0 @@
[toolchain]
channel = "nightly"

View File

@ -1,8 +0,0 @@
max_width = 110
use_small_heuristics = "max"
imports_indent = "block"
imports_granularity = "crate"
group_imports = "stdexternalcrate"
match_arm_blocks = false
where_single_line = true

12
schala-codegen/Cargo.toml Normal file
View File

@ -0,0 +1,12 @@
[package]
name = "schala-codegen"
version = "0.1.0"
authors = ["greg <greg.shuflin@protonmail.com>"]
[dependencies]
syn = { version = "0.13.1", features = ["full", "extra-traits"] }
quote = "0.5"
schala-repl = { path = "../schala-repl" }
[lib]
proc-macro = true

103
schala-codegen/src/lib.rs Normal file
View File

@ -0,0 +1,103 @@
#![feature(trace_macros)]
#![feature(proc_macro)]
extern crate proc_macro;
#[macro_use]
extern crate quote;
extern crate syn;
extern crate schala_repl;
use proc_macro::TokenStream;
use syn::{Ident, Attribute, DeriveInput};
fn extract_attribute_arg_by_name(name: &str, attrs: &Vec<Attribute>) -> Option<String> {
use syn::{Meta, Lit, MetaNameValue};
attrs.iter().map(|attr| attr.interpret_meta()).find(|meta| {
match meta {
&Some(Meta::NameValue(MetaNameValue { ident, .. })) if ident.as_ref() == name => true,
_ => false
}
}).and_then(|meta| {
match meta {
Some(Meta::NameValue(MetaNameValue { lit: Lit::Str(litstr), .. })) => Some(litstr.value()),
_ => None,
}
})
}
fn extract_attribute_list(name: &str, attrs: &Vec<Attribute>) -> Option<Vec<(Ident, Option<Vec<Ident>>)>> {
use syn::{Meta, MetaList, NestedMeta};
attrs.iter().find(|attr| {
match attr.path.segments.iter().nth(0) {
Some(segment) if segment.ident.as_ref() == name => true,
_ => false
}
}).and_then(|attr| {
match attr.interpret_meta() {
Some(Meta::List(MetaList { nested, .. })) => {
Some(nested.iter().map(|nested_meta| match nested_meta {
&NestedMeta::Meta(Meta::Word(ident)) => (ident, None),
&NestedMeta::Meta(Meta::List(MetaList { ident, nested: ref nested2, .. })) => {
let own_args = nested2.iter().map(|nested_meta2| match nested_meta2 {
&NestedMeta::Meta(Meta::Word(ident)) => ident,
_ => panic!("Bad format for doubly-nested attribute list")
}).collect();
(ident, Some(own_args))
},
_ => panic!("Bad format for nested list")
}).collect())
},
_ => panic!("{} must be a comma-delimited list surrounded by parens", name)
}
})
}
#[proc_macro_derive(ProgrammingLanguageInterface, attributes(LanguageName, SourceFileExtension, PipelineSteps))]
pub fn derive_programming_language_interface(input: TokenStream) -> TokenStream {
use schala_repl::PassDescriptor;
let ast: DeriveInput = syn::parse(input).unwrap();
let name = &ast.ident;
let attrs = &ast.attrs;
let language_name: String = extract_attribute_arg_by_name("LanguageName", attrs).expect("LanguageName is required");
let file_ext = extract_attribute_arg_by_name("SourceFileExtension", attrs).expect("SourceFileExtension is required");
let passes = extract_attribute_list("PipelineSteps", attrs).expect("PipelineSteps are required");
let pass_idents = passes.iter().map(|x| x.0);
//let pass_names: Vec<String> = passes.iter().map(|pass| pass.0.to_string()).collect();
let pass_descriptors = passes.iter().map(|pass| {
let name = pass.0.to_string();
let opts: Vec<String> = match &pass.1 {
None => vec![],
Some(opts) => opts.iter().map(|o| o.to_string()).collect(),
};
quote! {
PassDescriptor {
name: #name.to_string(),
debug_options: vec![#(format!(#opts)),*]
}
}
});
let tokens = quote! {
use schala_repl::PassDescriptor;
impl ProgrammingLanguageInterface for #name {
fn get_language_name(&self) -> String {
#language_name.to_string()
}
fn get_source_file_suffix(&self) -> String {
#file_ext.to_string()
}
fn execute_pipeline(&mut self, input: &str, options: &EvalOptions) -> FinishedComputation {
let mut chain = pass_chain![self, options; #(#pass_idents),* ];
chain(input)
}
fn get_passes(&self) -> Vec<PassDescriptor> {
vec![ #(#pass_descriptors),* ]
//vec![ #(PassDescriptor { name: #pass_names.to_string(), debug_options: vec![] }),* ]
}
}
};
tokens.into()
}

View File

@ -2,26 +2,12 @@
name = "schala-lang" name = "schala-lang"
version = "0.1.0" version = "0.1.0"
authors = ["greg <greg.shuflin@protonmail.com>"] authors = ["greg <greg.shuflin@protonmail.com>"]
edition = "2021"
[dependencies] [dependencies]
itertools = "0.10" itertools = "0.5.8"
take_mut = "0.2.2" take_mut = "0.1.3"
failure = "0.1.5" maplit = "*"
ena = "0.11.0" lazy_static = "0.2.8"
stopwatch = "0.0.7"
derivative = "2.2.0"
colored = "1.8"
radix_trie = "0.1.5"
assert_matches = "1.5"
#peg = "0.7.0"
peg = "0.8.1"
nom = "7.1.0"
nom_locate = "4.0.0"
schala-repl = { path = "../schala-repl" } schala-repl = { path = "../schala-repl" }
schala-codegen = { path = "../schala-codegen" }
[dev-dependencies]
test-case = "1.2.0"
pretty_assertions = "1.0.0"

View File

@ -1,22 +0,0 @@
let _SCHALA_VERSION = "0.1.0"
type Option<T> = Some(T) | None
type Ord = LT | EQ | GT
@register_builtin(print)
fn print(arg) { }
@register_builtin(println)
fn println(arg) { }
@register_builtin(getline)
fn getline(arg) { }
fn map(input: Option<T>, func: Func): Option<T> {
if input {
is Option::Some(x) then Option::Some(func(x))
is Option::None then Option::None
}
}
type Complicated = Sunrise | Metal { black: bool, norwegian: bool } | Fella(String, Int)

176
schala-lang/src/ast.rs Normal file
View File

@ -0,0 +1,176 @@
use std::rc::Rc;
use builtin::{BinOp, PrefixOp};
#[derive(Debug, PartialEq)]
pub struct AST(pub Vec<Statement>);
#[derive(Debug, PartialEq, Clone)]
pub enum Statement {
ExpressionStatement(Expression),
Declaration(Declaration),
}
pub type Block = Vec<Statement>;
pub type ParamName = Rc<String>;
pub type InterfaceName = Rc<String>; //should be a singleton I think??
pub type FormalParam = (ParamName, Option<TypeName>);
#[derive(Debug, PartialEq, Clone)]
pub enum Declaration {
FuncSig(Signature),
FuncDecl(Signature, Block),
TypeDecl {
name: TypeSingletonName,
body: TypeBody,
mutable: bool
},
TypeAlias(Rc<String>, Rc<String>), //should have TypeSingletonName in it, or maybe just String, not sure
Binding {
name: Rc<String>,
constant: bool,
expr: Expression,
},
Impl {
type_name: TypeName,
interface_name: Option<InterfaceName>,
block: Vec<Declaration>,
},
Interface {
name: Rc<String>,
signatures: Vec<Signature>
}
}
#[derive(Debug, PartialEq, Clone)]
pub struct Signature {
pub name: Rc<String>,
pub params: Vec<FormalParam>,
pub type_anno: Option<TypeName>,
}
#[derive(Debug, PartialEq, Clone)]
pub struct TypeBody(pub Vec<Variant>);
#[derive(Debug, PartialEq, Clone)]
pub enum Variant {
UnitStruct(Rc<String>),
TupleStruct(Rc<String>, Vec<TypeName>),
Record(Rc<String>, Vec<(Rc<String>, TypeName)>),
}
#[derive(Debug, PartialEq, Clone)]
pub struct Expression(pub ExpressionType, pub Option<TypeName>);
#[derive(Debug, PartialEq, Clone)]
pub enum TypeName {
Tuple(Vec<TypeName>),
Singleton(TypeSingletonName)
}
#[derive(Debug, PartialEq, Clone)]
pub struct TypeSingletonName {
pub name: Rc<String>,
pub params: Vec<TypeName>,
}
#[derive(Debug, PartialEq, Clone)]
pub enum ExpressionType {
NatLiteral(u64),
FloatLiteral(f64),
StringLiteral(Rc<String>),
BoolLiteral(bool),
BinExp(BinOp, Box<Expression>, Box<Expression>),
PrefixExp(PrefixOp, Box<Expression>),
TupleLiteral(Vec<Expression>),
Value(Rc<String>),
NamedStruct {
name: Rc<String>,
fields: Vec<(Rc<String>, Expression)>,
},
Call {
f: Box<Expression>,
arguments: Vec<Expression>,
},
Index {
indexee: Box<Expression>,
indexers: Vec<Expression>,
},
IfExpression {
discriminator: Box<Discriminator>,
body: Box<IfExpressionBody>,
},
WhileExpression {
condition: Option<Box<Expression>>,
body: Block,
},
ForExpression {
enumerators: Vec<Enumerator>,
body: Box<ForBody>,
},
Lambda {
params: Vec<FormalParam>,
body: Block,
},
ListLiteral(Vec<Expression>),
}
#[derive(Debug, PartialEq, Clone)]
pub enum Discriminator {
Simple(Expression),
BinOp(Expression, BinOp)
}
#[derive(Debug, PartialEq, Clone)]
pub enum IfExpressionBody {
SimpleConditional(Block, Option<Block>),
SimplePatternMatch(Pattern, Block, Option<Block>),
GuardList(Vec<GuardArm>)
}
#[derive(Debug, PartialEq, Clone)]
pub struct GuardArm {
pub guard: Guard,
pub body: Block,
}
#[derive(Debug, PartialEq, Clone)]
pub enum Guard {
Pat(Pattern),
HalfExpr(HalfExpr)
}
#[derive(Debug, PartialEq, Clone)]
pub struct HalfExpr {
pub op: Option<BinOp>,
pub expr: ExpressionType,
}
#[derive(Debug, PartialEq, Clone)]
pub enum Pattern {
Ignored,
TuplePattern(Vec<Pattern>),
Literal(PatternLiteral),
TupleStruct(Rc<String>, Vec<Pattern>),
Record(Rc<String>, Vec<(Rc<String>, Pattern)>),
}
#[derive(Debug, PartialEq, Clone)]
pub enum PatternLiteral {
NumPattern(ExpressionType),
StringPattern(Rc<String>),
BoolPattern(bool),
VarPattern(Rc<String>)
}
#[derive(Debug, PartialEq, Clone)]
pub struct Enumerator {
pub id: Rc<String>,
pub generator: Expression,
}
#[derive(Debug, PartialEq, Clone)]
pub enum ForBody {
MonadicReturn(Expression),
StatementBlock(Block),
}

View File

@ -1,345 +0,0 @@
#![allow(clippy::upper_case_acronyms)]
#![allow(clippy::enum_variant_names)]
use std::{
convert::{AsRef, From},
fmt,
rc::Rc,
};
mod operators;
mod visitor;
mod visualize;
pub use operators::{BinOp, PrefixOp};
pub use visitor::*;
use crate::{
derivative::Derivative,
identifier::{define_id_kind, Id},
parsing::Location,
util::delim_wrapped,
};
define_id_kind!(ASTItem);
pub type ItemId = Id<ASTItem>;
#[derive(Derivative, Debug)]
#[derivative(PartialEq)]
pub struct AST {
#[derivative(PartialEq = "ignore")]
pub id: ItemId,
pub statements: Block,
}
impl fmt::Display for AST {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", visualize::render_ast(self))
}
}
#[derive(Derivative, Debug, Clone)]
#[derivative(PartialEq)]
pub struct Statement<K> {
#[derivative(PartialEq = "ignore")]
pub id: ItemId,
#[derivative(PartialEq = "ignore")]
pub location: Location,
pub kind: K,
}
#[derive(Debug, PartialEq, Clone)]
pub enum StatementKind {
Expression(Expression),
Declaration(Declaration),
Import(ImportSpecifier),
Flow(FlowControl),
}
#[derive(Debug, Clone, PartialEq)]
pub enum FlowControl {
Continue,
Break,
Return(Option<Expression>),
}
#[derive(Debug, Clone, PartialEq, Default)]
pub struct Block {
pub statements: Vec<Statement<StatementKind>>,
}
impl From<Vec<Statement<StatementKind>>> for Block {
fn from(statements: Vec<Statement<StatementKind>>) -> Self {
Self { statements }
}
}
impl From<Statement<StatementKind>> for Block {
fn from(statement: Statement<StatementKind>) -> Self {
Self { statements: vec![statement] }
}
}
impl AsRef<[Statement<StatementKind>]> for Block {
fn as_ref(&self) -> &[Statement<StatementKind>] {
self.statements.as_ref()
}
}
pub type ParamName = Rc<String>;
#[derive(Debug, Derivative, Clone)]
#[derivative(PartialEq)]
pub struct QualifiedName {
#[derivative(PartialEq = "ignore")]
pub id: ItemId,
pub components: Vec<Rc<String>>,
}
impl fmt::Display for QualifiedName {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match &self.components[..] {
[] => write!(f, "[<empty>]"),
[name] => write!(f, "{}", name),
[name, rest @ ..] => {
write!(f, "{}", name)?;
for c in rest {
write!(f, "::{}", c)?;
}
Ok(())
}
}
}
}
#[derive(Debug, PartialEq, Clone)]
pub struct FormalParam {
pub name: ParamName,
pub default: Option<Expression>,
pub anno: Option<TypeIdentifier>,
}
#[derive(Debug, PartialEq, Clone)]
pub enum Declaration {
FuncSig(Signature),
FuncDecl(Signature, Block),
TypeDecl {
name: TypeSingletonName,
body: TypeBody,
mutable: bool,
},
//TODO TypeAlias `original` needs to be a more complex type definition
TypeAlias {
alias: Rc<String>,
original: Rc<String>,
},
Binding {
name: Rc<String>,
constant: bool,
type_anno: Option<TypeIdentifier>,
expr: Expression,
},
Impl {
type_name: TypeIdentifier,
interface_name: Option<TypeSingletonName>,
block: Vec<Statement<Declaration>>,
},
Interface {
name: Rc<String>,
signatures: Vec<Signature>,
},
//TODO need to limit the types of statements that can be annotated
Annotation {
name: Rc<String>,
arguments: Vec<Expression>,
inner: Box<Statement<StatementKind>>,
},
Module {
name: Rc<String>,
items: Block,
},
}
#[derive(Debug, PartialEq, Clone)]
pub struct Signature {
pub name: Rc<String>,
pub operator: bool,
pub params: Vec<FormalParam>,
pub type_anno: Option<TypeIdentifier>,
}
//TODO I can probably get rid of TypeBody
#[derive(Debug, Derivative, Clone)]
#[derivative(PartialEq)]
pub enum TypeBody {
Variants(Vec<Variant>),
ImmediateRecord {
#[derivative(PartialEq = "ignore")]
id: ItemId,
fields: Vec<(Rc<String>, TypeIdentifier)>,
},
}
#[derive(Debug, Derivative, Clone)]
#[derivative(PartialEq)]
pub struct Variant {
#[derivative(PartialEq = "ignore")]
pub id: ItemId,
pub name: Rc<String>,
pub kind: VariantKind,
}
#[derive(Debug, PartialEq, Clone)]
pub enum VariantKind {
UnitStruct,
TupleStruct(Vec<TypeIdentifier>),
Record(Vec<(Rc<String>, TypeIdentifier)>),
}
#[derive(Debug, Derivative, Clone)]
#[derivative(PartialEq)]
pub struct Expression {
#[derivative(PartialEq = "ignore")]
pub id: ItemId,
pub kind: ExpressionKind,
//TODO this should only allow singletons, not tuples
pub type_anno: Option<TypeIdentifier>,
}
impl Expression {
pub fn new(id: ItemId, kind: ExpressionKind) -> Expression {
Expression { id, kind, type_anno: None }
}
}
#[derive(Debug, PartialEq, Clone)]
pub enum TypeIdentifier {
Tuple(Vec<TypeIdentifier>),
Singleton(TypeSingletonName),
}
impl fmt::Display for TypeIdentifier {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
TypeIdentifier::Tuple(items) =>
write!(f, "{}", delim_wrapped('(', ')', items.iter().map(|item| item.to_string()))),
TypeIdentifier::Singleton(tsn) => {
write!(f, "{}", tsn.name)?;
if !tsn.params.is_empty() {
write!(f, "{}", delim_wrapped('<', '>', tsn.params.iter().map(|item| item.to_string())))?;
}
Ok(())
}
}
}
}
#[derive(Debug, PartialEq, Clone)]
pub struct TypeSingletonName {
pub name: Rc<String>,
pub params: Vec<TypeIdentifier>,
}
#[derive(Debug, PartialEq, Clone)]
pub enum ExpressionKind {
NatLiteral(u64),
FloatLiteral(f64),
StringLiteral { prefix: Option<Rc<String>>, s: Rc<String> },
BoolLiteral(bool),
BinExp(BinOp, Box<Expression>, Box<Expression>),
PrefixExp(PrefixOp, Box<Expression>),
TupleLiteral(Vec<Expression>),
Value(QualifiedName),
SelfValue,
NamedStruct { name: QualifiedName, fields: Vec<(Rc<String>, Expression)> },
Call { f: Box<Expression>, arguments: Vec<InvocationArgument> },
Index { indexee: Box<Expression>, indexers: Vec<Expression> },
IfExpression { discriminator: Option<Box<Expression>>, body: Box<IfExpressionBody> },
WhileExpression { condition: Option<Box<Expression>>, body: Block },
ForExpression { enumerators: Vec<Enumerator>, body: Box<ForBody> },
Lambda { params: Vec<FormalParam>, type_anno: Option<TypeIdentifier>, body: Block },
Access { name: Rc<String>, expr: Box<Expression> },
ListLiteral(Vec<Expression>),
}
#[derive(Debug, PartialEq, Clone)]
pub enum InvocationArgument {
Positional(Expression),
Keyword { name: Rc<String>, expr: Expression },
Ignored,
}
#[derive(Debug, PartialEq, Clone)]
pub enum IfExpressionBody {
SimpleConditional { then_case: Block, else_case: Option<Block> },
SimplePatternMatch { pattern: Pattern, then_case: Block, else_case: Option<Block> },
CondList(Vec<ConditionArm>),
}
#[derive(Debug, PartialEq, Clone)]
pub struct ConditionArm {
pub condition: Condition,
pub guard: Option<Expression>,
pub body: Block,
}
#[derive(Debug, PartialEq, Clone)]
pub enum Condition {
Pattern(Pattern),
TruncatedOp(BinOp, Expression),
//Expression(Expression), //I'm pretty sure I don't actually want this
Else,
}
#[derive(Debug, PartialEq, Clone)]
pub enum Pattern {
Ignored,
TuplePattern(Vec<Pattern>),
Literal(PatternLiteral),
TupleStruct(QualifiedName, Vec<Pattern>),
Record(QualifiedName, Vec<(Rc<String>, Pattern)>),
VarOrName(QualifiedName),
}
#[derive(Debug, PartialEq, Clone)]
pub enum PatternLiteral {
NumPattern { neg: bool, num: ExpressionKind },
StringPattern(Rc<String>),
BoolPattern(bool),
}
#[derive(Debug, PartialEq, Clone)]
pub struct Enumerator {
pub identifier: Rc<String>,
pub generator: Expression,
pub assignment: bool, //true if `=`, false if `<-`
}
#[derive(Debug, PartialEq, Clone)]
pub enum ForBody {
MonadicReturn(Expression),
StatementBlock(Block),
}
#[derive(Debug, Derivative, Clone)]
#[derivative(PartialEq)]
pub struct ImportSpecifier {
#[derivative(PartialEq = "ignore")]
pub id: ItemId,
pub path_components: Vec<Rc<String>>,
pub imported_names: ImportedNames,
}
#[derive(Debug, PartialEq, Clone)]
pub enum ImportedNames {
All,
LastOfPath,
List(Vec<Rc<String>>),
}
#[derive(Debug, PartialEq, Clone)]
pub struct ModuleSpecifier {
pub name: Rc<String>,
pub contents: Block,
}

View File

@ -1,61 +0,0 @@
use std::rc::Rc;
#[derive(Debug, PartialEq, Clone)]
pub struct PrefixOp {
sigil: Rc<String>,
}
impl PrefixOp {
pub fn from_sigil(sigil: &str) -> PrefixOp {
PrefixOp { sigil: Rc::new(sigil.to_string()) }
}
pub fn sigil(&self) -> &str {
&self.sigil
}
}
#[derive(Debug, PartialEq, Clone)]
pub struct BinOp {
sigil: Rc<String>,
}
impl BinOp {
pub fn from_sigil(sigil: &str) -> BinOp {
BinOp { sigil: Rc::new(sigil.to_string()) }
}
pub fn sigil(&self) -> &str {
&self.sigil
}
pub fn min_precedence() -> i32 {
i32::min_value()
}
pub fn get_precedence(&self) -> i32 {
binop_precedences(self.sigil.as_ref())
}
}
fn binop_precedences(s: &str) -> i32 {
let default = 10_000_000;
match s {
"+" => 10,
"-" => 10,
"*" => 20,
"/" => 20,
"%" => 20,
"++" => 30,
"^" => 30,
"&" => 20,
"|" => 20,
">" => 20,
">=" => 20,
"<" => 20,
"<=" => 20,
"==" => 40,
"<=>" => 30,
"=" => 5, // Assignment shoudl have highest precedence
_ => default,
}
}

View File

@ -1,202 +0,0 @@
use crate::ast::*;
#[derive(Debug)]
pub enum Recursion {
Continue,
Stop,
}
pub trait ASTVisitor: Sized {
fn expression(&mut self, _expression: &Expression) -> Recursion {
Recursion::Continue
}
fn declaration(&mut self, _declaration: &Declaration, _id: &ItemId) -> Recursion {
Recursion::Continue
}
fn import(&mut self, _import: &ImportSpecifier) -> Recursion {
Recursion::Continue
}
fn pattern(&mut self, _pat: &Pattern) -> Recursion {
Recursion::Continue
}
}
pub fn walk_ast<V: ASTVisitor>(v: &mut V, ast: &AST) {
walk_block(v, &ast.statements);
}
pub fn walk_block<V: ASTVisitor>(v: &mut V, block: &Block) {
use StatementKind::*;
for statement in block.statements.iter() {
match statement.kind {
StatementKind::Expression(ref expr) => {
walk_expression(v, expr);
}
Declaration(ref decl) => {
walk_declaration(v, decl, &statement.id);
}
Import(ref import_spec) => {
v.import(import_spec);
}
Flow(ref flow_control) =>
if let FlowControl::Return(Some(ref retval)) = flow_control {
walk_expression(v, retval);
},
}
}
}
pub fn walk_declaration<V: ASTVisitor>(v: &mut V, decl: &Declaration, id: &ItemId) {
use Declaration::*;
if let Recursion::Continue = v.declaration(decl, id) {
match decl {
FuncDecl(_sig, block) => {
walk_block(v, block);
}
Binding { name: _, constant: _, type_anno: _, expr } => {
walk_expression(v, expr);
}
Module { name: _, items } => {
walk_block(v, items);
}
_ => (),
};
}
}
pub fn walk_expression<V: ASTVisitor>(v: &mut V, expr: &Expression) {
use ExpressionKind::*;
if let Recursion::Continue = v.expression(expr) {
match &expr.kind {
NatLiteral(_)
| FloatLiteral(_)
| StringLiteral { .. }
| BoolLiteral(_)
| Value(_)
| SelfValue => (),
BinExp(_, lhs, rhs) => {
walk_expression(v, lhs);
walk_expression(v, rhs);
}
PrefixExp(_, arg) => {
walk_expression(v, arg);
}
TupleLiteral(exprs) =>
for expr in exprs {
walk_expression(v, expr);
},
NamedStruct { name: _, fields } =>
for (_, expr) in fields.iter() {
walk_expression(v, expr);
},
Call { f, arguments } => {
walk_expression(v, f);
for arg in arguments.iter() {
match arg {
InvocationArgument::Positional(expr) | InvocationArgument::Keyword { expr, .. } =>
walk_expression(v, expr),
_ => (),
}
}
}
Index { indexee, indexers } => {
walk_expression(v, indexee);
for indexer in indexers.iter() {
walk_expression(v, indexer);
}
}
IfExpression { discriminator, body } => {
if let Some(d) = discriminator.as_ref() {
walk_expression(v, d);
}
walk_if_expr_body(v, body.as_ref());
}
WhileExpression { condition, body } => {
if let Some(d) = condition.as_ref() {
walk_expression(v, d);
}
walk_block(v, body);
}
ForExpression { enumerators, body } => {
for enumerator in enumerators {
walk_expression(v, &enumerator.generator);
}
match body.as_ref() {
ForBody::MonadicReturn(expr) => walk_expression(v, expr),
ForBody::StatementBlock(block) => walk_block(v, block),
};
}
Lambda { params: _, type_anno: _, body } => {
walk_block(v, body);
}
Access { name: _, expr } => {
walk_expression(v, expr);
}
ListLiteral(exprs) =>
for expr in exprs {
walk_expression(v, expr);
},
};
}
}
pub fn walk_if_expr_body<V: ASTVisitor>(v: &mut V, body: &IfExpressionBody) {
use IfExpressionBody::*;
match body {
SimpleConditional { then_case, else_case } => {
walk_block(v, then_case);
if let Some(block) = else_case.as_ref() {
walk_block(v, block)
}
}
SimplePatternMatch { pattern, then_case, else_case } => {
walk_pattern(v, pattern);
walk_block(v, then_case);
if let Some(block) = else_case.as_ref() {
walk_block(v, block)
}
}
CondList(arms) =>
for arm in arms {
match arm.condition {
Condition::Pattern(ref pat) => {
walk_pattern(v, pat);
}
Condition::TruncatedOp(ref _binop, ref expr) => {
walk_expression(v, expr);
}
Condition::Else => (),
}
if let Some(ref guard) = arm.guard {
walk_expression(v, guard);
}
walk_block(v, &arm.body);
},
}
}
pub fn walk_pattern<V: ASTVisitor>(v: &mut V, pat: &Pattern) {
use Pattern::*;
if let Recursion::Continue = v.pattern(pat) {
match pat {
TuplePattern(patterns) =>
for pat in patterns {
walk_pattern(v, pat);
},
TupleStruct(_, patterns) =>
for pat in patterns {
walk_pattern(v, pat);
},
Record(_, name_and_patterns) =>
for (_, pat) in name_and_patterns {
walk_pattern(v, pat);
},
_ => (),
};
}
}

View File

@ -1,282 +0,0 @@
#![allow(clippy::single_char_add_str)]
use std::fmt::Write;
use super::{
Block, Declaration, Expression, ExpressionKind, FlowControl, ImportSpecifier, InvocationArgument,
Signature, Statement, StatementKind, AST,
};
const LEVEL: usize = 2;
fn do_indent(n: usize, buf: &mut String) {
for _ in 0..n {
buf.push(' ');
}
}
fn newline(buf: &mut String) {
buf.push('\n');
}
pub(super) fn render_ast(ast: &AST) -> String {
let AST { statements, .. } = ast;
let mut buf = "(AST\n".to_string();
render_block(statements, LEVEL, &mut buf);
buf.push(')');
buf
}
fn render_statement(stmt: &Statement<StatementKind>, indent: usize, buf: &mut String) {
use StatementKind::*;
do_indent(indent, buf);
match stmt.kind {
Expression(ref expr) => render_expression(expr, indent, buf),
Declaration(ref decl) => render_declaration(decl, indent, buf),
Import(ref spec) => render_import(spec, indent, buf),
Flow(ref flow_control) => render_flow_control(flow_control, indent, buf),
}
}
fn render_expression(expr: &Expression, indent: usize, buf: &mut String) {
use ExpressionKind::*;
buf.push_str("(Expr ");
match &expr.kind {
SelfValue => write!(buf, "(SelfValue)").unwrap(),
NatLiteral(n) => buf.push_str(&format!("(NatLiteral {})", n)),
FloatLiteral(f) => buf.push_str(&format!("(FloatLiteral {})", f)),
StringLiteral { s, prefix } => buf.push_str(&format!("(StringLiteral prefix: {:?} {})", prefix, s)),
BoolLiteral(b) => buf.push_str(&format!("(BoolLiteral {})", b)),
BinExp(binop, lhs, rhs) => {
let new_indent = indent + LEVEL;
buf.push_str(&format!("Binop {}\n", binop.sigil()));
do_indent(new_indent, buf);
render_expression(lhs, new_indent, buf);
newline(buf);
do_indent(new_indent, buf);
render_expression(rhs, new_indent, buf);
newline(buf);
do_indent(indent, buf);
}
PrefixExp(prefix, expr) => {
let new_indent = indent + LEVEL;
buf.push_str(&format!("PrefixOp {}\n", prefix.sigil()));
do_indent(new_indent, buf);
render_expression(expr, new_indent, buf);
newline(buf);
do_indent(indent, buf);
}
TupleLiteral(..) => (),
Value(name) => {
buf.push_str(&format!("Value {})", name));
}
NamedStruct { name: _, fields: _ } => (),
Call { f, arguments } => {
let new_indent = indent + LEVEL;
buf.push_str("Call ");
render_expression(f, new_indent, buf);
newline(buf);
for arg in arguments {
do_indent(new_indent, buf);
match arg {
InvocationArgument::Positional(expr) => render_expression(expr, new_indent, buf),
InvocationArgument::Keyword { .. } => buf.push_str("<keyword>"),
InvocationArgument::Ignored => buf.push_str("<ignored>"),
}
newline(buf);
do_indent(indent, buf);
}
}
Index { .. } => buf.push_str("<index>"),
IfExpression { .. } => buf.push_str("<if-expr>"),
WhileExpression { .. } => buf.push_str("<while-expr>"),
ForExpression { .. } => buf.push_str("<for-expr>"),
Lambda { params, type_anno: _, body } => {
let new_indent = indent + LEVEL;
buf.push_str("Lambda ");
newline(buf);
do_indent(new_indent, buf);
buf.push_str("(Args ");
for p in params {
buf.push_str(&format!("{} ", p.name));
}
buf.push(')');
newline(buf);
do_indent(new_indent, buf);
buf.push_str("(Body ");
newline(buf);
render_block(body, new_indent + LEVEL, buf);
do_indent(new_indent, buf);
buf.push(')');
newline(buf);
do_indent(indent, buf);
}
Access { .. } => buf.push_str("<access-expr>"),
ListLiteral(..) => buf.push_str("<list-literal>"),
}
buf.push(')');
}
fn render_declaration(decl: &Declaration, indent: usize, buf: &mut String) {
use Declaration::*;
buf.push_str("(Decl ");
match decl {
FuncSig(ref sig) => render_signature(sig, indent, buf),
FuncDecl(ref sig, ref block) => {
let indent = indent + LEVEL;
buf.push_str("Function");
newline(buf);
do_indent(indent, buf);
render_signature(sig, indent, buf);
newline(buf);
do_indent(indent, buf);
buf.push_str("(Body");
newline(buf);
render_block(block, indent + LEVEL, buf);
do_indent(indent, buf);
buf.push_str(")");
newline(buf);
}
TypeDecl { name: _, body: _, .. } => {
buf.push_str("<type-decl>");
}
TypeAlias { alias: _, original: _ } => {
buf.push_str("<type-alias>");
}
Binding { name, constant: _, type_anno: _, expr } => {
let new_indent = indent + LEVEL;
buf.push_str(&format!("Binding {}", name));
newline(buf);
do_indent(new_indent, buf);
render_expression(expr, new_indent, buf);
newline(buf);
}
Module { name, items: _ } => {
write!(buf, "(Module {} <body>)", name).unwrap();
}
_ => (), /*
Impl { type_name: TypeIdentifier, interface_name: Option<TypeSingletonName>, block: Vec<Declaration> },
Interface { name: Rc<String>, signatures: Vec<Signature> },
Annotation { name: Rc<String>, arguments: Vec<Expression> },
*/
}
do_indent(indent, buf);
buf.push(')');
}
fn render_block(block: &Block, indent: usize, buf: &mut String) {
for stmt in block.statements.iter() {
render_statement(stmt, indent, buf);
newline(buf);
}
}
fn render_signature(sig: &Signature, _indent: usize, buf: &mut String) {
buf.push_str(&format!("(Signature {} )", sig.name));
}
fn render_import(_import: &ImportSpecifier, _indent: usize, buf: &mut String) {
buf.push_str("(Import <some import>)");
}
fn render_flow_control(flow: &FlowControl, _indent: usize, buf: &mut String) {
use FlowControl::*;
match flow {
Return(ref _expr) => write!(buf, "return <expr>").unwrap(),
Break => write!(buf, "break").unwrap(),
Continue => write!(buf, "continue").unwrap(),
}
}
#[cfg(test)]
mod test {
use super::render_ast;
use crate::util::quick_ast;
#[test]
fn test_visualization() {
let ast = quick_ast(
r#"
fn test(x) {
let m = 9
1 * 4 <> m |> somemod::output(x)
}
let quincy = \(no, yes, maybe) {
let a = 10
yes * no + a
}
let b = 54
test(b) == 3
"#,
);
let expected_output = r#"(AST
(Decl Function
(Signature test )
(Body
(Decl Binding m
(Expr (NatLiteral 9))
)
(Expr Binop *
(Expr (NatLiteral 1))
(Expr Binop |>
(Expr Binop <>
(Expr (NatLiteral 4))
(Expr Value m))
)
(Expr Call (Expr Value somemod::output))
(Expr Value x))
)
)
)
)
)
(Decl Binding quincy
(Expr Lambda
(Args no yes maybe )
(Body
(Decl Binding a
(Expr (NatLiteral 10))
)
(Expr Binop +
(Expr Binop *
(Expr Value yes))
(Expr Value no))
)
(Expr Value a))
)
)
)
)
(Decl Binding b
(Expr (NatLiteral 54))
)
(Expr Binop ==
(Expr Call (Expr Value test))
(Expr Value b))
)
(Expr (NatLiteral 3))
)
)"#;
let rendered = render_ast(&ast);
assert_eq!(rendered, expected_output);
}
}

View File

@ -1,130 +1,100 @@
use std::{convert::TryFrom, str::FromStr}; use std::rc::Rc;
use std::collections::HashMap;
use std::fmt;
use crate::{ use self::Type::*; use self::TConstOld::*;
ast::{BinOp, PrefixOp},
type_inference::Type,
//TODO get rid of these types and replace them with the right MonoType or whatever ones later
#[derive(Debug, PartialEq, Clone)]
pub enum Type {
Const(TConstOld),
Func(Box<Type>, Box<Type>),
}
#[derive(Debug, PartialEq, Clone)]
pub enum TConstOld {
Nat,
Int,
Float,
StringT,
Bool,
}
impl fmt::Display for Type {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?}", self)
}
}
#[derive(Debug, PartialEq, Clone)]
pub struct BinOp {
sigil: Rc<String>
}
impl BinOp {
pub fn from_sigil(sigil: &str) -> BinOp {
BinOp { sigil: Rc::new(sigil.to_string()) }
}
pub fn sigil(&self) -> &Rc<String> {
&self.sigil
}
pub fn get_type(&self) -> Result<Type, String> {
let s = self.sigil.as_str();
BINOPS.get(s).map(|x| x.0.clone()).ok_or(format!("Binop {} not found", s))
}
pub fn min_precedence() -> i32 {
i32::min_value()
}
pub fn get_precedence(op: &str) -> i32 {
let default = 10_000_000;
BINOPS.get(op).map(|x| x.2.clone()).unwrap_or(default)
}
}
#[derive(Debug, PartialEq, Clone)]
pub struct PrefixOp {
sigil: Rc<String>
}
impl PrefixOp {
pub fn from_sigil(sigil: &str) -> PrefixOp {
PrefixOp { sigil: Rc::new(sigil.to_string()) }
}
pub fn sigil(&self) -> &Rc<String> {
&self.sigil
}
pub fn is_prefix(op: &str) -> bool {
PREFIX_OPS.get(op).is_some()
}
pub fn get_type(&self) -> Result<Type, String> {
let s = self.sigil.as_str();
PREFIX_OPS.get(s).map(|x| x.0.clone()).ok_or(format!("Prefix op {} not found", s))
}
}
lazy_static! {
static ref PREFIX_OPS: HashMap<&'static str, (Type, ())> =
hashmap! {
"+" => (Func(bx!(Const(Int)), bx!(Const(Int))), ()),
"-" => (Func(bx!(Const(Int)), bx!(Const(Int))), ()),
"!" => (Func(bx!(Const(Bool)), bx!(Const(Bool))), ()),
}; };
/// "Builtin" computational operations with some kind of semantics, mostly mathematical operations.
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum Builtin {
Add,
Increment,
Subtract,
Negate,
Multiply,
Divide,
Quotient,
Modulo,
Exponentiation,
BitwiseAnd,
BitwiseOr,
BooleanAnd,
BooleanOr,
BooleanNot,
Equality,
LessThan,
LessThanOrEqual,
GreaterThan,
GreaterThanOrEqual,
Comparison,
IOPrint,
IOPrintLn,
IOGetLine,
Assignment,
Concatenate,
NotEqual,
} }
impl Builtin { /* the second tuple member is a placeholder for when I want to make evaluation rules tied to the
#[allow(dead_code)] * binop definition */
pub fn get_type(&self) -> Type { lazy_static! {
use Builtin::*; static ref BINOPS: HashMap<&'static str, (Type, (), i32)> =
match self { hashmap! {
Add => ty!(Nat -> Nat -> Nat), "+" => (Func(bx!(Const(Nat)), bx!(Func(bx!(Const(Nat)), bx!(Const(Nat))))), (), 10),
Subtract => ty!(Nat -> Nat -> Nat), "-" => (Func(bx!(Const(Nat)), bx!(Func(bx!(Const(Nat)), bx!(Const(Nat))))), (), 10),
Multiply => ty!(Nat -> Nat -> Nat), "*" => (Func(bx!(Const(Nat)), bx!(Func(bx!(Const(Nat)), bx!(Const(Nat))))), (), 20),
Divide => ty!(Nat -> Nat -> Float), "/" => (Func(bx!(Const(Nat)), bx!(Func(bx!(Const(Nat)), bx!(Const(Float))))), (), 20),
Quotient => ty!(Nat -> Nat -> Nat), "//" => (Func(bx!(Const(Nat)), bx!(Func(bx!(Const(Nat)), bx!(Const(Nat))))), (), 20), //TODO change this to `quot`
Modulo => ty!(Nat -> Nat -> Nat), "%" => (Func(bx!(Const(Nat)), bx!(Func(bx!(Const(Nat)), bx!(Const(Nat))))), (), 20),
Exponentiation => ty!(Nat -> Nat -> Nat), "++" => (Func(bx!(Const(StringT)), bx!(Func(bx!(Const(StringT)), bx!(Const(StringT))))), (), 30),
BitwiseAnd => ty!(Nat -> Nat -> Nat), "^" => (Func(bx!(Const(Nat)), bx!(Func(bx!(Const(Nat)), bx!(Const(Nat))))), (), 20),
BitwiseOr => ty!(Nat -> Nat -> Nat), "&" => (Func(bx!(Const(Nat)), bx!(Func(bx!(Const(Nat)), bx!(Const(Nat))))), (), 20),
BooleanAnd => ty!(Bool -> Bool -> Bool), "|" => (Func(bx!(Const(Nat)), bx!(Func(bx!(Const(Nat)), bx!(Const(Nat))))), (), 20),
BooleanOr => ty!(Bool -> Bool -> Bool), };
BooleanNot => ty!(Bool -> Bool),
Equality => ty!(Nat -> Nat -> Bool),
LessThan => ty!(Nat -> Nat -> Bool),
LessThanOrEqual => ty!(Nat -> Nat -> Bool),
GreaterThan => ty!(Nat -> Nat -> Bool),
GreaterThanOrEqual => ty!(Nat -> Nat -> Bool),
Comparison => ty!(Nat -> Nat -> Ordering),
IOPrint => ty!(Unit),
IOPrintLn => ty!(Unit),
IOGetLine => ty!(StringT),
Assignment => ty!(Unit),
Concatenate => ty!(StringT -> StringT -> StringT),
Increment => ty!(Nat -> Int),
Negate => ty!(Nat -> Int),
NotEqual => ty!(Nat -> Nat -> Bool),
}
}
}
impl TryFrom<&BinOp> for Builtin {
type Error = ();
fn try_from(binop: &BinOp) -> Result<Self, Self::Error> {
FromStr::from_str(binop.sigil())
}
}
impl TryFrom<&PrefixOp> for Builtin {
type Error = ();
fn try_from(prefix_op: &PrefixOp) -> Result<Self, Self::Error> {
use Builtin::*;
match prefix_op.sigil() {
"+" => Ok(Increment),
"-" => Ok(Negate),
"!" => Ok(BooleanNot),
_ => Err(()),
}
}
}
impl FromStr for Builtin {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
use Builtin::*;
Ok(match s {
"+" => Add,
"-" => Subtract,
"*" => Multiply,
"/" => Divide,
"quot" => Quotient,
"%" => Modulo,
"++" => Concatenate,
"^" => Exponentiation,
"&" => BitwiseAnd,
"&&" => BooleanAnd,
"|" => BitwiseOr,
"||" => BooleanOr,
"!" => BooleanNot,
">" => GreaterThan,
">=" => GreaterThanOrEqual,
"<" => LessThan,
"<=" => LessThanOrEqual,
"==" => Equality,
"!=" => NotEqual,
"=" => Assignment,
"<=>" => Comparison,
"print" => IOPrint,
"println" => IOPrintLn,
"getline" => IOGetLine,
_ => return Err(()),
})
}
} }

View File

@ -1,79 +0,0 @@
use crate::{
parsing::{Location, ParseError},
schala::{SourceReference, Stage},
symbol_table::SymbolError,
type_inference::TypeError,
};
pub struct SchalaError {
errors: Vec<Error>,
}
impl SchalaError {
pub(crate) fn display(&self) -> String {
match self.errors[0] {
Error::Parse(ref parse_err) => parse_err.to_string(),
Error::Standard { ref text, .. } => text.as_ref().cloned().unwrap_or_default(),
}
}
#[allow(dead_code)]
pub(crate) fn from_type_error(err: TypeError) -> Self {
Self {
errors: vec![Error::Standard { location: None, text: Some(err.msg), stage: Stage::Typechecking }],
}
}
pub(crate) fn from_symbol_table(symbol_errs: Vec<SymbolError>) -> Self {
//TODO this could be better
let errors = symbol_errs
.into_iter()
.map(|_symbol_err| Error::Standard {
location: None,
text: Some("symbol table error".to_string()),
stage: Stage::Symbols,
})
.collect();
Self { errors }
}
pub(crate) fn from_string(text: String, stage: Stage) -> Self {
Self { errors: vec![Error::Standard { location: None, text: Some(text), stage }] }
}
pub(crate) fn from_parse_error(parse_error: ParseError, source_reference: &SourceReference) -> Self {
let formatted_parse_error = format_parse_error(parse_error, source_reference);
Self { errors: vec![Error::Parse(formatted_parse_error)] }
}
}
#[allow(dead_code)]
enum Error {
Standard { location: Option<Location>, text: Option<String>, stage: Stage },
Parse(String),
}
fn format_parse_error(error: ParseError, source_reference: &SourceReference) -> String {
let offset = error.location.offset;
let (line_start, line_num, line_from_program) = source_reference.get_line(offset);
let ch = offset - line_start;
let location_pointer = format!("{}^", " ".repeat(ch));
let line_num_digits = format!("{}", line_num).chars().count();
let space_padding = " ".repeat(line_num_digits);
format!(
r#"
{error_msg}
{space_padding} |
{line_num} | {}
{space_padding} | {}
"#,
line_from_program,
location_pointer,
error_msg = error.msg,
space_padding = space_padding,
line_num = line_num,
)
}

375
schala-lang/src/eval.rs Normal file
View File

@ -0,0 +1,375 @@
use std::cell::RefCell;
use std::rc::Rc;
use std::fmt::Write;
use std::io;
use itertools::Itertools;
use util::StateStack;
use reduced_ast::{ReducedAST, Stmt, Expr, Lit, Func};
use symbol_table::{SymbolSpec, Symbol, SymbolTable};
pub struct State<'a> {
values: StateStack<'a, Rc<String>, ValueEntry>,
symbol_table_handle: Rc<RefCell<SymbolTable>>,
}
macro_rules! builtin_binding {
($name:expr, $values:expr) => {
$values.insert(Rc::new(format!($name)), ValueEntry::Binding { constant: true, val: Expr::Func(Func::BuiltIn(Rc::new(format!($name)))) });
}
}
impl<'a> State<'a> {
pub fn new(symbol_table_handle: Rc<RefCell<SymbolTable>>) -> State<'a> {
let mut values = StateStack::new(Some(format!("global")));
builtin_binding!("print", values);
builtin_binding!("println", values);
builtin_binding!("getline", values);
State { values, symbol_table_handle }
}
pub fn debug_print(&self) -> String {
format!("Values: {:?}", self.values)
}
}
#[derive(Debug)]
enum ValueEntry {
Binding {
constant: bool,
val: /*FullyEvaluatedExpr*/ Expr,
}
}
type EvalResult<T> = Result<T, String>;
impl Expr {
fn to_repl(&self) -> String {
use self::Lit::*;
use self::Func::*;
fn paren_wrapped_vec(exprs: &Vec<Expr>) -> String {
let mut buf = String::new();
write!(buf, "(").unwrap();
for term in exprs.iter().map(|e| Some(e)).intersperse(None) {
match term {
Some(e) => write!(buf, "{}", e.to_repl()).unwrap(),
None => write!(buf, ", ").unwrap(),
};
}
write!(buf, ")").unwrap();
buf
}
match self {
Expr::Lit(ref l) => match l {
Nat(n) => format!("{}", n),
Int(i) => format!("{}", i),
Float(f) => format!("{}", f),
Bool(b) => format!("{}", b),
StringLit(s) => format!("\"{}\"", s),
Custom(name, args) if args.len() == 0 => format!("{}", name),
Custom(name, args) => format!("{}{}", name, paren_wrapped_vec(args)),
},
Expr::Func(f) => match f {
BuiltIn(name) => format!("<built-in function {}>", name),
UserDefined { name: None, .. } => format!("<function>"),
UserDefined { name: Some(name), .. } => format!("<function {}>", name),
},
Expr::Constructor { name } => format!("<constructor {}>", name),
Expr::Tuple(exprs) => paren_wrapped_vec(exprs),
_ => format!("{:?}", self),
}
}
}
impl<'a> State<'a> {
pub fn evaluate(&mut self, ast: ReducedAST, repl: bool) -> Vec<Result<String, String>> {
let mut acc = vec![];
// handle prebindings
for statement in ast.0.iter() {
self.prebinding(statement);
}
for statement in ast.0 {
match self.statement(statement) {
Ok(Some(ref output)) if repl => acc.push(Ok(output.to_repl())),
Ok(_) => (),
Err(error) => {
acc.push(Err(format!("Runtime error: {}", error)));
return acc;
},
}
}
acc
}
fn prebinding(&mut self, stmt: &Stmt) {
match stmt {
Stmt::PreBinding { name, func } => {
let v_entry = ValueEntry::Binding { constant: true, val: Expr::Func(func.clone()) };
self.values.insert(name.clone(), v_entry);
},
Stmt::Expr(_expr) => {
//TODO have this support things like nested function defs
},
_ => ()
}
}
fn statement(&mut self, stmt: Stmt) -> EvalResult<Option<Expr>> {
match stmt {
Stmt::Binding { name, constant, expr } => {
let val = self.expression(expr)?;
self.values.insert(name.clone(), ValueEntry::Binding { constant, val });
Ok(None)
},
Stmt::Expr(expr) => Ok(Some(self.expression(expr)?)),
Stmt::PreBinding {..} | Stmt::Noop => Ok(None),
}
}
fn block(&mut self, stmts: Vec<Stmt>) -> EvalResult<Expr> {
let mut ret = None;
for stmt in stmts {
ret = self.statement(stmt)?;
}
Ok(ret.unwrap_or(Expr::Unit))
}
fn expression(&mut self, expr: Expr) -> EvalResult<Expr> {
use self::Expr::*;
match expr {
literal @ Lit(_) => Ok(literal),
Call { box f, args } => {
match self.expression(f)? {
Constructor {name} => self.apply_data_constructor(name, args),
Func(f) => self.apply_function(f, args),
other => return Err(format!("Tried to call {:?} which is not a function or data constructor", other)),
}
},
Val(v) => self.value(v),
constr @ Constructor { .. } => Ok(constr),
func @ Func(_) => Ok(func),
Tuple(exprs) => Ok(Tuple(exprs.into_iter().map(|expr| self.expression(expr)).collect::<Result<Vec<Expr>,_>>()?)),
Conditional { box cond, then_clause, else_clause } => self.conditional(cond, then_clause, else_clause),
Assign { box val, box expr } => {
let name = match val {
Expr::Val(name) => name,
_ => return Err(format!("Trying to assign to a non-value")),
};
let constant = match self.values.lookup(&name) {
None => return Err(format!("{} is undefined", name)),
Some(ValueEntry::Binding { constant, .. }) => constant.clone(),
};
if constant {
return Err(format!("trying to update {}, a non-mutable binding", name));
}
let val = self.expression(expr)?;
self.values.insert(name.clone(), ValueEntry::Binding { constant: false, val });
Ok(Expr::Unit)
},
e => Err(format!("Expr {:?} eval not implemented", e))
}
}
fn apply_data_constructor(&mut self, name: Rc<String>, args: Vec<Expr>) -> EvalResult<Expr> {
{
let symbol_table = self.symbol_table_handle.borrow();
match symbol_table.values.get(&name) {
Some(Symbol { spec: SymbolSpec::DataConstructor { type_name, type_args }, name }) => {
if args.len() != type_args.len() {
return Err(format!("Data constructor {} requires {} args", name, type_args.len()));
}
()
},
_ => return Err(format!("Bad symbol {}", name))
}
}
let evaled_args = args.into_iter().map(|expr| self.expression(expr)).collect::<Result<Vec<Expr>,_>>()?;
//let evaled_args = vec![];
Ok(Expr::Lit(self::Lit::Custom(name.clone(), evaled_args)))
}
fn apply_function(&mut self, f: Func, args: Vec<Expr>) -> EvalResult<Expr> {
match f {
Func::BuiltIn(sigil) => self.apply_builtin(sigil, args),
Func::UserDefined { params, body, name } => {
if params.len() != args.len() {
return Err(format!("calling a {}-argument function with {} args", params.len(), args.len()))
}
let mut func_state = State {
values: self.values.new_frame(name.map(|n| format!("{}", n))),
symbol_table_handle: self.symbol_table_handle.clone(),
};
for (param, val) in params.into_iter().zip(args.into_iter()) {
let val = func_state.expression(val)?;
func_state.values.insert(param, ValueEntry::Binding { constant: true, val });
}
// TODO figure out function return semantics
func_state.block(body)
}
}
}
fn apply_builtin(&mut self, name: Rc<String>, args: Vec<Expr>) -> EvalResult<Expr> {
use self::Expr::*;
use self::Lit::*;
let evaled_args: Result<Vec<Expr>, String> = args.into_iter().map(|arg| self.expression(arg)).collect();
let evaled_args = evaled_args?;
Ok(match (name.as_str(), evaled_args.as_slice()) {
/* binops */
("+", &[Lit(Nat(l)), Lit(Nat(r))]) => Lit(Nat(l + r)),
("++", &[Lit(StringLit(ref s1)), Lit(StringLit(ref s2))]) => Lit(StringLit(Rc::new(format!("{}{}", s1, s2)))),
("-", &[Lit(Nat(l)), Lit(Nat(r))]) => Lit(Nat(l - r)),
("*", &[Lit(Nat(l)), Lit(Nat(r))]) => Lit(Nat(l * r)),
("/", &[Lit(Nat(l)), Lit(Nat(r))]) => Lit(Float((l as f64)/ (r as f64))),
("//", &[Lit(Nat(l)), Lit(Nat(r))]) => if r == 0 {
return Err(format!("divide by zero"));
} else {
Lit(Nat(l / r))
},
("%", &[Lit(Nat(l)), Lit(Nat(r))]) => Lit(Nat(l % r)),
("^", &[Lit(Nat(l)), Lit(Nat(r))]) => Lit(Nat(l ^ r)),
("&", &[Lit(Nat(l)), Lit(Nat(r))]) => Lit(Nat(l & r)),
("|", &[Lit(Nat(l)), Lit(Nat(r))]) => Lit(Nat(l | r)),
("==", &[Lit(Nat(l)), Lit(Nat(r))]) => Lit(Bool(l == r)),
("==", &[Lit(Int(l)), Lit(Int(r))]) => Lit(Bool(l == r)),
("==", &[Lit(Float(l)), Lit(Float(r))]) => Lit(Bool(l == r)),
("==", &[Lit(Bool(l)), Lit(Bool(r))]) => Lit(Bool(l == r)),
("==", &[Lit(StringLit(ref l)), Lit(StringLit(ref r))]) => Lit(Bool(l == r)),
/* prefix ops */
("!", &[Lit(Bool(true))]) => Lit(Bool(false)),
("!", &[Lit(Bool(false))]) => Lit(Bool(true)),
("-", &[Lit(Nat(n))]) => Lit(Int(-1*(n as i64))),
("-", &[Lit(Int(n))]) => Lit(Int(-1*(n as i64))),
("+", &[Lit(Int(n))]) => Lit(Int(n)),
("+", &[Lit(Nat(n))]) => Lit(Nat(n)),
/* builtin functions */
("print", &[ref anything]) => {
print!("{}", anything.to_repl());
Expr::Unit
},
("println", &[ref anything]) => {
println!("{}", anything.to_repl());
Expr::Unit
},
("getline", &[]) => {
let mut buf = String::new();
io::stdin().read_line(&mut buf).expect("Error readling line in 'getline'");
Lit(StringLit(Rc::new(buf.trim().to_string())))
},
(x, args) => return Err(format!("bad or unimplemented builtin {:?} | {:?}", x, args)),
})
}
fn conditional(&mut self, cond: Expr, then_clause: Vec<Stmt>, else_clause: Vec<Stmt>) -> EvalResult<Expr> {
let cond = self.expression(cond)?;
Ok(match cond {
Expr::Lit(Lit::Bool(true)) => self.block(then_clause)?,
Expr::Lit(Lit::Bool(false)) => self.block(else_clause)?,
_ => return Err(format!("Conditional with non-boolean condition"))
})
}
fn value(&mut self, name: Rc<String>) -> EvalResult<Expr> {
use self::ValueEntry::*;
use self::Func::*;
//TODO add a layer of indirection here to talk to the symbol table first, and only then look up
//in the values table
let symbol_table = self.symbol_table_handle.borrow();
let value = symbol_table.values.get(&name);
Ok(match value {
Some(Symbol { name, spec }) => match spec {
SymbolSpec::DataConstructor { type_name, type_args } => {
if type_args.len() == 0 {
Expr::Lit(Lit::Custom(name.clone(), vec![]))
} else {
return Err(format!("This data constructor thing not done"))
}
},
SymbolSpec::Func(_) => match self.values.lookup(&name) {
Some(Binding { val: Expr::Func(UserDefined { name, params, body }), .. }) => {
Expr::Func(UserDefined { name: name.clone(), params: params.clone(), body: body.clone() })
},
_ => unreachable!(),
},
},
/* see if it's an ordinary variable TODO make variables go in symbol table */
None => match self.values.lookup(&name) {
Some(Binding { val, .. }) => val.clone(),
None => return Err(format!("Couldn't find value {}", name)),
}
})
}
}
#[cfg(test)]
mod eval_tests {
use std::cell::RefCell;
use std::rc::Rc;
use symbol_table::SymbolTable;
use tokenizing::tokenize;
use parsing::parse;
use eval::State;
macro_rules! fresh_env {
($string:expr, $correct:expr) => {
let symbol_table = Rc::new(RefCell::new(SymbolTable::new()));
let mut state = State::new(symbol_table);
let ast = parse(tokenize($string)).0.unwrap();
state.symbol_table_handle.borrow_mut().add_top_level_symbols(&ast);
let reduced = ast.reduce(&state.symbol_table_handle.borrow());
let all_output = state.evaluate(reduced, true);
let ref output = all_output.last().unwrap();
assert_eq!(**output, Ok($correct.to_string()));
}
}
#[test]
fn test_basic_eval() {
fresh_env!("1 + 2", "3");
fresh_env!("let mut a = 1; a = 2", "Unit");
fresh_env!("let mut a = 1; a = 2; a", "2");
fresh_env!(r#"("a", 1 + 2)"#, r#"("a", 3)"#);
}
#[test]
fn function_eval() {
fresh_env!("fn oi(x) { x + 1 }; oi(4)", "5");
fresh_env!("fn oi(x) { x + 1 }; oi(1+2)", "4");
}
#[test]
fn scopes() {
let scope_ok = r#"
let a = 20
fn haha() {
let a = 10
a
}
haha()
"#;
fresh_env!(scope_ok, "10");
let scope_ok = r#"
let a = 20
fn haha() {
let a = 10
a
}
a
"#;
fresh_env!(scope_ok, "20");
}
}

View File

@ -1,75 +0,0 @@
use std::{
fmt::{self, Debug},
hash::Hash,
marker::PhantomData,
};
pub trait IdKind: Debug + Copy + Clone + Hash + PartialEq + Eq + Default {
fn tag() -> &'static str;
}
/// A generalized abstract identifier type of up to 2^32-1 entries.
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, Default)]
pub struct Id<T>
where T: IdKind
{
idx: u32,
t: PhantomData<T>,
}
impl<T> Id<T>
where T: IdKind
{
fn new(n: u32) -> Self {
Self { idx: n, t: PhantomData }
}
#[allow(dead_code)]
pub fn as_u32(&self) -> u32 {
self.idx
}
}
impl<T> fmt::Display for Id<T>
where T: IdKind
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}:{}", self.idx, T::tag())
}
}
#[derive(Debug)]
pub struct IdStore<T>
where T: IdKind
{
last_idx: u32,
t: PhantomData<T>,
}
impl<T> IdStore<T>
where T: IdKind
{
pub fn new() -> Self {
Self { last_idx: 0, t: PhantomData }
}
pub fn fresh(&mut self) -> Id<T> {
let idx = self.last_idx;
self.last_idx += 1;
Id::new(idx)
}
}
macro_rules! define_id_kind {
($name:ident) => {
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, Default)]
pub struct $name;
impl crate::identifier::IdKind for $name {
fn tag() -> &'static str {
stringify!($name)
}
}
};
}
pub(crate) use define_id_kind;

View File

@ -1,31 +1,147 @@
#![feature(trace_macros)] #![feature(trace_macros)]
//#![feature(unrestricted_attribute_tokens)] #![feature(slice_patterns, box_patterns, box_syntax)]
#![feature(box_patterns, iter_intersperse)] #![feature(proc_macro)]
extern crate itertools;
//! `schala-lang` is where the Schala programming language is actually implemented. #[macro_use]
//! It defines the `Schala` type, which contains the state for a Schala REPL, and implements extern crate lazy_static;
//! `ProgrammingLanguageInterface` and the chain of compiler passes for it. #[macro_use]
extern crate maplit;
extern crate derivative; #[macro_use]
extern crate schala_repl; extern crate schala_repl;
#[macro_use] #[macro_use]
extern crate schala_codegen;
use std::cell::RefCell;
use std::rc::Rc;
use itertools::Itertools;
use schala_repl::{ProgrammingLanguageInterface, EvalOptions, TraceArtifact, UnfinishedComputation, FinishedComputation};
macro_rules! bx {
($e:expr) => { Box::new($e) }
}
mod util; mod util;
mod builtin;
#[macro_use] mod tokenizing;
mod type_inference;
mod ast; mod ast;
mod parsing; mod parsing;
#[macro_use]
mod symbol_table; mod symbol_table;
mod builtin; mod typechecking;
mod error; mod reduced_ast;
mod reduced_ir; mod eval;
mod tree_walk_eval;
#[macro_use]
mod identifier;
mod schala; //trace_macros!(true);
#[derive(ProgrammingLanguageInterface)]
#[LanguageName = "Schala"]
#[SourceFileExtension = "schala"]
#[PipelineSteps(tokenizing, parsing(compact,expanded,trace), symbol_table, typechecking, ast_reducing, eval)]
pub struct Schala {
state: eval::State<'static>,
symbol_table: Rc<RefCell<symbol_table::SymbolTable>>,
type_context: typechecking::TypeContext<'static>,
}
impl Schala {
fn new_blank_env() -> Schala {
let symbols = Rc::new(RefCell::new(symbol_table::SymbolTable::new()));
Schala {
symbol_table: symbols.clone(),
type_context: typechecking::TypeContext::new(symbols.clone()),
state: eval::State::new(symbols),
}
}
pub fn new() -> Schala {
let prelude = r#"
type Option<T> = Some(T) | None
"#;
let mut s = Schala::new_blank_env();
s.execute_pipeline(prelude, &EvalOptions::default());
s
}
}
fn tokenizing(_handle: &mut Schala, input: &str, comp: Option<&mut UnfinishedComputation>) -> Result<Vec<tokenizing::Token>, String> {
let tokens = tokenizing::tokenize(input);
comp.map(|comp| {
let token_string = tokens.iter().map(|t| format!("{:?}<L:{},C:{}>", t.token_type, t.offset.0, t.offset.1)).join(", ");
comp.add_artifact(TraceArtifact::new("tokens", token_string));
});
let errors: Vec<String> = tokens.iter().filter_map(|t| t.get_error()).collect();
if errors.len() == 0 {
Ok(tokens)
} else {
Err(format!("{:?}", errors))
}
}
fn parsing(_handle: &mut Schala, input: Vec<tokenizing::Token>, comp: Option<&mut UnfinishedComputation>) -> Result<ast::AST, String> {
let (ast, trace) = parsing::parse(input);
comp.map(|comp| {
//TODO need to control which of these debug stages get added
let opt = comp.cur_debug_options.get(0).map(|s| s.clone());
match opt {
None => comp.add_artifact(TraceArtifact::new("ast", format!("{:?}", ast))),
Some(ref s) if s == "compact" => comp.add_artifact(TraceArtifact::new("ast", format!("{:?}", ast))),
Some(ref s) if s == "expanded" => comp.add_artifact(TraceArtifact::new("ast", format!("{:#?}", ast))),
Some(ref s) if s == "trace" => comp.add_artifact(TraceArtifact::new_parse_trace(trace)),
Some(ref x) => println!("Bad parsing debug option: {}", x),
};
});
ast.map_err(|err| err.msg)
}
fn symbol_table(handle: &mut Schala, input: ast::AST, comp: Option<&mut UnfinishedComputation>) -> Result<ast::AST, String> {
let add = handle.symbol_table.borrow_mut().add_top_level_symbols(&input);
match add {
Ok(()) => {
let artifact = TraceArtifact::new("symbol_table", handle.symbol_table.borrow().debug_symbol_table());
comp.map(|comp| comp.add_artifact(artifact));
Ok(input)
},
Err(msg) => Err(msg)
}
}
fn typechecking(handle: &mut Schala, input: ast::AST, comp: Option<&mut UnfinishedComputation>) -> Result<ast::AST, String> {
match handle.type_context.type_check_ast(&input) {
Ok(ty) => {
comp.map(|c| {
c.add_artifact(TraceArtifact::new("type_table", format!("{}", handle.type_context.debug_types())));
c.add_artifact(TraceArtifact::new("type_check", format!("{:?}", ty)));
});
Ok(input)
},
Err(msg) => {
comp.map(|comp| {
comp.add_artifact(TraceArtifact::new("type_table", format!("{}", handle.type_context.debug_types())));
comp.add_artifact(TraceArtifact::new("type_check", format!("Type error: {:?}", msg)));
});
Ok(input)
}
}
}
fn ast_reducing(handle: &mut Schala, input: ast::AST, comp: Option<&mut UnfinishedComputation>) -> Result<reduced_ast::ReducedAST, String> {
let ref symbol_table = handle.symbol_table.borrow();
let output = input.reduce(symbol_table);
comp.map(|comp| comp.add_artifact(TraceArtifact::new("ast_reducing", format!("{:?}", output))));
Ok(output)
}
fn eval(handle: &mut Schala, input: reduced_ast::ReducedAST, comp: Option<&mut UnfinishedComputation>) -> Result<String, String> {
comp.map(|comp| comp.add_artifact(TraceArtifact::new("value_state", handle.state.debug_print())));
let evaluation_outputs = handle.state.evaluate(input, true);
let text_output: Result<Vec<String>, String> = evaluation_outputs
.into_iter()
.collect();
let eval_output: Result<String, String> = text_output
.map(|v| { v.into_iter().intersperse(format!("\n")).collect() });
eval_output
}
pub use schala::{Schala, SchalaConfig};

1422
schala-lang/src/parsing.rs Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,126 +0,0 @@
#![allow(clippy::upper_case_acronyms)]
pub mod combinator;
mod peg_parser;
mod test;
use std::{cell::RefCell, fmt, rc::Rc};
use combinator::Span;
#[cfg(test)]
use crate::ast::{Block, Expression};
use crate::{
ast::{ASTItem, AST},
identifier::{Id, IdStore},
};
pub(crate) type StoreRef = Rc<RefCell<IdStore<ASTItem>>>;
pub struct Parser {
id_store: StoreRef,
use_combinator: bool,
}
impl Parser {
pub(crate) fn new() -> Self {
let id_store: IdStore<ASTItem> = IdStore::new();
Self { id_store: Rc::new(RefCell::new(id_store)), use_combinator: true }
}
pub(crate) fn parse(&mut self, input: &str) -> Result<AST, ParseError> {
if self.use_combinator {
self.parse_comb(input)
} else {
self.parse_peg(input)
}
}
pub(crate) fn parse_peg(&mut self, input: &str) -> Result<AST, ParseError> {
peg_parser::schala_parser::program(input, self).map_err(ParseError::from_peg)
}
pub(crate) fn parse_comb(&mut self, input: &str) -> Result<AST, ParseError> {
let span = Span::new_extra(input, self.id_store.clone());
convert(input, combinator::program(span))
}
#[cfg(test)]
fn expression(&mut self, input: &str) -> Result<Expression, ParseError> {
peg_parser::schala_parser::expression(input, self).map_err(ParseError::from_peg)
}
#[cfg(test)]
fn expression_comb(&mut self, input: &str) -> Result<Expression, ParseError> {
let span = Span::new_extra(input, self.id_store.clone());
convert(input, combinator::expression(span))
}
#[cfg(test)]
fn block(&mut self, input: &str) -> Result<Block, ParseError> {
peg_parser::schala_parser::block(input, self).map_err(ParseError::from_peg)
}
#[cfg(test)]
fn block_comb(&mut self, input: &str) -> Result<Block, ParseError> {
let span = Span::new_extra(input, self.id_store.clone());
convert(input, combinator::block(span))
}
fn fresh(&mut self) -> Id<ASTItem> {
self.id_store.borrow_mut().fresh()
}
}
fn convert<'a, O>(input: &'a str, result: combinator::ParseResult<'a, O>) -> Result<O, ParseError> {
use nom::{error::VerboseError, Finish};
match result.finish() {
Ok((rest, output)) => {
if rest.fragment() != &"" {
return Err(ParseError {
location: Default::default(),
msg: format!("Bad parse state, remaining text: `{}`", rest.fragment()),
});
}
Ok(output)
}
Err(err) => {
let err = VerboseError {
errors: err.errors.into_iter().map(|(sp, kind)| (*sp.fragment(), kind)).collect(),
};
let msg = nom::error::convert_error(input, err);
Err(ParseError { msg, location: (0).into() })
}
}
}
/// Represents a parsing error
#[derive(Debug)]
pub struct ParseError {
pub msg: String,
pub location: Location,
}
impl ParseError {
fn from_peg(err: peg::error::ParseError<peg::str::LineCol>) -> Self {
let msg = err.to_string();
Self { msg, location: err.location.offset.into() }
}
}
#[derive(Debug, Clone, Copy, PartialEq, Default)]
pub struct Location {
pub(crate) offset: usize,
}
impl From<usize> for Location {
fn from(offset: usize) -> Self {
Self { offset }
}
}
impl fmt::Display for Location {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.offset)
}
}

View File

@ -1,567 +0,0 @@
use std::rc::Rc;
use super::Parser;
use crate::ast::*;
fn rc_string(s: &str) -> Rc<String> {
Rc::new(s.to_string())
}
enum ExtendedPart<'a> {
Index(Vec<Expression>),
Accessor(&'a str),
Call(Vec<InvocationArgument>),
}
peg::parser! {
pub grammar schala_parser() for str {
rule whitespace() = [' ' | '\t' ]
rule whitespace_or_newline() = [' ' | '\t' | '\n' ]
rule _ = quiet!{ (block_comment() / line_comment() / whitespace())* }
rule __ = quiet!{ (block_comment() / line_comment() / whitespace_or_newline())* }
rule block_comment() = "/*" (block_comment() / !"*/" [_])* "*/"
rule line_comment() = "//" (!['\n'] [_])* &"\n"
pub rule program(parser: &mut Parser) -> AST =
__ statements:(statement(parser) ** (delimiter()+) ) __ { AST { id: parser.fresh(), statements: statements.into() } }
rule delimiter() = (";" / "\n")+
//Note - this is a hack, ideally the rule `rule block() -> Block = "{" _ items:(statement() **
//delimiter()) _ "}" { items.into() }` would've worked, but it doesn't.
pub rule block(parser: &mut Parser) -> Block =
"{" __ items:(statement(parser) ** delimiter()) delimiter()? __ "}" { items.into() } /
"{" __ stmt:statement(parser) __ "}" { vec![stmt].into() }
rule block_item(parser: &mut Parser) -> Statement<StatementKind> =
_ stmt:statement(parser) _ delimiter()+ { stmt }
rule statement(parser: &mut Parser) -> Statement<StatementKind> =
_ pos:position!() kind:statement_kind(parser) _ { Statement { id: parser.fresh(), location: pos.into(), kind } }
rule statement_kind(parser: &mut Parser) -> StatementKind =
__ import:import(parser) { StatementKind::Import(import) } /
__ decl:declaration(parser) { StatementKind::Declaration(decl) } /
__ flow:flow(parser) { StatementKind::Flow(flow) } /
__ expr:expression(parser) { StatementKind::Expression(expr) }
rule flow(parser: &mut Parser) -> FlowControl =
"continue" { FlowControl::Continue } /
"break" { FlowControl::Break } /
"return" _ expr:expression(parser)? { FlowControl::Return(expr) }
//TODO add the ability to rename and exclude imports
rule import(parser: &mut Parser) -> ImportSpecifier =
"import" _ path_components:path_components() suffix:import_suffix()? {
ImportSpecifier {
id: parser.fresh(),
path_components,
imported_names: suffix.unwrap_or(ImportedNames::LastOfPath)
}
}
rule path_components() -> Vec<Rc<String>> =
"::"? name:identifier() rest:path_component()* {
let mut items = vec![rc_string(name)];
items.extend(rest.into_iter().map(rc_string));
items
}
rule path_component() -> &'input str = "::" ident:identifier() { ident }
rule import_suffix() -> ImportedNames =
"::*" { ImportedNames::All } /
"::{" __ names:(identifier() ** (_ "," _)) __ "}" {?
if names.is_empty() {
Err("import groups must have at least one item")
} else {
Ok(ImportedNames::List(names.into_iter().map(rc_string).collect()))
}
}
rule declaration(parser: &mut Parser) -> Declaration =
binding(parser) / type_decl(parser) / annotation(parser) / func(parser) / interface(parser) /
implementation(parser) / module(parser)
rule module(parser: &mut Parser) -> Declaration =
"module" _ name:identifier() _ items:block(parser) { Declaration::Module { name: rc_string(name), items } }
rule implementation(parser: &mut Parser) -> Declaration =
"impl" _ interface:type_singleton_name() _ "for" _ type_name:type_identifier() _ block:decl_block(parser) {
Declaration::Impl { type_name, interface_name: Some(interface), block }
} /
"impl" _ type_name:type_identifier() _ block:decl_block(parser) {
Declaration::Impl { type_name, interface_name: None, block }
}
rule decl_block(parser: &mut Parser) -> Vec<Statement<Declaration>> =
"{" __ decls:(func_declaration_stmt(parser) ** (delimiter()+)) delimiter()? __ "}" { decls }
rule func_declaration_stmt(parser: &mut Parser) -> Statement<Declaration> =
pos:position!() decl:func_declaration(parser) { Statement { id: parser.fresh(), location: pos.into(), kind: decl } }
rule interface(parser: &mut Parser) -> Declaration =
"interface" _ name:identifier() _ signatures:signature_block(parser) { Declaration::Interface { name: rc_string(name), signatures } }
rule signature_block(parser: &mut Parser) -> Vec<Signature> =
"{" __ signatures:(func_signature(parser) ** (delimiter()+)) __ "}" { signatures }
rule func(parser: &mut Parser) -> Declaration =
decl:func_declaration(parser) { decl } /
sig:func_signature(parser) { Declaration::FuncSig(sig) }
rule func_declaration(parser: &mut Parser) -> Declaration =
_ sig:func_signature(parser) __ body:block(parser) { Declaration::FuncDecl(sig, body) }
rule func_signature(parser: &mut Parser) -> Signature =
_ "fn" _ name:identifier() "(" _ params:formal_params(parser) _ ")" _ type_anno:type_anno()? { Signature {
name: rc_string(name), operator: false, params, type_anno
} } /
_ "fn" _ "(" op:operator() ")" _ "(" _ params:formal_params(parser) _ ")" _ type_anno:type_anno()? { Signature {
name: rc_string(op), operator: true, params, type_anno
} }
rule formal_params(parser: &mut Parser) -> Vec<FormalParam> =
params:(formal_param(parser) ** (_ "," _)) {? if params.len() < 256 { Ok(params) } else {
Err("function-too-long") }
}
rule formal_param(parser: &mut Parser) -> FormalParam =
name:identifier() _ anno:type_anno()? _ "=" expr:expression(parser) { FormalParam { name: rc_string(name),
default: Some(expr), anno } } /
name:identifier() _ anno:type_anno()? { FormalParam { name: rc_string(name), default: None, anno } }
rule annotation(parser: &mut Parser) -> Declaration =
"@" name:identifier() args:annotation_args(parser)? delimiter()+ _ inner:statement(parser) { Declaration::Annotation {
name: rc_string(name), arguments: if let Some(args) = args { args } else { vec![] }, inner: Box::new(inner) }
}
rule annotation_args(parser: &mut Parser) -> Vec<Expression> =
"(" _ args:(expression(parser) ** (_ "," _)) _ ")" { args }
rule binding(parser: &mut Parser) -> Declaration =
"let" _ mutable:"mut"? _ ident:identifier() _ type_anno:type_anno()? _ "=" _ expr:expression(parser) {
Declaration::Binding { name: Rc::new(ident.to_string()), constant: mutable.is_none(),
type_anno, expr }
}
rule type_decl(parser: &mut Parser) -> Declaration =
"type" _ "alias" _ alias:type_alias() { alias } /
"type" _ mutable:"mut"? _ name:type_singleton_name() _ "=" _ body:type_body(parser) {
Declaration::TypeDecl { name, body, mutable: mutable.is_some() }
}
rule type_singleton_name() -> TypeSingletonName =
name:identifier() params:type_params()? { TypeSingletonName {
name: rc_string(name), params: if let Some(params) = params { params } else { vec![] }
} }
rule type_params() -> Vec<TypeIdentifier> =
"<" _ idents:(type_identifier() ** (_ "," _)) _ ">" { idents }
rule type_identifier() -> TypeIdentifier =
"(" _ items:(type_identifier() ** (_ "," _)) _ ")" { TypeIdentifier::Tuple(items) } /
singleton:type_singleton_name() { TypeIdentifier::Singleton(singleton) }
rule type_body(parser: &mut Parser) -> TypeBody =
"{" _ items:(record_variant_item() ** (__ "," __)) __ "}" { TypeBody::ImmediateRecord { id: parser.fresh(), fields: items } } /
variants:(variant_spec(parser) ** (__ "|" __)) { TypeBody::Variants(variants) }
rule variant_spec(parser: &mut Parser) -> Variant =
name:identifier() __ "{" __ typed_identifier_list:(record_variant_item() ** (__ "," __)) __ ","? __ "}" { Variant {
id: parser.fresh(), name: rc_string(name), kind: VariantKind::Record(typed_identifier_list)
} } /
name:identifier() "(" tuple_members:(type_identifier() ++ (__ "," __)) ")" { Variant {
id: parser.fresh(), name: rc_string(name), kind: VariantKind::TupleStruct(tuple_members) } } /
name:identifier() { Variant { id: parser.fresh(), name: rc_string(name), kind: VariantKind::UnitStruct } }
rule record_variant_item() -> (Rc<String>, TypeIdentifier) =
name:identifier() _ ":" _ ty:type_identifier() { (rc_string(name), ty) }
rule type_alias() -> Declaration =
alias:identifier() _ "=" _ name:identifier() { Declaration::TypeAlias { alias: rc_string(alias), original: rc_string(name), } }
rule type_anno() -> TypeIdentifier =
":" _ identifier:type_identifier() { identifier }
pub rule expression(parser: &mut Parser) -> Expression =
__ kind:expression_kind(true, parser) _ type_anno:type_anno()? { Expression { id: parser.fresh(), type_anno, kind } }
rule expression_no_struct(parser: &mut Parser) -> Expression =
__ kind:expression_kind(false, parser) { Expression { id: parser.fresh(), type_anno: None, kind } }
rule expression_kind(struct_ok: bool, parser: &mut Parser) -> ExpressionKind =
precedence_expr(struct_ok, parser)
rule precedence_expr(struct_ok: bool, parser: &mut Parser) -> ExpressionKind =
first:prefix_expr(struct_ok, parser) _ next:(precedence_continuation(struct_ok, parser))* {
let next = next.into_iter().map(|(sigil, expr)| (BinOp::from_sigil(sigil), expr)).collect();
BinopSequence { first, next }.do_precedence(parser)
}
rule precedence_continuation(struct_ok: bool, parser: &mut Parser) -> (&'input str, ExpressionKind) =
op:operator() _ expr:prefix_expr(struct_ok, parser) _ { (op, expr) }
rule prefix_expr(struct_ok: bool, parser: &mut Parser) -> ExpressionKind =
prefix:prefix()? expr:extended_expr(struct_ok, parser) {
if let Some(p) = prefix {
let expr = Expression::new(parser.fresh(), expr);
let prefix = PrefixOp::from_sigil(p);
ExpressionKind::PrefixExp(prefix, Box::new(expr))
} else {
expr
}
}
rule prefix() -> &'input str =
$(['+' | '-' | '!' ])
//TODO make the definition of operators more complex
rule operator() -> &'input str =
quiet!{!"*/" s:$( ['+' | '-' | '*' | '/' | '%' | '<' | '>' | '=' | '!' | '$' | '&' | '|' | '?' | '^' | '`']+ ) { s } } /
expected!("operator")
rule extended_expr(struct_ok: bool, parser: &mut Parser) -> ExpressionKind =
primary:primary(struct_ok, parser) parts:(extended_expr_part(parser)*) {
let mut expression = Expression::new(parser.fresh(), primary);
for part in parts.into_iter() {
let kind = match part {
ExtendedPart::Index(indexers) => {
ExpressionKind::Index { indexee: Box::new(expression), indexers }
},
ExtendedPart::Accessor(name) => {
let name = rc_string(name);
ExpressionKind::Access { name, expr: Box::new(expression) }
},
ExtendedPart::Call(arguments) => {
ExpressionKind::Call { f: Box::new(expression), arguments }
}
};
expression = Expression::new(parser.fresh(), kind);
}
expression.kind
}
rule extended_expr_part(parser: &mut Parser) -> ExtendedPart<'input> =
indexers:index_part(parser) { ExtendedPart::Index(indexers) } /
arguments:call_part(parser) { ExtendedPart::Call(arguments) } /
"." name:identifier() { ExtendedPart::Accessor(name) }
rule index_part(parser: &mut Parser) -> Vec<Expression> =
"[" indexers:(expression(parser) ++ ",") "]" { indexers }
rule call_part(parser: &mut Parser) -> Vec<InvocationArgument> =
"(" arguments:(invocation_argument(parser) ** ",") ")" { arguments }
rule invocation_argument(parser: &mut Parser) -> InvocationArgument =
_ "_" _ { InvocationArgument::Ignored } /
_ ident:identifier() _ "=" _ expr:expression(parser) { InvocationArgument::Keyword {
name: Rc::new(ident.to_string()),
expr
} } /
_ expr:expression(parser) _ { InvocationArgument::Positional(expr) }
rule primary(struct_ok: bool, parser: &mut Parser) -> ExpressionKind =
while_expr(parser) / for_expr(parser) / float_literal() / nat_literal() / bool_literal() /
string_literal() / paren_expr(parser) /
list_expr(parser) / if_expr(parser) / lambda_expr(parser) /
item:named_struct(parser) {? if struct_ok { Ok(item) } else { Err("no-struct-allowed") } } /
identifier_expr(parser)
rule lambda_expr(parser: &mut Parser) -> ExpressionKind =
r#"\"# __ "(" _ params:formal_params(parser) _ ")" _ type_anno:(type_anno()?) _ body:block(parser) {
ExpressionKind::Lambda { params, type_anno, body }
} /
r#"\"# param:formal_param(parser) _ type_anno:(type_anno()?) _ body:block(parser) {
ExpressionKind::Lambda { params: vec![param], type_anno, body }
}
rule for_expr(parser: &mut Parser) -> ExpressionKind =
"for" _ enumerators:for_enumerators(parser) _ body:for_body(parser) {
ExpressionKind::ForExpression { enumerators, body }
}
rule for_enumerators(parser: &mut Parser) -> Vec<Enumerator> =
"{" _ enumerators:(enumerator(parser) ++ ",") _ "}" { enumerators } /
enumerator:enumerator(parser) { vec![enumerator] }
//TODO add guards, etc.
rule enumerator(parser: &mut Parser) -> Enumerator =
ident:identifier() _ "<-" _ generator:expression_no_struct(parser) {
Enumerator { identifier: Rc::new(ident.to_string()), generator, assignment: false }
} /
//TODO need to distinguish these two cases in AST
ident:identifier() _ "=" _ generator:expression_no_struct(parser) {
Enumerator { identifier: Rc::new(ident.to_string()), generator, assignment: true }
}
rule for_body(parser: &mut Parser) -> Box<ForBody> =
"return" _ expr:expression(parser) { Box::new(ForBody::MonadicReturn(expr)) } /
body:block(parser) { Box::new(ForBody::StatementBlock(body)) }
rule while_expr(parser: &mut Parser) -> ExpressionKind =
"while" _ cond:expression_kind(false, parser)? _ body:block(parser) {
ExpressionKind::WhileExpression {
condition: cond.map(|kind| Box::new(Expression::new(parser.fresh(), kind))),
body,
}
}
rule identifier_expr(parser: &mut Parser) -> ExpressionKind =
qn:qualified_identifier(parser) { ExpressionKind::Value(qn) }
rule named_struct(parser: &mut Parser) -> ExpressionKind =
name:qualified_identifier(parser) _ fields:record_block(parser) {
ExpressionKind::NamedStruct {
name,
fields: fields.into_iter().map(|(n, exp)| (Rc::new(n.to_string()), exp)).collect(),
}
}
//TODO support anonymous structs and Elm-style update syntax for structs
rule record_block(parser: &mut Parser) -> Vec<(&'input str, Expression)> =
"{" _ entries:(record_entry(parser) ** ",") _ "}" { entries }
rule record_entry(parser: &mut Parser) -> (&'input str, Expression) =
_ name:identifier() _ ":" _ expr:expression(parser) _ { (name, expr) }
rule qualified_identifier(parser: &mut Parser) -> QualifiedName =
names:(identifier() ++ "::") { QualifiedName { id: parser.fresh(), components: names.into_iter().map(|name| Rc::new(name.to_string())).collect() } }
//TODO improve the definition of identifiers
rule identifier() -> &'input str =
!(reserved() !(ident_continuation())) text:$(['a'..='z' | 'A'..='Z' | '_'] ident_continuation()*) { text }
rule ident_continuation() -> &'input str =
text:$(['a'..='z' | 'A'..='Z' | '0'..='9' | '_'])
rule reserved() = "if" / "then" / "else" / "is" / "fn" / "for" / "while" / "let" / "in" / "mut" / "return" /
"break" / "alias" / "type" / "self" / "Self" / "interface" / "impl" / "true" / "false" / "module" / "import"
rule if_expr(parser: &mut Parser) -> ExpressionKind =
"if" _ discriminator:(expression(parser)?) _ body:if_expr_body(parser) {
ExpressionKind::IfExpression {
discriminator: discriminator.map(Box::new),
body: Box::new(body),
}
}
rule if_expr_body(parser: &mut Parser) -> IfExpressionBody =
cond_block(parser) / simple_pattern_match(parser) / simple_conditional(parser)
rule simple_conditional(parser: &mut Parser) -> IfExpressionBody =
"then" _ then_case:expr_or_block(parser) _ else_case:else_case(parser) {
IfExpressionBody::SimpleConditional { then_case, else_case }
}
rule simple_pattern_match(parser: &mut Parser) -> IfExpressionBody =
"is" _ pattern:pattern(parser) _ "then" _ then_case:expr_or_block(parser) _ else_case:else_case(parser) {
IfExpressionBody::SimplePatternMatch { pattern, then_case, else_case }
}
rule cond_block(parser: &mut Parser) -> IfExpressionBody =
"{" __ cond_arms:(cond_arm(parser) ++ (delimiter()+)) __ "}" { IfExpressionBody::CondList(cond_arms) }
rule cond_arm(parser: &mut Parser) -> ConditionArm =
_ "else" _ body:expr_or_block(parser) { ConditionArm { condition: Condition::Else, guard: None, body } } /
_ condition:condition(parser) _ guard:condition_guard(parser) _ "then" _ body:expr_or_block(parser)
{ ConditionArm { condition, guard, body } }
rule condition(parser: &mut Parser) -> Condition =
"is" _ pat:pattern(parser) { Condition::Pattern(pat) } /
op:operator() _ expr:expression(parser) { Condition::TruncatedOp(BinOp::from_sigil(op), expr) }
rule condition_guard(parser: &mut Parser) -> Option<Expression> =
("if" _ expr:expression(parser) { expr } )?
rule expr_or_block(parser: &mut Parser) -> Block = block(parser) / pos:position!() ex:expression(parser) {
Statement {
id: parser.fresh() , location: pos.into(),
kind: StatementKind::Expression(ex)
}.into()
}
rule else_case(parser: &mut Parser) -> Option<Block> =
("else" _ eorb:expr_or_block(parser) { eorb })?
rule pattern(parser: &mut Parser) -> Pattern =
"(" _ variants:(pattern(parser) ++ ",") _ ")" { Pattern::TuplePattern(variants) } /
_ pat:simple_pattern(parser) { pat }
rule simple_pattern(parser: &mut Parser) -> Pattern =
pattern_literal() /
qn:qualified_identifier(parser) "(" members:(pattern(parser) ** ",") ")" {
Pattern::TupleStruct(qn, members)
} /
qn:qualified_identifier(parser) _ "{" _ items:(record_pattern_entry(parser) ** ",") "}" _ {
let items = items.into_iter().map(|(name, pat)| (Rc::new(name.to_string()), pat)).collect();
Pattern::Record(qn, items)
} /
qn:qualified_identifier(parser) { Pattern::VarOrName(qn) }
rule record_pattern_entry(parser: &mut Parser) -> (&'input str, Pattern) =
_ name:identifier() _ ":" _ pat:pattern(parser) _ { (name, pat) } /
_ name:identifier() _ {
let qn = QualifiedName {
id: parser.fresh(),
components: vec![Rc::new(name.to_string())],
};
(name, Pattern::VarOrName(qn))
}
rule pattern_literal() -> Pattern =
"true" { Pattern::Literal(PatternLiteral::BoolPattern(true)) } /
"false" { Pattern::Literal(PatternLiteral::BoolPattern(false)) } /
s:bare_string_literal() { Pattern::Literal(PatternLiteral::StringPattern(Rc::new(s))) } /
sign:("-"?) num:(float_literal() / nat_literal()) {
let neg = sign.is_some();
Pattern::Literal(PatternLiteral::NumPattern { neg, num })
} /
"_" { Pattern::Ignored }
rule list_expr(parser: &mut Parser) -> ExpressionKind =
"[" exprs:(expression(parser) ** ",") "]" {
let mut exprs = exprs;
ExpressionKind::ListLiteral(exprs)
}
rule paren_expr(parser: &mut Parser) -> ExpressionKind =
"(" exprs:(expression(parser) ** ",") ")" {
let mut exprs = exprs;
match exprs.len() {
1 => exprs.pop().unwrap().kind,
_ => ExpressionKind::TupleLiteral(exprs),
}
}
rule string_literal() -> ExpressionKind =
prefix:identifier()? s:bare_string_literal(){ ExpressionKind::StringLiteral{ s: Rc::new(s),
prefix: prefix.map(rc_string)
} }
rule bare_string_literal() -> String =
"\"" chars:string_component()* "\"" { chars.into_iter().collect::<String>() }
rule string_component() -> char =
!(r#"""# / r#"\"#) ch:$([_]) { ch.chars().next().unwrap() } /
r#"\u{"# value:$(['0'..='9' | 'a'..='f' | 'A'..='F']+) "}" { char::from_u32(u32::from_str_radix(value, 16).unwrap()).unwrap() } /
r#"\n"# { '\n' } / r#"\t"# { '\t' } / r#"\""# { '"' } / r#"\\"# { '\\' } /
expected!("Valid escape sequence")
rule bool_literal() -> ExpressionKind =
"true" { ExpressionKind::BoolLiteral(true) } / "false" { ExpressionKind::BoolLiteral(false) }
rule nat_literal() -> ExpressionKind =
bin_literal() / hex_literal() / unmarked_literal()
rule unmarked_literal() -> ExpressionKind =
digits:digits() { let n = digits.chars().filter(|ch| *ch != '_').collect::<String>().parse().unwrap(); ExpressionKind::NatLiteral(n) }
rule bin_literal() -> ExpressionKind =
"0b" digits:bin_digits() {? parse_binary(digits).map(ExpressionKind::NatLiteral) }
rule hex_literal() -> ExpressionKind =
"0x" digits:hex_digits() {? parse_hex(digits).map(ExpressionKind::NatLiteral) }
rule float_literal() -> ExpressionKind =
ds:$( digits() "." digits()? / "." digits() ) { ExpressionKind::FloatLiteral(ds.parse().unwrap()) }
rule digits() -> &'input str = $((digit_group() "_"*)+)
rule bin_digits() -> &'input str = $((bin_digit_group() "_"*)+)
rule hex_digits() -> &'input str = $((hex_digit_group() "_"*)+)
rule digit_group() -> &'input str = $(['0'..='9']+)
rule bin_digit_group() -> &'input str = $(['0' | '1']+)
rule hex_digit_group() -> &'input str = $(['0'..='9' | 'a'..='f' | 'A'..='F']+)
}
}
fn parse_binary(digits: &str) -> Result<u64, &'static str> {
let mut result: u64 = 0;
let mut multiplier = 1;
for d in digits.chars().rev() {
match d {
'1' => result += multiplier,
'0' => (),
'_' => continue,
_ => unreachable!(),
}
multiplier = match multiplier.checked_mul(2) {
Some(m) => m,
None => return Err("Binary expression will overflow"),
}
}
Ok(result)
}
fn parse_hex(digits: &str) -> Result<u64, &'static str> {
let mut result: u64 = 0;
let mut multiplier: u64 = 1;
for d in digits.chars().rev() {
if d == '_' {
continue;
}
match d.to_digit(16) {
Some(n) => result += n as u64 * multiplier,
None => return Err("Internal parser error: invalid hex digit"),
}
multiplier = match multiplier.checked_mul(16) {
Some(m) => m,
None => return Err("Hexadecimal expression will overflow"),
}
}
Ok(result)
}
#[derive(Debug)]
struct BinopSequence {
first: ExpressionKind,
next: Vec<(BinOp, ExpressionKind)>,
}
impl BinopSequence {
fn do_precedence(self, parser: &mut Parser) -> ExpressionKind {
fn helper(
precedence: i32,
lhs: ExpressionKind,
rest: &mut Vec<(BinOp, ExpressionKind)>,
parser: &mut Parser,
) -> Expression {
let mut lhs = Expression::new(parser.fresh(), lhs);
while let Some((next_op, next_rhs)) = rest.pop() {
let new_precedence = next_op.get_precedence();
if precedence >= new_precedence {
rest.push((next_op, next_rhs));
break;
}
let rhs = helper(new_precedence, next_rhs, rest, parser);
lhs = Expression::new(
parser.fresh(),
ExpressionKind::BinExp(next_op, Box::new(lhs), Box::new(rhs)),
);
}
lhs
}
let mut as_stack = self.next.into_iter().rev().collect();
helper(BinOp::min_precedence(), self.first, &mut as_stack, parser).kind
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,217 @@
use std::rc::Rc;
use ast::{AST, Statement, Expression, Declaration, Discriminator, IfExpressionBody, Pattern};
use symbol_table::{Symbol, SymbolSpec, SymbolTable};
use builtin::{BinOp, PrefixOp};
#[derive(Debug)]
pub struct ReducedAST(pub Vec<Stmt>);
#[derive(Debug, Clone)]
pub enum Stmt {
PreBinding {
name: Rc<String>,
func: Func,
},
Binding {
name: Rc<String>,
constant: bool,
expr: Expr,
},
Expr(Expr),
Noop,
}
#[derive(Debug, Clone)]
pub enum Expr {
Lit(Lit),
Func(Func),
Tuple(Vec<Expr>),
Constructor {
variant: usize,
expr: Box<Expr>,
},
Val(Rc<String>),
Call {
f: Box<Expr>,
args: Vec<Expr>,
},
Assign {
val: Box<Expr>,
expr: Box<Expr>,
},
Conditional {
cond: Box<Expr>,
then_clause: Vec<Stmt>,
else_clause: Vec<Stmt>,
},
Match {
cond: Box<Expr>,
arms: Vec<(Pattern, Vec<Stmt>)>
},
UnimplementedSigilValue
}
pub enum Pat {
Ignored
}
#[derive(Debug, Clone)]
pub enum Lit {
Nat(u64),
Int(i64),
Float(f64),
Bool(bool),
StringLit(Rc<String>),
Custom(Rc<String>, Vec<Expr>),
}
#[derive(Debug, Clone)]
pub enum Func {
BuiltIn(Rc<String>),
UserDefined {
name: Option<Rc<String>>,
params: Vec<Rc<String>>,
body: Vec<Stmt>,
}
}
impl AST {
pub fn reduce(&self, symbol_table: &SymbolTable) -> ReducedAST {
let mut output = vec![];
for statement in self.0.iter() {
output.push(statement.reduce(symbol_table));
}
ReducedAST(output)
}
}
impl Statement {
fn reduce(&self, symbol_table: &SymbolTable) -> Stmt {
use ast::Statement::*;
match self {
ExpressionStatement(expr) => Stmt::Expr(expr.reduce(symbol_table)),
Declaration(decl) => decl.reduce(symbol_table),
}
}
}
impl Expression {
fn reduce(&self, symbol_table: &SymbolTable) -> Expr {
use ast::ExpressionType::*;
let ref input = self.0;
match input {
NatLiteral(n) => Expr::Lit(Lit::Nat(*n)),
FloatLiteral(f) => Expr::Lit(Lit::Float(*f)),
StringLiteral(s) => Expr::Lit(Lit::StringLit(s.clone())),
BoolLiteral(b) => Expr::Lit(Lit::Bool(*b)),
BinExp(binop, lhs, rhs) => binop.reduce(symbol_table, lhs, rhs),
PrefixExp(op, arg) => op.reduce(symbol_table, arg),
//remember Some(5) is a CallExpr
// => ast: Ok(AST([ExpressionStatement(Expression(Call { f: Expression(Value("Some"), None), arguments: [Expression(NatLiteral(5), None)] }, None))]))
Value(name) => {
match symbol_table.values.get(name) {
Some(Symbol { spec: SymbolSpec::DataConstructor { type_args, .. }, .. }) => {
Expr::Constructor { type_name: name.clone() }
},
_ => Expr::Val(name.clone()),
}
},
Call { f, arguments } => Expr::Call {
f: Box::new(f.reduce(symbol_table)),
args: arguments.iter().map(|arg| arg.reduce(symbol_table)).collect(),
},
TupleLiteral(exprs) => Expr::Tuple(exprs.iter().map(|e| e.reduce(symbol_table)).collect()),
IfExpression { discriminator, body } => reduce_if_expression(discriminator, body, symbol_table),
_ => Expr::UnimplementedSigilValue,
}
}
}
fn reduce_if_expression(discriminator: &Discriminator, body: &IfExpressionBody, symbol_table: &SymbolTable) -> Expr {
let cond = Box::new(match *discriminator {
Discriminator::Simple(ref expr) => expr.reduce(symbol_table),
_ => panic!(),
});
match *body {
IfExpressionBody::SimpleConditional(ref then_clause, ref else_clause) => {
let then_clause = then_clause.iter().map(|expr| expr.reduce(symbol_table)).collect();
let else_clause = match else_clause {
None => vec![],
Some(stmts) => stmts.iter().map(|expr| expr.reduce(symbol_table)).collect(),
};
Expr::Conditional { cond, then_clause, else_clause }
},
IfExpressionBody::SimplePatternMatch(ref pat, ref then_clause, ref else_clause) => {
let then_clause = then_clause.iter().map(|expr| expr.reduce(symbol_table)).collect();
let else_clause = match else_clause {
None => vec![],
Some(stmts) => stmts.iter().map(|expr| expr.reduce(symbol_table)).collect(),
};
Expr::Match {
cond,
arms: vec![
(pat.clone(), then_clause),
(Pattern::Ignored, else_clause)
],
}
},
IfExpressionBody::GuardList(ref _guard_arms) => panic!(),
}
}
impl Pattern {
fn reduce(&self, symbol_table: &SymbolTable) -> Pat {
match self {
Pattern::Ignored => Pat::Ignored,
Pattern::TuplePattern(_) => panic!(),
Pattern::Literal(_) => panic!(),
Pattern::TupleStruct(_, _) => panic!(),
Pattern::Record(_, _) => panic!(),
}
}
}
impl Declaration {
fn reduce(&self, symbol_table: &SymbolTable) -> Stmt {
use self::Declaration::*;
use ::ast::Signature;
match self {
Binding {name, constant, expr } => Stmt::Binding { name: name.clone(), constant: *constant, expr: expr.reduce(symbol_table) },
FuncDecl(Signature { name, params, .. }, statements) => Stmt::PreBinding {
name: name.clone(),
func: Func::UserDefined {
name: Some(name.clone()),
params: params.iter().map(|param| param.0.clone()).collect(),
body: statements.iter().map(|stmt| stmt.reduce(symbol_table)).collect(),
}
},
TypeDecl { .. } => Stmt::Noop,
TypeAlias(_, _) => Stmt::Noop,
Interface { .. } => Stmt::Noop,
Impl { .. } => Stmt::Expr(Expr::UnimplementedSigilValue),
_ => Stmt::Expr(Expr::UnimplementedSigilValue)
}
}
}
impl BinOp {
fn reduce(&self, symbol_table: &SymbolTable, lhs: &Box<Expression>, rhs: &Box<Expression>) -> Expr {
if **self.sigil() == "=" {
Expr::Assign {
val: Box::new(lhs.reduce(symbol_table)),
expr: Box::new(rhs.reduce(symbol_table)),
}
} else {
let f = Box::new(Expr::Func(Func::BuiltIn(self.sigil().clone())));
Expr::Call { f, args: vec![lhs.reduce(symbol_table), rhs.reduce(symbol_table)]}
}
}
}
impl PrefixOp {
fn reduce(&self, symbol_table: &SymbolTable, arg: &Box<Expression>) -> Expr {
let f = Box::new(Expr::Func(Func::BuiltIn(self.sigil().clone())));
Expr::Call { f, args: vec![arg.reduce(symbol_table)]}
}
}

View File

@ -1,484 +0,0 @@
use std::{collections::HashMap, rc::Rc, str::FromStr};
use crate::{
ast,
builtin::Builtin,
symbol_table::{DefId, SymbolSpec, SymbolTable},
type_inference::{TypeContext, TypeId},
};
mod test;
mod types;
pub use types::*;
pub fn reduce(ast: &ast::AST, symbol_table: &SymbolTable, type_context: &TypeContext) -> ReducedIR {
let reducer = Reducer::new(symbol_table, type_context);
reducer.reduce(ast)
}
struct Reducer<'a, 'b> {
symbol_table: &'a SymbolTable,
functions: HashMap<DefId, FunctionDefinition>,
type_context: &'b TypeContext,
}
impl<'a, 'b> Reducer<'a, 'b> {
fn new(symbol_table: &'a SymbolTable, type_context: &'b TypeContext) -> Self {
Self { symbol_table, functions: HashMap::new(), type_context }
}
fn reduce(mut self, ast: &ast::AST) -> ReducedIR {
// First reduce all functions
// TODO once this works, maybe rewrite it using the Visitor
for statement in ast.statements.statements.iter() {
self.top_level_definition(statement);
}
// Then compute the entrypoint statements (which may reference previously-computed
// functions by ID)
let mut entrypoint = vec![];
for statement in ast.statements.statements.iter() {
let ast::Statement { id: item_id, kind, .. } = statement;
match &kind {
ast::StatementKind::Expression(expr) => {
entrypoint.push(Statement::Expression(self.expression(expr)));
}
ast::StatementKind::Declaration(ast::Declaration::Binding {
name: _,
constant,
expr,
..
}) => {
let symbol = self.symbol_table.lookup_symbol(item_id).unwrap();
entrypoint.push(Statement::Binding {
id: symbol.def_id(),
constant: *constant,
expr: self.expression(expr),
});
}
_ => (),
}
}
ReducedIR { functions: self.functions, entrypoint }
}
fn top_level_definition(&mut self, statement: &ast::Statement<ast::StatementKind>) {
let ast::Statement { id: item_id, kind, .. } = statement;
match kind {
ast::StatementKind::Expression(_expr) => {
//TODO expressions can in principle contain definitions, but I won't worry
//about it now
}
ast::StatementKind::Declaration(decl) => match decl {
ast::Declaration::FuncDecl(_, statements) => {
self.insert_function_definition(item_id, statements);
}
ast::Declaration::Impl { type_name: _, interface_name: _, block } =>
for item in block {
if let ast::Statement {
id: item_id,
kind: ast::Declaration::FuncDecl(_, statements),
..
} = item
{
self.insert_function_definition(item_id, statements);
}
},
_ => (),
},
// Imports should have already been processed by the symbol table and are irrelevant
// for this representation.
ast::StatementKind::Import(..) => (),
ast::StatementKind::Flow(..) => {
//TODO this should be an error
}
}
}
fn function_internal_statement(
&mut self,
statement: &ast::Statement<ast::StatementKind>,
) -> Option<Statement> {
let ast::Statement { id: item_id, kind, .. } = statement;
match kind {
ast::StatementKind::Expression(expr) => Some(Statement::Expression(self.expression(expr))),
ast::StatementKind::Declaration(decl) => match decl {
ast::Declaration::FuncDecl(_, statements) => {
self.insert_function_definition(item_id, statements);
None
}
ast::Declaration::Binding { constant, expr, .. } => {
let symbol = self.symbol_table.lookup_symbol(item_id).unwrap();
Some(Statement::Binding {
id: symbol.def_id(),
constant: *constant,
expr: self.expression(expr),
})
}
_ => None,
},
ast::StatementKind::Import(_) => None,
ast::StatementKind::Flow(ast::FlowControl::Return(expr)) =>
if let Some(expr) = expr {
Some(Statement::Return(self.expression(expr)))
} else {
Some(Statement::Return(Expression::unit()))
},
ast::StatementKind::Flow(ast::FlowControl::Break) => Some(Statement::Break),
ast::StatementKind::Flow(ast::FlowControl::Continue) => Some(Statement::Continue),
}
}
fn insert_function_definition(&mut self, item_id: &ast::ItemId, statements: &ast::Block) {
let symbol = self.symbol_table.lookup_symbol(item_id).unwrap();
let function_def = FunctionDefinition { body: self.function_internal_block(statements) };
self.functions.insert(symbol.def_id(), function_def);
}
//TODO this needs to be type-aware to work correctly
fn lookup_method(&mut self, name: &str) -> Option<DefId> {
for (def_id, function) in self.functions.iter() {
let symbol = self.symbol_table.lookup_symbol_by_def(def_id)?;
println!("Def Id: {} symbol: {:?}", def_id, symbol);
if symbol.local_name() == name {
return Some(*def_id);
}
}
None
}
fn expression(&mut self, expr: &ast::Expression) -> Expression {
use crate::ast::ExpressionKind::*;
match &expr.kind {
SelfValue => Expression::Lookup(Lookup::SelfParam),
NatLiteral(n) => Expression::Literal(Literal::Nat(*n)),
FloatLiteral(f) => Expression::Literal(Literal::Float(*f)),
//TODO implement handling string literal prefixes
StringLiteral { s, prefix: _ } => Expression::Literal(Literal::StringLit(s.clone())),
BoolLiteral(b) => Expression::Literal(Literal::Bool(*b)),
BinExp(binop, lhs, rhs) => self.binop(binop, lhs, rhs),
PrefixExp(op, arg) => self.prefix(op, arg),
Value(qualified_name) => self.value(qualified_name),
Call { f, arguments } => {
let f = self.expression(f);
let args = arguments.iter().map(|arg| self.invocation_argument(arg)).collect();
//TODO need to have full type availability at this point to do this method lookup
//correctly
if let Expression::Access { name, expr } = f {
let def_id = self.lookup_method(&name).unwrap();
let method = Expression::Lookup(Lookup::Function(def_id));
Expression::CallMethod { f: Box::new(method), args, self_expr: expr }
} else {
Expression::Call { f: Box::new(f), args }
}
}
TupleLiteral(exprs) => Expression::Tuple(exprs.iter().map(|e| self.expression(e)).collect()),
IfExpression { discriminator, body } =>
self.reduce_if_expression(discriminator.as_ref().map(|x| x.as_ref()), body),
Lambda { params, body, .. } => Expression::Callable(Callable::Lambda {
arity: params.len() as u8,
body: self.function_internal_block(body),
}),
NamedStruct { name, fields } => {
let symbol = match self.symbol_table.lookup_symbol(&name.id) {
Some(symbol) => symbol,
None => return Expression::ReductionError(format!("No symbol found for {}", name)),
};
let (tag, type_id) = match symbol.spec() {
SymbolSpec::RecordConstructor { tag, type_id } => (tag, type_id),
e => return Expression::ReductionError(format!("Bad symbol for NamedStruct: {:?}", e)),
};
let field_order = compute_field_orderings(self.type_context, &type_id, tag).unwrap();
let mut field_map = HashMap::new();
for (name, expr) in fields.iter() {
field_map.insert(name.as_ref(), expr);
}
let mut ordered_args = vec![];
for field in field_order.iter() {
let expr = match field_map.get(&field) {
Some(expr) => expr,
None =>
return Expression::ReductionError(format!(
"Field {} not specified for record {}",
field, name
)),
};
ordered_args.push(self.expression(expr));
}
let constructor =
Expression::Callable(Callable::RecordConstructor { type_id, tag, field_order });
Expression::Call { f: Box::new(constructor), args: ordered_args }
}
Index { indexee, indexers } => self.reduce_index(indexee.as_ref(), indexers.as_slice()),
WhileExpression { condition, body } => {
let cond = Box::new(if let Some(condition) = condition {
self.expression(condition)
} else {
Expression::Literal(Literal::Bool(true))
});
let statements = self.function_internal_block(body);
Expression::Loop { cond, statements }
}
ForExpression { .. } => Expression::ReductionError("For expr not implemented".to_string()),
ListLiteral(items) => Expression::List(items.iter().map(|item| self.expression(item)).collect()),
Access { name, expr } =>
Expression::Access { name: name.as_ref().to_string(), expr: Box::new(self.expression(expr)) },
}
}
//TODO figure out the semantics of multiple indexers - for now, just ignore them
fn reduce_index(&mut self, indexee: &ast::Expression, indexers: &[ast::Expression]) -> Expression {
if indexers.len() != 1 {
return Expression::ReductionError("Invalid index expression".to_string());
}
let indexee = self.expression(indexee);
let indexer = self.expression(&indexers[0]);
Expression::Index { indexee: Box::new(indexee), indexer: Box::new(indexer) }
}
fn reduce_if_expression(
&mut self,
discriminator: Option<&ast::Expression>,
body: &ast::IfExpressionBody,
) -> Expression {
use ast::IfExpressionBody::*;
let cond = Box::new(match discriminator {
Some(expr) => self.expression(expr),
None => return Expression::ReductionError("blank cond if-expr not supported".to_string()),
});
match body {
SimpleConditional { then_case, else_case } => {
let then_clause = self.function_internal_block(then_case);
let else_clause = match else_case.as_ref() {
None => vec![],
Some(stmts) => self.function_internal_block(stmts),
};
Expression::Conditional { cond, then_clause, else_clause }
}
SimplePatternMatch { pattern, then_case, else_case } => {
let alternatives = vec![
Alternative {
pattern: match pattern.reduce(self.symbol_table) {
Ok(p) => p,
Err(e) => return Expression::ReductionError(format!("Bad pattern: {:?}", e)),
},
item: self.function_internal_block(then_case),
},
Alternative {
pattern: Pattern::Ignored,
item: match else_case.as_ref() {
Some(else_case) => self.function_internal_block(else_case),
None => vec![],
},
},
];
Expression::CaseMatch { cond, alternatives }
}
CondList(ref condition_arms) => {
let mut alternatives = vec![];
for arm in condition_arms {
match arm.condition {
ast::Condition::Pattern(ref pat) => {
let alt = Alternative {
pattern: match pat.reduce(self.symbol_table) {
Ok(p) => p,
Err(e) =>
return Expression::ReductionError(format!("Bad pattern: {:?}", e)),
},
item: self.function_internal_block(&arm.body),
};
alternatives.push(alt);
}
ast::Condition::TruncatedOp(_, _) =>
return Expression::ReductionError("case-expression-trunc-op".to_string()),
ast::Condition::Else =>
return Expression::ReductionError("case-expression-else".to_string()),
}
}
Expression::CaseMatch { cond, alternatives }
}
}
}
fn invocation_argument(&mut self, invoc: &ast::InvocationArgument) -> Expression {
use crate::ast::InvocationArgument::*;
match invoc {
Positional(ex) => self.expression(ex),
Keyword { .. } => Expression::ReductionError("Keyword arguments not supported".to_string()),
Ignored => Expression::ReductionError("Ignored arguments not supported".to_string()),
}
}
fn function_internal_block(&mut self, block: &ast::Block) -> Vec<Statement> {
block.statements.iter().filter_map(|stmt| self.function_internal_statement(stmt)).collect()
}
fn prefix(&mut self, prefix: &ast::PrefixOp, arg: &ast::Expression) -> Expression {
let builtin: Option<Builtin> = TryFrom::try_from(prefix).ok();
match builtin {
Some(op) => Expression::Call {
f: Box::new(Expression::Callable(Callable::Builtin(op))),
args: vec![self.expression(arg)],
},
None => {
//TODO need this for custom prefix ops
Expression::ReductionError("User-defined prefix ops not supported".to_string())
}
}
}
fn binop(&mut self, binop: &ast::BinOp, lhs: &ast::Expression, rhs: &ast::Expression) -> Expression {
use Expression::ReductionError;
let operation = Builtin::from_str(binop.sigil()).ok();
match operation {
Some(Builtin::Assignment) => {
let lval = match &lhs.kind {
ast::ExpressionKind::Value(qualified_name) => {
if let Some(symbol) = self.symbol_table.lookup_symbol(&qualified_name.id) {
symbol.def_id()
} else {
return ReductionError(format!("Couldn't look up name: {:?}", qualified_name));
}
}
_ => return ReductionError("Trying to assign to a non-name".to_string()),
};
Expression::Assign { lval, rval: Box::new(self.expression(rhs)) }
}
Some(op) => Expression::Call {
f: Box::new(Expression::Callable(Callable::Builtin(op))),
args: vec![self.expression(lhs), self.expression(rhs)],
},
//TODO handle a user-defined operation
None => ReductionError("User-defined operations not supported".to_string()),
}
}
fn value(&mut self, qualified_name: &ast::QualifiedName) -> Expression {
use SymbolSpec::*;
let symbol = match self.symbol_table.lookup_symbol(&qualified_name.id) {
Some(s) => s,
None =>
return Expression::ReductionError(format!("No symbol found for name: `{}`", qualified_name)),
};
let def_id = symbol.def_id();
match symbol.spec() {
Builtin(b) => Expression::Callable(Callable::Builtin(b)),
Func { .. } => Expression::Lookup(Lookup::Function(def_id)),
GlobalBinding => Expression::Lookup(Lookup::GlobalVar(def_id)),
LocalVariable => Expression::Lookup(Lookup::LocalVar(def_id)),
FunctionParam(n) => Expression::Lookup(Lookup::Param(n)),
DataConstructor { tag, type_id } =>
Expression::Callable(Callable::DataConstructor { type_id, tag }),
RecordConstructor { .. } => Expression::ReductionError(format!(
"The symbol for value {:?} is unexpectdly a RecordConstructor",
qualified_name
)),
}
}
}
impl ast::Pattern {
fn reduce(&self, symbol_table: &SymbolTable) -> Result<Pattern, PatternError> {
Ok(match self {
ast::Pattern::Ignored => Pattern::Ignored,
ast::Pattern::TuplePattern(subpatterns) => {
let items: Result<Vec<Pattern>, PatternError> =
subpatterns.iter().map(|pat| pat.reduce(symbol_table)).into_iter().collect();
let items = items?;
Pattern::Tuple { tag: None, subpatterns: items }
}
ast::Pattern::Literal(lit) => Pattern::Literal(match lit {
ast::PatternLiteral::NumPattern { neg, num } => match (neg, num) {
(false, ast::ExpressionKind::NatLiteral(n)) => Literal::Nat(*n),
(false, ast::ExpressionKind::FloatLiteral(f)) => Literal::Float(*f),
(true, ast::ExpressionKind::NatLiteral(n)) => Literal::Int(-(*n as i64)),
(true, ast::ExpressionKind::FloatLiteral(f)) => Literal::Float(-f),
(_, e) =>
return Err(format!("Internal error, unexpected pattern literal: {:?}", e).into()),
},
ast::PatternLiteral::StringPattern(s) => Literal::StringLit(s.clone()),
ast::PatternLiteral::BoolPattern(b) => Literal::Bool(*b),
}),
ast::Pattern::TupleStruct(name, subpatterns) => {
let symbol = symbol_table.lookup_symbol(&name.id).unwrap();
if let SymbolSpec::DataConstructor { tag, type_id: _ } = symbol.spec() {
let items: Result<Vec<Pattern>, PatternError> =
subpatterns.iter().map(|pat| pat.reduce(symbol_table)).into_iter().collect();
let items = items?;
Pattern::Tuple { tag: Some(tag), subpatterns: items }
} else {
return Err(
"Internal error, trying to match something that's not a DataConstructor".into()
);
}
}
ast::Pattern::VarOrName(name) => {
let symbol = symbol_table.lookup_symbol(&name.id).unwrap();
match symbol.spec() {
SymbolSpec::DataConstructor { tag, type_id: _ } =>
Pattern::Tuple { tag: Some(tag), subpatterns: vec![] },
SymbolSpec::LocalVariable => {
let def_id = symbol.def_id();
Pattern::Binding(def_id)
}
spec => return Err(format!("Unexpected VarOrName symbol: {:?}", spec).into()),
}
}
ast::Pattern::Record(name, specified_members) => {
let symbol = symbol_table.lookup_symbol(&name.id).unwrap();
if let SymbolSpec::RecordConstructor { tag, type_id: _ } = symbol.spec() {
//TODO do this computation from the type_id
/*
if specified_members.iter().any(|(member, _)| !members.contains_key(member)) {
return Err(format!("Unknown key in record pattern").into());
}
*/
let subpatterns: Result<Vec<(Rc<String>, Pattern)>, PatternError> = specified_members
.iter()
.map(|(name, pat)| {
pat.reduce(symbol_table).map(|reduced_pat| (name.clone(), reduced_pat))
})
.into_iter()
.collect();
let subpatterns = subpatterns?;
Pattern::Record { tag, subpatterns }
} else {
return Err(format!("Unexpected Record pattern symbol: {:?}", symbol.spec()).into());
}
}
})
}
}
/// Given the type context and a variant, compute what order the fields on it were stored.
/// This needs to be public until type-checking is fully implemented because the type information
/// is only available at runtime.
pub fn compute_field_orderings(
type_context: &TypeContext,
type_id: &TypeId,
tag: u32,
) -> Option<Vec<String>> {
// Eventually, the ReducedIR should decide what field ordering is optimal.
// For now, just do it alphabetically.
let record_members = type_context.lookup_record_members(type_id, tag)?;
let mut field_order: Vec<String> =
record_members.iter().map(|(field, _type_id)| field).cloned().collect();
field_order.sort_unstable();
Some(field_order)
}

View File

@ -1,61 +0,0 @@
#![cfg(test)]
use super::*;
use crate::{symbol_table::SymbolTable, type_inference::TypeContext};
fn build_ir(input: &str) -> ReducedIR {
let ast = crate::util::quick_ast(input);
let mut symbol_table = SymbolTable::new();
let mut type_context = TypeContext::new();
symbol_table.process_ast(&ast, &mut type_context).unwrap();
let reduced = reduce(&ast, &symbol_table, &type_context);
reduced.debug(&symbol_table);
reduced
}
#[test]
fn test_ir() {
let src = r#"
let global_one = 10 + 20
let global_two = "the string hello"
fn a_function(i, j, k) {
fn nested(x) {
x + 10
}
i + j * nested(k)
}
fn another_function(e) {
let local_var = 420
e * local_var
}
another_function()
"#;
let reduced = build_ir(src);
assert_eq!(reduced.functions.len(), 3);
}
#[test]
fn test_methods() {
let src = r#"
type Thing = Thing
impl Thing {
fn a_method() {
20
}
}
let a = Thing
4 + a.a_method()
"#;
let reduced = build_ir(src);
assert_eq!(reduced.functions.len(), 1);
}

View File

@ -1,137 +0,0 @@
use std::{collections::HashMap, convert::From, rc::Rc};
use crate::{
builtin::Builtin,
symbol_table::{DefId, SymbolTable},
type_inference::TypeId,
};
//TODO most of these Clone impls only exist to support function application, because the
//tree-walking evaluator moves the reduced IR members.
/// The reduced intermediate representation consists of a list of function definitions, and a block
/// of entrypoint statements. In a repl or script context this can be an arbitrary list of
/// statements, in an executable context will likely just be a pointer to the main() function.
#[derive(Debug)]
pub struct ReducedIR {
pub functions: HashMap<DefId, FunctionDefinition>,
pub entrypoint: Vec<Statement>,
}
impl ReducedIR {
#[allow(dead_code)]
pub fn debug(&self, symbol_table: &SymbolTable) {
println!("Reduced IR:");
println!("Functions:");
println!("-----------");
for (id, callable) in self.functions.iter() {
let name = &symbol_table.lookup_symbol_by_def(id).unwrap().local_name();
println!("{}({}) -> {:?}", id, name, callable);
}
println!();
println!("Entrypoint:");
println!("-----------");
for stmt in self.entrypoint.iter() {
println!("{:?}", stmt);
}
println!("-----------");
}
}
#[derive(Debug, Clone)]
pub enum Statement {
Expression(Expression),
Binding { id: DefId, constant: bool, expr: Expression },
Return(Expression),
Continue,
Break,
}
#[derive(Debug, Clone)]
pub enum Expression {
Literal(Literal),
Tuple(Vec<Expression>),
List(Vec<Expression>),
Lookup(Lookup),
Assign { lval: DefId, rval: Box<Expression> },
Access { name: String, expr: Box<Expression> },
Callable(Callable),
Call { f: Box<Expression>, args: Vec<Expression> },
CallMethod { f: Box<Expression>, args: Vec<Expression>, self_expr: Box<Expression> },
Conditional { cond: Box<Expression>, then_clause: Vec<Statement>, else_clause: Vec<Statement> },
CaseMatch { cond: Box<Expression>, alternatives: Vec<Alternative> },
Loop { cond: Box<Expression>, statements: Vec<Statement> },
Index { indexee: Box<Expression>, indexer: Box<Expression> },
ReductionError(String),
}
impl Expression {
pub fn unit() -> Self {
Expression::Tuple(vec![])
}
}
#[derive(Debug)]
pub struct FunctionDefinition {
pub body: Vec<Statement>,
}
#[derive(Debug, Clone)]
pub enum Callable {
Builtin(Builtin),
UserDefined(DefId),
Lambda { arity: u8, body: Vec<Statement> },
DataConstructor { type_id: TypeId, tag: u32 },
RecordConstructor { type_id: TypeId, tag: u32, field_order: Vec<String> },
}
#[derive(Debug, Clone)]
pub enum Lookup {
LocalVar(DefId),
GlobalVar(DefId),
Function(DefId),
Param(u8),
SelfParam,
}
#[derive(Debug, Clone, PartialEq)]
pub enum Literal {
Nat(u64),
Int(i64),
Float(f64),
Bool(bool),
StringLit(Rc<String>),
}
#[derive(Debug, Clone)]
pub struct Alternative {
pub pattern: Pattern,
pub item: Vec<Statement>,
}
#[derive(Debug, Clone)]
pub enum Pattern {
Tuple { subpatterns: Vec<Pattern>, tag: Option<u32> },
Record { tag: u32, subpatterns: Vec<(Rc<String>, Pattern)> },
Literal(Literal),
Ignored,
Binding(DefId),
}
#[allow(dead_code)]
#[derive(Debug)]
pub struct PatternError {
msg: String,
}
impl From<&str> for PatternError {
fn from(s: &str) -> Self {
Self { msg: s.to_string() }
}
}
impl From<String> for PatternError {
fn from(msg: String) -> Self {
Self { msg }
}
}

View File

@ -1,210 +0,0 @@
use schala_repl::{
ComputationRequest, ComputationResponse, GlobalOutputStats, LangMetaRequest, LangMetaResponse,
ProgrammingLanguageInterface,
};
use stopwatch::Stopwatch;
use crate::{error::SchalaError, parsing, reduced_ir, symbol_table, tree_walk_eval, type_inference};
/// All the state necessary to parse and execute a Schala program are stored in this struct.
pub struct Schala<'a> {
/// Holds a reference to the original source code, parsed into line and character
source_reference: SourceReference,
//state: eval::State<'static>,
/// Keeps track of symbols and scopes
symbol_table: symbol_table::SymbolTable,
/// Contains information for type-checking
type_context: type_inference::TypeContext,
/// Schala Parser
active_parser: parsing::Parser,
/// Execution state for AST-walking interpreter
eval_state: tree_walk_eval::State<'a>,
timings: Vec<(&'static str, std::time::Duration)>,
}
/*
impl Schala {
//TODO implement documentation for language items
/*
fn handle_docs(&self, source: String) -> LangMetaResponse {
LangMetaResponse::Docs {
doc_string: format!("Schala item `{}` : <<Schala-lang documentation not yet implemented>>", source)
}
}
*/
}
*/
impl<'a> Schala<'a> {
/// Creates a new Schala environment *without* any prelude.
fn new_blank_env() -> Schala<'a> {
Schala {
source_reference: SourceReference::new(),
symbol_table: symbol_table::SymbolTable::new(),
type_context: type_inference::TypeContext::new(),
active_parser: parsing::Parser::new(),
eval_state: tree_walk_eval::State::new(),
timings: Vec::new(),
}
}
/// Creates a new Schala environment with the standard prelude, which is defined as ordinary
/// Schala code in the file `prelude.schala`
#[allow(clippy::new_without_default)]
pub fn new() -> Schala<'a> {
let prelude = include_str!("../source-files/prelude.schala");
let mut env = Schala::new_blank_env();
let response = env.run_pipeline(prelude, SchalaConfig::default());
if let Err(err) = response {
panic!("Error in prelude, panicking: {}", err.display());
}
env
}
/// This is where the actual action of interpreting/compilation happens.
/// Note: this should eventually use a query-based system for parallelization, cf.
/// https://rustc-dev-guide.rust-lang.org/overview.html
fn run_pipeline(&mut self, source: &str, config: SchalaConfig) -> Result<String, SchalaError> {
self.timings = vec![];
let sw = Stopwatch::start_new();
self.source_reference.load_new_source(source);
let ast = self
.active_parser
.parse(source)
.map_err(|err| SchalaError::from_parse_error(err, &self.source_reference))?;
self.timings.push(("parsing", sw.elapsed()));
let sw = Stopwatch::start_new();
//Perform all symbol table work
self.symbol_table
.process_ast(&ast, &mut self.type_context)
.map_err(SchalaError::from_symbol_table)?;
self.timings.push(("symbol_table", sw.elapsed()));
// Typechecking
let _overall_type = self.type_context.typecheck(&ast).map_err(SchalaError::from_type_error);
let sw = Stopwatch::start_new();
let reduced_ir = reduced_ir::reduce(&ast, &self.symbol_table, &self.type_context);
self.timings.push(("reduced_ir", sw.elapsed()));
let sw = Stopwatch::start_new();
let evaluation_outputs = self.eval_state.evaluate(reduced_ir, &self.type_context, config.repl);
self.timings.push(("tree-walking-evaluation", sw.elapsed()));
let text_output: Result<Vec<String>, String> = evaluation_outputs.into_iter().collect();
let text_output: Result<Vec<String>, SchalaError> =
text_output.map_err(|err| SchalaError::from_string(err, Stage::Evaluation));
let eval_output: String =
text_output.map(|v| Iterator::intersperse(v.into_iter(), "\n".to_owned()).collect())?;
Ok(eval_output)
}
}
/// Represents lines of source code
pub(crate) struct SourceReference {
last_source: Option<String>,
/// Offsets in *bytes* (not chars) representing a newline character
newline_offsets: Vec<usize>,
}
impl SourceReference {
pub(crate) fn new() -> SourceReference {
SourceReference { last_source: None, newline_offsets: vec![] }
}
pub(crate) fn load_new_source(&mut self, source: &str) {
self.newline_offsets = vec![];
for (offset, ch) in source.as_bytes().iter().enumerate() {
if *ch == b'\n' {
self.newline_offsets.push(offset);
}
}
self.last_source = Some(source.to_string());
}
// (line_start, line_num, the string itself)
pub fn get_line(&self, line: usize) -> (usize, usize, String) {
if self.newline_offsets.is_empty() {
return (0, 0, self.last_source.as_ref().cloned().unwrap());
}
//TODO make sure this is utf8-safe
let start_idx = match self.newline_offsets.binary_search(&line) {
Ok(index) | Err(index) => index,
};
let last_source = self.last_source.as_ref().unwrap();
let start = self.newline_offsets[start_idx];
let end = self.newline_offsets.get(start_idx + 1).cloned().unwrap_or_else(|| last_source.len());
let slice = &last_source.as_bytes()[start..end];
(start, start_idx, std::str::from_utf8(slice).unwrap().to_string())
}
}
#[allow(dead_code)]
#[derive(Clone, Copy, Debug)]
pub(crate) enum Stage {
Parsing,
Symbols,
ScopeResolution,
Typechecking,
AstReduction,
Evaluation,
}
fn stage_names() -> Vec<&'static str> {
vec!["parsing", "symbol-table", "typechecking", "ast-reduction", "ast-walking-evaluation"]
}
#[derive(Default, Clone)]
pub struct SchalaConfig {
pub repl: bool,
}
impl<'a> ProgrammingLanguageInterface for Schala<'a> {
//TODO flesh out Config
type Config = SchalaConfig;
fn language_name() -> String {
"Schala".to_owned()
}
fn source_file_suffix() -> String {
"schala".to_owned()
}
fn run_computation(&mut self, request: ComputationRequest<Self::Config>) -> ComputationResponse {
let ComputationRequest { source, debug_requests: _, config: _ } = request;
let sw = Stopwatch::start_new();
let main_output =
self.run_pipeline(source, request.config).map_err(|schala_err| schala_err.display());
let total_duration = sw.elapsed();
let stage_durations: Vec<_> = std::mem::take(&mut self.timings)
.into_iter()
.map(|(label, duration)| (label.to_string(), duration))
.collect();
let global_output_stats = GlobalOutputStats { total_duration, stage_durations };
ComputationResponse { main_output, global_output_stats, debug_responses: vec![] }
}
fn request_meta(&mut self, request: LangMetaRequest) -> LangMetaResponse {
match request {
LangMetaRequest::StageNames =>
LangMetaResponse::StageNames(stage_names().iter().map(|s| s.to_string()).collect()),
_ => LangMetaResponse::Custom { kind: "not-implemented".to_string(), value: "".to_string() },
}
}
}

View File

@ -0,0 +1,121 @@
use std::collections::HashMap;
use std::rc::Rc;
use std::fmt;
use std::fmt::Write;
use ast;
use typechecking::TypeName;
//cf. p. 150 or so of Language Implementation Patterns
pub struct SymbolTable {
pub values: HashMap<Rc<String>, Symbol> //TODO this will eventually have real type information
}
impl SymbolTable {
pub fn new() -> SymbolTable {
SymbolTable { values: HashMap::new() }
}
}
#[derive(Debug)]
pub struct Symbol {
pub name: Rc<String>,
pub spec: SymbolSpec,
}
impl fmt::Display for Symbol {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "<Name: {}, Spec: {}>", self.name, self.spec)
}
}
#[derive(Debug)]
pub enum SymbolSpec {
Func(Vec<TypeName>),
DataConstructor {
type_name: Rc<String>,
type_args: Vec<Rc<String>>,
},
}
impl fmt::Display for SymbolSpec {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use self::SymbolSpec::*;
match self {
Func(type_names) => write!(f, "Func({:?})", type_names),
DataConstructor { type_name, type_args } => write!(f, "DataConstructor({:?} -> {})", type_args, type_name),
}
}
}
impl SymbolTable {
/* note: this adds names for *forward reference* but doesn't actually create any types. solve that problem
* later */
pub fn add_top_level_symbols(&mut self, ast: &ast::AST) -> Result<(), String> {
use self::ast::{Statement, TypeName, Variant, TypeSingletonName, TypeBody};
use self::ast::Declaration::*;
for statement in ast.0.iter() {
if let Statement::Declaration(decl) = statement {
match decl {
FuncSig(signature) | FuncDecl(signature, _) => {
let mut ch: char = 'a';
let mut types = vec![];
for param in signature.params.iter() {
match param {
(_, Some(ty)) => {
//TODO eventually handle this case different
types.push(Rc::new(format!("{}", ch)));
ch = ((ch as u8) + 1) as char;
},
(_, None) => {
types.push(Rc::new(format!("{}", ch)));
ch = ((ch as u8) + 1) as char;
}
}
}
let spec = SymbolSpec::Func(types);
self.values.insert(
signature.name.clone(),
Symbol { name: signature.name.clone(), spec }
);
},
TypeDecl { name: TypeSingletonName { name, params}, body: TypeBody(variants), mutable } => {
for var in variants {
match var {
Variant::UnitStruct(variant_name) => {
let spec = SymbolSpec::DataConstructor {
type_name: name.clone(),
type_args: vec![],
};
self.values.insert(variant_name.clone(), Symbol { name: variant_name.clone(), spec });
},
Variant::TupleStruct(variant_name, tuple_members) => {
let type_args = tuple_members.iter().map(|type_name| match type_name {
TypeName::Singleton(TypeSingletonName { name, ..}) => name.clone(),
TypeName::Tuple(_) => unimplemented!(),
}).collect();
let spec = SymbolSpec::DataConstructor {
type_name: name.clone(),
type_args
};
let symbol = Symbol { name: variant_name.clone(), spec };
self.values.insert(variant_name.clone(), symbol);
},
e => return Err(format!("{:?} not supported in typing yet", e)),
}
}
},
_ => ()
}
}
}
Ok(())
}
pub fn debug_symbol_table(&self) -> String {
let mut output = format!("Symbol table\n");
for (name, sym) in &self.values {
write!(output, "{} -> {}\n", name, sym).unwrap();
}
output
}
}

View File

@ -1,65 +0,0 @@
use std::{fmt, rc::Rc};
/// Fully-qualified symbol name
#[derive(Debug, Clone, Eq, PartialEq, Hash, PartialOrd, Ord)]
pub struct Fqsn {
//TODO Fqsn's need to be cheaply cloneable
pub scopes: Vec<ScopeSegment>,
}
impl Fqsn {
pub fn from_scope_stack(scopes: &[ScopeSegment], new_name: Rc<String>) -> Self {
let mut v = Vec::new();
for s in scopes {
v.push(s.clone());
}
v.push(ScopeSegment::Name(new_name));
Fqsn { scopes: v }
}
pub fn extend(&self, new_item: &str) -> Self {
let mut new = self.clone();
new.scopes.push(ScopeSegment::Name(Rc::new(new_item.to_string())));
new
}
#[allow(dead_code)]
pub fn from_strs(strs: &[&str]) -> Fqsn {
let mut scopes = vec![];
for s in strs {
scopes.push(ScopeSegment::Name(Rc::new(s.to_string())));
}
Fqsn { scopes }
}
pub fn last_elem(&self) -> Rc<String> {
let ScopeSegment::Name(name) = self.scopes.last().unwrap();
name.clone()
}
}
impl fmt::Display for Fqsn {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let delim = "::";
let Fqsn { scopes } = self;
write!(f, "FQSN<{}", scopes[0])?;
for item in scopes[1..].iter() {
write!(f, "{}{}", delim, item)?;
}
write!(f, ">")
}
}
//TODO eventually this should use ItemId's to avoid String-cloning
/// One segment within a scope.
#[derive(Debug, Clone, Eq, PartialEq, Hash, PartialOrd, Ord)]
pub enum ScopeSegment {
Name(Rc<String>),
}
impl fmt::Display for ScopeSegment {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let ScopeSegment::Name(name) = self;
write!(f, "{}", name)
}
}

View File

@ -1,244 +0,0 @@
#![allow(clippy::enum_variant_names)]
use std::{
collections::{hash_map::Entry, HashMap},
fmt,
rc::Rc,
};
use crate::{
ast,
ast::ItemId,
builtin::Builtin,
parsing::Location,
type_inference::{TypeContext, TypeId},
};
mod populator;
use populator::SymbolTablePopulator;
mod fqsn;
pub use fqsn::{Fqsn, ScopeSegment};
mod resolver;
mod symbol_trie;
use symbol_trie::SymbolTrie;
mod test;
use crate::identifier::{define_id_kind, Id, IdStore};
define_id_kind!(DefItem);
pub type DefId = Id<DefItem>;
#[allow(dead_code)]
#[derive(Debug, Clone)]
pub enum SymbolError {
DuplicateName { prev_name: Fqsn, location: Location },
DuplicateVariant { type_fqsn: Fqsn, name: String },
DuplicateRecord { type_fqsn: Fqsn, location: Location, record: String, member: String },
UnknownAnnotation { name: String },
BadAnnotation { name: String, msg: String },
BadImplBlockEntry,
}
#[allow(dead_code)]
#[derive(Debug)]
struct NameSpec<K> {
location: Location,
kind: K,
}
#[derive(Debug)]
enum NameKind {
Module,
Function,
Binding,
}
#[derive(Debug)]
enum TypeKind {
Function,
Constructor,
}
/// Keeps track of what names were used in a given namespace.
struct NameTable<K> {
table: HashMap<Fqsn, NameSpec<K>>,
}
impl<K> NameTable<K> {
fn new() -> Self {
Self { table: HashMap::new() }
}
fn register(&mut self, name: Fqsn, spec: NameSpec<K>) -> Result<(), SymbolError> {
match self.table.entry(name.clone()) {
Entry::Occupied(o) =>
Err(SymbolError::DuplicateName { prev_name: name, location: o.get().location }),
Entry::Vacant(v) => {
v.insert(spec);
Ok(())
}
}
}
}
//cf. p. 150 or so of Language Implementation Patterns
pub struct SymbolTable {
def_id_store: IdStore<DefItem>,
/// Used for import resolution.
symbol_trie: SymbolTrie,
/// These tables are responsible for preventing duplicate names.
fq_names: NameTable<NameKind>, //Note that presence of two tables implies that a type and other binding with the same name can co-exist
types: NameTable<TypeKind>,
id_to_def: HashMap<ItemId, DefId>,
def_to_symbol: HashMap<DefId, Rc<Symbol>>,
}
impl SymbolTable {
/// Create a new, empty SymbolTable
pub fn new() -> Self {
Self {
def_id_store: IdStore::new(),
symbol_trie: SymbolTrie::new(),
fq_names: NameTable::new(),
types: NameTable::new(),
id_to_def: HashMap::new(),
def_to_symbol: HashMap::new(),
}
}
/// The main entry point into the symbol table. This will traverse the AST in several
/// different ways and populate subtables with information that will be used further in the
/// compilation process.
pub fn process_ast(
&mut self,
ast: &ast::AST,
type_context: &mut TypeContext,
) -> Result<(), Vec<SymbolError>> {
let mut populator = SymbolTablePopulator { type_context, table: self };
let errs = populator.populate_definition_tables(ast);
if !errs.is_empty() {
return Err(errs);
}
// Walks the AST, matching the ID of an identifier used in some expression to
// the corresponding Symbol.
let mut resolver = resolver::ScopeResolver::new(self);
resolver.resolve(ast);
Ok(())
}
pub fn lookup_symbol(&self, id: &ItemId) -> Option<&Symbol> {
let def = self.id_to_def.get(id)?;
self.def_to_symbol.get(def).map(|s| s.as_ref())
}
pub fn lookup_symbol_by_def(&self, def: &DefId) -> Option<&Symbol> {
self.def_to_symbol.get(def).map(|s| s.as_ref())
}
#[allow(dead_code)]
pub fn debug(&self) {
println!("Symbol table:");
println!("----------------");
for (id, def) in self.id_to_def.iter() {
if let Some(symbol) = self.def_to_symbol.get(def) {
println!("{} => {}: {}", id, def, symbol);
} else {
println!("{} => {} <NO SYMBOL FOUND>", id, def);
}
}
}
/// Register a new mapping of a fully-qualified symbol name (e.g. `Option::Some`)
/// to a Symbol, a descriptor of what that name refers to.
fn add_symbol(&mut self, id: &ItemId, fqsn: Fqsn, spec: SymbolSpec) {
let def_id = self.def_id_store.fresh();
let local_name = fqsn.last_elem();
let symbol = Rc::new(Symbol { fully_qualified_name: fqsn.clone(), local_name, spec, def_id });
self.symbol_trie.insert(&fqsn, def_id);
self.id_to_def.insert(*id, def_id);
self.def_to_symbol.insert(def_id, symbol);
}
fn populate_single_builtin(&mut self, fqsn: Fqsn, builtin: Builtin) {
let def_id = self.def_id_store.fresh();
let spec = SymbolSpec::Builtin(builtin);
let local_name = fqsn.last_elem();
let symbol = Rc::new(Symbol { fully_qualified_name: fqsn.clone(), local_name, spec, def_id });
self.symbol_trie.insert(&fqsn, def_id);
self.def_to_symbol.insert(def_id, symbol);
}
}
#[allow(dead_code)]
#[derive(Debug, Clone)]
pub struct Symbol {
fully_qualified_name: Fqsn,
local_name: Rc<String>,
spec: SymbolSpec,
def_id: DefId,
}
impl Symbol {
pub fn local_name(&self) -> &str {
self.local_name.as_ref()
}
pub fn def_id(&self) -> DefId {
self.def_id
}
pub fn spec(&self) -> SymbolSpec {
self.spec.clone()
}
}
impl fmt::Display for Symbol {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "<Local name: {}, {}, Spec: {}>", self.local_name(), self.fully_qualified_name, self.spec)
}
}
//TODO - I think I eventually want to draw a distinction between true global items
//i.e. global vars, and items whose definitions are scoped. Right now there's a sense
//in which Func, DataConstructor, RecordConstructor, and GlobalBinding are "globals",
//whereas LocalVarible and FunctionParam have local scope. But right now, they all
//get put into a common table, and all get DefId's from a common source.
//
//It would be good if individual functions could in parallel look up their own
//local vars without interfering with other lookups. Also some type definitions
//should be scoped in a similar way.
//
//Also it makes sense that non-globals should not use DefId's, particularly not
//function parameters (even though they are currently assigned).
#[derive(Debug, Clone)]
pub enum SymbolSpec {
Builtin(Builtin),
Func { method: Option<crate::ast::TypeSingletonName> },
DataConstructor { tag: u32, type_id: TypeId },
RecordConstructor { tag: u32, type_id: TypeId },
GlobalBinding, //Only for global variables, not for function-local ones or ones within a `let` scope context
LocalVariable,
FunctionParam(u8),
}
impl fmt::Display for SymbolSpec {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use self::SymbolSpec::*;
match self {
Builtin(b) => write!(f, "Builtin: {:?}", b),
Func { .. } => write!(f, "Func"),
DataConstructor { tag, type_id } => write!(f, "DataConstructor(tag: {}, type: {})", tag, type_id),
RecordConstructor { type_id, tag, .. } =>
write!(f, "RecordConstructor(tag: {})(<members> -> {})", tag, type_id),
GlobalBinding => write!(f, "GlobalBinding"),
LocalVariable => write!(f, "Local variable"),
FunctionParam(n) => write!(f, "Function param: {}", n),
}
}
}

View File

@ -1,352 +0,0 @@
use std::{
collections::{hash_map::Entry, HashMap, HashSet},
rc::Rc,
str::FromStr,
};
use super::{Fqsn, NameKind, NameSpec, ScopeSegment, SymbolError, SymbolSpec, SymbolTable, TypeKind};
use crate::{
ast::{
Declaration, Expression, ExpressionKind, ItemId, Statement, StatementKind, TypeBody,
TypeSingletonName, Variant, VariantKind, AST,
},
builtin::Builtin,
parsing::Location,
type_inference::{self, PendingType, TypeBuilder, TypeContext, VariantBuilder},
};
pub(super) struct SymbolTablePopulator<'a> {
pub(super) type_context: &'a mut TypeContext,
pub(super) table: &'a mut SymbolTable,
}
impl<'a> SymbolTablePopulator<'a> {
/* note: this adds names for *forward reference* but doesn't actually create any types. solve that problem
* later */
fn add_symbol(&mut self, id: &ItemId, fqsn: Fqsn, spec: SymbolSpec) {
self.table.add_symbol(id, fqsn, spec)
}
/// This function traverses the AST and adds symbol table entries for
/// constants, functions, types, and modules defined within. This simultaneously
/// checks for dupicate definitions (and returns errors if discovered), and sets
/// up name tables that will be used by further parts of the compiler
pub fn populate_definition_tables(&mut self, ast: &AST) -> Vec<SymbolError> {
let mut scope_stack = vec![];
self.add_from_scope(ast.statements.as_ref(), &mut scope_stack, false)
}
fn add_from_scope(
&mut self,
statements: &[Statement<StatementKind>],
scope_stack: &mut Vec<ScopeSegment>,
function_scope: bool,
) -> Vec<SymbolError> {
let mut errors = vec![];
for statement in statements {
let Statement { id, kind, location } = statement;
let location = *location;
if let Err(err) = self.add_single_statement(id, kind, location, scope_stack, function_scope) {
errors.push(err);
} else {
let decl = match kind {
StatementKind::Declaration(decl) => decl,
_ => continue,
};
// If there's an error with a name, don't recurse into subscopes of that name
let recursive_errs = match decl {
Declaration::FuncDecl(signature, body) => {
let new_scope = ScopeSegment::Name(signature.name.clone());
scope_stack.push(new_scope);
let output = self.add_from_scope(body.as_ref(), scope_stack, true);
scope_stack.pop();
output
}
Declaration::Module { name, items } => {
let new_scope = ScopeSegment::Name(name.clone());
scope_stack.push(new_scope);
let output = self.add_from_scope(items.as_ref(), scope_stack, false);
scope_stack.pop();
output
}
Declaration::TypeDecl { name, body, mutable } => {
let type_fqsn = Fqsn::from_scope_stack(scope_stack, name.name.clone());
self.add_type_members(name, body, mutable, location, type_fqsn)
}
Declaration::Impl { type_name, interface_name: _, block } => {
let mut errors = vec![];
let new_scope = ScopeSegment::Name(Rc::new(format!("<impl-block>{}", type_name)));
scope_stack.push(new_scope);
for decl_stmt in block.iter() {
let Statement { id, kind, location } = decl_stmt;
let location = *location;
match kind {
decl @ Declaration::FuncDecl(signature, body) => {
let output =
self.add_single_declaration(id, decl, location, scope_stack, true);
if let Err(e) = output {
errors.push(e);
};
let new_scope = ScopeSegment::Name(signature.name.clone());
scope_stack.push(new_scope);
let output = self.add_from_scope(body.as_ref(), scope_stack, true);
scope_stack.pop();
errors.extend(output.into_iter());
}
_other => errors.push(SymbolError::BadImplBlockEntry),
};
}
scope_stack.pop();
errors
}
_ => vec![],
};
errors.extend(recursive_errs.into_iter());
}
}
errors
}
fn add_single_statement(
&mut self,
id: &ItemId,
kind: &StatementKind,
location: Location,
scope_stack: &[ScopeSegment],
function_scope: bool,
) -> Result<(), SymbolError> {
match kind {
StatementKind::Declaration(decl) =>
self.add_single_declaration(id, decl, location, scope_stack, function_scope),
_ => return Ok(()),
}
}
fn add_single_declaration(
&mut self,
id: &ItemId,
decl: &Declaration,
location: Location,
scope_stack: &[ScopeSegment],
function_scope: bool,
) -> Result<(), SymbolError> {
match decl {
Declaration::FuncSig(signature) => {
let fq_function = Fqsn::from_scope_stack(scope_stack, signature.name.clone());
self.table
.fq_names
.register(fq_function.clone(), NameSpec { location, kind: NameKind::Function })?;
self.table
.types
.register(fq_function.clone(), NameSpec { location, kind: TypeKind::Function })?;
self.add_symbol(id, fq_function, SymbolSpec::Func { method: None });
}
Declaration::FuncDecl(signature, ..) => {
let fn_name = &signature.name;
let fq_function = Fqsn::from_scope_stack(scope_stack, fn_name.clone());
self.table
.fq_names
.register(fq_function.clone(), NameSpec { location, kind: NameKind::Function })?;
self.table
.types
.register(fq_function.clone(), NameSpec { location, kind: TypeKind::Function })?;
self.add_symbol(id, fq_function, SymbolSpec::Func { method: None });
}
Declaration::TypeDecl { name, .. } => {
let fq_type = Fqsn::from_scope_stack(scope_stack, name.name.clone());
self.table.types.register(fq_type, NameSpec { location, kind: TypeKind::Constructor })?;
}
//TODO handle type aliases
Declaration::TypeAlias { .. } => (),
Declaration::Binding { name, .. } => {
let fq_binding = Fqsn::from_scope_stack(scope_stack, name.clone());
self.table
.fq_names
.register(fq_binding.clone(), NameSpec { location, kind: NameKind::Binding })?;
if !function_scope {
self.add_symbol(id, fq_binding, SymbolSpec::GlobalBinding);
}
}
//TODO implement interfaces
Declaration::Interface { .. } => (),
Declaration::Impl { .. } => (),
Declaration::Module { name, .. } => {
let fq_module = Fqsn::from_scope_stack(scope_stack, name.clone());
self.table.fq_names.register(fq_module, NameSpec { location, kind: NameKind::Module })?;
}
Declaration::Annotation { name, arguments, inner } => {
let inner = inner.as_ref();
self.add_single_statement(
&inner.id,
&inner.kind,
inner.location,
scope_stack,
function_scope,
)?;
self.process_annotation(name.as_ref(), arguments.as_slice(), scope_stack, inner)?;
}
}
Ok(())
}
fn process_annotation(
&mut self,
name: &str,
arguments: &[Expression],
scope_stack: &[ScopeSegment],
inner: &Statement<StatementKind>,
) -> Result<(), SymbolError> {
if name == "register_builtin" {
if let Statement {
id: _,
location: _,
kind: StatementKind::Declaration(Declaration::FuncDecl(sig, _)),
} = inner
{
let fqsn = Fqsn::from_scope_stack(scope_stack, sig.name.clone());
let builtin_name = match arguments {
[Expression { kind: ExpressionKind::Value(qname), .. }]
if qname.components.len() == 1 =>
qname.components[0].clone(),
_ =>
return Err(SymbolError::BadAnnotation {
name: name.to_string(),
msg: "Bad argument for register_builtin".to_string(),
}),
};
let builtin =
Builtin::from_str(builtin_name.as_str()).map_err(|_| SymbolError::BadAnnotation {
name: name.to_string(),
msg: format!("Invalid builtin: {}", builtin_name),
})?;
self.table.populate_single_builtin(fqsn, builtin);
Ok(())
} else {
Err(SymbolError::BadAnnotation {
name: name.to_string(),
msg: "register_builtin not annotating a function".to_string(),
})
}
} else {
Err(SymbolError::UnknownAnnotation { name: name.to_string() })
}
}
fn add_type_members(
&mut self,
type_name: &TypeSingletonName,
type_body: &TypeBody,
_mutable: &bool,
location: Location,
type_fqsn: Fqsn,
) -> Vec<SymbolError> {
let (variants, immediate_variant) = match type_body {
TypeBody::Variants(variants) => (variants.clone(), false),
TypeBody::ImmediateRecord { id, fields } => (
vec![Variant {
id: *id,
name: type_name.name.clone(),
kind: VariantKind::Record(fields.clone()),
}],
true,
),
};
// Check for duplicates before registering any types with the TypeContext
let mut seen_variants = HashSet::new();
let mut errors = vec![];
for variant in variants.iter() {
if seen_variants.contains(&variant.name) {
errors.push(SymbolError::DuplicateVariant {
type_fqsn: type_fqsn.clone(),
name: variant.name.as_ref().to_string(),
})
}
seen_variants.insert(variant.name.clone());
if let VariantKind::Record(ref members) = variant.kind {
let mut seen_members = HashMap::new();
for (member_name, _) in members.iter() {
match seen_members.entry(member_name.as_ref()) {
Entry::Occupied(o) => {
let location = *o.get();
errors.push(SymbolError::DuplicateRecord {
type_fqsn: type_fqsn.clone(),
location,
record: variant.name.as_ref().to_string(),
member: member_name.as_ref().to_string(),
});
}
//TODO eventually this should track meaningful locations
Entry::Vacant(v) => {
v.insert(location);
}
}
}
}
}
if !errors.is_empty() {
return errors;
}
let mut type_builder = TypeBuilder::new(type_name.name.as_ref());
let mut variant_name_map = HashMap::new();
for variant in variants.iter() {
let Variant { name, kind, id } = variant;
variant_name_map.insert(name.clone(), id);
let mut variant_builder = VariantBuilder::new(name.as_ref());
match kind {
VariantKind::UnitStruct => (),
VariantKind::TupleStruct(items) =>
for type_identifier in items {
let pending: PendingType = type_identifier.into();
variant_builder.add_member(pending);
},
VariantKind::Record(members) =>
for (field_name, type_identifier) in members.iter() {
let pending: PendingType = type_identifier.into();
variant_builder.add_record_member(field_name.as_ref(), pending);
},
}
type_builder.add_variant(variant_builder);
}
let type_id = self.type_context.register_type(type_builder);
let type_definition = self.type_context.lookup_type(&type_id).unwrap();
// This index is guaranteed to be the correct tag
for (index, variant) in type_definition.variants.iter().enumerate() {
let id = variant_name_map.get(&variant.name).unwrap();
let tag = index as u32;
let spec = match &variant.members {
type_inference::VariantMembers::Unit => SymbolSpec::DataConstructor { tag, type_id },
type_inference::VariantMembers::Tuple(..) => SymbolSpec::DataConstructor { tag, type_id },
type_inference::VariantMembers::Record(..) => SymbolSpec::RecordConstructor { tag, type_id },
};
self.table.add_symbol(id, type_fqsn.extend(&variant.name), spec);
}
if immediate_variant {
let variant = &type_definition.variants[0];
let id = variant_name_map.get(&variant.name).unwrap();
let spec = SymbolSpec::RecordConstructor { tag: 0, type_id };
self.table.add_symbol(id, type_fqsn, spec);
}
vec![]
}
}

View File

@ -1,253 +0,0 @@
use std::rc::Rc;
use crate::{
ast::*,
symbol_table::{Fqsn, ScopeSegment, SymbolSpec, SymbolTable},
util::ScopeStack,
};
#[derive(Debug)]
enum NameType {
//TODO eventually this needs to support closures
Param(u8),
LocalVariable(ItemId),
LocalFunction(ItemId),
Import(Fqsn),
}
type LexScope<'a> = ScopeStack<'a, Rc<String>, NameType, ScopeType>;
#[derive(Debug)]
enum ScopeType {
Function { name: Rc<String> },
Lambda,
PatternMatch,
ImplBlock,
//TODO add some notion of a let-like scope?
}
pub struct ScopeResolver<'a> {
symbol_table: &'a mut super::SymbolTable,
lexical_scopes: LexScope<'a>,
}
impl<'a> ScopeResolver<'a> {
pub fn new(symbol_table: &'a mut SymbolTable) -> Self {
let lexical_scopes = ScopeStack::new(None);
Self { symbol_table, lexical_scopes }
}
pub fn resolve(&mut self, ast: &AST) {
walk_ast(self, ast);
}
/// This method correctly modifies the id_to_def table (ItemId) to have the appropriate
/// mappings.
fn lookup_name_in_scope(&mut self, name: &QualifiedName) {
//TODO this method badly needs attention
let QualifiedName { id, components } = name;
let local_name = components.first().unwrap().clone();
let name_type = self.lexical_scopes.lookup(&local_name);
let fqsn = Fqsn { scopes: components.iter().map(|name| ScopeSegment::Name(name.clone())).collect() };
let def_id = self.symbol_table.symbol_trie.lookup(&fqsn);
//TODO handle a "partial" qualified name, and also handle it down in the pattern-matching
//section
if components.len() == 1 {
match name_type {
Some(NameType::Import(fqsn)) => {
let def_id = self.symbol_table.symbol_trie.lookup(fqsn);
if let Some(def_id) = def_id {
self.symbol_table.id_to_def.insert(*id, def_id);
}
}
Some(NameType::Param(n)) => {
let spec = SymbolSpec::FunctionParam(*n);
//TODO need to come up with a better solution for local variable FQSNs
let lscope = ScopeSegment::Name(Rc::new("<local-param>".to_string()));
let fqsn = Fqsn { scopes: vec![lscope, ScopeSegment::Name(local_name.clone())] };
self.symbol_table.add_symbol(id, fqsn, spec);
}
Some(NameType::LocalFunction(item_id)) => {
let def_id = self.symbol_table.id_to_def.get(item_id);
if let Some(def_id) = def_id {
let def_id = *def_id;
self.symbol_table.id_to_def.insert(*id, def_id);
}
}
Some(NameType::LocalVariable(item_id)) => {
let def_id = self.symbol_table.id_to_def.get(item_id);
if let Some(def_id) = def_id {
let def_id = *def_id;
self.symbol_table.id_to_def.insert(*id, def_id);
}
}
None =>
if let Some(def_id) = def_id {
self.symbol_table.id_to_def.insert(*id, def_id);
},
}
} else if let Some(def_id) = def_id {
self.symbol_table.id_to_def.insert(*id, def_id);
}
}
}
impl<'a> ASTVisitor for ScopeResolver<'a> {
// Import statements bring in a bunch of local names that all map to a specific FQSN.
// FQSNs map to a Symbol (or this is an error), Symbols have a DefId. So for every
// name we import, we map a local name (a string) to a NameType::ImportedDefinition(DefId).
fn import(&mut self, import_spec: &ImportSpecifier) -> Recursion {
let ImportSpecifier { ref path_components, ref imported_names, .. } = &import_spec;
match imported_names {
ImportedNames::All => {
let prefix =
Fqsn { scopes: path_components.iter().map(|c| ScopeSegment::Name(c.clone())).collect() };
let members = self.symbol_table.symbol_trie.get_children(&prefix);
for fqsn in members.into_iter() {
self.lexical_scopes.insert(fqsn.last_elem(), NameType::Import(fqsn));
}
}
ImportedNames::LastOfPath => {
let fqsn =
Fqsn { scopes: path_components.iter().map(|c| ScopeSegment::Name(c.clone())).collect() };
self.lexical_scopes.insert(fqsn.last_elem(), NameType::Import(fqsn));
}
ImportedNames::List(ref names) => {
let fqsn_prefix: Vec<ScopeSegment> =
path_components.iter().map(|c| ScopeSegment::Name(c.clone())).collect();
for name in names.iter() {
let mut scopes = fqsn_prefix.clone();
scopes.push(ScopeSegment::Name(name.clone()));
let fqsn = Fqsn { scopes };
self.lexical_scopes.insert(fqsn.last_elem(), NameType::Import(fqsn));
}
}
};
Recursion::Continue
}
fn declaration(&mut self, declaration: &Declaration, id: &ItemId) -> Recursion {
let cur_function_name = match self.lexical_scopes.get_name() {
//TODO this needs to be a fqsn
Some(ScopeType::Function { name }) => Some(name.clone()),
_ => None,
};
match declaration {
Declaration::FuncDecl(signature, block) => {
let param_names = signature.params.iter().map(|param| param.name.clone());
//TODO I'm 90% sure this is right, until I get to closures
//let mut new_scope = self.lexical_scopes.new_scope(Some(ScopeType::Function { name: signature.name.clone() }));
//TODO this will recurse unwantedly into scopes; need to pop an outer function
//scope off first before going into a non-closure scope
let mut new_scope =
ScopeStack::new(Some(ScopeType::Function { name: signature.name.clone() }));
for (n, param) in param_names.enumerate() {
new_scope.insert(param, NameType::Param(n as u8));
}
self.lexical_scopes.insert(signature.name.clone(), NameType::LocalFunction(*id));
let mut new_resolver =
ScopeResolver { symbol_table: self.symbol_table, lexical_scopes: new_scope };
walk_block(&mut new_resolver, block);
Recursion::Stop
}
Declaration::Binding { name, .. } => {
if let Some(fn_name) = cur_function_name {
// We are within a function scope
let fqsn =
Fqsn { scopes: vec![ScopeSegment::Name(fn_name), ScopeSegment::Name(name.clone())] };
self.symbol_table.add_symbol(id, fqsn, SymbolSpec::LocalVariable);
self.lexical_scopes.insert(name.clone(), NameType::LocalVariable(*id));
}
Recursion::Continue
}
Declaration::Impl { block, .. } => {
let new_scope = ScopeStack::new(Some(ScopeType::ImplBlock));
let mut new_resolver =
ScopeResolver { symbol_table: self.symbol_table, lexical_scopes: new_scope };
for stmt in block.iter() {
walk_declaration(&mut new_resolver, &stmt.kind, &stmt.id);
}
Recursion::Stop
}
_ => Recursion::Continue,
}
}
fn expression(&mut self, expression: &Expression) -> Recursion {
use ExpressionKind::*;
match &expression.kind {
Value(name) => {
self.lookup_name_in_scope(name);
}
NamedStruct { name, fields: _ } => {
self.lookup_name_in_scope(name);
}
Lambda { params, body, .. } => {
let param_names = params.iter().map(|param| param.name.clone());
//TODO need to properly handle closure scope, this is currently broken
//let mut new_scope = self.lexical_scopes.new_scope(Some(ScopeType::Function { name: signature.name.clone() }));
let mut new_scope = ScopeStack::new(Some(ScopeType::Lambda));
for (n, param) in param_names.enumerate() {
new_scope.insert(param, NameType::Param(n as u8));
}
let mut new_resolver =
ScopeResolver { symbol_table: self.symbol_table, lexical_scopes: new_scope };
walk_block(&mut new_resolver, body);
return Recursion::Stop;
}
IfExpression { discriminator, body } => {
if let Some(d) = discriminator.as_ref() {
walk_expression(self, d);
}
let mut resolver = ScopeResolver {
lexical_scopes: self.lexical_scopes.new_scope(Some(ScopeType::PatternMatch)),
symbol_table: self.symbol_table,
};
walk_if_expr_body(&mut resolver, body);
return Recursion::Stop;
}
_ => (),
}
Recursion::Continue
}
fn pattern(&mut self, pat: &Pattern) -> Recursion {
use Pattern::*;
match pat {
Literal(..) | Ignored | TuplePattern(..) => (),
TupleStruct(name, _) | Record(name, _) => {
self.lookup_name_in_scope(name);
}
//TODO this isn't really the right syntax for a VarOrName
VarOrName(QualifiedName { id, components }) => {
if components.len() == 1 {
//TODO need a better way to construct a FQSN from a QualifiedName
let local_name: Rc<String> = components[0].clone();
let lscope = ScopeSegment::Name(Rc::new("<local-case-match>".to_string()));
let fqsn = Fqsn { scopes: vec![lscope, ScopeSegment::Name(local_name.clone())] };
self.symbol_table.add_symbol(id, fqsn, SymbolSpec::LocalVariable);
self.lexical_scopes.insert(local_name, NameType::LocalVariable(*id));
} else {
let fqsn = Fqsn {
scopes: components.iter().map(|name| ScopeSegment::Name(name.clone())).collect(),
};
let def_id = self.symbol_table.symbol_trie.lookup(&fqsn);
if let Some(def_id) = def_id {
self.symbol_table.id_to_def.insert(*id, def_id);
}
}
}
};
Recursion::Continue
}
}

View File

@ -1,70 +0,0 @@
use std::{
collections::hash_map::DefaultHasher,
hash::{Hash, Hasher},
};
use radix_trie::{Trie, TrieCommon, TrieKey};
use super::{DefId, Fqsn, ScopeSegment};
#[derive(Debug)]
pub struct SymbolTrie(Trie<Fqsn, DefId>);
impl TrieKey for Fqsn {
fn encode_bytes(&self) -> Vec<u8> {
let mut hasher = DefaultHasher::new();
let mut output = vec![];
for segment in self.scopes.iter() {
let ScopeSegment::Name(s) = segment;
s.as_bytes().hash(&mut hasher);
output.extend_from_slice(&hasher.finish().to_be_bytes());
}
output
}
}
impl SymbolTrie {
pub fn new() -> SymbolTrie {
SymbolTrie(Trie::new())
}
pub fn insert(&mut self, fqsn: &Fqsn, def_id: DefId) {
self.0.insert(fqsn.clone(), def_id);
}
pub fn lookup(&self, fqsn: &Fqsn) -> Option<DefId> {
self.0.get(fqsn).cloned()
}
pub fn get_children(&self, fqsn: &Fqsn) -> Vec<Fqsn> {
let subtrie = match self.0.subtrie(fqsn) {
Some(s) => s,
None => return vec![],
};
let output: Vec<Fqsn> = subtrie.keys().filter(|cur_key| **cur_key != *fqsn).cloned().collect();
output
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::symbol_table::Fqsn;
fn make_fqsn(strs: &[&str]) -> Fqsn {
Fqsn::from_strs(strs)
}
#[test]
fn test_trie_insertion() {
let id = DefId::default();
let mut trie = SymbolTrie::new();
trie.insert(&make_fqsn(&["unrelated", "thing"]), id);
trie.insert(&make_fqsn(&["outer", "inner"]), id);
trie.insert(&make_fqsn(&["outer", "inner", "still_inner"]), id);
let children = trie.get_children(&make_fqsn(&["outer", "inner"]));
assert_eq!(children.len(), 1);
}
}

View File

@ -1,314 +0,0 @@
#![cfg(test)]
use assert_matches::assert_matches;
use super::*;
use crate::util::quick_ast;
fn add_symbols(src: &str) -> (SymbolTable, Result<(), Vec<SymbolError>>) {
let ast = quick_ast(src);
let mut symbol_table = SymbolTable::new();
let mut type_context = crate::type_inference::TypeContext::new();
let result = symbol_table.process_ast(&ast, &mut type_context);
(symbol_table, result)
}
fn make_fqsn(strs: &[&str]) -> Fqsn {
Fqsn::from_strs(strs)
}
#[test]
fn basic_symbol_table() {
let src = "let a = 10; fn b() { 20 }";
let (symbols, _) = add_symbols(src);
fn make_fqsn(strs: &[&str]) -> Fqsn {
Fqsn::from_strs(strs)
}
symbols.fq_names.table.get(&make_fqsn(&["b"])).unwrap();
let src = "type Option<T> = Some(T) | None";
let (symbols, _) = add_symbols(src);
symbols.types.table.get(&make_fqsn(&["Option"])).unwrap();
}
#[test]
fn no_function_definition_duplicates() {
let source = r#"
fn a() { 1 }
fn b() { 2 }
fn a() { 3 }
"#;
let (_, output) = add_symbols(source);
let errs = output.unwrap_err();
assert_matches!(&errs[..], [
SymbolError::DuplicateName { prev_name, ..}
] if prev_name == &Fqsn::from_strs(&["a"])
);
}
#[test]
fn no_variable_definition_duplicates() {
let source = r#"
let x = 9
let a = 20
let q = 39
let a = 30
let x = 34
"#;
let (_, output) = add_symbols(source);
let errs = output.unwrap_err();
assert_matches!(&errs[..], [
SymbolError::DuplicateName { prev_name: pn1, ..},
SymbolError::DuplicateName { prev_name: pn2, ..}
] if pn1 == &Fqsn::from_strs(&["a"]) && pn2 == &Fqsn::from_strs(&["x"])
);
}
#[test]
fn no_type_definition_duplicates() {
let source = r#"
let x = 9
type Food = Japchae | Burrito | Other
type Food = GoodJapchae | Breadfruit
"#;
let (_, output) = add_symbols(source);
let errs = output.unwrap_err();
let err = &errs[0];
match err {
SymbolError::DuplicateName { location: _, prev_name } => {
assert_eq!(prev_name, &Fqsn::from_strs(&["Food"]));
//TODO restore this Location test
//assert_eq!(location, &Location { line_num: 2, char_num: 2 });
}
_ => panic!(),
}
}
#[test]
fn no_variant_duplicates() {
let source = r#"
type Panda = FoolsGold | Kappa(i32) | Remix | Kappa | Thursday | Remix
"#;
let (_, output) = add_symbols(source);
let errs = output.unwrap_err();
assert_eq!(errs.len(), 2);
assert_matches!(&errs[0], SymbolError::DuplicateVariant {
type_fqsn, name } if *type_fqsn == Fqsn::from_strs(&["Panda"]) &&
name == "Kappa");
assert_matches!(&errs[1], SymbolError::DuplicateVariant {
type_fqsn, name } if *type_fqsn == Fqsn::from_strs(&["Panda"]) &&
name == "Remix");
}
#[test]
fn no_variable_definition_duplicates_in_function() {
let source = r#"
fn a() {
let a = 20
let b = 40
a + b
}
fn q() {
let a = 29
let x = 30
let x = 33
}
"#;
let (_, output) = add_symbols(source);
let errs = output.unwrap_err();
assert_matches!(&errs[..], [
SymbolError::DuplicateName { prev_name: pn1, ..},
] if pn1 == &Fqsn::from_strs(&["q", "x"])
);
}
#[test]
fn dont_falsely_detect_duplicates() {
let source = r#"
let a = 20;
fn some_func() {
let a = 40;
77
}
let q = 39
"#;
let (symbols, _) = add_symbols(source);
assert!(symbols.fq_names.table.get(&make_fqsn(&["a"])).is_some());
assert!(symbols.fq_names.table.get(&make_fqsn(&["some_func", "a"])).is_some());
}
#[test]
fn enclosing_scopes() {
let source = r#"
fn outer_func(x) {
fn inner_func(arg) {
arg
}
x + inner_func(x)
}"#;
let (symbols, _) = add_symbols(source);
assert!(symbols.fq_names.table.get(&make_fqsn(&["outer_func"])).is_some());
assert!(symbols.fq_names.table.get(&make_fqsn(&["outer_func", "inner_func"])).is_some());
}
#[test]
fn enclosing_scopes_2() {
let source = r#"
fn outer_func(x) {
fn inner_func(arg) {
arg
}
fn second_inner_func() {
fn another_inner_func() {
}
}
inner_func(x)
}
"#;
let (symbols, _) = add_symbols(source);
assert!(symbols.fq_names.table.get(&make_fqsn(&["outer_func"])).is_some());
assert!(symbols.fq_names.table.get(&make_fqsn(&["outer_func", "inner_func"])).is_some());
assert!(symbols.fq_names.table.get(&make_fqsn(&["outer_func", "second_inner_func"])).is_some());
assert!(symbols
.fq_names
.table
.get(&make_fqsn(&["outer_func", "second_inner_func", "another_inner_func"]))
.is_some());
}
#[test]
fn enclosing_scopes_3() {
let source = r#"
fn outer_func(x) {
fn inner_func(arg) {
arg
}
fn second_inner_func() {
fn another_inner_func() {
}
fn another_inner_func() {
}
}
inner_func(x)
}"#;
let (_, output) = add_symbols(source);
let _err = output.unwrap_err();
}
#[test]
fn modules() {
let source = r#"
module stuff {
fn item() {
}
}
fn item()
"#;
let (symbols, _) = add_symbols(source);
symbols.fq_names.table.get(&make_fqsn(&["stuff"])).unwrap();
symbols.fq_names.table.get(&make_fqsn(&["item"])).unwrap();
symbols.fq_names.table.get(&make_fqsn(&["stuff", "item"])).unwrap();
}
#[test]
fn duplicate_modules() {
let source = r#"
module q {
fn foo() { 4 }
}
module a {
fn foo() { 334 }
}
module a {
fn sarat() { 39 }
fn foo() { 256.1 }
}
"#;
let (_, output) = add_symbols(source);
let errs = output.unwrap_err();
assert_matches!(&errs[..], [
SymbolError::DuplicateName { prev_name: pn1, ..},
] if pn1 == &Fqsn::from_strs(&["a"])
);
}
#[test]
fn duplicate_struct_members() {
let source = r#"
type Tarak = Tarak {
loujet: i32
,
mets: i32,
mets: i32
,
}
"#;
let (_, output) = add_symbols(source);
let errs = dbg!(output.unwrap_err());
assert_matches!(&errs[..], [
SymbolError::DuplicateRecord {
type_fqsn, member, record, ..},
] if type_fqsn == &Fqsn::from_strs(&["Tarak"]) && member == "mets" && record == "Tarak"
);
}
#[test]
fn method_definition_added_to_symbol_table() {
let source = r#"
type Foo = { x: Int, y: Int }
impl Foo {
fn hella() {
let a = 50
self.x + a
}
}
"#;
let (symbols, _) = add_symbols(source);
symbols.debug();
assert!(symbols.fq_names.table.get(&make_fqsn(&["<impl-block>Foo", "hella"])).is_some());
assert!(symbols.fq_names.table.get(&make_fqsn(&["<impl-block>Foo", "hella", "a"])).is_some());
}
#[test]
fn duplicate_method_definitions_detected() {
let source = r#"
type Foo = { x: Int, y: Int }
impl Foo {
fn hella() {
self.x + 50
}
fn hella() {
self.x + 40
}
}
"#;
let (_symbols, output) = add_symbols(source);
let errs = output.unwrap_err();
assert_matches!(&errs[..], [
SymbolError::DuplicateName { prev_name: pn1, ..},
] if pn1 == &Fqsn::from_strs(&["<impl-block>Foo", "hella"]));
}

View File

@ -0,0 +1,316 @@
use itertools::Itertools;
use std::collections::HashMap;
use std::rc::Rc;
use std::iter::{Iterator, Peekable};
use std::fmt;
#[derive(Debug, PartialEq, Clone)]
pub enum TokenType {
Newline, Semicolon,
LParen, RParen,
LSquareBracket, RSquareBracket,
LAngleBracket, RAngleBracket,
LCurlyBrace, RCurlyBrace,
Pipe,
Comma, Period, Colon, Underscore,
Slash,
Operator(Rc<String>),
DigitGroup(Rc<String>), HexLiteral(Rc<String>), BinNumberSigil,
StrLiteral(Rc<String>),
Identifier(Rc<String>),
Keyword(Kw),
EOF,
Error(String),
}
use self::TokenType::*;
impl fmt::Display for TokenType {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
&Operator(ref s) => write!(f, "Operator({})", **s),
&DigitGroup(ref s) => write!(f, "DigitGroup({})", s),
&HexLiteral(ref s) => write!(f, "HexLiteral({})", s),
&StrLiteral(ref s) => write!(f, "StrLiteral({})", s),
&Identifier(ref s) => write!(f, "Identifier({})", s),
&Error(ref s) => write!(f, "Error({})", s),
other => write!(f, "{:?}", other),
}
}
}
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum Kw {
If, Then, Else,
Is,
Func,
For, While,
Match,
Const, Let, In,
Mut,
Return,
Alias, Type, SelfType, SelfIdent,
Interface, Impl,
True, False,
Module
}
lazy_static! {
static ref KEYWORDS: HashMap<&'static str, Kw> =
hashmap! {
"if" => Kw::If,
"then" => Kw::Then,
"else" => Kw::Else,
"is" => Kw::Is,
"fn" => Kw::Func,
"for" => Kw::For,
"while" => Kw::While,
"const" => Kw::Const,
"let" => Kw::Let,
"in" => Kw::In,
"mut" => Kw::Mut,
"return" => Kw::Return,
"alias" => Kw::Alias,
"type" => Kw::Type,
"Self" => Kw::SelfType,
"self" => Kw::SelfIdent,
"interface" => Kw::Interface,
"impl" => Kw::Impl,
"true" => Kw::True,
"false" => Kw::False,
"module" => Kw::Module,
};
}
#[derive(Debug, Clone)]
pub struct Token {
pub token_type: TokenType,
pub offset: (usize, usize),
}
impl Token {
pub fn get_error(&self) -> Option<String> {
match self.token_type {
TokenType::Error(ref s) => Some(s.clone()),
_ => None,
}
}
pub fn to_string_with_metadata(&self) -> String {
format!("{}(L:{},c:{})", self.token_type, self.offset.0, self.offset.1)
}
}
const OPERATOR_CHARS: [char; 18] = ['!', '$', '%', '&', '*', '+', '-', '.', ':', '<', '>', '=', '?', '@', '^', '|', '~', '`'];
fn is_operator(c: &char) -> bool {
OPERATOR_CHARS.iter().any(|x| x == c)
}
type CharData = (usize, usize, char);
pub fn tokenize(input: &str) -> Vec<Token> {
let mut tokens: Vec<Token> = Vec::new();
let mut input = input.lines().enumerate()
.intersperse((0, "\n"))
.flat_map(|(line_idx, ref line)| {
line.chars().enumerate().map(move |(ch_idx, ch)| (line_idx, ch_idx, ch))
})
.peekable();
while let Some((line_idx, ch_idx, c)) = input.next() {
let cur_tok_type = match c {
'/' => match input.peek().map(|t| t.2) {
Some('/') => {
while let Some((_, _, c)) = input.next() {
if c == '\n' {
break;
}
}
continue;
},
Some('*') => {
input.next();
let mut comment_level = 1;
while let Some((_, _, c)) = input.next() {
if c == '*' && input.peek().map(|t| t.2) == Some('/') {
input.next();
comment_level -= 1;
} else if c == '/' && input.peek().map(|t| t.2) == Some('*') {
input.next();
comment_level += 1;
}
if comment_level == 0 {
break;
}
}
continue;
},
_ => Slash
},
c if c.is_whitespace() && c != '\n' => continue,
'\n' => Newline, ';' => Semicolon,
':' => Colon, ',' => Comma,
'(' => LParen, ')' => RParen,
'{' => LCurlyBrace, '}' => RCurlyBrace,
'[' => LSquareBracket, ']' => RSquareBracket,
'"' => handle_quote(&mut input),
c if c.is_digit(10) => handle_digit(c, &mut input),
c if c.is_alphabetic() || c == '_' => handle_alphabetic(c, &mut input), //TODO I'll probably have to rewrite this if I care about types being uppercase, also type parameterization
c if is_operator(&c) => handle_operator(c, &mut input),
unknown => Error(format!("Unexpected character: {}", unknown)),
};
tokens.push(Token { token_type: cur_tok_type, offset: (line_idx, ch_idx) });
}
tokens
}
fn handle_digit(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenType {
if c == '0' && input.peek().map_or(false, |&(_, _, c)| { c == 'x' }) {
input.next();
let rest: String = input.peeking_take_while(|&(_, _, ref c)| c.is_digit(16) || *c == '_').map(|(_, _, c)| { c }).collect();
HexLiteral(Rc::new(rest))
} else if c == '0' && input.peek().map_or(false, |&(_, _, c)| { c == 'b' }) {
input.next();
BinNumberSigil
} else {
let mut buf = c.to_string();
buf.extend(input.peeking_take_while(|&(_, _, ref c)| c.is_digit(10)).map(|(_, _, c)| { c }));
DigitGroup(Rc::new(buf))
}
}
fn handle_quote(input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenType {
let mut buf = String::new();
loop {
match input.next().map(|(_, _, c)| { c }) {
Some('"') => break,
Some('\\') => {
let next = input.peek().map(|&(_, _, c)| { c });
if next == Some('n') {
input.next();
buf.push('\n')
} else if next == Some('"') {
input.next();
buf.push('"');
} else if next == Some('t') {
input.next();
buf.push('\t');
}
},
Some(c) => buf.push(c),
None => return TokenType::Error(format!("Unclosed string")),
}
}
TokenType::StrLiteral(Rc::new(buf))
}
fn handle_alphabetic(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenType {
let mut buf = String::new();
buf.push(c);
if c == '_' && input.peek().map(|&(_, _, c)| { !c.is_alphabetic() }).unwrap_or(true) {
return TokenType::Underscore
}
loop {
match input.peek().map(|&(_, _, c)| { c }) {
Some(c) if c.is_alphanumeric() => {
input.next();
buf.push(c);
},
_ => break,
}
}
match KEYWORDS.get(buf.as_str()) {
Some(kw) => TokenType::Keyword(*kw),
None => TokenType::Identifier(Rc::new(buf)),
}
}
fn handle_operator(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenType {
match c {
'<' | '>' | '|' | '.' => {
let ref next = input.peek().map(|&(_, _, c)| { c });
if !next.map(|n| { is_operator(&n) }).unwrap_or(false) {
return match c {
'<' => LAngleBracket,
'>' => RAngleBracket,
'|' => Pipe,
'.' => Period,
_ => unreachable!(),
}
}
},
_ => (),
};
let mut buf = String::new();
if c == '`' {
loop {
match input.peek().map(|&(_, _, c)| { c }) {
Some(c) if c.is_alphabetic() || c == '_' => {
input.next();
buf.push(c);
},
Some('`') => {
input.next();
break;
},
_ => break
}
}
} else {
buf.push(c);
loop {
match input.peek().map(|&(_, _, c)| { c }) {
Some(c) if is_operator(&c) => {
input.next();
buf.push(c);
},
_ => break
}
}
}
TokenType::Operator(Rc::new(buf))
}
#[cfg(test)]
mod schala_tokenizer_tests {
use super::*;
use super::Kw::*;
macro_rules! digit { ($ident:expr) => { DigitGroup(Rc::new($ident.to_string())) } }
macro_rules! ident { ($ident:expr) => { Identifier(Rc::new($ident.to_string())) } }
macro_rules! op { ($ident:expr) => { Operator(Rc::new($ident.to_string())) } }
#[test]
fn tokens() {
let a = tokenize("let a: A<B> = c ++ d");
let token_types: Vec<TokenType> = a.into_iter().map(move |t| t.token_type).collect();
assert_eq!(token_types, vec![Keyword(Let), ident!("a"), Colon, ident!("A"),
LAngleBracket, ident!("B"), RAngleBracket, op!("="), ident!("c"), op!("++"), ident!("d")]);
}
#[test]
fn underscores() {
let token_types: Vec<TokenType> = tokenize("4_8").into_iter().map(move |t| t.token_type).collect();
assert_eq!(token_types, vec![digit!("4"), Underscore, digit!("8")]);
}
#[test]
fn comments() {
let token_types: Vec<TokenType> = tokenize("1 + /* hella /* bro */ */ 2").into_iter().map(move |t| t.token_type).collect();
assert_eq!(token_types, vec![digit!("1"), op!("+"), digit!("2")]);
}
#[test]
fn backtick_operators() {
let token_types: Vec<TokenType> = tokenize("1 `plus` 2").into_iter().map(move |t| t.token_type).collect();
assert_eq!(token_types, vec![digit!("1"), op!("plus"), digit!("2")]);
}
}

View File

@ -1,513 +0,0 @@
use std::rc::Rc;
use super::{EvalResult, Memory, MemoryValue, Primitive, State};
use crate::{
builtin::Builtin,
reduced_ir::{
Alternative, Callable, Expression, FunctionDefinition, Literal, Lookup, Pattern, ReducedIR, Statement,
},
type_inference::TypeContext,
util::ScopeStack,
};
#[derive(Debug)]
enum StatementOutput {
Primitive(Primitive),
Nothing,
}
#[derive(Debug, Clone, Copy)]
enum LoopControlFlow {
Break,
Continue,
}
pub struct Evaluator<'a, 'b> {
type_context: &'b TypeContext,
state: &'b mut State<'a>,
early_returning: bool,
loop_control: Option<LoopControlFlow>,
}
impl<'a, 'b> Evaluator<'a, 'b> {
pub(crate) fn new(state: &'b mut State<'a>, type_context: &'b TypeContext) -> Self {
Self { state, type_context, early_returning: false, loop_control: None }
}
pub fn evaluate(&mut self, reduced: ReducedIR, repl: bool) -> Vec<Result<String, String>> {
let mut acc = vec![];
for (def_id, function) in reduced.functions.into_iter() {
let mem = (&def_id).into();
self.state.memory.insert(mem, MemoryValue::Function(function));
}
for statement in reduced.entrypoint.into_iter() {
match self.statement(statement) {
Ok(StatementOutput::Primitive(output)) if repl =>
acc.push(Ok(output.to_repl(self.type_context))),
Ok(_) => (),
Err(error) => {
acc.push(Err(error.msg));
return acc;
}
}
}
acc
}
fn block(&mut self, statements: Vec<Statement>) -> EvalResult<Primitive> {
let mut retval = None;
for stmt in statements.into_iter() {
match self.statement(stmt)? {
StatementOutput::Nothing => (),
StatementOutput::Primitive(prim) => {
retval = Some(prim);
}
};
if self.early_returning {
break;
}
if self.loop_control.is_some() {
break;
}
}
Ok(if let Some(ret) = retval { ret } else { self.expression(Expression::unit())? })
}
fn statement(&mut self, stmt: Statement) -> EvalResult<StatementOutput> {
match stmt {
Statement::Binding { ref id, expr, constant: _ } => {
let evaluated = self.expression(expr)?;
self.state.memory.insert(id.into(), evaluated.into());
Ok(StatementOutput::Nothing)
}
Statement::Expression(expr) => {
let evaluated = self.expression(expr)?;
Ok(StatementOutput::Primitive(evaluated))
}
Statement::Return(expr) => {
let evaluated = self.expression(expr)?;
self.early_returning = true;
Ok(StatementOutput::Primitive(evaluated))
}
Statement::Break => {
self.loop_control = Some(LoopControlFlow::Break);
Ok(StatementOutput::Nothing)
}
Statement::Continue => {
self.loop_control = Some(LoopControlFlow::Continue);
Ok(StatementOutput::Nothing)
}
}
}
fn expression(&mut self, expression: Expression) -> EvalResult<Primitive> {
Ok(match expression {
Expression::Literal(lit) => Primitive::Literal(lit),
Expression::Tuple(items) => Primitive::Tuple(
items
.into_iter()
.map(|expr| self.expression(expr))
.collect::<EvalResult<Vec<Primitive>>>()?,
),
Expression::List(items) => Primitive::List(
items
.into_iter()
.map(|expr| self.expression(expr))
.collect::<EvalResult<Vec<Primitive>>>()?,
),
Expression::Lookup(kind) => match kind {
Lookup::Function(ref id) => {
let mem = id.into();
match self.state.memory.lookup(&mem) {
// This just checks that the function exists in "memory" by ID, we don't
// actually retrieve it until `apply_function()`
Some(MemoryValue::Function(_)) => Primitive::Callable(Callable::UserDefined(*id)),
x => return Err(format!("Function not found for id: {} : {:?}", id, x).into()),
}
}
Lookup::Param(n) => {
let mem = n.into();
match self.state.memory.lookup(&mem) {
Some(MemoryValue::Primitive(prim)) => prim.clone(),
e => return Err(format!("Param lookup error, got {:?}", e).into()),
}
}
Lookup::SelfParam => {
let mem = Memory::self_param();
match self.state.memory.lookup(&mem) {
Some(MemoryValue::Primitive(prim)) => prim.clone(),
e => return Err(format!("SelfParam lookup error, got {:?}", e).into()),
}
}
Lookup::LocalVar(ref id) | Lookup::GlobalVar(ref id) => {
let mem = id.into();
match self.state.memory.lookup(&mem) {
Some(MemoryValue::Primitive(expr)) => expr.clone(),
_ =>
return Err(
format!("Nothing found for local/gloval variable lookup {}", id).into()
),
}
}
},
Expression::Assign { ref lval, box rval } => {
let mem = lval.into();
let evaluated = self.expression(rval)?;
println!("Inserting {:?} into {:?}", evaluated, mem);
self.state.memory.insert(mem, MemoryValue::Primitive(evaluated));
Primitive::unit()
}
Expression::Call { box f, args } => self.call_expression(f, args, None)?,
Expression::CallMethod { box f, args, box self_expr } =>
self.call_expression(f, args, Some(self_expr))?,
Expression::Callable(Callable::DataConstructor { type_id, tag }) => {
let arity = self.type_context.lookup_variant_arity(&type_id, tag).unwrap();
if arity == 0 {
Primitive::Object { type_id, tag, items: vec![], ordered_fields: None }
} else {
Primitive::Callable(Callable::DataConstructor { type_id, tag })
}
}
Expression::Callable(func) => Primitive::Callable(func),
Expression::Conditional { box cond, then_clause, else_clause } => {
let cond = self.expression(cond)?;
match cond {
Primitive::Literal(Literal::Bool(true)) => self.block(then_clause)?,
Primitive::Literal(Literal::Bool(false)) => self.block(else_clause)?,
v => return Err(format!("Non-boolean value {:?} in if-statement", v).into()),
}
}
Expression::CaseMatch { box cond, alternatives } =>
self.case_match_expression(cond, alternatives)?,
Expression::Index { box indexee, box indexer } => {
let indexee = self.expression(indexee)?;
let indexer = self.expression(indexer)?;
match (indexee, indexer) {
(Primitive::List(items), Primitive::Literal(Literal::Nat(n))) =>
match items.get(n as usize) {
Some(item) => item.clone(),
None => return Err(format!("Invalid index {} for this value", n).into()),
},
_ => return Err("Invalid index type".to_string().into()),
}
}
Expression::Loop { box cond, statements } => self.loop_expression(cond, statements)?,
Expression::ReductionError(e) => return Err(e.into()),
Expression::Access { name, box expr } => {
let expr = self.expression(expr)?;
match expr {
Primitive::Object { items, ordered_fields: Some(ordered_fields), .. } => {
let idx = match ordered_fields.iter().position(|s| s == &name) {
Some(idx) => idx,
None => return Err(format!("Field `{}` not found", name).into()),
};
let item = match items.get(idx) {
Some(item) => item,
None => return Err(format!("Field lookup `{}` failed", name).into()),
};
item.clone()
}
e =>
return Err(
format!("Trying to do a field lookup on a non-object value: {:?}", e).into()
),
}
}
})
}
fn loop_expression(&mut self, cond: Expression, statements: Vec<Statement>) -> EvalResult<Primitive> {
let existing = self.loop_control;
let output = self.loop_expression_inner(cond, statements);
self.loop_control = existing;
output
}
fn loop_expression_inner(
&mut self,
cond: Expression,
statements: Vec<Statement>,
) -> EvalResult<Primitive> {
loop {
let cond = self.expression(cond.clone())?;
println!("COND: {:?}", cond);
match cond {
Primitive::Literal(Literal::Bool(true)) => (),
Primitive::Literal(Literal::Bool(false)) => break,
e => return Err(format!("Loop condition evaluates to non-boolean: {:?}", e).into()),
};
//TODO eventually loops shoudl be able to return something
let _output = self.block(statements.clone())?;
match self.loop_control {
None => (),
Some(LoopControlFlow::Continue) => {
self.loop_control = None;
}
Some(LoopControlFlow::Break) => {
break;
}
}
}
Ok(Primitive::unit())
}
fn case_match_expression(
&mut self,
cond: Expression,
alternatives: Vec<Alternative>,
) -> EvalResult<Primitive> {
fn matches(scrut: &Primitive, pat: &Pattern, scope: &mut ScopeStack<Memory, MemoryValue>) -> bool {
match pat {
Pattern::Ignored => true,
Pattern::Binding(ref def_id) => {
let mem = def_id.into();
scope.insert(mem, MemoryValue::Primitive(scrut.clone())); //TODO make sure this doesn't cause problems with nesting
true
}
Pattern::Literal(pat_literal) =>
if let Primitive::Literal(scrut_literal) = scrut {
pat_literal == scrut_literal
} else {
false
},
Pattern::Tuple { subpatterns, tag } => match tag {
None => match scrut {
Primitive::Tuple(items) if items.len() == subpatterns.len() => items
.iter()
.zip(subpatterns.iter())
.all(|(item, subpat)| matches(item, subpat, scope)),
_ => false, //TODO should be a type error
},
Some(pattern_tag) => match scrut {
//TODO should test type_ids for runtime type checking, once those work
Primitive::Object { tag, items, .. }
if tag == pattern_tag && items.len() == subpatterns.len() =>
items
.iter()
.zip(subpatterns.iter())
.all(|(item, subpat)| matches(item, subpat, scope)),
_ => false,
},
},
Pattern::Record { tag: pattern_tag, subpatterns } => match scrut {
//TODO several types of possible error here
Primitive::Object { tag, items, ordered_fields: Some(ordered_fields), .. }
if tag == pattern_tag =>
subpatterns.iter().all(|(field_name, subpat)| {
let idx = ordered_fields
.iter()
.position(|field| field.as_str() == field_name.as_ref())
.unwrap();
let item = &items[idx];
matches(item, subpat, scope)
}),
_ => false,
},
}
}
let cond = self.expression(cond)?;
for alt in alternatives.into_iter() {
let mut new_scope = self.state.memory.new_scope(None);
if matches(&cond, &alt.pattern, &mut new_scope) {
let mut new_state = State { memory: new_scope };
let mut evaluator = Evaluator::new(&mut new_state, self.type_context);
let output = evaluator.block(alt.item);
self.early_returning = evaluator.early_returning;
return output;
}
}
Err("No valid match in match expression".into())
}
//TODO need to do something with self_expr to make method invocations actually work
fn call_expression(
&mut self,
f: Expression,
args: Vec<Expression>,
self_expr: Option<Expression>,
) -> EvalResult<Primitive> {
let func = match self.expression(f)? {
Primitive::Callable(func) => func,
other => return Err(format!("Trying to call non-function value: {:?}", other).into()),
};
match func {
Callable::Builtin(builtin) => self.apply_builtin(builtin, args),
Callable::UserDefined(def_id) => {
let mem = (&def_id).into();
match self.state.memory.lookup(&mem) {
Some(MemoryValue::Function(FunctionDefinition { body })) => {
let body = body.clone(); //TODO ideally this clone would not happen
self.apply_function(body, args, self_expr)
}
e => Err(format!("Error looking up function with id {}: {:?}", def_id, e).into()),
}
}
Callable::Lambda { arity, body } => {
if arity as usize != args.len() {
return Err(format!(
"Lambda expression requries {} arguments, only {} provided",
arity,
args.len()
)
.into());
}
self.apply_function(body, args, None)
}
Callable::DataConstructor { type_id, tag } => {
let arity = self.type_context.lookup_variant_arity(&type_id, tag).unwrap();
if arity as usize != args.len() {
return Err(format!(
"Constructor expression requries {} arguments, only {} provided",
arity,
args.len()
)
.into());
}
let mut items: Vec<Primitive> = vec![];
for arg in args.into_iter() {
items.push(self.expression(arg)?);
}
Ok(Primitive::Object { type_id, tag, items, ordered_fields: None })
}
Callable::RecordConstructor { type_id, tag, field_order } => {
//TODO maybe I'll want to do a runtime check of the evaluated fields
/*
let record_members = self.type_context.lookup_record_members(type_id, tag)
.ok_or(format!("Runtime record lookup for: {} {} not found", type_id, tag).into())?;
*/
let mut items: Vec<Primitive> = vec![];
for arg in args.into_iter() {
items.push(self.expression(arg)?);
}
Ok(Primitive::Object { type_id, tag, items, ordered_fields: Some(field_order) })
}
}
}
fn apply_builtin(&mut self, builtin: Builtin, args: Vec<Expression>) -> EvalResult<Primitive> {
use Builtin::*;
use Literal::*;
use Primitive::Literal as Lit;
let evaled_args: EvalResult<Vec<Primitive>> =
args.into_iter().map(|arg| self.expression(arg)).collect();
let evaled_args = evaled_args?;
Ok(match (builtin, evaled_args.as_slice()) {
/* builtin functions */
(IOPrint, &[ref anything]) => {
print!("{}", anything.to_repl(self.type_context));
Primitive::Tuple(vec![])
}
(IOPrintLn, &[ref anything]) => {
println!("{}", anything.to_repl(self.type_context));
Primitive::Tuple(vec![])
}
(IOGetLine, &[]) => {
let mut buf = String::new();
std::io::stdin().read_line(&mut buf).expect("Error readling line in 'getline'");
StringLit(Rc::new(buf.trim().to_string())).into()
}
/* Binops */
(binop, &[ref lhs, ref rhs]) => match (binop, lhs, rhs) {
// TODO need a better way of handling these literals
(Add, Lit(Nat(l)), Lit(Nat(r))) => Nat(l + r).into(),
(Add, Lit(Int(l)), Lit(Int(r))) => Int(l + r).into(),
(Add, Lit(Nat(l)), Lit(Int(r))) => Int((*l as i64) + (*r as i64)).into(),
(Add, Lit(Int(l)), Lit(Nat(r))) => Int((*l as i64) + (*r as i64)).into(),
(Concatenate, Lit(StringLit(ref s1)), Lit(StringLit(ref s2))) =>
StringLit(Rc::new(format!("{}{}", s1, s2))).into(),
(Subtract, Lit(Nat(l)), Lit(Nat(r))) => Nat(l - r).into(),
(Multiply, Lit(Nat(l)), Lit(Nat(r))) => Nat(l * r).into(),
(Divide, Lit(Nat(l)), Lit(Nat(r))) => Float((*l as f64) / (*r as f64)).into(),
(Quotient, Lit(Nat(l)), Lit(Nat(r))) =>
if *r == 0 {
return Err("Divide-by-zero error".into());
} else {
Nat(l / r).into()
},
(Modulo, Lit(Nat(l)), Lit(Nat(r))) => Nat(l % r).into(),
(Exponentiation, Lit(Nat(l)), Lit(Nat(r))) => Nat(l ^ r).into(),
(BitwiseAnd, Lit(Nat(l)), Lit(Nat(r))) => Nat(l & r).into(),
(BitwiseOr, Lit(Nat(l)), Lit(Nat(r))) => Nat(l | r).into(),
/* comparisons */
(Equality, Lit(Nat(l)), Lit(Nat(r))) => Bool(l == r).into(),
(Equality, Lit(Int(l)), Lit(Int(r))) => Bool(l == r).into(),
(Equality, Lit(Float(l)), Lit(Float(r))) => Bool(l == r).into(),
(Equality, Lit(Bool(l)), Lit(Bool(r))) => Bool(l == r).into(),
(Equality, Lit(StringLit(ref l)), Lit(StringLit(ref r))) => Bool(l == r).into(),
(NotEqual, Lit(Nat(l)), Lit(Nat(r))) => Bool(l != r).into(),
(NotEqual, Lit(Int(l)), Lit(Int(r))) => Bool(l != r).into(),
(NotEqual, Lit(Float(l)), Lit(Float(r))) => Bool(l != r).into(),
(NotEqual, Lit(Bool(l)), Lit(Bool(r))) => Bool(l != r).into(),
(NotEqual, Lit(StringLit(ref l)), Lit(StringLit(ref r))) => Bool(l != r).into(),
(LessThan, Lit(Nat(l)), Lit(Nat(r))) => Bool(l < r).into(),
(LessThan, Lit(Int(l)), Lit(Int(r))) => Bool(l < r).into(),
(LessThan, Lit(Float(l)), Lit(Float(r))) => Bool(l < r).into(),
(LessThanOrEqual, Lit(Nat(l)), Lit(Nat(r))) => Bool(l <= r).into(),
(LessThanOrEqual, Lit(Int(l)), Lit(Int(r))) => Bool(l <= r).into(),
(LessThanOrEqual, Lit(Float(l)), Lit(Float(r))) => Bool(l <= r).into(),
(GreaterThan, Lit(Nat(l)), Lit(Nat(r))) => Bool(l > r).into(),
(GreaterThan, Lit(Int(l)), Lit(Int(r))) => Bool(l > r).into(),
(GreaterThan, Lit(Float(l)), Lit(Float(r))) => Bool(l > r).into(),
(GreaterThanOrEqual, Lit(Nat(l)), Lit(Nat(r))) => Bool(l >= r).into(),
(GreaterThanOrEqual, Lit(Int(l)), Lit(Int(r))) => Bool(l >= r).into(),
(GreaterThanOrEqual, Lit(Float(l)), Lit(Float(r))) => Bool(l >= r).into(),
(binop, lhs, rhs) =>
return Err(format!("Invalid binop expression {:?} {:?} {:?}", lhs, binop, rhs).into()),
},
(prefix, &[ref arg]) => match (prefix, arg) {
(BooleanNot, Lit(Bool(true))) => Bool(false),
(BooleanNot, Lit(Bool(false))) => Bool(true),
(Negate, Lit(Nat(n))) => Int(-(*n as i64)),
(Negate, Lit(Int(n))) => Int(-(*n as i64)),
(Negate, Lit(Float(f))) => Float(-(*f as f64)),
(Increment, Lit(Int(n))) => Int(*n),
(Increment, Lit(Nat(n))) => Nat(*n),
_ => return Err("No valid prefix op".into()),
}
.into(),
(x, args) => return Err(format!("bad or unimplemented builtin {:?} | {:?}", x, args).into()),
})
}
fn apply_function(
&mut self,
body: Vec<Statement>,
args: Vec<Expression>,
self_expr: Option<Expression>,
) -> EvalResult<Primitive> {
let self_expr = if let Some(expr) = self_expr { Some(self.expression(expr)?) } else { None };
let mut evaluated_args: Vec<Primitive> = vec![];
for arg in args.into_iter() {
evaluated_args.push(self.expression(arg)?);
}
let mut frame_state = State { memory: self.state.memory.new_scope(None) };
let mut evaluator = Evaluator::new(&mut frame_state, self.type_context);
if let Some(evaled) = self_expr {
let mem = Memory::self_param();
evaluator.state.memory.insert(mem, MemoryValue::Primitive(evaled));
}
for (n, evaled) in evaluated_args.into_iter().enumerate() {
let n = n as u8;
let mem = n.into();
evaluator.state.memory.insert(mem, MemoryValue::Primitive(evaled));
}
evaluator.block(body)
}
}

View File

@ -1,173 +0,0 @@
use std::{convert::From, fmt::Write};
use crate::{
reduced_ir::{Callable, Expression, FunctionDefinition, Literal, ReducedIR},
symbol_table::DefId,
type_inference::{TypeContext, TypeId},
util::{delim_wrapped, ScopeStack},
};
mod evaluator;
mod test;
type EvalResult<T> = Result<T, RuntimeError>;
#[derive(Debug)]
pub struct State<'a> {
memory: ScopeStack<'a, Memory, MemoryValue>,
}
//TODO - eh, I dunno, maybe it doesn't matter exactly how memory works in the tree-walking
//evaluator
#[derive(Debug, PartialEq, Eq, Hash, Clone)]
enum Memory {
Index(u32),
}
impl Memory {
fn self_param() -> Self {
Memory::Index(3_999_999)
}
}
// This is for function param lookups, and is a hack
impl From<u8> for Memory {
fn from(n: u8) -> Self {
Memory::Index(4_000_000 + (n as u32))
}
}
impl From<&DefId> for Memory {
fn from(id: &DefId) -> Self {
Self::Index(id.as_u32())
}
}
#[derive(Debug)]
struct RuntimeError {
msg: String,
}
impl From<String> for RuntimeError {
fn from(msg: String) -> Self {
Self { msg }
}
}
impl From<&str> for RuntimeError {
fn from(msg: &str) -> Self {
Self { msg: msg.to_string() }
}
}
impl RuntimeError {
#[allow(dead_code)]
fn get_msg(&self) -> String {
format!("Runtime error: {}", self.msg)
}
}
/// Anything that can be stored in memory; that is, a function definition, or a fully-evaluated
/// program value.
#[derive(Debug)]
enum MemoryValue {
Function(FunctionDefinition),
Primitive(Primitive),
}
impl From<Primitive> for MemoryValue {
fn from(prim: Primitive) -> Self {
Self::Primitive(prim)
}
}
#[derive(Debug)]
enum RuntimeValue {
Expression(Expression),
Evaluated(Primitive),
}
impl From<Expression> for RuntimeValue {
fn from(expr: Expression) -> Self {
Self::Expression(expr)
}
}
impl From<Primitive> for RuntimeValue {
fn from(prim: Primitive) -> Self {
Self::Evaluated(prim)
}
}
/// A fully-reduced value
#[derive(Debug, Clone)]
enum Primitive {
Tuple(Vec<Primitive>),
List(Vec<Primitive>),
Literal(Literal),
Callable(Callable),
Object { type_id: TypeId, tag: u32, ordered_fields: Option<Vec<String>>, items: Vec<Primitive> },
}
impl Primitive {
fn to_repl(&self, type_context: &TypeContext) -> String {
match self {
Primitive::Object { type_id, items, tag, ordered_fields: _ } if items.is_empty() =>
type_context.variant_local_name(type_id, *tag).unwrap().to_string(),
Primitive::Object { type_id, items, tag, ordered_fields: None } => {
format!(
"{}{}",
type_context.variant_local_name(type_id, *tag).unwrap(),
delim_wrapped('(', ')', items.iter().map(|item| item.to_repl(type_context)))
)
}
Primitive::Object { type_id, items, tag, ordered_fields: Some(fields) } => {
let mut buf = format!("{} {{ ", type_context.variant_local_name(type_id, *tag).unwrap());
for item in fields.iter().zip(items.iter()).map(Some).intersperse(None) {
match item {
Some((name, val)) => write!(buf, "{}: {}", name, val.to_repl(type_context)).unwrap(),
None => write!(buf, ", ").unwrap(),
}
}
write!(buf, " }}").unwrap();
buf
}
Primitive::Literal(lit) => match lit {
Literal::Nat(n) => format!("{}", n),
Literal::Int(i) => format!("{}", i),
Literal::Float(f) => format!("{}", f),
Literal::Bool(b) => format!("{}", b),
Literal::StringLit(s) => format!("\"{}\"", s),
},
Primitive::Tuple(terms) => delim_wrapped('(', ')', terms.iter().map(|x| x.to_repl(type_context))),
Primitive::List(terms) => delim_wrapped('[', ']', terms.iter().map(|x| x.to_repl(type_context))),
Primitive::Callable(..) => "<some-callable>".to_string(),
}
}
fn unit() -> Self {
Primitive::Tuple(vec![])
}
}
impl From<Literal> for Primitive {
fn from(lit: Literal) -> Self {
Primitive::Literal(lit)
}
}
impl<'a> State<'a> {
pub fn new() -> Self {
Self { memory: ScopeStack::new(Some("global".to_string())) }
}
pub fn evaluate(
&mut self,
reduced: ReducedIR,
type_context: &TypeContext,
repl: bool,
) -> Vec<Result<String, String>> {
let mut evaluator = evaluator::Evaluator::new(self, type_context);
evaluator.evaluate(reduced, repl)
}
}

View File

@ -1,564 +0,0 @@
#![cfg(test)]
use pretty_assertions::assert_eq;
use test_case::test_case;
use crate::{
symbol_table::SymbolTable,
tree_walk_eval::{evaluator::Evaluator, State},
type_inference::TypeContext,
};
fn evaluate_input(input: &str) -> Result<String, String> {
let ast = crate::util::quick_ast(input);
let mut symbol_table = SymbolTable::new();
let mut type_context = TypeContext::new();
symbol_table.process_ast(&ast, &mut type_context).unwrap();
let reduced_ir = crate::reduced_ir::reduce(&ast, &symbol_table, &type_context);
reduced_ir.debug(&symbol_table);
println!("========");
symbol_table.debug();
let mut state = State::new();
let mut evaluator = Evaluator::new(&mut state, &type_context);
let mut outputs = evaluator.evaluate(reduced_ir, true);
outputs.pop().unwrap()
}
fn eval_assert(input: &str, expected: &str) {
assert_eq!(evaluate_input(input), Ok(expected.to_string()));
}
fn eval_assert_failure(input: &str, expected: &str) {
assert_eq!(evaluate_input(input), Err(expected.to_string()));
}
#[test]
fn test_basic_eval() {
eval_assert("1 + 2", "3");
eval_assert("let mut a = 1; a = 2", "()");
eval_assert("let mut a = 1; a = a + 2; a", "3");
}
#[test]
fn op_eval() {
eval_assert("-13", "-13");
eval_assert("10 - 2", "8");
}
#[test]
fn function_eval() {
eval_assert("fn oi(x) { x + 1 }; oi(4)", "5");
eval_assert("fn oi(x) { x + 1 }; oi(1+2)", "4");
}
#[test]
fn scopes() {
let scope_ok = r#"
let a = 20
fn haha() {
let something = 38
let a = 10
a
}
haha()
"#;
eval_assert(scope_ok, "10");
let scope_ok = r#"
let a = 20
fn queque() {
let a = 10
a
}
a
"#;
eval_assert(scope_ok, "20");
}
#[test]
fn eval_scopes_2() {
eval_assert(
r#"
fn trad() {
let a = 10
fn jinner() {
let b = 20
b
}
a + jinner()
}
trad()"#,
"30",
);
let err = "No symbol found for name: `a`";
eval_assert_failure(
r#"
fn trad() {
let a = 10
fn inner() {
let b = 20
a + b
}
inner()
}
trad()
"#,
err,
);
}
#[test]
fn adt_output_1() {
let source = r#"
type Option<T> = Some(T) | None
let a = Option::None
let b = Option::Some(10)
(b, a)
"#;
eval_assert(source, "(Some(10), None)");
}
#[test]
fn adt_output_2() {
let source = r#"
type Gobble = Unknown | Rufus { a: Int, torrid: Nat }
let b = Gobble::Rufus { a: 3, torrid: 99 }
b
"#;
eval_assert(source, "Rufus { a: 3, torrid: 99 }");
let source = r#"
type Gobble = Unknown | Rufus { a: Int, torrid: Nat }
let b = Gobble::Rufus { torrid: 3, a: 84 }
b
"#;
eval_assert(source, "Rufus { a: 84, torrid: 3 }");
let source = r#"
type Gobble = Unknown | Rufus { a: Int, torrid: Nat }
let b = Gobble::Rufus { a: 84 }
b
"#;
eval_assert_failure(source, "Field torrid not specified for record Gobble::Rufus");
}
#[test]
fn basic_if_statement() {
let source = r#"
let a = 10
let b = 10
if a == b then { 69 } else { 420 }
"#;
eval_assert(source, "69");
}
#[test]
fn basic_patterns_1() {
let source = r#"
let x = 10
let a = if x is 10 then { 255 } else { 256 }
let b = if 23 is 99 then { 255 } else { 256 }
let c = if true is false then { 9 } else { 10 }
let d = if "xxx" is "yyy" then { 20 } else { 30 }
(a, b, c, d)
"#;
eval_assert(source, "(255, 256, 10, 30)");
}
#[test_case("sanchez", "1")]
#[test_case("mouri", "2")]
#[test_case("hella", "3")]
#[test_case("cyrus", "4")]
fn basic_patterns_2(input: &str, expected: &str) {
let mut source = format!(r#"let x = "{}""#, input);
source.push_str(
r#"
if x {
is "sanchez" then 1
is "mouri" then 2
is "hella" then 3
is _ then 4
}
"#,
);
eval_assert(&source, expected);
}
#[test_case(r#"(45, "panda", false, 2.2)"#, r#""yes""#)]
#[test_case(r#"(99, "panda", false, -2.45)"#, r#""maybe""#)]
fn tuple_patterns(input: &str, expected: &str) {
let mut source = format!("let x = {}", input);
source.push_str(
r#"
if x {
is (45, "pablo", _, 28.4) then "no"
is (_, "panda", _, 2.2) then "yes"
is _ then "maybe"
}"#,
);
eval_assert(&source, expected);
}
#[test]
fn record_patterns_1() {
let source = r#"
type Ara = Kueh { a: Int, b: String } | Morbuk
let alpha = Ara::Kueh { a: 10, b: "sanchez" }
if alpha {
is Ara::Kueh { a, b } then (b, a)
is _ then ("nooo", 8888)
}"#;
eval_assert(source, r#"("sanchez", 10)"#);
}
#[test]
fn record_patterns_2() {
let source = r#"
type Ara = Kueh { a: Int, b: String } | Morbuk
let alpha = Ara::Kueh { a: 10, b: "sanchez" }
if alpha {
is Ara::Kueh { a, b: le_value } then (le_value, (a*2))
is _ then ("nooo", 8888)
}"#;
eval_assert(source, r#"("sanchez", 20)"#);
}
#[test]
fn record_patterns_3() {
let source = r#"
type Vstsavlobs = { tkveni: Int, b: Ia }
type Ia = { sitqva: Int, ghmerts: String }
let b = Vstsavlobs { tkveni: 3, b: Ia::Ia { sitqva: 5, ghmerts: "ooo" } }
if b {
is Vstsavlobs::Vstsavlobs { tkveni: _, b: Ia::Ia { sitqva, ghmerts } } then sitqva
is _ then 5000
}"#;
eval_assert(source, "5");
}
#[test]
fn if_is_patterns() {
let source = r#"
type Option<T> = Some(T) | None
let q = "a string"
let x = Option::Some(9); if x is Option::Some(q) then { q } else { 0 }"#;
eval_assert(source, "9");
let source = r#"
type Option<T> = Some(T) | None
let q = "a string"
let outer = 2
let x = Option::None; if x is Option::Some(q) then { q } else { -2 + outer }"#;
eval_assert(source, "0");
}
#[test]
fn full_if_matching() {
let source = r#"
type Option<T> = Some(T) | None
let a = Option::None
if a { is Option::None then 4; is Option::Some(x) then x }
"#;
eval_assert(source, "4");
let source = r#"
type Option<T> = Some(T) | None
let sara = Option::Some(99)
if sara { is Option::None then 1 + 3; is Option::Some(x) then x }
"#;
eval_assert(source, "99");
let source = r#"
let a = 10
if a { is 10 then "x"; is 4 then "y" }
"#;
eval_assert(source, "\"x\"");
let source = r#"
let a = 10
if a { is 15 then "x"; is 10 then "y" }
"#;
eval_assert(source, "\"y\"");
}
//TODO - I can probably cut down some of these
#[test]
fn string_pattern() {
let source = r#"
let a = "foo"
if a { is "foo" then "x"; is _ then "y" }
"#;
eval_assert(source, "\"x\"");
}
#[test]
fn boolean_pattern() {
let source = r#"
let a = true
if a {
is true then "x"
is false then "y"
}
"#;
eval_assert(source, "\"x\"");
}
#[test]
fn boolean_pattern_2() {
let source = r#"
let a = false
if a { is true then "x"; is false then "y" }
"#;
eval_assert(source, "\"y\"");
}
#[test]
fn ignore_pattern() {
let source = r#"
type Option<T> = Some(T) | None
if Option::Some(10) {
is _ then "hella"
}
"#;
eval_assert(source, "\"hella\"");
}
#[test]
fn tuple_pattern() {
let source = r#"
if (1, 2) {
is (1, x) then x;
is _ then 99
}
"#;
eval_assert(source, "2");
}
#[test]
fn tuple_pattern_2() {
let source = r#"
if (1, 2) {
is (10, x) then x
is (y, x) then x + y
}
"#;
eval_assert(source, "3");
}
#[test]
fn tuple_pattern_3() {
let source = r#"
if (1, 5) {
is (10, x) then x
is (1, x) then x
}
"#;
eval_assert(source, "5");
}
#[test]
fn tuple_pattern_4() {
let source = r#"
if (1, 5) {
is (10, x) then x
is (1, x) then x
}
"#;
eval_assert(source, "5");
}
#[test]
fn prim_obj_pattern() {
let source = r#"
type Stuff = Mulch(Nat) | Jugs(Nat, String) | Mardok
let a = Stuff::Mulch(20)
let b = Stuff::Jugs(1, "haha")
let c = Stuff::Mardok
let x = if a {
is Stuff::Mulch(20) then "x"
is _ then "ERR"
}
let y = if b {
is Stuff::Mulch(n) then "ERR"
is Stuff::Jugs(2, _) then "ERR"
is Stuff::Jugs(1, s) then s
is _ then "ERR"
}
let z = if c {
is Stuff::Jugs(_, _) then "ERR"
is Stuff::Mardok then "NIGH"
is _ then "ERR"
}
(x, y, z)
"#;
eval_assert(source, r#"("x", "haha", "NIGH")"#);
}
#[test]
fn basic_lambda_evaluation_1() {
let source = r#"
let q = \(x, y) { x * y }
let x = q(5, 2)
let y = \(m, n, o) { m + n + o }(1,2,3)
(x, y)
"#;
eval_assert(source, r"(10, 6)");
}
#[test]
fn basic_lambda_evaluation_2() {
let source = r#"
fn milta() {
\(x) { x + 33 }
}
milta()(10)
"#;
eval_assert(source, "43");
}
#[test]
fn import_all() {
let source = r#"
type Option<T> = Some(T) | None
import Option::*
let x = Some(9); if x is Some(q) then { q } else { 0 }"#;
eval_assert(source, "9");
}
#[test]
fn accessors() {
let source = r#"
type Klewos = { a: Int, b: String }
let value = Klewos::Klewos { a: 50, b: "nah" }
(value.a, value.b)
"#;
eval_assert(source, r#"(50, "nah")"#);
}
#[test]
fn early_return() {
let source = r#"
fn chnurmek(a: Int): Int {
if a == 5 then {
return 9999;
}
return (a + 2);
}
(chnurmek(5), chnurmek(0))
"#;
eval_assert(source, r#"(9999, 2)"#);
let source = r#"
fn marbuk(a: Int, b: Int): (Int, Int) {
if a == 5 then {
if b == 6 then {
return (50, 50);
}
return (a, b + 1)
}
(a * 100, b * 100)
}
let x = marbuk(1, 1)
let y = marbuk(5, 1)
let z = marbuk(5, 6)
(x, y, z)
"#;
eval_assert(source, "((100, 100), (5, 2), (50, 50))");
}
#[test]
fn loops() {
let source = r#"
let mut a = 0
let mut count = 0
while a != 5 {
a = a + 1
count = count + 100
}
count
"#;
eval_assert(source, "500");
}
#[test]
fn loops_2() {
let source = r#"
let mut a = 0
let mut acc = 0
while a < 10 {
acc = acc + 1
a = a + 1
// Without this continue, the output would be 20
if a == 5 then {
continue
}
acc = acc + 1
}
acc"#;
eval_assert(source, "19");
}
#[test]
fn list_literals() {
eval_assert(
r#"
let a = [7, 8, 9]
a
"#,
"[7, 8, 9]",
);
eval_assert(
r#"
let a = [7, 8, 9]
fn foo() { return 2 }
(a[0], a[foo()])
"#,
"(7, 9)",
);
}
#[test]
fn eval_method() {
let src = r#"
type Thing = Thing
impl Thing {
fn a_method() {
20
}
}
let a = Thing::Thing
4 + a.a_method()
"#;
eval_assert(src, "24");
}

View File

@ -1,227 +0,0 @@
use std::{collections::HashMap, convert::From};
use crate::{
ast::{TypeIdentifier, AST},
identifier::{define_id_kind, Id, IdStore},
};
define_id_kind!(TypeItem);
pub type TypeId = Id<TypeItem>;
pub struct TypeContext {
defined_types: HashMap<TypeId, DefinedType>,
type_id_store: IdStore<TypeItem>,
}
impl TypeContext {
pub fn new() -> Self {
Self { defined_types: HashMap::new(), type_id_store: IdStore::new() }
}
pub fn register_type(&mut self, builder: TypeBuilder) -> TypeId {
let type_id = self.type_id_store.fresh();
let mut pending_variants = vec![];
for variant_builder in builder.variants.into_iter() {
let members = variant_builder.members;
if members.is_empty() {
pending_variants.push(Variant { name: variant_builder.name, members: VariantMembers::Unit });
continue;
}
let record_variant = matches!(members.get(0).unwrap(), VariantMemberBuilder::KeyVal(..));
if record_variant {
let pending_members = members.into_iter().map(|var| match var {
VariantMemberBuilder::KeyVal(name, ty) => (name, ty),
_ => panic!("Compiler internal error: variant mismatch"),
});
//TODO make this mapping meaningful
let type_ids = pending_members
.into_iter()
.map(|(name, _ty_id)| (name, self.type_id_store.fresh()))
.collect();
pending_variants
.push(Variant { name: variant_builder.name, members: VariantMembers::Record(type_ids) });
} else {
let pending_members = members.into_iter().map(|var| match var {
VariantMemberBuilder::Pending(pending_type) => pending_type,
_ => panic!("Compiler internal error: variant mismatch"),
});
//TODO make this mapping meaningful
let type_ids = pending_members.into_iter().map(|_ty_id| self.type_id_store.fresh()).collect();
pending_variants
.push(Variant { name: variant_builder.name, members: VariantMembers::Tuple(type_ids) });
}
}
// Eventually, I will want to have a better way of determining which numeric tag goes with
// which variant. For now, just sort them alphabetically.
pending_variants.sort_unstable_by(|a, b| a.name.cmp(&b.name));
let defined = DefinedType { name: builder.name, variants: pending_variants };
self.defined_types.insert(type_id, defined);
type_id
}
pub fn variant_local_name(&self, type_id: &TypeId, tag: u32) -> Option<&str> {
self.defined_types
.get(type_id)
.and_then(|defined| defined.variants.get(tag as usize))
.map(|variant| variant.name.as_ref())
}
pub fn lookup_variant_arity(&self, type_id: &TypeId, tag: u32) -> Option<u32> {
self.defined_types.get(type_id).and_then(|defined| defined.variants.get(tag as usize)).map(
|variant| match &variant.members {
VariantMembers::Unit => 0,
VariantMembers::Tuple(items) => items.len() as u32,
VariantMembers::Record(items) => items.len() as u32,
},
)
}
pub fn lookup_record_members(&self, type_id: &TypeId, tag: u32) -> Option<&[(String, TypeId)]> {
self.defined_types.get(type_id).and_then(|defined| defined.variants.get(tag as usize)).and_then(
|variant| match &variant.members {
VariantMembers::Record(items) => Some(items.as_ref()),
_ => None,
},
)
}
pub fn lookup_type(&self, type_id: &TypeId) -> Option<&DefinedType> {
self.defined_types.get(type_id)
}
//TODO return some kind of overall type later?
pub fn typecheck(&mut self, ast: &AST) -> Result<(), TypeError> {
Ok(())
}
}
/// A type defined in program source code, as opposed to a builtin.
#[allow(dead_code)]
#[derive(Debug)]
pub struct DefinedType {
pub name: String,
// the variants are in this list according to tag order
pub variants: Vec<Variant>,
}
#[derive(Debug)]
pub struct Variant {
pub name: String,
pub members: VariantMembers,
}
#[derive(Debug)]
pub enum VariantMembers {
Unit,
// Should be non-empty
Tuple(Vec<TypeId>),
Record(Vec<(String, TypeId)>),
}
/// Represents a type mentioned as a member of another type during the type registration process.
/// It may not have been registered itself in the relevant context.
#[allow(dead_code)]
#[derive(Debug)]
pub struct PendingType {
inner: TypeIdentifier,
}
impl From<&TypeIdentifier> for PendingType {
fn from(type_identifier: &TypeIdentifier) -> Self {
Self { inner: type_identifier.clone() }
}
}
#[derive(Debug)]
pub struct TypeBuilder {
name: String,
variants: Vec<VariantBuilder>,
}
impl TypeBuilder {
pub fn new(name: &str) -> Self {
Self { name: name.to_string(), variants: vec![] }
}
pub fn add_variant(&mut self, vb: VariantBuilder) {
self.variants.push(vb);
}
}
#[derive(Debug)]
pub struct VariantBuilder {
name: String,
members: Vec<VariantMemberBuilder>,
}
impl VariantBuilder {
pub fn new(name: &str) -> Self {
Self { name: name.to_string(), members: vec![] }
}
pub fn add_member(&mut self, member_ty: PendingType) {
self.members.push(VariantMemberBuilder::Pending(member_ty));
}
// You can't call this and `add_member` on the same fn, there should be a runtime error when
// that's detected.
pub fn add_record_member(&mut self, name: &str, ty: PendingType) {
self.members.push(VariantMemberBuilder::KeyVal(name.to_string(), ty));
}
}
#[derive(Debug)]
enum VariantMemberBuilder {
Pending(PendingType),
KeyVal(String, PendingType),
}
#[derive(Debug, Clone)]
pub struct TypeError {
pub msg: String,
}
#[allow(dead_code)]
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum TypeConst {
Unit,
Nat,
Int,
Float,
StringT,
Bool,
Ordering,
}
#[allow(dead_code)]
#[derive(Debug, Clone, PartialEq)]
pub enum Type {
Const(TypeConst),
//Var(TypeVar),
Arrow { params: Vec<Type>, ret: Box<Type> },
Compound { ty_name: String, args: Vec<Type> },
}
macro_rules! ty {
($type_name:ident) => {
Type::Const(crate::type_inference::TypeConst::$type_name)
};
($t1:ident -> $t2:ident) => {
Type::Arrow { params: vec![ty!($t1)], ret: Box::new(ty!($t2)) }
};
($t1:ident -> $t2:ident -> $t3:ident) => {
Type::Arrow { params: vec![ty!($t1), ty!($t2)], ret: Box::new(ty!($t3)) }
};
($type_list:ident, $ret_type:ident) => {
Type::Arrow { params: $type_list, ret: Box::new($ret_type) }
};
}

View File

@ -1,522 +1,493 @@
use std::cell::RefCell;
use std::rc::Rc; use std::rc::Rc;
use std::convert::TryFrom; use std::collections::HashMap;
use std::fmt; use std::fmt;
use std::fmt::Write;
/*
use std::collections::hash_set::Union;
use std::iter::Iterator;
use itertools::Itertools;
*/
use ena::unify::{UnifyKey, InPlaceUnificationTable, UnificationTable, EqUnifyValue}; use ast;
use util::StateStack;
use symbol_table::{SymbolSpec, SymbolTable};
use crate::builtin::Builtin; pub type TypeName = Rc<String>;
use crate::ast::*; type TypeResult<T> = Result<T, String>;
use crate::util::ScopeStack;
use crate::util::deref_optional_box;
#[derive(Debug, PartialEq, Clone)]
#[derive(Debug, Clone, PartialEq)] enum Type {
pub struct TypeData { Const(TConst),
ty: Option<Type> Var(TypeName),
Func(Vec<Type>),
} }
impl TypeData { #[derive(Debug, PartialEq, Clone)]
#[allow(dead_code)] enum TConst {
pub fn new() -> TypeData { Unit,
TypeData { ty: None } Nat,
} StringT,
Custom(String)
} }
//TODO need to hook this into the actual typechecking system somehow #[derive(Debug, PartialEq, Clone)]
#[derive(Debug, Clone)] struct Scheme {
pub struct TypeId { names: Vec<TypeName>,
local_name: Rc<String> ty: Type,
} }
impl TypeId { impl fmt::Display for Scheme {
//TODO this is definitely incomplete
pub fn lookup_name(name: &str) -> TypeId {
TypeId {
local_name: Rc::new(name.to_string())
}
}
pub fn local_name(&self) -> &str {
self.local_name.as_ref()
}
}
impl fmt::Display for TypeId {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "TypeId:{}", self.local_name) write!(f, "∀{:?} . {:?}", self.names, self.ty)
} }
} }
#[derive(Debug, PartialEq, Clone)]
struct Substitution(HashMap<TypeName, Type>);
impl Substitution {
fn empty() -> Substitution {
Substitution(HashMap::new())
}
}
#[derive(Debug, PartialEq, Clone)]
struct TypeEnv(HashMap<TypeName, Scheme>);
impl TypeEnv {
fn default() -> TypeEnv {
TypeEnv(HashMap::new())
}
fn populate_from_symbols(&mut self, symbol_table: &SymbolTable) {
for (name, symbol) in symbol_table.values.iter() {
if let SymbolSpec::Func(ref type_names) = symbol.spec {
let mut ch: char = 'a';
let mut names = vec![];
for _ in type_names.iter() {
names.push(Rc::new(format!("{}", ch)));
ch = ((ch as u8) + 1) as char;
}
let sigma = Scheme {
names: names.clone(),
ty: Type::Func(names.into_iter().map(|n| Type::Var(n)).collect())
};
self.0.insert(name.clone(), sigma);
}
}
}
}
pub struct TypeContext<'a> { pub struct TypeContext<'a> {
variable_map: ScopeStack<'a, Rc<String>, Type>, values: StateStack<'a, TypeName, Type>,
unification_table: InPlaceUnificationTable<TypeVar>, symbol_table_handle: Rc<RefCell<SymbolTable>>,
global_env: TypeEnv
} }
/// `InferResult` is the monad in which type inference takes place.
type InferResult<T> = Result<T, TypeError>;
#[derive(Debug, Clone)]
pub struct TypeError { pub msg: String }
impl TypeError {
fn new<A, T>(msg: T) -> InferResult<A> where T: Into<String> {
Err(TypeError { msg: msg.into() })
}
}
#[allow(dead_code)] // avoids warning from Compound
#[derive(Debug, Clone, PartialEq)]
pub enum Type {
Const(TypeConst),
Var(TypeVar),
Arrow {
params: Vec<Type>,
ret: Box<Type>
},
Compound {
ty_name: String,
args:Vec<Type>
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct TypeVar(usize);
impl UnifyKey for TypeVar {
type Value = Option<TypeConst>;
fn index(&self) -> u32 { self.0 as u32 }
fn from_index(u: u32) -> TypeVar { TypeVar(u as usize) }
fn tag() -> &'static str { "TypeVar" }
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum TypeConst {
Unit,
Nat,
Int,
Float,
StringT,
Bool,
Ordering,
//UserDefined
}
impl TypeConst {
/*
#[allow(dead_code)]
pub fn to_string(&self) -> String {
use self::TypeConst::*;
match self {
Unit => "()".to_string(),
Nat => "Nat".to_string(),
Int => "Int".to_string(),
Float => "Float".to_string(),
StringT => "String".to_string(),
Bool => "Bool".to_string(),
Ordering => "Ordering".to_string(),
}
}
*/
}
impl EqUnifyValue for TypeConst { }
macro_rules! ty {
($type_name:ident) => { Type::Const(TypeConst::$type_name) };
($t1:ident -> $t2:ident) => { Type::Arrow { params: vec![ty!($t1)], ret: box ty!($t2) } };
($t1:ident -> $t2:ident -> $t3:ident) => { Type::Arrow { params: vec![ty!($t1), ty!($t2)], ret: box ty!($t3) } };
($type_list:ident, $ret_type:ident) => {
Type::Arrow {
params: $type_list,
ret: box $ret_type,
}
}
}
//TODO find a better way to capture the to/from string logic
impl Type {
/*
#[allow(dead_code)]
pub fn to_string(&self) -> String {
use self::Type::*;
match self {
Const(c) => c.to_string(),
Var(v) => format!("t_{}", v.0),
Arrow { params, box ref ret } => {
if params.is_empty() {
format!("-> {}", ret.to_string())
} else {
let mut buf = String::new();
for p in params.iter() {
write!(buf, "{} -> ", p.to_string()).unwrap();
}
write!(buf, "{}", ret.to_string()).unwrap();
buf
}
},
Compound { .. } => "<some compound type>".to_string()
}
}
*/
fn from_string(string: &str) -> Option<Type> {
Some(match string {
"()" | "Unit" => ty!(Unit),
"Nat" => ty!(Nat),
"Int" => ty!(Int),
"Float" => ty!(Float),
"String" => ty!(StringT),
"Bool" => ty!(Bool),
"Ordering" => ty!(Ordering),
_ => return None
})
}
}
/*
/// `Type` is parameterized by whether the type variables can be just universal, or universal or
/// existential.
#[derive(Debug, Clone)]
enum Type<A> {
Var(A),
Const(TConst),
Arrow(Box<Type<A>>, Box<Type<A>>),
}
#[derive(Debug, Clone)]
enum TVar {
Univ(UVar),
Exist(ExistentialVar)
}
#[derive(Debug, Clone)]
struct UVar(Rc<String>);
#[derive(Debug, Clone)]
struct ExistentialVar(u32);
impl Type<UVar> {
fn to_tvar(&self) -> Type<TVar> {
match self {
Type::Var(UVar(name)) => Type::Var(TVar::Univ(UVar(name.clone()))),
Type::Const(ref c) => Type::Const(c.clone()),
Type::Arrow(a, b) => Type::Arrow(
Box::new(a.to_tvar()),
Box::new(b.to_tvar())
)
}
}
}
impl Type<TVar> {
fn skolemize(&self) -> Type<UVar> {
match self {
Type::Var(TVar::Univ(uvar)) => Type::Var(uvar.clone()),
Type::Var(TVar::Exist(_)) => Type::Var(UVar(Rc::new(format!("sk")))),
Type::Const(ref c) => Type::Const(c.clone()),
Type::Arrow(a, b) => Type::Arrow(
Box::new(a.skolemize()),
Box::new(b.skolemize())
)
}
}
}
impl TypeIdentifier {
fn to_monotype(&self) -> Type<UVar> {
match self {
TypeIdentifier::Tuple(_) => Type::Const(TConst::Nat),
TypeIdentifier::Singleton(TypeSingletonName { name, .. }) => {
match &name[..] {
"Nat" => Type::Const(TConst::Nat),
"Int" => Type::Const(TConst::Int),
"Float" => Type::Const(TConst::Float),
"Bool" => Type::Const(TConst::Bool),
"String" => Type::Const(TConst::StringT),
_ => Type::Const(TConst::Nat),
}
}
}
}
}
#[derive(Debug, Clone)]
enum TConst {
User(Rc<String>),
Unit,
Nat,
Int,
Float,
StringT,
Bool,
}
impl TConst {
fn user(name: &str) -> TConst {
TConst::User(Rc::new(name.to_string()))
}
}
*/
impl<'a> TypeContext<'a> { impl<'a> TypeContext<'a> {
pub fn new() -> TypeContext<'a> { pub fn new(symbol_table_handle: Rc<RefCell<SymbolTable>>) -> TypeContext<'static> {
TypeContext { TypeContext { values: StateStack::new(None), global_env: TypeEnv::default(), symbol_table_handle }
variable_map: ScopeStack::new(None), }
unification_table: UnificationTable::new(),
pub fn debug_types(&self) -> String {
let mut output = format!("Type environment\n");
for (name, scheme) in &self.global_env.0 {
write!(output, "{} -> {}\n", name, scheme).unwrap();
}
output
}
pub fn type_check_ast(&mut self, input: &ast::AST) -> Result<String, String> {
let ref symbol_table = self.symbol_table_handle.borrow();
self.global_env.populate_from_symbols(symbol_table);
let output = self.global_env.infer_block(&input.0)?;
Ok(format!("{:?}", output))
} }
} }
/* impl TypeEnv {
fn new_env(&'a self, new_var: Rc<String>, ty: Type) -> TypeContext<'a> { fn instantiate(&mut self, sigma: Scheme) -> Type {
let mut new_context = TypeContext { match sigma {
variable_map: self.variable_map.new_scope(None), Scheme { ty, .. } => ty,
unification_table: UnificationTable::new(), //???? not sure if i want this
};
new_context.variable_map.insert(new_var, ty);
new_context
}
*/
fn get_type_from_name(&self, name: &TypeIdentifier) -> InferResult<Type> {
use self::TypeIdentifier::*;
Ok(match name {
Singleton(TypeSingletonName { name,.. }) => {
match Type::from_string(name) {
Some(ty) => ty,
None => return TypeError::new(format!("Unknown type name: {}", name))
}
},
Tuple(_) => return TypeError::new("tuples aren't ready yet"),
})
}
/// `typecheck` is the entry into the type-inference system, accepting an AST as an argument
/// Following the example of GHC, the compiler deliberately does typechecking before de-sugaring
/// the AST to ReducedAST
pub fn typecheck(&mut self, ast: &AST) -> Result<Type, TypeError> {
let mut returned_type = Type::Const(TypeConst::Unit);
for statement in ast.statements.statements.iter() {
returned_type = self.statement(statement)?;
}
Ok(returned_type)
}
fn statement(&mut self, statement: &Statement) -> InferResult<Type> {
match &statement.kind {
StatementKind::Expression(e) => self.expr(e),
StatementKind::Declaration(decl) => self.decl(decl),
StatementKind::Import(_) => Ok(ty!(Unit)),
StatementKind::Module(_) => Ok(ty!(Unit)),
} }
} }
fn generate(&mut self, ty: Type) -> Scheme {
fn decl(&mut self, decl: &Declaration) -> InferResult<Type> { Scheme {
use self::Declaration::*; names: vec![], //TODO incomplete
if let Binding { name, expr, .. } = decl { ty
let ty = self.expr(expr)?;
self.variable_map.insert(name.clone(), ty);
}
Ok(ty!(Unit))
}
fn invoc(&mut self, invoc: &InvocationArgument) -> InferResult<Type> {
use InvocationArgument::*;
match invoc {
Positional(expr) => self.expr(expr),
_ => Ok(ty!(Nat)) //TODO this is wrong
} }
} }
fn infer_block(&mut self, block: &Vec<ast::Statement>) -> TypeResult<Type> {
fn expr(&mut self, expr: &Expression) -> InferResult<Type> { let mut output = Type::Const(TConst::Unit);
match expr { for statement in block {
Expression { kind, type_anno: Some(anno), .. } => { output = self.infer_statement(statement)?;
let t1 = self.expr_type(kind)?;
let t2 = self.get_type_from_name(anno)?;
self.unify(t2, t1)
},
Expression { kind, type_anno: None, .. } => self.expr_type(kind)
}
}
fn expr_type(&mut self, expr: &ExpressionKind) -> InferResult<Type> {
use self::ExpressionKind::*;
Ok(match expr {
NatLiteral(_) => ty!(Nat),
BoolLiteral(_) => ty!(Bool),
FloatLiteral(_) => ty!(Float),
StringLiteral(_) => ty!(StringT),
PrefixExp(op, expr) => self.prefix(op, expr)?,
BinExp(op, lhs, rhs) => self.binexp(op, lhs, rhs)?,
IfExpression { discriminator, body } => self.if_expr(deref_optional_box(discriminator), &**body)?,
Value(val) => self.handle_value(val)?,
Call { box ref f, arguments } => self.call(f, arguments)?,
Lambda { params, type_anno, body } => self.lambda(params, type_anno, body)?,
_ => ty!(Unit),
})
}
fn prefix(&mut self, op: &PrefixOp, expr: &Expression) -> InferResult<Type> {
let builtin: Option<Builtin> = TryFrom::try_from(op).ok();
let tf = match builtin.map(|b| b.get_type()) {
Some(ty) => ty,
None => return TypeError::new("no type found")
};
let tx = self.expr(expr)?;
self.handle_apply(tf, vec![tx])
}
fn binexp(&mut self, op: &BinOp, lhs: &Expression, rhs: &Expression) -> InferResult<Type> {
let builtin: Option<Builtin> = TryFrom::try_from(op).ok();
let tf = match builtin.map(|b| b.get_type()) {
Some(ty) => ty,
None => return TypeError::new("no type found"),
};
let t_lhs = self.expr(lhs)?;
let t_rhs = self.expr(rhs)?; //TODO is this order a problem? not sure
self.handle_apply(tf, vec![t_lhs, t_rhs])
}
fn if_expr(&mut self, discriminator: Option<&Expression>, body: &IfExpressionBody) -> InferResult<Type> {
use self::IfExpressionBody::*;
match (discriminator, body) {
(Some(expr), SimpleConditional{ then_case, else_case }) => self.handle_simple_if(expr, then_case, else_case),
_ => TypeError::new("Complex conditionals not supported".to_string())
}
}
#[allow(clippy::ptr_arg)]
fn handle_simple_if(&mut self, expr: &Expression, then_clause: &Block, else_clause: &Option<Block>) -> InferResult<Type> {
let t1 = self.expr(expr)?;
let t2 = self.block(then_clause)?;
let t3 = match else_clause {
Some(block) => self.block(block)?,
None => ty!(Unit)
};
let _ = self.unify(ty!(Bool), t1)?;
self.unify(t2, t3)
}
#[allow(clippy::ptr_arg)]
fn lambda(&mut self, params: &Vec<FormalParam>, type_anno: &Option<TypeIdentifier>, _body: &Block) -> InferResult<Type> {
let argument_types: InferResult<Vec<Type>> = params.iter().map(|param: &FormalParam| {
if let FormalParam { anno: Some(type_identifier), .. } = param {
self.get_type_from_name(type_identifier)
} else {
Ok(Type::Var(self.fresh_type_variable()))
}
}).collect();
let argument_types = argument_types?;
let ret_type = match type_anno.as_ref() {
Some(anno) => self.get_type_from_name(anno)?,
None => Type::Var(self.fresh_type_variable())
};
Ok(ty!(argument_types, ret_type))
}
fn call(&mut self, f: &Expression, args: &[ InvocationArgument ]) -> InferResult<Type> {
let tf = self.expr(f)?;
let arg_types: InferResult<Vec<Type>> = args.iter().map(|ex| self.invoc(ex)).collect();
let arg_types = arg_types?;
self.handle_apply(tf, arg_types)
}
fn handle_apply(&mut self, tf: Type, args: Vec<Type>) -> InferResult<Type> {
Ok(match tf {
Type::Arrow { ref params, ret: box ref t_ret } if params.len() == args.len() => {
for (t_param, t_arg) in params.iter().zip(args.iter()) {
let _ = self.unify(t_param.clone(), t_arg.clone())?; //TODO I think this needs to reference a sub-scope
}
t_ret.clone()
},
Type::Arrow { .. } => return TypeError::new("Wrong length"),
_ => return TypeError::new("Not a function".to_string())
})
}
#[allow(clippy::ptr_arg)]
fn block(&mut self, block: &Block) -> InferResult<Type> {
let mut output = ty!(Unit);
for statement in block.statements.iter() {
output = self.statement(statement)?;
} }
Ok(output) Ok(output)
} }
fn infer_statement(&mut self, statement: &ast::Statement) -> TypeResult<Type> {
fn handle_value(&mut self, val: &QualifiedName) -> InferResult<Type> { match statement {
let QualifiedName { components: vec, .. } = val; ast::Statement::ExpressionStatement(expr) => self.infer_expr(expr),
let var = &vec[0]; ast::Statement::Declaration(decl) => self.infer_decl(decl)
match self.variable_map.lookup(var) { }
Some(ty) => Ok(ty.clone()), }
None => TypeError::new(format!("Couldn't find variable: {}", &var)), fn infer_decl(&mut self, decl: &ast::Declaration) -> TypeResult<Type> {
use ast::Declaration::*;
match decl {
Binding { name, expr, .. } => {
let ty = self.infer_expr(expr)?;
let sigma = self.generate(ty);
self.0.insert(name.clone(), sigma);
},
_ => (),
}
Ok(Type::Const(TConst::Unit))
}
fn infer_expr(&mut self, expr: &ast::Expression) -> TypeResult<Type> {
match expr {
ast::Expression(expr, Some(anno)) => {
self.infer_exprtype(expr)
},
ast::Expression(expr, None) => {
self.infer_exprtype(expr)
}
} }
} }
fn unify(&mut self, t1: Type, t2: Type) -> InferResult<Type> { fn infer_exprtype(&mut self, expr: &ast::ExpressionType) -> TypeResult<Type> {
use self::TConst::*;
use ast::ExpressionType::*;
Ok(match expr {
NatLiteral(_) => Type::Const(Nat),
StringLiteral(_) => Type::Const(StringT),
BinExp(op, lhs, rhs) => {
return Err(format!("NOTDONE"))
},
Call { f, arguments } => {
return Err(format!("NOTDONE"))
},
Value(name) => {
let s = match self.0.get(name) {
Some(sigma) => sigma.clone(),
None => return Err(format!("Unknown variable: {}", name))
};
self.instantiate(s)
},
_ => Type::Const(Unit)
})
}
}
/* GIANT TODO - use the rust im crate, unless I make this code way less haskell-ish after it's done
*/
/*
pub type TypeResult<T> = Result<T, String>;
*/
/* TODO this should just check the name against a map, and that map should be pre-populated with
* types */
/*
impl parsing::TypeName {
fn to_type(&self) -> TypeResult<Type> {
use self::parsing::TypeSingletonName;
use self::parsing::TypeName::*;
use self::Type::*; use self::TConstOld::*;
Ok(match self {
Tuple(_) => return Err(format!("Tuples not yet implemented")),
Singleton(name) => match name {
TypeSingletonName { name, .. } => match &name[..] {
/*
"Nat" => Const(Nat),
"Int" => Const(Int),
"Float" => Const(Float),
"Bool" => Const(Bool),
"String" => Const(StringT),
*/
n => Const(Custom(n.to_string()))
}
}
})
}
}
*/
/*
impl TypeContext {
pub fn type_check_ast(&mut self, ast: &parsing::AST) -> TypeResult<String> {
let ref block = ast.0;
let mut infer = Infer::default();
let env = TypeEnvironment::default();
let output = infer.infer_block(block, &env);
match output {
Ok(s) => Ok(format!("{:?}", s)),
Err(s) => Err(format!("Error: {:?}", s))
}
}
}
// this is the equivalent of the Haskell Infer monad
#[derive(Debug, Default)]
struct Infer {
_idents: u32,
}
#[derive(Debug)]
enum InferError {
CannotUnify(MonoType, MonoType),
OccursCheckFailed(Rc<String>, MonoType),
UnknownIdentifier(Rc<String>),
Custom(String),
}
type InferResult<T> = Result<T, InferError>;
impl Infer {
fn fresh(&mut self) -> MonoType {
let i = self._idents;
self._idents += 1;
let name = Rc::new(format!("{}", ('a' as u8 + 1) as char));
MonoType::Var(name)
}
fn unify(&mut self, a: MonoType, b: MonoType) -> InferResult<Substitution> {
use self::InferError::*; use self::MonoType::*;
Ok(match (a, b) {
(Const(ref a), Const(ref b)) if a == b => Substitution::new(),
(Var(ref name), ref var) => Substitution::bind_variable(name, var),
(ref var, Var(ref name)) => Substitution::bind_variable(name, var),
(Function(box a1, box b1), Function(box a2, box b2)) => {
let s1 = self.unify(a1, a2)?;
let s2 = self.unify(b1.apply_substitution(&s1), b2.apply_substitution(&s1))?;
s1.merge(s2)
},
(a, b) => return Err(CannotUnify(a, b))
})
}
fn infer_block(&mut self, block: &Vec<parsing::Statement>, env: &TypeEnvironment) -> InferResult<MonoType> {
use self::parsing::Statement;
let mut ret = MonoType::Const(TypeConst::Unit);
for statement in block.iter() {
ret = match statement {
Statement::ExpressionStatement(expr) => {
let (sub, ty) = self.infer_expr(expr, env)?;
//TODO handle substitution monadically
ty
}
Statement::Declaration(decl) => MonoType::Const(TypeConst::Unit),
}
}
Ok(ret)
}
fn infer_expr(&mut self, expr: &parsing::Expression, env: &TypeEnvironment) -> InferResult<(Substitution, MonoType)> {
use self::parsing::Expression;
match expr {
Expression(e, Some(anno)) => self.infer_annotated_expr(e, anno, env),
/*
let anno_ty = anno.to_type()?;
let ty = self.infer_exprtype(&e)?;
self.unify(ty, anno_ty)
},
*/
Expression(e, None) => self.infer_exprtype(e, env)
}
}
fn infer_annotated_expr(&mut self, expr: &parsing::ExpressionType, anno: &parsing::TypeName, env: &TypeEnvironment) -> InferResult<(Substitution, MonoType)> {
Err(InferError::Custom(format!("exprtype not done: {:?}", expr)))
}
fn infer_exprtype(&mut self, expr: &parsing::ExpressionType, env: &TypeEnvironment) -> InferResult<(Substitution, MonoType)> {
use self::parsing::ExpressionType::*;
use self::TypeConst::*;
Ok(match expr {
NatLiteral(_) => (Substitution::new(), MonoType::Const(Nat)),
FloatLiteral(_) => (Substitution::new(), MonoType::Const(Float)),
StringLiteral(_) => (Substitution::new(), MonoType::Const(StringT)),
BoolLiteral(_) => (Substitution::new(), MonoType::Const(Bool)),
Value(name) => match env.lookup(name) {
Some(sigma) => {
let tau = self.instantiate(&sigma);
(Substitution::new(), tau)
},
None => return Err(InferError::UnknownIdentifier(name.clone())),
},
e => return Err(InferError::Custom(format!("Type inference for {:?} not done", e)))
})
}
fn instantiate(&mut self, sigma: &PolyType) -> MonoType {
let ref ty: MonoType = sigma.1;
let mut subst = Substitution::new();
for name in sigma.0.iter() {
let fresh_mvar = self.fresh();
let new = Substitution::bind_variable(name, &fresh_mvar);
subst = subst.merge(new);
}
ty.apply_substitution(&subst)
}
}
*/
/* OLD STUFF DOWN HERE */
/*
impl TypeContext {
fn infer_block(&mut self, statements: &Vec<parsing::Statement>) -> TypeResult<Type> {
let mut ret_type = Type::Const(TConst::Unit);
for statement in statements {
ret_type = self.infer_statement(statement)?;
}
Ok(ret_type)
}
fn infer_statement(&mut self, statement: &parsing::Statement) -> TypeResult<Type> {
use self::parsing::Statement::*;
match statement {
ExpressionStatement(expr) => self.infer(expr),
Declaration(decl) => self.add_declaration(decl),
}
}
fn add_declaration(&mut self, decl: &parsing::Declaration) -> TypeResult<Type> {
use self::parsing::Declaration::*;
use self::Type::*; use self::Type::*;
match decl {
Binding { name, expr, .. } => {
let ty = self.infer(expr)?;
self.bindings.insert(name.clone(), ty);
},
_ => return Err(format!("other formats not done"))
}
Ok(Void)
}
fn infer(&mut self, expr: &parsing::Expression) -> TypeResult<Type> {
use self::parsing::Expression;
match expr {
Expression(e, Some(anno)) => {
let anno_ty = anno.to_type()?;
let ty = self.infer_exprtype(&e)?;
self.unify(ty, anno_ty)
},
Expression(e, None) => self.infer_exprtype(e)
}
}
fn infer_exprtype(&mut self, expr: &parsing::ExpressionType) -> TypeResult<Type> {
use self::parsing::ExpressionType::*;
use self::Type::*; use self::TConst::*;
match expr {
NatLiteral(_) => Ok(Const(Nat)),
FloatLiteral(_) => Ok(Const(Float)),
StringLiteral(_) => Ok(Const(StringT)),
BoolLiteral(_) => Ok(Const(Bool)),
BinExp(op, lhs, rhs) => { /* remember there are both the haskell convention talk and the write you a haskell ways to do this! */
match op.get_type()? {
Func(box t1, box Func(box t2, box t3)) => {
let lhs_ty = self.infer(lhs)?;
let rhs_ty = self.infer(rhs)?;
self.unify(t1, lhs_ty)?;
self.unify(t2, rhs_ty)?;
Ok(t3)
},
other => Err(format!("{:?} is not a binary function type", other))
}
},
PrefixExp(op, expr) => match op.get_type()? {
Func(box t1, box t2) => {
let expr_ty = self.infer(expr)?;
self.unify(t1, expr_ty)?;
Ok(t2)
},
other => Err(format!("{:?} is not a prefix op function type", other))
},
Value(name) => {
match self.bindings.get(name) {
Some(ty) => Ok(ty.clone()),
None => Err(format!("No binding found for variable: {}", name)),
}
},
Call { f, arguments } => {
let mut tf = self.infer(f)?;
for arg in arguments.iter() {
match tf {
Func(box t, box rest) => {
let t_arg = self.infer(arg)?;
self.unify(t, t_arg)?;
tf = rest;
},
other => return Err(format!("Function call failed to unify; last type: {:?}", other)),
}
}
Ok(tf)
},
TupleLiteral(expressions) => {
let mut types = vec![];
for expr in expressions {
types.push(self.infer(expr)?);
}
Ok(Sum(types))
},
_ => Err(format!("Type not yet implemented"))
}
}
fn unify(&mut self, t1: Type, t2: Type) -> TypeResult<Type> {
use self::Type::*;// use self::TConst::*;
match (t1, t2) { match (t1, t2) {
(Const(ref c1), Const(ref c2)) if c1 == c2 => Ok(Const(c1.clone())), //choice of c1 is arbitrary I *think* (Const(ref a), Const(ref b)) if a == b => Ok(Const(a.clone())),
(a @ Var(_), b @ Const(_)) => self.unify(b, a), (a, b) => Err(format!("Types {:?} and {:?} don't unify", a, b))
(Const(ref c1), Var(ref v2)) => {
self.unification_table.unify_var_value(*v2, Some(c1.clone()))
.or_else(|_| TypeError::new(format!("Couldn't unify {:?} and {:?}", Const(c1.clone()), Var(*v2))))?;
Ok(Const(c1.clone()))
},
(Var(v1), Var(v2)) => {
//TODO add occurs check
self.unification_table.unify_var_var(v1, v2)
.or_else(|e| {
println!("Unify error: {:?}", e);
TypeError::new(format!("Two type variables {:?} and {:?} couldn't unify", v1, v2))
})?;
Ok(Var(v1)) //arbitrary decision I think
},
(a, b) => TypeError::new(format!("{:?} and {:?} do not unify", a, b)),
} }
} }
}
*/
fn fresh_type_variable(&mut self) -> TypeVar {
self.unification_table.new_key(None)
}
}
#[cfg(test)] #[cfg(test)]
mod typechecking_tests { mod tests {
use super::*;
macro_rules! assert_type_in_fresh_context {
($string:expr, $type:expr) => {
let mut tc = TypeContext::new();
let ast = &crate::util::quick_ast($string);
let ty = tc.typecheck(ast).unwrap();
assert_eq!(ty, $type)
}
}
#[test]
fn basic_test() {
assert_type_in_fresh_context!("1", ty!(Nat));
assert_type_in_fresh_context!(r#""drugs""#, ty!(StringT));
assert_type_in_fresh_context!("true", ty!(Bool));
}
#[test]
fn operators() {
//TODO fix these with new operator regime
/* /*
assert_type_in_fresh_context!("-1", ty!(Int)); use super::{Type, TConst, TypeContext};
assert_type_in_fresh_context!("1 + 2", ty!(Nat)); use super::Type::*;
assert_type_in_fresh_context!("-2", ty!(Int)); use super::TConst::*;
assert_type_in_fresh_context!("!true", ty!(Bool)); use std::rc::Rc;
use std::cell::RefCell;
macro_rules! type_test {
($input:expr, $correct:expr) => {
{
let symbol_table = Rc::new(RefCell::new(SymbolTable::new()));
let mut tc = TypeContext::new(symbol_table);
let ast = ::ast::parse(::tokenizing::tokenize($input)).0.unwrap() ;
//tc.add_symbols(&ast);
assert_eq!($correct, tc.infer_block(&ast.0).unwrap())
}
}
}
#[test]
fn basic_inference() {
type_test!("30", Const(Nat));
//type_test!("fn x(a: Int): Bool {}; x(1)", TConst(Boolean));
}
*/ */
} }
}

View File

@ -1,85 +1,43 @@
use std::{cmp::Eq, collections::HashMap, fmt::Write, hash::Hash}; use std::collections::HashMap;
use std::hash::Hash;
/// Utility function for printing a comma-delimited list of things use std::cmp::Eq;
pub(crate) fn delim_wrapped(lhs: char, rhs: char, terms: impl Iterator<Item = String>) -> String {
let mut buf = String::new();
write!(buf, "{}", lhs).unwrap();
for term in terms.map(Some).intersperse(None) {
match term {
Some(e) => write!(buf, "{}", e).unwrap(),
None => write!(buf, ", ").unwrap(),
};
}
write!(buf, "{}", rhs).unwrap();
buf
}
//TODO rename this ScopeStack
#[derive(Default, Debug)] #[derive(Default, Debug)]
pub struct ScopeStack<'a, T: 'a, V: 'a, N = String> pub struct StateStack<'a, T: 'a, V: 'a> where T: Hash + Eq {
where T: Hash + Eq parent: Option<&'a StateStack<'a, T, V>>,
{
parent: Option<&'a ScopeStack<'a, T, V, N>>,
values: HashMap<T, V>, values: HashMap<T, V>,
scope_name: Option<N>, scope_name: Option<String>
} }
impl<'a, T, V, N> ScopeStack<'a, T, V, N> impl<'a, T, V> StateStack<'a, T, V> where T: Hash + Eq {
where T: Hash + Eq pub fn new(name: Option<String>) -> StateStack<'a, T, V> where T: Hash + Eq {
{ StateStack {
pub fn new(scope_name: Option<N>) -> Self parent: None,
where T: Hash + Eq { values: HashMap::new(),
ScopeStack { parent: None, values: HashMap::new(), scope_name } scope_name: name
} }
pub fn insert(&mut self, key: T, value: V) }
where T: Hash + Eq { pub fn insert(&mut self, key: T, value: V) where T: Hash + Eq {
self.values.insert(key, value); self.values.insert(key, value);
} }
pub fn lookup(&self, key: &T) -> Option<&V> pub fn lookup(&self, key: &T) -> Option<&V> where T: Hash + Eq {
where T: Hash + Eq {
match (self.values.get(key), self.parent) { match (self.values.get(key), self.parent) {
(None, None) => None, (None, None) => None,
(None, Some(parent)) => parent.lookup(key), (None, Some(parent)) => parent.lookup(key),
(Some(value), _) => Some(value), (Some(value), _) => Some(value),
} }
} }
//TODO rename new_scope
pub fn new_scope(&'a self, scope_name: Option<N>) -> Self pub fn new_frame(&'a self, name: Option<String>) -> StateStack<'a, T, V> where T: Hash + Eq {
where T: Hash + Eq { StateStack {
ScopeStack { parent: Some(self), values: HashMap::default(), scope_name } parent: Some(self),
} values: HashMap::default(),
scope_name: name,
#[allow(dead_code)]
pub fn lookup_with_scope(&self, key: &T) -> Option<(&V, Option<&N>)>
where T: Hash + Eq {
match (self.values.get(key), self.parent) {
(None, None) => None,
(None, Some(parent)) => parent.lookup_with_scope(key),
(Some(value), _) => Some((value, self.scope_name.as_ref())),
} }
} }
pub fn get_name(&self) -> Option<&String> {
pub fn get_name(&self) -> Option<&N> {
self.scope_name.as_ref() self.scope_name.as_ref()
} }
} }
/// Quickly create an AST from a string, with no error checking. For test use only
#[cfg(test)]
pub fn quick_ast(input: &str) -> crate::ast::AST {
let mut parser = crate::parsing::Parser::new();
let output = parser.parse(input);
match output {
Ok(output) => output,
Err(err) => {
println!("Parse error: {}", err.msg);
panic!();
}
}
}
#[allow(unused_macros)]
macro_rules! rc {
($string:tt) => {
Rc::new(stringify!($string).to_string())
};
}

View File

@ -2,21 +2,24 @@
name = "schala-repl" name = "schala-repl"
version = "0.1.0" version = "0.1.0"
authors = ["greg <greg.shuflin@protonmail.com>"] authors = ["greg <greg.shuflin@protonmail.com>"]
edition = "2021"
[dependencies] [dependencies]
llvm-sys = "70.0.2" llvm-sys = "*"
take_mut = "0.2.2" take_mut = "0.1.3"
itertools = "0.10" itertools = "0.5.8"
getopts = "*"
lazy_static = "0.2.8" lazy_static = "0.2.8"
maplit = "*" maplit = "*"
colored = "1.8" colored = "1.5"
serde = "1.0" serde = "1.0.15"
serde_derive = "1.0" serde_derive = "1.0.15"
serde_json = "1.0" serde_json = "1.0.3"
rocket = "0.3.13"
rocket_codegen = "0.3.13"
rocket_contrib = "0.3.13"
phf = "0.7.12" phf = "0.7.12"
includedir = "0.2.0" includedir = "0.2.0"
linefeed = "0.6.0" linefeed = "0.5.0"
regex = "0.2" regex = "0.2"
[build-dependencies] [build-dependencies]

View File

@ -3,5 +3,8 @@ extern crate includedir_codegen;
use includedir_codegen::Compression; use includedir_codegen::Compression;
fn main() { fn main() {
includedir_codegen::start("WEBFILES").dir("../static", Compression::Gzip).build("static.rs").unwrap(); includedir_codegen::start("WEBFILES")
.dir("../static", Compression::Gzip)
.build("static.rs")
.unwrap();
} }

View File

@ -1,116 +0,0 @@
use colored::*;
use crate::{
directive_actions::DirectiveAction, language::ProgrammingLanguageInterface, InterpreterDirectiveOutput,
Repl,
};
/// A CommandTree is either a `Terminal` or a `NonTerminal`. When command parsing reaches the first
/// Terminal, it will use the `DirectiveAction` found there to find an appropriate function to execute,
/// and then execute it with any remaining arguments
#[derive(Clone)]
pub enum CommandTree {
Terminal {
name: String,
children: Vec<CommandTree>,
help_msg: Option<String>,
action: DirectiveAction,
},
NonTerminal {
name: String,
children: Vec<CommandTree>,
help_msg: Option<String>,
action: DirectiveAction,
},
Top(Vec<CommandTree>),
}
impl CommandTree {
pub fn nonterm_no_further_tab_completions(s: &str, help: Option<&str>) -> CommandTree {
CommandTree::NonTerminal {
name: s.to_string(),
help_msg: help.map(|x| x.to_string()),
children: vec![],
action: DirectiveAction::Null,
}
}
pub fn terminal(
s: &str,
help: Option<&str>,
children: Vec<CommandTree>,
action: DirectiveAction,
) -> CommandTree {
CommandTree::Terminal { name: s.to_string(), help_msg: help.map(|x| x.to_string()), children, action }
}
pub fn nonterm(s: &str, help: Option<&str>, children: Vec<CommandTree>) -> CommandTree {
CommandTree::NonTerminal {
name: s.to_string(),
help_msg: help.map(|x| x.to_string()),
children,
action: DirectiveAction::Null,
}
}
pub fn get_cmd(&self) -> &str {
match self {
CommandTree::Terminal { name, .. } => name.as_str(),
CommandTree::NonTerminal { name, .. } => name.as_str(),
CommandTree::Top(_) => "",
}
}
pub fn get_help(&self) -> &str {
match self {
CommandTree::Terminal { help_msg, .. } =>
help_msg.as_ref().map(|s| s.as_str()).unwrap_or("<no help text provided>"),
CommandTree::NonTerminal { help_msg, .. } =>
help_msg.as_ref().map(|s| s.as_str()).unwrap_or("<no help text provided>"),
CommandTree::Top(_) => "",
}
}
pub fn get_children(&self) -> &Vec<CommandTree> {
use CommandTree::*;
match self {
Terminal { children, .. } | NonTerminal { children, .. } | Top(children) => children,
}
}
pub fn get_subcommands(&self) -> Vec<&str> {
self.get_children().iter().map(|x| x.get_cmd()).collect()
}
pub fn perform<L: ProgrammingLanguageInterface>(
&self,
repl: &mut Repl<L>,
arguments: &[&str],
) -> InterpreterDirectiveOutput {
let mut dir_pointer: &CommandTree = self;
let mut idx = 0;
let res: Result<(DirectiveAction, usize), String> = loop {
match dir_pointer {
CommandTree::Top(subcommands) | CommandTree::NonTerminal { children: subcommands, .. } => {
let next_command = match arguments.get(idx) {
Some(cmd) => cmd,
None => break Err("Command requires arguments".to_owned()),
};
idx += 1;
match subcommands.iter().find(|sc| sc.get_cmd() == *next_command) {
Some(command_tree) => {
dir_pointer = command_tree;
}
None => break Err(format!("Command {} not found", next_command)),
};
}
CommandTree::Terminal { action, .. } => {
break Ok((action.clone(), idx));
}
}
};
match res {
Ok((action, idx)) => action.perform(repl, &arguments[idx..]),
Err(err) => Some(err.red().to_string()),
}
}
}

View File

@ -1,77 +0,0 @@
use std::fmt::Write as FmtWrite;
use crate::{
help::help,
language::{LangMetaRequest, LangMetaResponse, ProgrammingLanguageInterface},
InterpreterDirectiveOutput, Repl,
};
#[derive(Debug, Clone)]
pub enum DirectiveAction {
Null,
Help,
QuitProgram,
ListPasses,
TotalTime(bool),
StageTime(bool),
Doc,
}
impl DirectiveAction {
pub fn perform<L: ProgrammingLanguageInterface>(
&self,
repl: &mut Repl<L>,
arguments: &[&str],
) -> InterpreterDirectiveOutput {
use DirectiveAction::*;
match self {
Null => None,
Help => help(repl, arguments),
QuitProgram => {
repl.save_before_exit();
::std::process::exit(0)
}
ListPasses => {
let pass_names = match repl.language_state.request_meta(LangMetaRequest::StageNames) {
LangMetaResponse::StageNames(names) => names,
_ => vec![],
};
let mut buf = String::new();
for pass in pass_names.iter().map(Some).intersperse(None) {
match pass {
Some(pass) => write!(buf, "{}", pass).unwrap(),
None => write!(buf, " -> ").unwrap(),
}
}
Some(buf)
}
TotalTime(value) => {
repl.options.show_total_time = *value;
None
}
StageTime(value) => {
repl.options.show_stage_times = *value;
None
}
Doc => doc(repl, arguments),
}
}
}
fn doc<L: ProgrammingLanguageInterface>(
repl: &mut Repl<L>,
arguments: &[&str],
) -> InterpreterDirectiveOutput {
arguments
.get(0)
.map(|cmd| {
let source = cmd.to_string();
let meta = LangMetaRequest::Docs { source };
match repl.language_state.request_meta(meta) {
LangMetaResponse::Docs { doc_string } => Some(doc_string),
_ => Some("Invalid doc response".to_owned()),
}
})
.unwrap_or_else(|| Some(":docs needs an argument".to_owned()))
}

View File

@ -1,68 +0,0 @@
use crate::{command_tree::CommandTree, directive_actions::DirectiveAction};
pub fn directives_from_pass_names(pass_names: &[String]) -> CommandTree {
let passes_directives: Vec<CommandTree> = pass_names
.iter()
.map(|pass_name| {
if pass_name == "parsing" {
CommandTree::nonterm(
pass_name,
None,
vec![
CommandTree::nonterm_no_further_tab_completions("compact", None),
CommandTree::nonterm_no_further_tab_completions("expanded", None),
CommandTree::nonterm_no_further_tab_completions("trace", None),
],
)
} else {
CommandTree::nonterm_no_further_tab_completions(pass_name, None)
}
})
.collect();
CommandTree::Top(get_list(&passes_directives, true))
}
fn get_list(passes_directives: &[CommandTree], include_help: bool) -> Vec<CommandTree> {
use DirectiveAction::*;
vec![
CommandTree::terminal("exit", Some("exit the REPL"), vec![], QuitProgram),
//TODO there should be an alias for this
CommandTree::terminal("quit", Some("exit the REPL"), vec![], QuitProgram),
CommandTree::terminal(
"help",
Some("Print this help message"),
if include_help { get_list(passes_directives, false) } else { vec![] },
Help,
),
CommandTree::nonterm(
"debug",
Some("Configure debug information"),
vec![
CommandTree::terminal(
"list-passes",
Some("List all registered compiler passes"),
vec![],
ListPasses,
),
CommandTree::nonterm(
"total-time",
None,
vec![
CommandTree::terminal("on", None, vec![], TotalTime(true)),
CommandTree::terminal("off", None, vec![], TotalTime(false)),
],
),
CommandTree::nonterm(
"stage-times",
Some("Computation time per-stage"),
vec![
CommandTree::terminal("on", None, vec![], StageTime(true)),
CommandTree::terminal("off", None, vec![], StageTime(false)),
],
),
],
),
CommandTree::terminal("doc", Some("Get language-specific help for an item"), vec![], Doc),
]
}

View File

@ -1,63 +0,0 @@
use std::fmt::Write as FmtWrite;
use colored::*;
use crate::{
command_tree::CommandTree, language::ProgrammingLanguageInterface, InterpreterDirectiveOutput, Repl,
};
pub fn help<L: ProgrammingLanguageInterface>(
repl: &mut Repl<L>,
arguments: &[&str],
) -> InterpreterDirectiveOutput {
match arguments {
[] => global_help(repl),
commands => {
let dirs = repl.get_directives();
Some(match get_directive_from_commands(commands, &dirs) {
None => format!("Directive `{}` not found", commands.last().unwrap()),
Some(dir) => {
let mut buf = String::new();
let cmd = dir.get_cmd();
let children = dir.get_children();
writeln!(buf, "`{}` - {}", cmd, dir.get_help()).unwrap();
for sub in children.iter() {
writeln!(buf, "\t`{} {}` - {}", cmd, sub.get_cmd(), sub.get_help()).unwrap();
}
buf
}
})
}
}
}
fn get_directive_from_commands<'a>(commands: &[&str], dirs: &'a CommandTree) -> Option<&'a CommandTree> {
let mut directive_list = dirs.get_children();
let mut matched_directive = None;
for cmd in commands {
let found = directive_list.iter().find(|directive| directive.get_cmd() == *cmd);
if let Some(dir) = found {
directive_list = dir.get_children();
}
matched_directive = found;
}
matched_directive
}
fn global_help<L: ProgrammingLanguageInterface>(repl: &mut Repl<L>) -> InterpreterDirectiveOutput {
let mut buf = String::new();
writeln!(buf, "{} version {}", "Schala REPL".bright_red().bold(), crate::VERSION_STRING).unwrap();
writeln!(buf, "-----------------------").unwrap();
for directive in repl.get_directives().get_children() {
writeln!(buf, "{}{} - {}", repl.sigil, directive.get_cmd(), directive.get_help()).unwrap();
}
writeln!(buf).unwrap();
writeln!(buf, "Language-specific help for {}", <L as ProgrammingLanguageInterface>::language_name())
.unwrap();
writeln!(buf, "-----------------------").unwrap();
Some(buf)
}

View File

@ -1,56 +1,196 @@
use std::{collections::HashSet, time}; use std::collections::HashMap;
use colored::*;
use std::fmt::Write;
pub struct LLVMCodeString(pub String);
#[derive(Debug, Default, Serialize, Deserialize)]
pub struct EvalOptions {
pub execution_method: ExecutionMethod,
pub debug_passes: HashMap<String, PassDebugOptionsDescriptor>,
}
#[derive(Debug, Hash, PartialEq)]
pub struct PassDescriptor {
pub name: String,
pub debug_options: Vec<String>
}
#[derive(Debug, Serialize, Deserialize)]
pub struct PassDebugOptionsDescriptor {
pub opts: Vec<String>,
}
#[derive(Debug, Serialize, Deserialize)]
pub enum ExecutionMethod {
Compile,
Interpret,
}
impl Default for ExecutionMethod {
fn default() -> ExecutionMethod {
ExecutionMethod::Interpret
}
}
#[derive(Debug, Default)]
pub struct UnfinishedComputation {
artifacts: Vec<(String, TraceArtifact)>,
pub cur_debug_options: Vec<String>,
}
#[derive(Debug)]
pub struct FinishedComputation {
artifacts: Vec<(String, TraceArtifact)>,
text_output: Result<String, String>,
}
impl UnfinishedComputation {
pub fn add_artifact(&mut self, artifact: TraceArtifact) {
self.artifacts.push((artifact.stage_name.clone(), artifact));
}
pub fn finish(self, text_output: Result<String, String>) -> FinishedComputation {
FinishedComputation {
artifacts: self.artifacts,
text_output
}
}
pub fn output(self, output: Result<String, String>) -> FinishedComputation {
FinishedComputation {
artifacts: self.artifacts,
text_output: output
}
}
}
impl FinishedComputation {
pub fn to_repl(&self) -> String {
let mut buf = String::new();
for (stage, artifact) in self.artifacts.iter() {
let color = artifact.text_color;
let stage = stage.color(color).bold();
let output = artifact.debug_output.color(color);
write!(&mut buf, "{}: {}\n", stage, output).unwrap();
}
match self.text_output {
Ok(ref output) => write!(&mut buf, "{}", output).unwrap(),
Err(ref err) => write!(&mut buf, "{} {}", "Error: ".red().bold(), err).unwrap(),
}
buf
}
pub fn to_noninteractive(&self) -> Option<String> {
match self.text_output {
Ok(_) => {
let mut buf = String::new();
for (stage, artifact) in self.artifacts.iter() {
let color = artifact.text_color;
let stage = stage.color(color).bold();
let output = artifact.debug_output.color(color);
write!(&mut buf, "{}: {}\n", stage, output).unwrap();
}
if buf == "" { None } else { Some(buf) }
},
Err(ref s) => Some(format!("{} {}", "Error: ".red().bold(), s))
}
}
}
#[derive(Debug)]
pub struct TraceArtifact {
stage_name: String,
debug_output: String,
text_color: &'static str,
}
impl TraceArtifact {
pub fn new(stage: &str, debug: String) -> TraceArtifact {
let color = match stage {
"parse_trace" | "ast" => "red",
"ast_reducing" => "red",
"tokens" => "green",
"type_check" => "magenta",
_ => "blue",
};
TraceArtifact { stage_name: stage.to_string(), debug_output: debug, text_color: color}
}
pub fn new_parse_trace(trace: Vec<String>) -> TraceArtifact {
let mut output = String::new();
for t in trace {
output.push_str(&t);
output.push_str("\n");
}
TraceArtifact { stage_name: "parse_trace".to_string(), debug_output: output, text_color: "red"}
}
}
pub trait ProgrammingLanguageInterface { pub trait ProgrammingLanguageInterface {
type Config: Default + Clone; fn execute_pipeline(&mut self, _input: &str, _eval_options: &EvalOptions) -> FinishedComputation {
fn language_name() -> String; FinishedComputation { artifacts: vec![], text_output: Err(format!("Execution pipeline not done")) }
fn source_file_suffix() -> String; }
fn run_computation(&mut self, _request: ComputationRequest<Self::Config>) -> ComputationResponse; fn get_language_name(&self) -> String;
fn get_source_file_suffix(&self) -> String;
fn request_meta(&mut self, _request: LangMetaRequest) -> LangMetaResponse { fn get_passes(&self) -> Vec<PassDescriptor> {
LangMetaResponse::Custom { kind: "not-implemented".to_owned(), value: format!("") } vec![]
}
fn handle_custom_interpreter_directives(&mut self, _commands: &Vec<&str>) -> Option<String> {
None
}
fn custom_interpreter_directives_help(&self) -> String {
format!(">> No custom interpreter directives specified <<")
} }
} }
pub struct ComputationRequest<'a, T> { /* a pass_chain function signature looks like:
pub source: &'a str, * fn(&mut ProgrammingLanguageInterface, A, Option<&mut DebugHandler>) -> Result<B, String>
pub config: T, *
pub debug_requests: HashSet<DebugAsk>, * TODO use some kind of failure-handling library to make this better
*/
#[macro_export]
macro_rules! pass_chain {
($state:expr, $options:expr; $($pass:path), *) => {
|text_input| {
let mut comp = UnfinishedComputation::default();
pass_chain_helper! { ($state, comp, $options); text_input $(, $pass)* }
}
};
} }
pub struct ComputationResponse { #[macro_export]
pub main_output: Result<String, String>, macro_rules! pass_chain_helper {
pub global_output_stats: GlobalOutputStats, (($state:expr, $comp:expr, $options:expr); $input:expr, $pass:path $(, $rest:path)*) => {
pub debug_responses: Vec<DebugResponse>, {
use schala_repl::PassDebugOptionsDescriptor;
let pass_name = stringify!($pass);
let output = {
let ref debug_map = $options.debug_passes;
let debug_handle = match debug_map.get(pass_name) {
Some(PassDebugOptionsDescriptor { opts }) => {
let ptr = &mut $comp;
ptr.cur_debug_options = opts.clone();
Some(ptr)
} }
_ => None
#[derive(Default, Debug)] };
pub struct GlobalOutputStats { $pass($state, $input, debug_handle)
pub total_duration: time::Duration, };
pub stage_durations: Vec<(String, time::Duration)>, match output {
Ok(result) => pass_chain_helper! { ($state, $comp, $options); result $(, $rest)* },
Err(err) => {
$comp.output(Err(format!("Pass {} failed with {:?}", pass_name, err)))
} }
#[derive(Debug, Clone, Hash, Eq, PartialEq, Deserialize, Serialize)]
pub enum DebugAsk {
Timing,
ByStage { stage_name: String, token: Option<String> },
} }
pub struct DebugResponse {
pub ask: DebugAsk,
pub value: String,
} }
};
pub enum LangMetaRequest { // Done
StageNames, (($state:expr, $comp:expr, $options:expr); $final_output:expr) => {
Docs { source: String }, {
Custom { kind: String, value: String }, let final_output: FinishedComputation = $comp.finish(Ok($final_output));
ImmediateDebug(DebugAsk), final_output
} }
};
pub enum LangMetaResponse {
StageNames(Vec<String>),
Docs { doc_string: String },
Custom { kind: String, value: String },
ImmediateDebug(DebugResponse),
} }

View File

@ -1,185 +1,168 @@
#![feature(box_patterns, proc_macro_hygiene, decl_macro, iter_intersperse)] #![feature(link_args)]
#![feature(slice_patterns, box_patterns, box_syntax)]
#![feature(plugin)]
#![plugin(rocket_codegen)]
extern crate getopts;
extern crate linefeed;
extern crate itertools;
extern crate colored;
#[macro_use] #[macro_use]
extern crate serde_derive; extern crate serde_derive;
extern crate serde_json;
extern crate rocket;
extern crate rocket_contrib;
extern crate includedir; extern crate includedir;
extern crate phf; extern crate phf;
extern crate serde_json;
mod command_tree; use std::path::Path;
mod language; use std::fs::File;
use self::command_tree::CommandTree; use std::io::{Read, Write};
mod repl_options; use std::process::exit;
use repl_options::ReplOptions; use std::default::Default;
mod directive_actions; use std::fmt::Write as FmtWrite;
mod directives;
use directives::directives_from_pass_names;
mod help;
mod response;
use std::{collections::HashSet, sync::Arc};
use colored::*; use colored::*;
pub use language::{ use itertools::Itertools;
ComputationRequest, ComputationResponse, DebugAsk, DebugResponse, GlobalOutputStats, LangMetaRequest,
LangMetaResponse, ProgrammingLanguageInterface, mod language;
}; mod webapp;
use response::ReplResponse; pub mod llvm_wrap;
const VERSION_STRING: &'static str = "0.1.0";
include!(concat!(env!("OUT_DIR"), "/static.rs")); include!(concat!(env!("OUT_DIR"), "/static.rs"));
const VERSION_STRING: &str = "0.1.0";
const HISTORY_SAVE_FILE: &str = ".schala_history"; pub use language::{LLVMCodeString, ProgrammingLanguageInterface, EvalOptions,
const OPTIONS_SAVE_FILE: &str = ".schala_repl"; ExecutionMethod, TraceArtifact, FinishedComputation, UnfinishedComputation, PassDebugOptionsDescriptor, PassDescriptor};
type InterpreterDirectiveOutput = Option<String>; pub type PLIGenerator = Box<Fn() -> Box<ProgrammingLanguageInterface> + Send + Sync>;
pub struct Repl<L: ProgrammingLanguageInterface> { pub fn repl_main(generators: Vec<PLIGenerator>) {
/// If this is the first character typed by a user into the repl, the following let languages: Vec<Box<ProgrammingLanguageInterface>> = generators.iter().map(|x| x()).collect();
/// will be interpreted as a directive to the REPL rather than a command in the
/// running programming language. let option_matches = program_options().parse(std::env::args()).unwrap_or_else(|e| {
sigil: char, println!("{:?}", e);
line_reader: ::linefeed::interface::Interface<::linefeed::terminal::DefaultTerminal>, exit(1);
language_state: L, });
options: ReplOptions,
if option_matches.opt_present("list-languages") {
for lang in languages {
println!("{}", lang.get_language_name());
}
exit(1);
}
if option_matches.opt_present("help") {
println!("{}", program_options().usage("Schala metainterpreter"));
exit(0);
}
if option_matches.opt_present("webapp") {
webapp::web_main(generators);
exit(0);
}
let mut options = EvalOptions::default();
let debug_passes = if let Some(opts) = option_matches.opt_str("debug") {
let output: Vec<String> = opts.split_terminator(",").map(|s| s.to_string()).collect();
output
} else {
vec![]
};
let language_names: Vec<String> = languages.iter().map(|lang| {lang.get_language_name()}).collect();
let initial_index: usize =
option_matches.opt_str("lang")
.and_then(|lang| { language_names.iter().position(|x| { x.to_lowercase() == lang.to_lowercase() }) })
.unwrap_or(0);
options.execution_method = match option_matches.opt_str("eval-style") {
Some(ref s) if s == "compile" => ExecutionMethod::Compile,
_ => ExecutionMethod::Interpret,
};
match option_matches.free[..] {
[] | [_] => {
let mut repl = Repl::new(languages, initial_index);
repl.run();
}
[_, ref filename, _..] => {
run_noninteractive(filename, languages, options, debug_passes);
}
};
}
fn run_noninteractive(filename: &str, languages: Vec<Box<ProgrammingLanguageInterface>>, mut options: EvalOptions, debug_passes: Vec<String>) {
let path = Path::new(filename);
let ext = path.extension().and_then(|e| e.to_str()).unwrap_or_else(|| {
println!("Source file lacks extension");
exit(1);
});
let mut language = Box::new(languages.into_iter().find(|lang| lang.get_source_file_suffix() == ext)
.unwrap_or_else(|| {
println!("Extension .{} not recognized", ext);
exit(1);
}));
let mut source_file = File::open(path).unwrap();
let mut buffer = String::new();
source_file.read_to_string(&mut buffer).unwrap();
for pass in debug_passes.into_iter() {
if let Some(_) = language.get_passes().iter().find(|desc| desc.name == pass) {
options.debug_passes.insert(pass, PassDebugOptionsDescriptor { opts: vec![] });
}
}
match options.execution_method {
ExecutionMethod::Compile => {
/*
let llvm_bytecode = language.compile(&buffer);
compilation_sequence(llvm_bytecode, filename);
*/
panic!("Not ready to go yet");
},
ExecutionMethod::Interpret => {
let output = language.execute_pipeline(&buffer, &options);
output.to_noninteractive().map(|text| println!("{}", text));
}
}
} }
#[derive(Clone)] #[derive(Clone)]
enum PromptStyle { enum CommandTree {
Normal, Terminal(String, Option<String>),
Multiline, NonTerminal(String, Vec<CommandTree>, Option<String>),
Top(Vec<CommandTree>),
} }
impl<L: ProgrammingLanguageInterface> Repl<L> { impl CommandTree {
pub fn new(initial_state: L) -> Self { fn term(s: &str, help: Option<&str>) -> CommandTree {
use linefeed::Interface; CommandTree::Terminal(s.to_string(), help.map(|x| x.to_string()))
let line_reader = Interface::new("schala-repl").unwrap();
let sigil = ':';
Repl { sigil, line_reader, language_state: initial_state, options: ReplOptions::new() }
} }
fn get_cmd(&self) -> String {
pub fn run_repl(&mut self, config: L::Config) { match self {
println!("Schala meta-interpeter version {}", VERSION_STRING); CommandTree::Terminal(s, _) => s.to_string(),
println!("Type {} for help with the REPL", format!("{}help", self.sigil).bright_green().bold()); CommandTree::NonTerminal(s, _, _) => s.to_string(),
self.load_options(); CommandTree::Top(_) => "".to_string(),
self.handle_repl_loop(config);
self.save_before_exit();
println!("Exiting...");
}
fn load_options(&mut self) {
self.line_reader.load_history(HISTORY_SAVE_FILE).unwrap_or(());
match ReplOptions::load_from_file(OPTIONS_SAVE_FILE) {
Ok(options) => {
self.options = options;
}
Err(e) => eprintln!("{}", e),
} }
} }
fn get_help(&self) -> String {
fn handle_repl_loop(&mut self, config: L::Config) { match self {
use linefeed::ReadResult::*; CommandTree::Terminal(_, h) => h.as_ref().map(|h| h.clone()).unwrap_or(format!("")),
CommandTree::NonTerminal(_, _, h) => h.as_ref().map(|h| h.clone()).unwrap_or(format!("")),
'main: loop { CommandTree::Top(_) => "".to_string(),
macro_rules! match_or_break {
($line:expr) => {
match $line {
Err(e) => {
println!("readline IO Error: {}", e);
break 'main;
}
Ok(Eof) | Ok(Signal(_)) => break 'main,
Ok(Input(ref input)) => input,
}
};
}
self.update_line_reader();
let line = self.line_reader.read_line();
let input: &str = match_or_break!(line);
self.line_reader.add_history_unique(input.to_string());
let mut chars = input.chars().peekable();
let repl_responses = match chars.next() {
Some(ch) if ch == self.sigil =>
if chars.peek() == Some(&'{') {
let mut buf = String::new();
buf.push_str(input.get(2..).unwrap());
'multiline: loop {
self.set_prompt(PromptStyle::Multiline);
let new_line = self.line_reader.read_line();
let new_input = match_or_break!(new_line);
if new_input.starts_with(":}") {
break 'multiline;
} else {
buf.push_str(new_input);
buf.push('\n');
} }
} }
self.handle_input(&buf, &config) fn get_children(&self) -> Vec<String> {
} else { match self {
if let Some(output) = self.handle_interpreter_directive(input.get(1..).unwrap()) { CommandTree::Terminal(_, _) => vec![],
println!("{}", output); CommandTree::NonTerminal(_, children, _) => children.iter().map(|x| x.get_cmd()).collect(),
CommandTree::Top(children) => children.iter().map(|x| x.get_cmd()).collect(),
} }
continue;
},
_ => self.handle_input(input, &config),
};
for repl_response in repl_responses.iter() {
println!("{}", repl_response);
}
}
}
fn update_line_reader(&mut self) {
let tab_complete_handler = TabCompleteHandler::new(self.sigil, self.get_directives());
self.line_reader.set_completer(Arc::new(tab_complete_handler)); //TODO fix this here
self.set_prompt(PromptStyle::Normal);
}
fn set_prompt(&mut self, prompt_style: PromptStyle) {
let prompt_str = match prompt_style {
PromptStyle::Normal => ">> ",
PromptStyle::Multiline => ">| ",
};
self.line_reader.set_prompt(prompt_str).unwrap();
}
fn save_before_exit(&self) {
self.line_reader.save_history(HISTORY_SAVE_FILE).unwrap_or(());
self.options.save_to_file(OPTIONS_SAVE_FILE);
}
fn handle_interpreter_directive(&mut self, input: &str) -> InterpreterDirectiveOutput {
let arguments: Vec<&str> = input.split_whitespace().collect();
if arguments.is_empty() {
return None;
}
let directives = self.get_directives();
directives.perform(self, &arguments)
}
fn handle_input(&mut self, input: &str, config: &L::Config) -> Vec<ReplResponse> {
let mut debug_requests = HashSet::new();
for ask in self.options.debug_asks.iter() {
debug_requests.insert(ask.clone());
}
let request = ComputationRequest { source: input, config: config.clone(), debug_requests };
let response = self.language_state.run_computation(request);
response::handle_computation_response(response, &self.options)
}
fn get_directives(&mut self) -> CommandTree {
let pass_names = match self.language_state.request_meta(LangMetaRequest::StageNames) {
LangMetaResponse::StageNames(names) => names,
_ => vec![],
};
directives_from_pass_names(&pass_names)
} }
} }
@ -188,31 +171,23 @@ struct TabCompleteHandler {
top_level_commands: CommandTree, top_level_commands: CommandTree,
} }
use linefeed::{ use linefeed::complete::{Completion, Completer};
complete::{Completer, Completion}, use linefeed::terminal::Terminal;
terminal::Terminal,
};
impl TabCompleteHandler { impl TabCompleteHandler {
fn new(sigil: char, top_level_commands: CommandTree) -> TabCompleteHandler { fn new(sigil: char, top_level_commands: CommandTree) -> TabCompleteHandler {
TabCompleteHandler { top_level_commands, sigil } TabCompleteHandler {
top_level_commands,
sigil,
}
} }
} }
impl<T: Terminal> Completer<T> for TabCompleteHandler { impl<T: Terminal> Completer<T> for TabCompleteHandler {
fn complete( fn complete(&self, word: &str, prompter: &linefeed::prompter::Prompter<T>, start: usize, _end: usize) -> Option<Vec<Completion>> {
&self,
word: &str,
prompter: &::linefeed::prompter::Prompter<T>,
start: usize,
_end: usize,
) -> Option<Vec<Completion>> {
let line = prompter.buffer(); let line = prompter.buffer();
if !line.starts_with(self.sigil) { if line.starts_with(&format!("{}", self.sigil)) {
return None;
}
let mut words = line[1..(if start == 0 { 1 } else { start })].split_whitespace(); let mut words = line[1..(if start == 0 { 1 } else { start })].split_whitespace();
let mut completions = Vec::new(); let mut completions = Vec::new();
let mut command_tree: Option<&CommandTree> = Some(&self.top_level_commands); let mut command_tree: Option<&CommandTree> = Some(&self.top_level_commands);
@ -220,30 +195,367 @@ impl<T: Terminal> Completer<T> for TabCompleteHandler {
loop { loop {
match words.next() { match words.next() {
None => { None => {
let top = matches!(command_tree, Some(CommandTree::Top(_))); let top = match command_tree {
Some(CommandTree::Top(_)) => true,
_ => false
};
let word = if top { word.get(1..).unwrap() } else { word }; let word = if top { word.get(1..).unwrap() } else { word };
for cmd in command_tree.map(|x| x.get_subcommands()).unwrap_or_default().into_iter() { for cmd in command_tree.map(|x| x.get_children()).unwrap_or(vec![]).into_iter() {
if cmd.starts_with(word) { if cmd.starts_with(word) {
completions.push(Completion { completions.push(Completion {
completion: format!("{}{}", if top { ":" } else { "" }, cmd), completion: format!("{}{}", if top { ":" } else { "" }, cmd),
display: Some(cmd.to_string()), display: Some(cmd.clone()),
suffix: ::linefeed::complete::Suffix::Some(' '), suffix: linefeed::complete::Suffix::Some(' ')
}) })
} }
} }
break; break;
} },
Some(s) => { Some(s) => {
let new_ptr: Option<&CommandTree> = command_tree.and_then(|cm| match cm { let new_ptr: Option<&CommandTree> = command_tree.and_then(|cm| match cm {
CommandTree::Top(children) => children.iter().find(|c| c.get_cmd() == s), CommandTree::Top(children) => children.iter().find(|c| c.get_cmd() == s),
CommandTree::NonTerminal { children, .. } => CommandTree::NonTerminal(_, children, _) => children.iter().find(|c| c.get_cmd() == s),
children.iter().find(|c| c.get_cmd() == s), CommandTree::Terminal(_, _) => None,
CommandTree::Terminal { children, .. } => children.iter().find(|c| c.get_cmd() == s),
}); });
command_tree = new_ptr; command_tree = new_ptr;
} }
} }
} }
Some(completions) Some(completions)
} else {
None
} }
} }
}
struct Repl {
options: EvalOptions,
languages: Vec<Box<ProgrammingLanguageInterface>>,
current_language_index: usize,
interpreter_directive_sigil: char,
line_reader: linefeed::interface::Interface<linefeed::terminal::DefaultTerminal>,
}
impl Repl {
fn new(languages: Vec<Box<ProgrammingLanguageInterface>>, initial_index: usize) -> Repl {
use linefeed::Interface;
let i = if initial_index < languages.len() { initial_index } else { 0 };
let line_reader = Interface::new("schala-repl").unwrap();
Repl {
options: Repl::get_options(),
languages: languages,
current_language_index: i,
interpreter_directive_sigil: ':',
line_reader
}
}
fn get_cur_language(&self) -> &ProgrammingLanguageInterface {
self.languages[self.current_language_index].as_ref()
}
fn get_options() -> EvalOptions {
File::open(".schala_repl")
.and_then(|mut file| {
let mut contents = String::new();
file.read_to_string(&mut contents)?;
Ok(contents)
})
.and_then(|contents| {
let options: EvalOptions = serde_json::from_str(&contents)?;
Ok(options)
}).unwrap_or(EvalOptions::default())
}
fn save_options(&self) {
let ref options = self.options;
let read = File::create(".schala_repl")
.and_then(|mut file| {
let buf = serde_json::to_string(options).unwrap();
file.write_all(buf.as_bytes())
});
if let Err(err) = read {
println!("Error saving .schala_repl file {}", err);
}
}
fn run(&mut self) {
use linefeed::ReadResult;
println!("Schala MetaInterpreter version {}", VERSION_STRING);
println!("Type {}help for help with the REPL", self.interpreter_directive_sigil);
self.line_reader.load_history(".schala_history").unwrap_or(());
loop {
let language_name = self.languages[self.current_language_index].get_language_name();
let directives = self.get_directives();
let tab_complete_handler = TabCompleteHandler::new(self.interpreter_directive_sigil, directives);
self.line_reader.set_completer(std::sync::Arc::new(tab_complete_handler));
let prompt_str = format!("{} >> ", language_name);
self.line_reader.set_prompt(&prompt_str);
match self.line_reader.read_line() {
Err(e) => {
println!("Terminal read error: {}", e);
},
Ok(ReadResult::Eof) => break,
Ok(ReadResult::Signal(_)) => break,
Ok(ReadResult::Input(ref input)) => {
self.line_reader.add_history_unique(input.to_string());
let output = match input.chars().nth(0) {
Some(ch) if ch == self.interpreter_directive_sigil => self.handle_interpreter_directive(input),
_ => Some(self.input_handler(input)),
};
if let Some(o) = output {
println!("=> {}", o);
}
}
}
}
self.line_reader.save_history(".schala_history").unwrap_or(());
self.save_options();
println!("Exiting...");
}
fn input_handler(&mut self, input: &str) -> String {
let ref mut language = self.languages[self.current_language_index];
let interpreter_output = language.execute_pipeline(input, &self.options);
interpreter_output.to_repl()
}
fn get_directives(&self) -> CommandTree {
let ref passes = self.get_cur_language().get_passes();
let passes_directives: Vec<CommandTree> = passes.iter()
.map(|pass_descriptor| {
let name = &pass_descriptor.name;
if pass_descriptor.debug_options.len() == 0 {
CommandTree::term(name, None)
} else {
let sub_opts: Vec<CommandTree> = pass_descriptor.debug_options.iter()
.map(|o| CommandTree::term(o, None)).collect();
CommandTree::NonTerminal(
name.clone(),
sub_opts,
None
)
}
}).collect();
CommandTree::Top(vec![
CommandTree::term("exit", Some("exit the REPL")),
CommandTree::term("quit", Some("exit the REPL")),
CommandTree::term("help", Some("Print this help message")),
CommandTree::NonTerminal(format!("debug"), vec![
CommandTree::term("passes", None),
CommandTree::NonTerminal(format!("show"), passes_directives.clone(), None),
CommandTree::NonTerminal(format!("hide"), passes_directives.clone(), None),
], Some(format!("show or hide pass info for a given pass, or display the names of all passes"))),
CommandTree::NonTerminal(format!("lang"), vec![
CommandTree::term("next", None),
CommandTree::term("prev", None),
CommandTree::NonTerminal(format!("go"), vec![], None)//TODO
], Some(format!("switch between languages, or go directly to a langauge by name"))),
])
}
fn handle_interpreter_directive(&mut self, input: &str) -> Option<String> {
let mut iter = input.chars();
iter.next();
let commands: Vec<&str> = iter
.as_str()
.split_whitespace()
.collect();
let cmd: &str = match commands.get(0).clone() {
None => return None,
Some(s) => s
};
match cmd {
"exit" | "quit" => {
self.save_options();
exit(0)
},
"lang" | "language" => match commands.get(1) {
Some(&"show") => {
let mut buf = String::new();
for (i, lang) in self.languages.iter().enumerate() {
write!(buf, "{}{}\n", if i == self.current_language_index { "* "} else { "" }, lang.get_language_name()).unwrap();
}
Some(buf)
},
Some(&"go") => match commands.get(2) {
None => Some(format!("Must specify a language name")),
Some(&desired_name) => {
for (i, _) in self.languages.iter().enumerate() {
let lang_name = self.languages[i].get_language_name();
if lang_name.to_lowercase() == desired_name.to_lowercase() {
self.current_language_index = i;
return Some(format!("Switching to {}", self.languages[self.current_language_index].get_language_name()));
}
}
Some(format!("Language {} not found", desired_name))
}
},
Some(&"next") | Some(&"n") => {
self.current_language_index = (self.current_language_index + 1) % self.languages.len();
Some(format!("Switching to {}", self.languages[self.current_language_index].get_language_name()))
},
Some(&"previous") | Some(&"p") | Some(&"prev") => {
self.current_language_index = if self.current_language_index == 0 { self.languages.len() - 1 } else { self.current_language_index - 1 };
Some(format!("Switching to {}", self.languages[self.current_language_index].get_language_name()))
},
Some(e) => Some(format!("Bad `lang(uage)` argument: {}", e)),
None => Some(format!("Valid arguments for `lang(uage)` are `show`, `next`|`n`, `previous`|`prev`|`n`"))
},
"help" => {
let mut buf = String::new();
let ref lang = self.languages[self.current_language_index];
let directives = match self.get_directives() {
CommandTree::Top(children) => children,
_ => panic!("Top-level CommandTree not Top")
};
writeln!(buf, "MetaInterpreter options").unwrap();
writeln!(buf, "-----------------------").unwrap();
for directive in directives {
let trailer = " ";
writeln!(buf, "{}{}- {}", directive.get_cmd(), trailer, directive.get_help()).unwrap();
}
writeln!(buf, "").unwrap();
writeln!(buf, "Language-specific help for {}", lang.get_language_name()).unwrap();
writeln!(buf, "-----------------------").unwrap();
writeln!(buf, "{}", lang.custom_interpreter_directives_help()).unwrap();
Some(buf)
},
"debug" => self.handle_debug(commands),
e => self.languages[self.current_language_index]
.handle_custom_interpreter_directives(&commands)
.or(Some(format!("Unknown command: {}", e)))
}
}
fn handle_debug(&mut self, commands: Vec<&str>) -> Option<String> {
let passes = self.get_cur_language().get_passes();
match commands.get(1) {
Some(&"passes") => Some(
passes.into_iter()
.map(|desc| {
if self.options.debug_passes.contains_key(&desc.name) {
let color = "green";
format!("*{}", desc.name.color(color))
} else {
desc.name
}
})
.intersperse(format!(" -> "))
.collect()),
b @ Some(&"show") | b @ Some(&"hide") => {
let show = b == Some(&"show");
let debug_pass: String = match commands.get(2) {
Some(s) => s.to_string(),
None => return Some(format!("Must specify a stage to debug")),
};
let pass_opt = commands.get(3);
if let Some(desc) = passes.iter().find(|desc| desc.name == debug_pass) {
let mut opts = vec![];
if let Some(opt) = pass_opt {
opts.push(opt.to_string());
}
let msg = format!("{} debug for pass {}", if show { "Enabling" } else { "Disabling" }, debug_pass);
if show {
self.options.debug_passes.insert(desc.name.clone(), PassDebugOptionsDescriptor { opts });
} else {
self.options.debug_passes.remove(&desc.name);
}
Some(msg)
} else {
Some(format!("Couldn't find stage: {}", debug_pass))
}
},
_ => Some(format!("Unknown debug command"))
}
}
}
/*
pub fn compilation_sequence(llvm_code: LLVMCodeString, sourcefile: &str) {
use std::process::Command;
let ll_filename = "out.ll";
let obj_filename = "out.o";
let q: Vec<&str> = sourcefile.split('.').collect();
let bin_filename = match &q[..] {
&[name, "maaru"] => name,
_ => panic!("Bad filename {}", sourcefile),
};
let LLVMCodeString(llvm_str) = llvm_code;
println!("Compilation process finished for {}", ll_filename);
File::create(ll_filename)
.and_then(|mut f| f.write_all(llvm_str.as_bytes()))
.expect("Error writing file");
let llc_output = Command::new("llc")
.args(&["-filetype=obj", ll_filename, "-o", obj_filename])
.output()
.expect("Failed to run llc");
if !llc_output.status.success() {
println!("{}", String::from_utf8_lossy(&llc_output.stderr));
}
let gcc_output = Command::new("gcc")
.args(&["-o", bin_filename, &obj_filename])
.output()
.expect("failed to run gcc");
if !gcc_output.status.success() {
println!("{}", String::from_utf8_lossy(&gcc_output.stdout));
println!("{}", String::from_utf8_lossy(&gcc_output.stderr));
}
for filename in [obj_filename].iter() {
Command::new("rm")
.arg(filename)
.output()
.expect(&format!("failed to run rm {}", filename));
}
}
*/
fn program_options() -> getopts::Options {
let mut options = getopts::Options::new();
options.optopt("s",
"eval-style",
"Specify whether to compile (if supported) or interpret the language. If not specified, the default is language-specific",
"[compile|interpret]"
);
options.optflag("",
"list-languages",
"Show a list of all supported languages");
options.optopt("l",
"lang",
"Start up REPL in a language",
"LANGUAGE");
options.optflag("h",
"help",
"Show help text");
options.optflag("w",
"webapp",
"Start up web interpreter");
options.optopt("d",
"debug",
"Debug a stage (l = tokenizer, a = AST, r = parse trace, s = symbol table)",
"[l|a|r|s]");
options
}

View File

@ -0,0 +1,279 @@
#![allow(non_snake_case)]
#![allow(dead_code)]
extern crate llvm_sys;
use self::llvm_sys::{LLVMIntPredicate, LLVMRealPredicate};
use self::llvm_sys::prelude::*;
use self::llvm_sys::core;
use std::ptr;
use std::ffi::{CString, CStr};
use std::os::raw::c_char;
pub fn create_context() -> LLVMContextRef {
unsafe { core::LLVMContextCreate() }
}
pub fn module_create_with_name(name: &str) -> LLVMModuleRef {
unsafe {
let n = name.as_ptr() as *const _;
core::LLVMModuleCreateWithName(n)
}
}
pub fn CreateBuilderInContext(context: LLVMContextRef) -> LLVMBuilderRef {
unsafe { core::LLVMCreateBuilderInContext(context) }
}
pub fn AppendBasicBlockInContext(context: LLVMContextRef,
function: LLVMValueRef,
name: &str)
-> LLVMBasicBlockRef {
let c_name = CString::new(name).unwrap();
unsafe { core::LLVMAppendBasicBlockInContext(context, function, c_name.as_ptr()) }
}
pub fn AddFunction(module: LLVMModuleRef, name: &str, function_type: LLVMTypeRef) -> LLVMValueRef {
let c_name = CString::new(name).unwrap();
unsafe { core::LLVMAddFunction(module, c_name.as_ptr(), function_type) }
}
pub fn FunctionType(return_type: LLVMTypeRef,
mut param_types: Vec<LLVMTypeRef>,
is_var_rag: bool)
-> LLVMTypeRef {
let len = param_types.len();
unsafe {
let pointer = param_types.as_mut_ptr();
core::LLVMFunctionType(return_type,
pointer,
len as u32,
if is_var_rag { 1 } else { 0 })
}
}
pub fn GetNamedFunction(module: LLVMModuleRef,
name: &str) -> Option<LLVMValueRef> {
let c_name = CString::new(name).unwrap();
let ret = unsafe { core::LLVMGetNamedFunction(module, c_name.as_ptr()) };
if ret.is_null() {
None
} else {
Some(ret)
}
}
pub fn VoidTypeInContext(context: LLVMContextRef) -> LLVMTypeRef {
unsafe { core::LLVMVoidTypeInContext(context) }
}
pub fn DisposeBuilder(builder: LLVMBuilderRef) {
unsafe { core::LLVMDisposeBuilder(builder) }
}
pub fn DisposeModule(module: LLVMModuleRef) {
unsafe { core::LLVMDisposeModule(module) }
}
pub fn ContextDispose(context: LLVMContextRef) {
unsafe { core::LLVMContextDispose(context) }
}
pub fn PositionBuilderAtEnd(builder: LLVMBuilderRef, basic_block: LLVMBasicBlockRef) {
unsafe { core::LLVMPositionBuilderAtEnd(builder, basic_block) }
}
pub fn BuildRet(builder: LLVMBuilderRef, val: LLVMValueRef) -> LLVMValueRef {
unsafe { core::LLVMBuildRet(builder, val) }
}
pub fn BuildRetVoid(builder: LLVMBuilderRef) -> LLVMValueRef {
unsafe { core::LLVMBuildRetVoid(builder) }
}
pub fn DumpModule(module: LLVMModuleRef) {
unsafe { core::LLVMDumpModule(module) }
}
pub fn Int64TypeInContext(context: LLVMContextRef) -> LLVMTypeRef {
unsafe { core::LLVMInt64TypeInContext(context) }
}
pub fn ConstInt(int_type: LLVMTypeRef, n: u64, sign_extend: bool) -> LLVMValueRef {
unsafe { core::LLVMConstInt(int_type, n, if sign_extend { 1 } else { 0 }) }
}
pub fn BuildAdd(builder: LLVMBuilderRef,
lhs: LLVMValueRef,
rhs: LLVMValueRef,
reg_name: &str)
-> LLVMValueRef {
let name = CString::new(reg_name).unwrap();
unsafe { core::LLVMBuildAdd(builder, lhs, rhs, name.as_ptr()) }
}
pub fn BuildSub(builder: LLVMBuilderRef,
lhs: LLVMValueRef,
rhs: LLVMValueRef,
reg_name: &str)
-> LLVMValueRef {
let name = CString::new(reg_name).unwrap();
unsafe { core::LLVMBuildSub(builder, lhs, rhs, name.as_ptr()) }
}
pub fn BuildMul(builder: LLVMBuilderRef,
lhs: LLVMValueRef,
rhs: LLVMValueRef,
reg_name: &str)
-> LLVMValueRef {
let name = CString::new(reg_name).unwrap();
unsafe { core::LLVMBuildMul(builder, lhs, rhs, name.as_ptr()) }
}
pub fn BuildUDiv(builder: LLVMBuilderRef,
lhs: LLVMValueRef,
rhs: LLVMValueRef,
reg_name: &str)
-> LLVMValueRef {
let name = CString::new(reg_name).unwrap();
unsafe { core::LLVMBuildUDiv(builder, lhs, rhs, name.as_ptr()) }
}
pub fn BuildSRem(builder: LLVMBuilderRef,
lhs: LLVMValueRef,
rhs: LLVMValueRef,
reg_name: &str)
-> LLVMValueRef {
let name = CString::new(reg_name).unwrap();
unsafe { core::LLVMBuildSRem(builder, lhs, rhs, name.as_ptr()) }
}
pub fn BuildCondBr(builder: LLVMBuilderRef,
if_expr: LLVMValueRef,
then_expr: LLVMBasicBlockRef,
else_expr: LLVMBasicBlockRef) -> LLVMValueRef {
unsafe { core::LLVMBuildCondBr(builder, if_expr, then_expr, else_expr) }
}
pub fn BuildBr(builder: LLVMBuilderRef,
dest: LLVMBasicBlockRef) -> LLVMValueRef {
unsafe { core::LLVMBuildBr(builder, dest) }
}
pub fn GetInsertBlock(builder: LLVMBuilderRef) -> LLVMBasicBlockRef {
unsafe { core::LLVMGetInsertBlock(builder) }
}
pub fn BuildPhi(builder: LLVMBuilderRef, ty: LLVMTypeRef, name: &str) -> LLVMValueRef {
let name = CString::new(name).unwrap();
unsafe { core::LLVMBuildPhi(builder, ty, name.as_ptr()) }
}
pub fn SetValueName(value: LLVMValueRef, name: &str) {
let name = CString::new(name).unwrap();
unsafe {
core::LLVMSetValueName(value, name.as_ptr())
}
}
pub fn GetValueName(value: LLVMValueRef) -> String {
unsafe {
let name_ptr: *const c_char = core::LLVMGetValueName(value);
CStr::from_ptr(name_ptr).to_string_lossy().into_owned()
}
}
pub fn GetParams(function: LLVMValueRef) -> Vec<LLVMValueRef> {
let size = CountParams(function);
unsafe {
let mut container = Vec::with_capacity(size);
container.set_len(size);
core::LLVMGetParams(function, container.as_mut_ptr());
container
}
}
pub fn CountParams(function: LLVMValueRef) -> usize {
unsafe { core::LLVMCountParams(function) as usize }
}
pub fn BuildFCmp(builder: LLVMBuilderRef,
op: LLVMRealPredicate,
lhs: LLVMValueRef,
rhs: LLVMValueRef,
name: &str) -> LLVMValueRef {
let name = CString::new(name).unwrap();
unsafe { core::LLVMBuildFCmp(builder, op, lhs, rhs, name.as_ptr()) }
}
pub fn BuildZExt(builder: LLVMBuilderRef,
val: LLVMValueRef,
dest_type: LLVMTypeRef,
name: &str) -> LLVMValueRef {
let name = CString::new(name).unwrap();
unsafe { core::LLVMBuildZExt(builder, val, dest_type, name.as_ptr()) }
}
pub fn BuildUIToFP(builder: LLVMBuilderRef,
val: LLVMValueRef,
dest_type: LLVMTypeRef,
name: &str) -> LLVMValueRef {
let name = CString::new(name).unwrap();
unsafe { core::LLVMBuildUIToFP(builder, val, dest_type, name.as_ptr()) }
}
pub fn BuildICmp(builder: LLVMBuilderRef,
op: LLVMIntPredicate,
lhs: LLVMValueRef,
rhs: LLVMValueRef,
name: &str) -> LLVMValueRef {
let name = CString::new(name).unwrap();
unsafe { core::LLVMBuildICmp(builder, op, lhs, rhs, name.as_ptr()) }
}
pub fn GetBasicBlockParent(block: LLVMBasicBlockRef) -> LLVMValueRef {
unsafe { core::LLVMGetBasicBlockParent(block) }
}
pub fn GetBasicBlocks(function: LLVMValueRef) -> Vec<LLVMBasicBlockRef> {
let size = CountBasicBlocks(function);
unsafe {
let mut container = Vec::with_capacity(size);
container.set_len(size);
core::LLVMGetBasicBlocks(function, container.as_mut_ptr());
container
}
}
pub fn CountBasicBlocks(function: LLVMValueRef) -> usize {
unsafe { core::LLVMCountBasicBlocks(function) as usize }
}
pub fn PrintModuleToString(module: LLVMModuleRef) -> String {
unsafe {
let str_ptr: *const c_char = core::LLVMPrintModuleToString(module);
CStr::from_ptr(str_ptr).to_string_lossy().into_owned()
}
}
pub fn AddIncoming(phi_node: LLVMValueRef, mut incoming_values: Vec<LLVMValueRef>,
mut incoming_blocks: Vec<LLVMBasicBlockRef>) {
let count = incoming_blocks.len() as u32;
if incoming_values.len() as u32 != count {
panic!("Bad invocation of AddIncoming");
}
unsafe {
let vals = incoming_values.as_mut_ptr();
let blocks = incoming_blocks.as_mut_ptr();
core::LLVMAddIncoming(phi_node, vals, blocks, count)
}
}
pub fn PrintModuleToFile(module: LLVMModuleRef, filename: &str) -> LLVMBool {
let out_file = CString::new(filename).unwrap();
unsafe { core::LLVMPrintModuleToFile(module, out_file.as_ptr(), ptr::null_mut()) }
}

View File

@ -1,43 +0,0 @@
use std::{
collections::HashSet,
fs::File,
io::{self, Read, Write},
};
use crate::language::DebugAsk;
#[derive(Serialize, Deserialize)]
pub struct ReplOptions {
pub debug_asks: HashSet<DebugAsk>,
pub show_total_time: bool,
pub show_stage_times: bool,
}
impl ReplOptions {
pub fn new() -> ReplOptions {
ReplOptions { debug_asks: HashSet::new(), show_total_time: true, show_stage_times: false }
}
pub fn save_to_file(&self, filename: &str) {
let res = File::create(filename).and_then(|mut file| {
let buf = crate::serde_json::to_string(self).unwrap();
file.write_all(buf.as_bytes())
});
if let Err(err) = res {
eprintln!("Error saving {} file {}", filename, err);
}
}
pub fn load_from_file(filename: &str) -> Result<ReplOptions, io::Error> {
File::open(filename)
.and_then(|mut file| {
let mut contents = String::new();
file.read_to_string(&mut contents)?;
Ok(contents)
})
.and_then(|contents| {
let output: ReplOptions = crate::serde_json::from_str(&contents)?;
Ok(output)
})
}
}

View File

@ -1,74 +0,0 @@
use std::{fmt, fmt::Write};
use colored::*;
use crate::{
language::{ComputationResponse, DebugAsk},
ReplOptions,
};
pub struct ReplResponse {
label: Option<String>,
text: String,
color: Option<Color>,
}
impl fmt::Display for ReplResponse {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut buf = String::new();
if let Some(ref label) = self.label {
write!(buf, "({})", label).unwrap();
}
write!(buf, "=> {}", self.text).unwrap();
write!(
f,
"{}",
match self.color {
Some(c) => buf.color(c),
None => buf.normal(),
}
)
}
}
pub fn handle_computation_response(
response: ComputationResponse,
options: &ReplOptions,
) -> Vec<ReplResponse> {
let mut responses = vec![];
if options.show_total_time {
responses.push(ReplResponse {
label: Some("Total time".to_string()),
text: format!("{:?}", response.global_output_stats.total_duration),
color: None,
});
}
if options.show_stage_times {
responses.push(ReplResponse {
label: Some("Stage times".to_string()),
text: format!("{:?}", response.global_output_stats.stage_durations),
color: None,
});
}
for debug_resp in response.debug_responses {
let stage_name = match debug_resp.ask {
DebugAsk::ByStage { stage_name, .. } => stage_name,
_ => continue,
};
responses.push(ReplResponse {
label: Some(stage_name.to_string()),
text: debug_resp.value,
color: Some(Color::Red),
});
}
responses.push(match response.main_output {
Ok(s) => ReplResponse { label: None, text: s, color: None },
Err(e) => ReplResponse { label: Some("Error".to_string()), text: e, color: Some(Color::Red) },
});
responses
}

44
schala-repl/src/webapp.rs Normal file
View File

@ -0,0 +1,44 @@
use rocket;
use rocket::State;
use rocket::response::Content;
use rocket::http::ContentType;
use rocket_contrib::Json;
use language::{ProgrammingLanguageInterface, EvalOptions};
use WEBFILES;
use ::PLIGenerator;
#[get("/")]
fn index() -> Content<String> {
let path = "static/index.html";
let html_contents = String::from_utf8(WEBFILES.get(path).unwrap().into_owned()).unwrap();
Content(ContentType::HTML, html_contents)
}
#[get("/bundle.js")]
fn js_bundle() -> Content<String> {
let path = "static/bundle.js";
let js_contents = String::from_utf8(WEBFILES.get(path).unwrap().into_owned()).unwrap();
Content(ContentType::JavaScript, js_contents)
}
#[derive(Debug, Serialize, Deserialize)]
struct Input {
source: String,
}
#[derive(Serialize, Deserialize)]
struct Output {
text: String,
}
#[post("/input", format = "application/json", data = "<input>")]
fn interpreter_input(input: Json<Input>, generators: State<Vec<PLIGenerator>>) -> Json<Output> {
let schala_gen = generators.get(0).unwrap();
let mut schala: Box<ProgrammingLanguageInterface> = schala_gen();
let code_output = schala.execute_pipeline(&input.source, &EvalOptions::default());
Json(Output { text: code_output.to_repl() })
}
pub fn web_main(language_generators: Vec<PLIGenerator>) {
rocket::ignite().manage(language_generators).mount("/", routes![index, js_bundle, interpreter_input]).launch();
}

View File

@ -1,15 +1,10 @@
fn main() { fn main() {
let a = 10 const a = 10
let b = 20 const b = 20
a + b a + b
} }
//this is a one-line comment //foo
/* this is
a multiline
comment
*/
print(main()) print(main())

View File

@ -6,37 +6,36 @@ fn main() {
} }
@annotations are with @-
@annotations use the @ sigil
// variable expressions // variable expressions
//variable declaration works like Rust var a: I32 = 20
let a: I32 = 20 const b: String = 20
let mut b: String = 20
there(); can(); be(); multiple(); statements(); per_line(); there(); can(); be(); multiple(); statements(); per_line();
//string interpolation //string interpolation
// maybe const yolo = "I have ${a + b} people in my house"
let yolo = "I have ${a + b} people in my house"
// let expressions // let expressions ??? not sure if I want this
let a = 10, b = 20, c = 30 in a + b + c let a = 10, b = 20, c = 30 in a + b + c
//list literal //list literal
let q = [1,2,3,4] const q = [1,2,3,4]
//lambda literal - uses haskell-ish syntax //lambda literal
q.map(\(item) { item * 100 }) q.map({|item| item * 100 })
fn yolo(a: MyType, b: YourType): ReturnType<Param1, Param2> { fn yolo(a: MyType, b: YourType): ReturnType<Param1, Param2> {
if a == 20 { if a == 20 {
return "early" return "early"
} }
var sex = 20
sex
} }
/* for/while loop topics */ /* for/while loop topics */
//TODO I can probably get away with having one of `for`, `while`
//infinite loop //infinite loop
while { while {
@ -71,13 +70,13 @@ fn main() {
/* conditionals/pattern matching */ /* conditionals/pattern matching */
// `is` functions as an operator asking "does this pattern match" // "is" operator for "does this pattern match"
x is Some(t) // type bool x is Some(t) // type bool
if x { if x {
is Some(t) => { is Some(t) => {
} },
is None => { is None => {
} }
@ -95,12 +94,12 @@ what if type A = B meant that you could had to create A's with A(B), but when yo
*/ */
//declaring types of all stripes //declaring types of all stripes
type MyData = { a: i32, b: String } // shorthand special-case for `type MyData = MyData { a: i32, b: String }` type MyData = { a: i32, b: String }
type MyType = MyType type MyType = MyType
type Option<a> = None | Some(a) type Option<a> = None | Some(a)
type Signal = Absence | SimplePresence(i32) | ComplexPresence {a: i32, b: MyCustomData} type Signal = Absence | SimplePresence(i32) | ComplexPresence {a: i32, b: MyCustomData}
//traits TODO I probably want to rename this //traits
trait Bashable { } trait Bashable { }
trait Luggable { trait Luggable {
@ -109,7 +108,7 @@ what if type A = B meant that you could had to create A's with A(B), but when yo
} }
// lambdas - maybe I want to use ruby-style (not rust style) syntax
// e.g. // lambdas
// Also TODO Nix uses `X: Y: Z` for in its value-level syntax, why can't I? // ruby-style not rust-style
let a: X -> Y -> Z = {|x,y| } const a: X -> Y -> Z = {|x,y| }

View File

@ -6,7 +6,7 @@ fn sua(x): Int {
} }
//let a = getline() //const a = getline()
/* /*
if a == "true" { if a == "true" {

View File

@ -1,11 +1,12 @@
let c = 10
const c = 10
fn add(a, b) { fn add(a, b) {
let c = a + b const c = a + b
c c
} }
let mut b = 20 var b = 20
println(add(1,2)) println(add(1,2))
println(c + b) println(c + b)

View File

@ -1,71 +1,20 @@
use std::{collections::HashSet, fs::File, io::Read, path::PathBuf, process::exit}; extern crate schala_repl;
use schala_lang::{Schala, SchalaConfig}; extern crate maaru_lang;
use schala_repl::{ComputationRequest, ProgrammingLanguageInterface, Repl}; extern crate rukka_lang;
extern crate robo_lang;
extern crate schala_lang;
use schala_repl::{PLIGenerator, repl_main};
extern { }
//TODO specify multiple langs, and have a way to switch between them
fn main() { fn main() {
let args: Vec<String> = std::env::args().collect(); let generators: Vec<PLIGenerator> = vec![
let matches = command_line_options().parse(&args[1..]).unwrap_or_else(|e| { Box::new(|| { Box::new(schala_lang::Schala::new())}),
eprintln!("Error parsing options: {}", e); Box::new(|| { Box::new(maaru_lang::Maaru::new())}),
exit(1); Box::new(|| { Box::new(robo_lang::Robo::new())}),
}); Box::new(|| { Box::new(rukka_lang::Rukka::new())}),
];
if matches.opt_present("help") { repl_main(generators);
println!("{}", command_line_options().usage("Schala metainterpreter"));
exit(0);
} }
if matches.free.is_empty() {
let state = Schala::new();
let mut repl = Repl::new(state);
let config = SchalaConfig { repl: true };
repl.run_repl(config);
} else {
let paths: Vec<PathBuf> = matches.free.iter().map(PathBuf::from).collect();
//TODO handle more than one file
let filename = &paths[0];
let extension = filename.extension().and_then(|e| e.to_str()).unwrap_or_else(|| {
eprintln!("Source file `{}` has no extension.", filename.display());
exit(1);
});
//TODO this proably should be a macro for every supported language
if extension == Schala::source_file_suffix() {
let config = SchalaConfig { repl: false };
run_noninteractive(paths, Schala::new(), config);
} else {
eprintln!("Extension .{} not recognized", extension);
exit(1);
}
}
}
pub fn run_noninteractive<L: ProgrammingLanguageInterface>(
filenames: Vec<PathBuf>,
mut language: L,
config: L::Config,
) {
// for now, ony do something with the first filename
let filename = &filenames[0];
let mut source_file = File::open(filename).unwrap();
let mut buffer = String::new();
source_file.read_to_string(&mut buffer).unwrap();
let request = ComputationRequest { source: &buffer, config, debug_requests: HashSet::new() };
let response = language.run_computation(request);
match response.main_output {
Ok(s) => println!("{}", s),
Err(s) => eprintln!("{}", s),
};
}
fn command_line_options() -> getopts::Options {
let mut options = getopts::Options::new();
options.optflag("h", "help", "Show help text");
//options.optflag("w", "webapp", "Start up web interpreter");
options
}