Compare commits
202 Commits
visitor_ag
...
COMPILER_B
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a236817956 | ||
|
|
b23b8ebdc7 | ||
|
|
8d3bbd7069 | ||
|
|
afcb10bb72 | ||
|
|
8de625e540 | ||
|
|
a2bd9a3985 | ||
|
|
e4a1a23f4d | ||
|
|
2cd325ba12 | ||
|
|
8218007f1c | ||
|
|
040ab11873 | ||
|
|
b967fa1911 | ||
|
|
4c718ed977 | ||
|
|
d20acf7166 | ||
|
|
efc8497235 | ||
|
|
d824b8d6ef | ||
|
|
4a1987b5a2 | ||
|
|
c96644ddce | ||
|
|
cc0ac83709 | ||
|
|
d6019e6f9a | ||
|
|
3344f6827d | ||
|
|
b38c4b3298 | ||
|
|
a2f30b6136 | ||
|
|
11a9a60a34 | ||
|
|
5bb1a245c4 | ||
|
|
1ffe61cf5f | ||
|
|
7495f30e16 | ||
|
|
82520aa28d | ||
|
|
129d9ec673 | ||
|
|
7825ef1eb9 | ||
|
|
f3ecdc61cb | ||
|
|
bf59e6cc63 | ||
|
|
c560c29b2d | ||
|
|
4dcd9d0198 | ||
|
|
7ac63160c5 | ||
|
|
8656992945 | ||
|
|
bb87a87848 | ||
|
|
2f467702e3 | ||
|
|
5ac5425fac | ||
|
|
944916d6af | ||
|
|
3906210db8 | ||
|
|
f7357d4498 | ||
|
|
1493d12a22 | ||
|
|
016d8fc900 | ||
|
|
86dc5eca02 | ||
|
|
e75958c2a2 | ||
|
|
7a56b6dfc0 | ||
|
|
f9633ebe55 | ||
|
|
854740a63f | ||
|
|
ca10481d7c | ||
|
|
26fa4a29ec | ||
|
|
97b59d7e70 | ||
|
|
92ad4767c8 | ||
|
|
7cabca2987 | ||
|
|
98e53a6d0f | ||
|
|
77cc1f3824 | ||
|
|
9e64a22328 | ||
|
|
5afdc16f2e | ||
|
|
f818e86f48 | ||
|
|
5a01b12d9b | ||
|
|
7c75f9b2a8 | ||
|
|
2c34ab52c4 | ||
|
|
44d1f4692f | ||
|
|
3cf3fce72d | ||
|
|
ddea470ba8 | ||
|
|
745afe981a | ||
|
|
a6c86d6447 | ||
|
|
8d3639ab8e | ||
|
|
3bca82a8c8 | ||
|
|
811c52c8d3 | ||
|
|
95e278d1b5 | ||
|
|
61b757313d | ||
|
|
24b48551dc | ||
|
|
2ed84de641 | ||
|
|
22efd39114 | ||
|
|
a48bb61eb3 | ||
|
|
904d5c4431 | ||
|
|
28056b1f89 | ||
|
|
f9a59838b0 | ||
|
|
f02d7cb924 | ||
|
|
489819a28e | ||
|
|
c427646e75 | ||
|
|
f06b5922de | ||
|
|
253b5d88f0 | ||
|
|
f654cd6b50 | ||
|
|
89649273d8 | ||
|
|
9fa4e3797c | ||
|
|
c8804eeefb | ||
|
|
d80a0036b1 | ||
|
|
7533c69c49 | ||
|
|
39bb175722 | ||
|
|
ae65455374 | ||
|
|
1fc028c9fc | ||
|
|
031ff9fe7e | ||
|
|
5a9f3c1850 | ||
|
|
58251d3f28 | ||
|
|
2e42313991 | ||
|
|
355604d911 | ||
|
|
0b57561114 | ||
|
|
dbd81ca83d | ||
|
|
6368d10d92 | ||
|
|
9cd64d97a5 | ||
|
|
41cad61e34 | ||
|
|
a054de56a2 | ||
|
|
603ea89b98 | ||
|
|
06026604cc | ||
|
|
03f8abac6a | ||
|
|
fd3922d866 | ||
|
|
71b3365de2 | ||
|
|
cf9ce74394 | ||
|
|
f5d1c89574 | ||
|
|
8d1e0ebdea | ||
|
|
69c215eac9 | ||
|
|
8a34034819 | ||
|
|
403b171c72 | ||
|
|
e5a09a6ee8 | ||
|
|
e1a83b5de3 | ||
|
|
8b1dd561f2 | ||
|
|
6ebe893acb | ||
|
|
c9052e0a3b | ||
|
|
56e6eb44f9 | ||
|
|
642f21d298 | ||
|
|
c12cb99b24 | ||
|
|
8dc8833eb3 | ||
|
|
b517bc2366 | ||
|
|
73519d5be5 | ||
|
|
8b6de6961f | ||
|
|
3eaeeb5509 | ||
|
|
b91c3c9da5 | ||
|
|
08da787aae | ||
|
|
d6f2fe6e02 | ||
|
|
a85d3c46bd | ||
|
|
25f51a314d | ||
|
|
6c3a4f907b | ||
|
|
22887678bd | ||
|
|
1ecf1e506c | ||
|
|
72944ded1b | ||
|
|
b65779fb93 | ||
|
|
418d77770f | ||
|
|
5572e0eebb | ||
|
|
65bc32b033 | ||
|
|
29f4060a71 | ||
|
|
09dbe5b736 | ||
|
|
cfa65e5339 | ||
|
|
9a28ccfd85 | ||
|
|
ea542192be | ||
|
|
79635f2f86 | ||
|
|
2b5b1589b0 | ||
|
|
44c073320b | ||
|
|
c04e4356a1 | ||
|
|
24e0ecbe73 | ||
|
|
fd66a9711d | ||
|
|
a5c9aca4d7 | ||
|
|
cefaeb1180 | ||
|
|
724237545f | ||
|
|
0f7f5cb416 | ||
|
|
b4da57f5c5 | ||
|
|
8b87945bee | ||
|
|
f96469178d | ||
|
|
34abb9b081 | ||
|
|
89d967aee4 | ||
|
|
0540df4024 | ||
|
|
61182a847f | ||
|
|
f6dcd7f0b8 | ||
|
|
16dc973aa6 | ||
|
|
611e46938d | ||
|
|
3d6447abb4 | ||
|
|
a74027bb1f | ||
|
|
583e87c19a | ||
|
|
12ed2f5c8e | ||
|
|
3caf9c763c | ||
|
|
cd20afc3c7 | ||
|
|
063a13f7ff | ||
|
|
b0a1f3337c | ||
|
|
2e147e141e | ||
|
|
44938aa4e6 | ||
|
|
44ae10b7ae | ||
|
|
fa1544c71f | ||
|
|
fde169b623 | ||
|
|
6e92b03f81 | ||
|
|
0dd6b26e5a | ||
|
|
a3bb3ee514 | ||
|
|
7ae41e717d | ||
|
|
24089da788 | ||
|
|
bfb36b90e4 | ||
|
|
e750247134 | ||
|
|
a8efe40b57 | ||
|
|
dae619c6fa | ||
|
|
c9bfa2b540 | ||
|
|
e708c728d2 | ||
|
|
b65d6e4c8e | ||
|
|
d9eca8ffb3 | ||
|
|
a600d34712 | ||
|
|
aae2ee53cd | ||
|
|
bf3dcc18d0 | ||
|
|
baf499ee5a | ||
|
|
3b19fc5aa9 | ||
|
|
16bf166fa9 | ||
|
|
d832583ed9 | ||
|
|
87ecc6f0cb | ||
|
|
ee87695626 | ||
|
|
37c77d93d7 | ||
|
|
b62968379a |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1,4 +1,3 @@
|
||||
Cargo.lock
|
||||
target
|
||||
.schala_repl
|
||||
.schala_history
|
||||
|
||||
1124
Cargo.lock
generated
Normal file
1124
Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
75
TODO.md
75
TODO.md
@@ -1,7 +1,45 @@
|
||||
# Plan of attack
|
||||
|
||||
1. modify visitor so it can handle scopes
|
||||
-this is needed both to handle import scope correctly
|
||||
-and also to support making FQSNs aware of function parameters
|
||||
|
||||
2. Once FQSNs are aware of function parameters, most of the Rc<String> things in eval.rs can go away
|
||||
|
||||
# TODO items
|
||||
|
||||
-use 'let' sigil in patterns for variables :
|
||||
|
||||
```
|
||||
q is MyStruct(let a, Chrono::Trigga) then {
|
||||
|
||||
}
|
||||
```
|
||||
|
||||
-idea: what if there was something like React jsx syntas built in? i.e. a way to automatically transform some kind of markup
|
||||
into a function call, cf. `<h1 prop="arg">` -> h1(prop=arg)
|
||||
|
||||
## General code cleanup
|
||||
- I think I can restructure the parser to get rid of most instances of expect!, at least at the beginning of a rule
|
||||
DONE -experiment with storing metadata via ItemIds on AST nodes (cf. https://rust-lang.github.io/rustc-guide/hir.html, https://github.com/rust-lang/rust/blob/master/src/librustc/hir/mod.rs )
|
||||
-implement and test open/use statements
|
||||
-implement field access
|
||||
- standardize on an error type that isn't String
|
||||
-implement a visitor pattern for the use of scope_resolver
|
||||
- maybe implement this twice: 1) the value-returning, no-default one in the haoyi blogpost,
|
||||
-look at https://gitlab.haskell.org/ghc/ghc/wikis/pattern-synonyms
|
||||
2) the non-value-returning, default one like in rustc (cf. https://github.com/rust-unofficial/patterns/blob/master/patterns/visitor.md)
|
||||
|
||||
-parser error - should report subset of AST parsed *so far*
|
||||
- what if you used python 'def' syntax to define a function? what error message makes sense here?
|
||||
|
||||
## Reduction
|
||||
- make a good type for actual language builtins to avoid string comparisons
|
||||
|
||||
## Typechecking
|
||||
|
||||
- make a type to represent types rather than relying on string comparisons
|
||||
|
||||
- look at https://rickyhan.com/jekyll/update/2018/05/26/hindley-milner-tutorial-rust.html
|
||||
|
||||
- cf. the notation mentioned in the cardelli paper, the debug information for the `typechecking` pass should
|
||||
@@ -62,10 +100,43 @@ ex.
|
||||
-consult http://gluon-lang.org/book/embedding-api.html
|
||||
|
||||
|
||||
## Trying if-syntax again
|
||||
|
||||
## Playing around with conditional syntax ideas
|
||||
//simple if expr
|
||||
if x == 10 then "a" else "z"
|
||||
|
||||
//complex if expr
|
||||
if x == 10 then {
|
||||
let a = 1
|
||||
let b = 2
|
||||
a + b
|
||||
} else {
|
||||
55
|
||||
}
|
||||
|
||||
// different comparison ops
|
||||
if x {
|
||||
== 1 then "a"
|
||||
.isPrime() then "b"
|
||||
else "c"
|
||||
}
|
||||
|
||||
/* for now disallow `if x == { 1 then ... }`, b/c hard to parse
|
||||
|
||||
//simple pattern-matching
|
||||
if x is Person("Ivan", age) then age else 0
|
||||
|
||||
//match-block equivalent
|
||||
if x {
|
||||
is Person("Ivan", _) then "Ivan"
|
||||
is Person(_, age) if age > 13 then "barmitzvah'd"
|
||||
else "foo"
|
||||
}
|
||||
|
||||
|
||||
|
||||
## (OLD) Playing around with conditional syntax ideas
|
||||
|
||||
-
|
||||
|
||||
- if/match playground
|
||||
|
||||
|
||||
@@ -32,7 +32,11 @@ impl Fold for RecursiveDescentFn {
|
||||
if self.parse_level != 0 {
|
||||
self.parse_level -= 1;
|
||||
}
|
||||
result
|
||||
|
||||
result.map_err(|mut parse_error: ParseError| {
|
||||
parse_error.production_name = Some(stringify!(#ident).to_string());
|
||||
parse_error
|
||||
})
|
||||
}
|
||||
};
|
||||
i.block = Box::new(new_block);
|
||||
|
||||
@@ -5,13 +5,16 @@ authors = ["greg <greg.shuflin@protonmail.com>"]
|
||||
edition = "2018"
|
||||
|
||||
[dependencies]
|
||||
itertools = "0.5.8"
|
||||
take_mut = "0.1.3"
|
||||
maplit = "*"
|
||||
lazy_static = "0.2.8"
|
||||
failure = "0.1.2"
|
||||
itertools = "0.8.0"
|
||||
take_mut = "0.2.2"
|
||||
maplit = "1.0.1"
|
||||
lazy_static = "1.3.0"
|
||||
failure = "0.1.5"
|
||||
ena = "0.11.0"
|
||||
stopwatch = "0.0.7"
|
||||
derivative = "1.0.3"
|
||||
colored = "1.8"
|
||||
radix_trie = "0.1.5"
|
||||
|
||||
schala-lang-codegen = { path = "../codegen" }
|
||||
schala-repl = { path = "../../schala-repl" }
|
||||
|
||||
@@ -1,49 +1,84 @@
|
||||
use std::rc::Rc;
|
||||
use std::convert::From;
|
||||
|
||||
use crate::builtin::{BinOp, PrefixOp};
|
||||
use crate::typechecking::TypeData;
|
||||
use crate::derivative::Derivative;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub struct Meta<T> {
|
||||
n: T,
|
||||
source_map: SourceMap,
|
||||
type_data: TypeData,
|
||||
mod walker;
|
||||
mod visitor;
|
||||
mod visitor_test;
|
||||
mod operators;
|
||||
pub use operators::*;
|
||||
pub use visitor::ASTVisitor;
|
||||
pub use walker::walk_ast;
|
||||
|
||||
/// An abstract identifier for an AST node
|
||||
#[derive(Debug, PartialEq, Eq, Hash, Clone)]
|
||||
pub struct ItemId {
|
||||
idx: u32,
|
||||
}
|
||||
|
||||
impl<T> Meta<T> {
|
||||
pub fn new(n: T) -> Meta<T> {
|
||||
Meta { n, source_map: SourceMap::default(), type_data: TypeData::new() }
|
||||
}
|
||||
|
||||
pub fn node(&self) -> &T {
|
||||
&self.n
|
||||
impl ItemId {
|
||||
fn new(n: u32) -> ItemId {
|
||||
ItemId { idx: n }
|
||||
}
|
||||
}
|
||||
|
||||
//TODO this PartialEq is here to make tests work - find a way to make it not necessary
|
||||
#[derive(Clone, Debug, Default, PartialEq)]
|
||||
struct SourceMap {
|
||||
pub struct ItemIdStore {
|
||||
last_idx: u32
|
||||
}
|
||||
|
||||
impl From<Expression> for Meta<Expression> {
|
||||
fn from(expr: Expression) -> Meta<Expression> {
|
||||
Meta { n: expr, source_map: SourceMap::default(), type_data: TypeData::new() }
|
||||
impl ItemIdStore {
|
||||
pub fn new() -> ItemIdStore {
|
||||
ItemIdStore { last_idx: 0 }
|
||||
}
|
||||
/// Always returns an ItemId with internal value zero
|
||||
#[cfg(test)]
|
||||
pub fn new_id() -> ItemId {
|
||||
ItemId { idx: 0 }
|
||||
}
|
||||
|
||||
/// This limits the size of the AST to 2^32 tree elements
|
||||
pub fn fresh(&mut self) -> ItemId {
|
||||
let idx = self.last_idx;
|
||||
self.last_idx += 1;
|
||||
ItemId::new(idx)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub struct AST(pub Vec<Meta<Statement>>);
|
||||
#[derive(Derivative, Debug)]
|
||||
#[derivative(PartialEq)]
|
||||
pub struct AST {
|
||||
#[derivative(PartialEq="ignore")]
|
||||
pub id: ItemId,
|
||||
pub statements: Vec<Statement>
|
||||
}
|
||||
|
||||
#[derive(Derivative, Debug, Clone)]
|
||||
#[derivative(PartialEq)]
|
||||
pub struct Statement {
|
||||
#[derivative(PartialEq="ignore")]
|
||||
pub id: ItemId,
|
||||
pub kind: StatementKind,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub enum Statement {
|
||||
ExpressionStatement(Meta<Expression>),
|
||||
pub enum StatementKind {
|
||||
Expression(Expression),
|
||||
Declaration(Declaration),
|
||||
Import(ImportSpecifier),
|
||||
Module(ModuleSpecifier),
|
||||
}
|
||||
|
||||
pub type Block = Vec<Meta<Statement>>;
|
||||
pub type Block = Vec<Statement>;
|
||||
pub type ParamName = Rc<String>;
|
||||
|
||||
#[derive(Debug, Derivative, Clone)]
|
||||
#[derivative(PartialEq)]
|
||||
pub struct QualifiedName {
|
||||
#[derivative(PartialEq="ignore")]
|
||||
pub id: ItemId,
|
||||
pub components: Vec<Rc<String>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub struct FormalParam {
|
||||
pub name: ParamName,
|
||||
@@ -60,12 +95,16 @@ pub enum Declaration {
|
||||
body: TypeBody,
|
||||
mutable: bool
|
||||
},
|
||||
TypeAlias(Rc<String>, Rc<String>), //should have TypeSingletonName in it, or maybe just String, not sure
|
||||
//TODO this needs to be more sophisticated
|
||||
TypeAlias {
|
||||
alias: Rc<String>,
|
||||
original: Rc<String>,
|
||||
},
|
||||
Binding {
|
||||
name: Rc<String>,
|
||||
constant: bool,
|
||||
type_anno: Option<TypeIdentifier>,
|
||||
expr: Meta<Expression>,
|
||||
expr: Expression,
|
||||
},
|
||||
Impl {
|
||||
type_name: TypeIdentifier,
|
||||
@@ -99,19 +138,22 @@ pub enum Variant {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
#[derive(Debug, Derivative, Clone)]
|
||||
#[derivative(PartialEq)]
|
||||
pub struct Expression {
|
||||
#[derivative(PartialEq="ignore")]
|
||||
pub id: ItemId,
|
||||
pub kind: ExpressionKind,
|
||||
pub type_anno: Option<TypeIdentifier>
|
||||
}
|
||||
|
||||
impl Expression {
|
||||
pub fn new(kind: ExpressionKind) -> Expression {
|
||||
Expression { kind, type_anno: None }
|
||||
pub fn new(id: ItemId, kind: ExpressionKind) -> Expression {
|
||||
Expression { id, kind, type_anno: None }
|
||||
}
|
||||
|
||||
pub fn with_anno(kind: ExpressionKind, type_anno: TypeIdentifier) -> Expression {
|
||||
Expression { kind, type_anno: Some(type_anno) }
|
||||
pub fn with_anno(id: ItemId, kind: ExpressionKind, type_anno: TypeIdentifier) -> Expression {
|
||||
Expression { id, kind, type_anno: Some(type_anno) }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -133,28 +175,28 @@ pub enum ExpressionKind {
|
||||
FloatLiteral(f64),
|
||||
StringLiteral(Rc<String>),
|
||||
BoolLiteral(bool),
|
||||
BinExp(BinOp, Box<Meta<Expression>>, Box<Meta<Expression>>),
|
||||
PrefixExp(PrefixOp, Box<Meta<Expression>>),
|
||||
TupleLiteral(Vec<Meta<Expression>>),
|
||||
Value(Rc<String>),
|
||||
BinExp(BinOp, Box<Expression>, Box<Expression>),
|
||||
PrefixExp(PrefixOp, Box<Expression>),
|
||||
TupleLiteral(Vec<Expression>),
|
||||
Value(QualifiedName),
|
||||
NamedStruct {
|
||||
name: Rc<String>,
|
||||
fields: Vec<(Rc<String>, Meta<Expression>)>,
|
||||
name: QualifiedName,
|
||||
fields: Vec<(Rc<String>, Expression)>,
|
||||
},
|
||||
Call {
|
||||
f: Box<Meta<Expression>>,
|
||||
arguments: Vec<Meta<InvocationArgument>>,
|
||||
f: Box<Expression>,
|
||||
arguments: Vec<InvocationArgument>,
|
||||
},
|
||||
Index {
|
||||
indexee: Box<Meta<Expression>>,
|
||||
indexers: Vec<Meta<Expression>>,
|
||||
indexee: Box<Expression>,
|
||||
indexers: Vec<Expression>,
|
||||
},
|
||||
IfExpression {
|
||||
discriminator: Box<Discriminator>,
|
||||
discriminator: Option<Box<Expression>>,
|
||||
body: Box<IfExpressionBody>,
|
||||
},
|
||||
WhileExpression {
|
||||
condition: Option<Box<Meta<Expression>>>,
|
||||
condition: Option<Box<Expression>>,
|
||||
body: Block,
|
||||
},
|
||||
ForExpression {
|
||||
@@ -166,7 +208,7 @@ pub enum ExpressionKind {
|
||||
type_anno: Option<TypeIdentifier>,
|
||||
body: Block,
|
||||
},
|
||||
ListLiteral(Vec<Meta<Expression>>),
|
||||
ListLiteral(Vec<Expression>),
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
@@ -179,35 +221,33 @@ pub enum InvocationArgument {
|
||||
Ignored
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub enum Discriminator {
|
||||
Simple(Expression),
|
||||
BinOp(Expression, BinOp)
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub enum IfExpressionBody {
|
||||
SimpleConditional(Block, Option<Block>),
|
||||
SimplePatternMatch(Pattern, Block, Option<Block>),
|
||||
GuardList(Vec<GuardArm>)
|
||||
SimpleConditional {
|
||||
then_case: Block,
|
||||
else_case: Option<Block>
|
||||
},
|
||||
SimplePatternMatch {
|
||||
pattern: Pattern,
|
||||
then_case: Block,
|
||||
else_case: Option<Block>
|
||||
},
|
||||
CondList(Vec<ConditionArm>)
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub struct GuardArm {
|
||||
pub guard: Guard,
|
||||
pub struct ConditionArm {
|
||||
pub condition: Condition,
|
||||
pub guard: Option<Expression>,
|
||||
pub body: Block,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub enum Guard {
|
||||
Pat(Pattern),
|
||||
HalfExpr(HalfExpr)
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub struct HalfExpr {
|
||||
pub op: Option<BinOp>,
|
||||
pub expr: ExpressionKind,
|
||||
pub enum Condition {
|
||||
Pattern(Pattern),
|
||||
TruncatedOp(BinOp, Expression),
|
||||
Expression(Expression),
|
||||
Else,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
@@ -215,8 +255,9 @@ pub enum Pattern {
|
||||
Ignored,
|
||||
TuplePattern(Vec<Pattern>),
|
||||
Literal(PatternLiteral),
|
||||
TupleStruct(Rc<String>, Vec<Pattern>),
|
||||
Record(Rc<String>, Vec<(Rc<String>, Pattern)>),
|
||||
TupleStruct(QualifiedName, Vec<Pattern>),
|
||||
Record(QualifiedName, Vec<(Rc<String>, Pattern)>),
|
||||
VarOrName(QualifiedName),
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
@@ -227,17 +268,40 @@ pub enum PatternLiteral {
|
||||
},
|
||||
StringPattern(Rc<String>),
|
||||
BoolPattern(bool),
|
||||
VarPattern(Rc<String>)
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub struct Enumerator {
|
||||
pub id: Rc<String>,
|
||||
pub generator: Meta<Expression>,
|
||||
pub generator: Expression,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub enum ForBody {
|
||||
MonadicReturn(Meta<Expression>),
|
||||
MonadicReturn(Expression),
|
||||
StatementBlock(Block),
|
||||
}
|
||||
|
||||
#[derive(Debug, Derivative, Clone)]
|
||||
#[derivative(PartialEq)]
|
||||
pub struct ImportSpecifier {
|
||||
#[derivative(PartialEq="ignore")]
|
||||
pub id: ItemId,
|
||||
pub path_components: Vec<Rc<String>>,
|
||||
pub imported_names: ImportedNames
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub enum ImportedNames {
|
||||
All,
|
||||
LastOfPath,
|
||||
List(Vec<Rc<String>>)
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub struct ModuleSpecifier {
|
||||
pub name: Rc<String>,
|
||||
pub contents: Vec<Statement>,
|
||||
}
|
||||
|
||||
|
||||
108
schala-lang/language/src/ast/operators.rs
Normal file
108
schala-lang/language/src/ast/operators.rs
Normal file
@@ -0,0 +1,108 @@
|
||||
use std::rc::Rc;
|
||||
use std::str::FromStr;
|
||||
|
||||
use crate::tokenizing::TokenKind;
|
||||
use crate::builtin::Builtin;
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub struct PrefixOp {
|
||||
sigil: Rc<String>,
|
||||
pub builtin: Option<Builtin>,
|
||||
}
|
||||
|
||||
impl PrefixOp {
|
||||
#[allow(dead_code)]
|
||||
pub fn sigil(&self) -> &Rc<String> {
|
||||
&self.sigil
|
||||
}
|
||||
pub fn is_prefix(op: &str) -> bool {
|
||||
match op {
|
||||
"+" => true,
|
||||
"-" => true,
|
||||
"!" => true,
|
||||
_ => false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for PrefixOp {
|
||||
type Err = ();
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
use Builtin::*;
|
||||
|
||||
let builtin = match s {
|
||||
"+" => Ok(Increment),
|
||||
"-" => Ok(Negate),
|
||||
"!" => Ok(BooleanNot),
|
||||
_ => Err(())
|
||||
};
|
||||
|
||||
builtin.map(|builtin| PrefixOp { sigil: Rc::new(s.to_string()), builtin: Some(builtin) })
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub struct BinOp {
|
||||
sigil: Rc<String>,
|
||||
pub builtin: Option<Builtin>,
|
||||
}
|
||||
|
||||
impl BinOp {
|
||||
pub fn from_sigil(sigil: &str) -> BinOp {
|
||||
let builtin = Builtin::from_str(sigil).ok();
|
||||
BinOp { sigil: Rc::new(sigil.to_string()), builtin }
|
||||
}
|
||||
pub fn sigil(&self) -> &Rc<String> {
|
||||
&self.sigil
|
||||
}
|
||||
pub fn from_sigil_token(tok: &TokenKind) -> Option<BinOp> {
|
||||
let s = token_kind_to_sigil(tok)?;
|
||||
Some(BinOp::from_sigil(s))
|
||||
}
|
||||
|
||||
pub fn min_precedence() -> i32 {
|
||||
i32::min_value()
|
||||
}
|
||||
pub fn get_precedence_from_token(op_tok: &TokenKind) -> Option<i32> {
|
||||
let s = token_kind_to_sigil(op_tok)?;
|
||||
Some(binop_precedences(s))
|
||||
}
|
||||
}
|
||||
|
||||
fn token_kind_to_sigil<'a>(tok: &'a TokenKind) -> Option<&'a str> {
|
||||
use self::TokenKind::*;
|
||||
Some(match tok {
|
||||
Operator(op) => op.as_str(),
|
||||
Period => ".",
|
||||
Pipe => "|",
|
||||
Slash => "/",
|
||||
LAngleBracket => "<",
|
||||
RAngleBracket => ">",
|
||||
Equals => "=",
|
||||
_ => return None
|
||||
})
|
||||
}
|
||||
|
||||
fn binop_precedences(s: &str) -> i32 {
|
||||
let default = 10_000_000;
|
||||
match s {
|
||||
"+" => 10,
|
||||
"-" => 10,
|
||||
"*" => 20,
|
||||
"/" => 20,
|
||||
"%" => 20,
|
||||
"++" => 30,
|
||||
"^" => 30,
|
||||
"&" => 20,
|
||||
"|" => 20,
|
||||
">" => 20,
|
||||
">=" => 20,
|
||||
"<" => 20,
|
||||
"<=" => 20,
|
||||
"==" => 40,
|
||||
"=" => 10,
|
||||
"<=>" => 30,
|
||||
_ => default,
|
||||
}
|
||||
}
|
||||
55
schala-lang/language/src/ast/visitor.rs
Normal file
55
schala-lang/language/src/ast/visitor.rs
Normal file
@@ -0,0 +1,55 @@
|
||||
use std::rc::Rc;
|
||||
use crate::ast::*;
|
||||
|
||||
//TODO maybe these functions should take closures that return a KeepRecursing | StopHere type,
|
||||
//or a tuple of (T, <that type>)
|
||||
|
||||
pub trait ASTVisitor<BlockHandler=()>: Sized {
|
||||
type BlockHandler: BlockVisitor;
|
||||
fn ast(&mut self, _ast: &AST) {}
|
||||
fn block(&mut self) -> Self::BlockHandler { Self::BlockHandler::new() }
|
||||
fn block_finished(&mut self, handler: Self::BlockHandler) {}
|
||||
fn statement(&mut self, _statement: &Statement) {}
|
||||
fn declaration(&mut self, _declaration: &Declaration) {}
|
||||
fn signature(&mut self, _signature: &Signature) {}
|
||||
fn type_declaration(&mut self, _name: &TypeSingletonName, _body: &TypeBody, _mutable: bool) {}
|
||||
fn type_alias(&mut self, _alias: &Rc<String>, _original: &Rc<String>) {}
|
||||
fn binding(&mut self, _name: &Rc<String>, _constant: bool, _type_anno: Option<&TypeIdentifier>, _expr: &Expression) {}
|
||||
fn implemention(&mut self, _type_name: &TypeIdentifier, _interface_name: Option<&TypeSingletonName>, _block: &Vec<Declaration>) {}
|
||||
fn interface(&mut self, _name: &Rc<String>, _signatures: &Vec<Signature>) {}
|
||||
fn expression(&mut self, _expression: &Expression) {}
|
||||
fn expression_kind(&mut self, _kind: &ExpressionKind) {}
|
||||
fn type_annotation(&mut self, _type_anno: Option<&TypeIdentifier>) {}
|
||||
fn named_struct(&mut self, _name: &QualifiedName, _fields: &Vec<(Rc<String>, Expression)>) {}
|
||||
fn call(&mut self, _f: &Expression, _arguments: &Vec<InvocationArgument>) {}
|
||||
fn index(&mut self, _indexee: &Expression, _indexers: &Vec<Expression>) {}
|
||||
fn if_expression(&mut self, _discrim: Option<&Expression>, _body: &IfExpressionBody) {}
|
||||
fn condition_arm(&mut self, _arm: &ConditionArm) {}
|
||||
fn while_expression(&mut self, _condition: Option<&Expression>, _body: &Block) {}
|
||||
fn for_expression(&mut self, _enumerators: &Vec<Enumerator>, _body: &ForBody) {}
|
||||
fn lambda(&mut self, _params: &Vec<FormalParam>, _type_anno: Option<&TypeIdentifier>, _body: &Block) {}
|
||||
fn invocation_argument(&mut self, _arg: &InvocationArgument) {}
|
||||
fn formal_param(&mut self, _param: &FormalParam) {}
|
||||
fn import(&mut self, _import: &ImportSpecifier) {}
|
||||
fn module(&mut self, _module: &ModuleSpecifier) {}
|
||||
fn qualified_name(&mut self, _name: &QualifiedName) {}
|
||||
fn nat_literal(&mut self, _n: u64) {}
|
||||
fn float_literal(&mut self, _f: f64) {}
|
||||
fn string_literal(&mut self, _s: &Rc<String>) {}
|
||||
fn bool_literal(&mut self, _b: bool) {}
|
||||
fn binexp(&mut self, _op: &BinOp, _lhs: &Expression, _rhs: &Expression) {}
|
||||
fn prefix_exp(&mut self, _op: &PrefixOp, _arg: &Expression) {}
|
||||
fn pattern(&mut self, _pat: &Pattern) {}
|
||||
}
|
||||
|
||||
pub trait BlockVisitor {
|
||||
fn new() -> Self;
|
||||
fn pre_block(&mut self) {}
|
||||
fn post_block(&mut self) {}
|
||||
}
|
||||
|
||||
impl BlockVisitor for () {
|
||||
fn new() -> () { () }
|
||||
}
|
||||
|
||||
|
||||
41
schala-lang/language/src/ast/visitor_test.rs
Normal file
41
schala-lang/language/src/ast/visitor_test.rs
Normal file
@@ -0,0 +1,41 @@
|
||||
#![cfg(test)]
|
||||
|
||||
use crate::ast::visitor::ASTVisitor;
|
||||
use crate::ast::walker;
|
||||
use crate::util::quick_ast;
|
||||
|
||||
struct Tester {
|
||||
count: u64,
|
||||
float_count: u64
|
||||
}
|
||||
|
||||
impl ASTVisitor for Tester {
|
||||
fn nat_literal(&mut self, _n: u64) {
|
||||
self.count += 1;
|
||||
}
|
||||
fn float_literal(&mut self, _f: f64) {
|
||||
self.float_count += 1;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
fn foo() {
|
||||
let mut tester = Tester { count: 0, float_count: 0 };
|
||||
let (ast, _) = quick_ast(r#"
|
||||
import gragh
|
||||
|
||||
let a = 20 + 84
|
||||
let b = 28 + 1 + 2 + 2.0
|
||||
fn heh() {
|
||||
let m = 9
|
||||
|
||||
}
|
||||
|
||||
"#);
|
||||
|
||||
walker::walk_ast(&mut tester, &ast);
|
||||
|
||||
assert_eq!(tester.count, 6);
|
||||
assert_eq!(tester.float_count, 1);
|
||||
}
|
||||
270
schala-lang/language/src/ast/walker.rs
Normal file
270
schala-lang/language/src/ast/walker.rs
Normal file
@@ -0,0 +1,270 @@
|
||||
#![allow(dead_code)]
|
||||
use std::rc::Rc;
|
||||
use crate::ast::*;
|
||||
use crate::ast::visitor::{ASTVisitor, BlockVisitor};
|
||||
use crate::util::deref_optional_box;
|
||||
|
||||
pub fn walk_ast<V: ASTVisitor>(v: &mut V, ast: &AST) {
|
||||
v.ast(ast);
|
||||
walk_block(v, &ast.statements);
|
||||
}
|
||||
|
||||
fn walk_block<V: ASTVisitor>(v: &mut V, block: &Vec<Statement>) {
|
||||
let mut block_handler = v.block();
|
||||
block_handler.pre_block();
|
||||
for s in block {
|
||||
v.statement(s);
|
||||
statement(v, s);
|
||||
}
|
||||
block_handler.post_block();
|
||||
v.block_finished(block_handler);
|
||||
}
|
||||
|
||||
fn statement<V: ASTVisitor>(v: &mut V, statement: &Statement) {
|
||||
use StatementKind::*;
|
||||
match statement.kind {
|
||||
Expression(ref expr) => {
|
||||
v.expression(expr);
|
||||
expression(v, expr);
|
||||
},
|
||||
Declaration(ref decl) => {
|
||||
v.declaration(decl);
|
||||
declaration(v, decl);
|
||||
},
|
||||
Import(ref import_spec) => v.import(import_spec),
|
||||
Module(ref module_spec) => {
|
||||
v.module(module_spec);
|
||||
walk_block(v, &module_spec.contents);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn declaration<V: ASTVisitor>(v: &mut V, decl: &Declaration) {
|
||||
use Declaration::*;
|
||||
match decl {
|
||||
FuncSig(sig) => {
|
||||
v.signature(&sig);
|
||||
signature(v, &sig);
|
||||
},
|
||||
FuncDecl(sig, block) => {
|
||||
v.signature(&sig);
|
||||
walk_block(v, block);
|
||||
},
|
||||
TypeDecl { name, body, mutable } => v.type_declaration(name, body, *mutable),
|
||||
TypeAlias { alias, original} => v.type_alias(alias, original),
|
||||
Binding { name, constant, type_anno, expr } => {
|
||||
v.binding(name, *constant, type_anno.as_ref(), expr);
|
||||
v.type_annotation(type_anno.as_ref());
|
||||
v.expression(&expr);
|
||||
expression(v, &expr);
|
||||
},
|
||||
Impl { type_name, interface_name, block } => {
|
||||
v.implemention(type_name, interface_name.as_ref(), block);
|
||||
}
|
||||
Interface { name, signatures } => v.interface(name, signatures),
|
||||
}
|
||||
}
|
||||
|
||||
fn signature<V: ASTVisitor>(v: &mut V, signature: &Signature) {
|
||||
for p in signature.params.iter() {
|
||||
v.formal_param(p);
|
||||
}
|
||||
v.type_annotation(signature.type_anno.as_ref());
|
||||
for p in signature.params.iter() {
|
||||
formal_param(v, p);
|
||||
}
|
||||
}
|
||||
|
||||
fn expression<V: ASTVisitor>(v: &mut V, expression: &Expression) {
|
||||
v.expression_kind(&expression.kind);
|
||||
v.type_annotation(expression.type_anno.as_ref());
|
||||
expression_kind(v, &expression.kind);
|
||||
}
|
||||
|
||||
|
||||
fn call<V: ASTVisitor>(v: &mut V, f: &Expression, args: &Vec<InvocationArgument>) {
|
||||
v.expression(f);
|
||||
expression(v, f);
|
||||
for arg in args.iter() {
|
||||
v.invocation_argument(arg);
|
||||
invocation_argument(v, arg);
|
||||
}
|
||||
}
|
||||
|
||||
fn invocation_argument<V: ASTVisitor>(v: &mut V, arg: &InvocationArgument) {
|
||||
use InvocationArgument::*;
|
||||
match arg {
|
||||
Positional(expr) => {
|
||||
v.expression(expr);
|
||||
expression(v, expr);
|
||||
},
|
||||
Keyword { expr, .. } => {
|
||||
v.expression(expr);
|
||||
expression(v, expr);
|
||||
},
|
||||
Ignored => (),
|
||||
}
|
||||
}
|
||||
|
||||
fn index<V: ASTVisitor>(v: &mut V, indexee: &Expression, indexers: &Vec<Expression>) {
|
||||
v.expression(indexee);
|
||||
for i in indexers.iter() {
|
||||
v.expression(i);
|
||||
}
|
||||
}
|
||||
|
||||
fn named_struct<V: ASTVisitor>(v: &mut V, n: &QualifiedName, fields: &Vec<(Rc<String>, Expression)>) {
|
||||
v.qualified_name(n);
|
||||
for (_, expr) in fields.iter() {
|
||||
v.expression(expr);
|
||||
}
|
||||
}
|
||||
|
||||
fn lambda<V: ASTVisitor>(v: &mut V, params: &Vec<FormalParam>, type_anno: Option<&TypeIdentifier>, body: &Block) {
|
||||
for param in params {
|
||||
v.formal_param(param);
|
||||
formal_param(v, param);
|
||||
}
|
||||
v.type_annotation(type_anno);
|
||||
walk_block(v, body);
|
||||
}
|
||||
|
||||
fn formal_param<V: ASTVisitor>(v: &mut V, param: &FormalParam) {
|
||||
param.default.as_ref().map(|p| {
|
||||
v.expression(p);
|
||||
expression(v, p);
|
||||
});
|
||||
v.type_annotation(param.anno.as_ref());
|
||||
}
|
||||
|
||||
fn expression_kind<V: ASTVisitor>(v: &mut V, expression_kind: &ExpressionKind) {
|
||||
use ExpressionKind::*;
|
||||
match expression_kind {
|
||||
NatLiteral(n) => v.nat_literal(*n),
|
||||
FloatLiteral(f) => v.float_literal(*f),
|
||||
StringLiteral(s) => v.string_literal(s),
|
||||
BoolLiteral(b) => v.bool_literal(*b),
|
||||
BinExp(op, lhs, rhs) => {
|
||||
v.binexp(op, lhs, rhs);
|
||||
expression(v, lhs);
|
||||
expression(v, rhs);
|
||||
},
|
||||
PrefixExp(op, arg) => {
|
||||
v.prefix_exp(op, arg);
|
||||
expression(v, arg);
|
||||
}
|
||||
TupleLiteral(exprs) => {
|
||||
for expr in exprs {
|
||||
v.expression(expr);
|
||||
expression(v, expr);
|
||||
}
|
||||
},
|
||||
Value(name) => v.qualified_name(name),
|
||||
NamedStruct { name, fields } => {
|
||||
v.named_struct(name, fields);
|
||||
named_struct(v, name, fields);
|
||||
}
|
||||
Call { f, arguments } => {
|
||||
v.call(f, arguments);
|
||||
call(v, f, arguments);
|
||||
},
|
||||
Index { indexee, indexers } => {
|
||||
v.index(indexee, indexers);
|
||||
index(v, indexee, indexers);
|
||||
},
|
||||
IfExpression { discriminator, body } => {
|
||||
v.if_expression(deref_optional_box(discriminator), body);
|
||||
discriminator.as_ref().map(|d| expression(v, d));
|
||||
if_expression_body(v, body);
|
||||
},
|
||||
WhileExpression { condition, body } => v.while_expression(deref_optional_box(condition), body),
|
||||
ForExpression { enumerators, body } => v.for_expression(enumerators, body),
|
||||
Lambda { params , type_anno, body } => {
|
||||
v.lambda(params, type_anno.as_ref(), body);
|
||||
lambda(v, params, type_anno.as_ref(), body);
|
||||
},
|
||||
ListLiteral(exprs) => {
|
||||
for expr in exprs {
|
||||
v.expression(expr);
|
||||
expression(v, expr);
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn if_expression_body<V: ASTVisitor>(v: &mut V, body: &IfExpressionBody) {
|
||||
use IfExpressionBody::*;
|
||||
match body {
|
||||
SimpleConditional { then_case, else_case } => {
|
||||
walk_block(v, then_case);
|
||||
else_case.as_ref().map(|block| walk_block(v, block));
|
||||
},
|
||||
SimplePatternMatch { pattern, then_case, else_case } => {
|
||||
v.pattern(pattern);
|
||||
walk_pattern(v, pattern);
|
||||
walk_block(v, then_case);
|
||||
else_case.as_ref().map(|block| walk_block(v, block));
|
||||
},
|
||||
CondList(arms) => {
|
||||
for arm in arms {
|
||||
v.condition_arm(arm);
|
||||
condition_arm(v, arm);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn condition_arm<V: ASTVisitor>(v: &mut V, arm: &ConditionArm) {
|
||||
use Condition::*;
|
||||
v.condition_arm(arm);
|
||||
match arm.condition {
|
||||
Pattern(ref pat) => {
|
||||
v.pattern(pat);
|
||||
walk_pattern(v, pat);
|
||||
},
|
||||
TruncatedOp(ref _binop, ref expr) => {
|
||||
v.expression(expr);
|
||||
expression(v, expr);
|
||||
},
|
||||
Expression(ref expr) => {
|
||||
v.expression(expr);
|
||||
expression(v, expr);
|
||||
},
|
||||
_ => ()
|
||||
}
|
||||
arm.guard.as_ref().map(|guard| {
|
||||
v.expression(guard);
|
||||
expression(v, guard);
|
||||
});
|
||||
walk_block(v, &arm.body);
|
||||
}
|
||||
|
||||
fn walk_pattern<V: ASTVisitor>(v: &mut V, pat: &Pattern) {
|
||||
use Pattern::*;
|
||||
match pat {
|
||||
TuplePattern(patterns) => {
|
||||
for pat in patterns {
|
||||
v.pattern(pat);
|
||||
walk_pattern(v, pat);
|
||||
}
|
||||
},
|
||||
TupleStruct(qualified_name, patterns) => {
|
||||
v.qualified_name(qualified_name);
|
||||
for pat in patterns {
|
||||
v.pattern(pat);
|
||||
walk_pattern(v, pat);
|
||||
}
|
||||
},
|
||||
Record(qualified_name, name_and_patterns) => {
|
||||
v.qualified_name(qualified_name);
|
||||
for (_, pat) in name_and_patterns {
|
||||
v.pattern(pat);
|
||||
walk_pattern(v, pat);
|
||||
}
|
||||
},
|
||||
VarOrName(qualified_name) => {
|
||||
v.qualified_name(qualified_name);
|
||||
},
|
||||
_ => ()
|
||||
}
|
||||
}
|
||||
@@ -1,124 +1,102 @@
|
||||
use std::rc::Rc;
|
||||
use std::collections::HashMap;
|
||||
use std::str::FromStr;
|
||||
|
||||
use crate::tokenizing::TokenKind;
|
||||
use crate::typechecking::{TypeConst, Type};
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub struct BinOp {
|
||||
sigil: Rc<String>
|
||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||
pub enum Builtin {
|
||||
Add,
|
||||
Increment,
|
||||
Subtract,
|
||||
Negate,
|
||||
Multiply,
|
||||
Divide,
|
||||
Quotient,
|
||||
Modulo,
|
||||
Exponentiation,
|
||||
BitwiseAnd,
|
||||
BitwiseOr,
|
||||
BooleanAnd,
|
||||
BooleanOr,
|
||||
BooleanNot,
|
||||
Equality,
|
||||
LessThan,
|
||||
LessThanOrEqual,
|
||||
GreaterThan,
|
||||
GreaterThanOrEqual,
|
||||
Comparison,
|
||||
FieldAccess,
|
||||
IOPrint,
|
||||
IOPrintLn,
|
||||
IOGetLine,
|
||||
Assignment,
|
||||
Concatenate,
|
||||
}
|
||||
|
||||
impl BinOp {
|
||||
pub fn from_sigil(sigil: &str) -> BinOp {
|
||||
BinOp { sigil: Rc::new(sigil.to_string()) }
|
||||
}
|
||||
pub fn sigil(&self) -> &Rc<String> {
|
||||
&self.sigil
|
||||
}
|
||||
pub fn from_sigil_token(tok: &TokenKind) -> Option<BinOp> {
|
||||
use self::TokenKind::*;
|
||||
let s = match tok {
|
||||
Operator(op) => op,
|
||||
Period => ".",
|
||||
Pipe => "|",
|
||||
Slash => "/",
|
||||
LAngleBracket => "<",
|
||||
RAngleBracket => ">",
|
||||
Equals => "=",
|
||||
_ => return None
|
||||
};
|
||||
Some(BinOp::from_sigil(s))
|
||||
impl Builtin {
|
||||
pub fn get_type(&self) -> Type {
|
||||
use Builtin::*;
|
||||
match self {
|
||||
Add => ty!(Nat -> Nat -> Nat),
|
||||
Subtract => ty!(Nat -> Nat -> Nat),
|
||||
Multiply => ty!(Nat -> Nat -> Nat),
|
||||
Divide => ty!(Nat -> Nat -> Float),
|
||||
Quotient => ty!(Nat -> Nat -> Nat),
|
||||
Modulo => ty!(Nat -> Nat -> Nat),
|
||||
Exponentiation => ty!(Nat -> Nat -> Nat),
|
||||
BitwiseAnd => ty!(Nat -> Nat -> Nat),
|
||||
BitwiseOr => ty!(Nat -> Nat -> Nat),
|
||||
BooleanAnd => ty!(Bool -> Bool -> Bool),
|
||||
BooleanOr => ty!(Bool -> Bool -> Bool),
|
||||
BooleanNot => ty!(Bool -> Bool),
|
||||
Equality => ty!(Nat -> Nat -> Bool),
|
||||
LessThan => ty!(Nat -> Nat -> Bool),
|
||||
LessThanOrEqual => ty!(Nat -> Nat -> Bool),
|
||||
GreaterThan => ty!(Nat -> Nat -> Bool),
|
||||
GreaterThanOrEqual => ty!(Nat -> Nat -> Bool),
|
||||
Comparison => ty!(Nat -> Nat -> Ordering),
|
||||
FieldAccess => ty!(Unit),
|
||||
IOPrint => ty!(Unit),
|
||||
IOPrintLn => ty!(Unit) ,
|
||||
IOGetLine => ty!(StringT),
|
||||
Assignment => ty!(Unit),
|
||||
Concatenate => ty!(StringT -> StringT -> StringT),
|
||||
Increment => ty!(Nat -> Int),
|
||||
Negate => ty!(Nat -> Int)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_type(&self) -> Result<Type, String> {
|
||||
let s = self.sigil.as_str();
|
||||
BINOPS.get(s).map(|x| x.0.clone()).ok_or(format!("Binop {} not found", s))
|
||||
}
|
||||
impl FromStr for Builtin {
|
||||
type Err = ();
|
||||
|
||||
pub fn min_precedence() -> i32 {
|
||||
i32::min_value()
|
||||
}
|
||||
pub fn get_precedence_from_token(op: &TokenKind) -> Option<i32> {
|
||||
use self::TokenKind::*;
|
||||
let s = match op {
|
||||
Operator(op) => op,
|
||||
Period => ".",
|
||||
Pipe => "|",
|
||||
Slash => "/",
|
||||
LAngleBracket => "<",
|
||||
RAngleBracket => ">",
|
||||
Equals => "=",
|
||||
_ => return None
|
||||
};
|
||||
let default = 10_000_000;
|
||||
Some(BINOPS.get(s).map(|x| x.2.clone()).unwrap_or_else(|| {
|
||||
default
|
||||
}))
|
||||
}
|
||||
|
||||
pub fn get_precedence(&self) -> i32 {
|
||||
let s: &str = &self.sigil;
|
||||
let default = 10_000_000;
|
||||
BINOPS.get(s).map(|x| x.2.clone()).unwrap_or_else(|| {
|
||||
default
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
use Builtin::*;
|
||||
Ok(match s {
|
||||
"+" => Add,
|
||||
"-" => Subtract,
|
||||
"*" => Multiply,
|
||||
"/" => Divide,
|
||||
"quot" => Quotient,
|
||||
"%" => Modulo,
|
||||
"++" => Concatenate,
|
||||
"^" => Exponentiation,
|
||||
"&" => BitwiseAnd,
|
||||
"&&" => BooleanAnd,
|
||||
"|" => BitwiseOr,
|
||||
"||" => BooleanOr,
|
||||
"!" => BooleanNot,
|
||||
">" => GreaterThan,
|
||||
">=" => GreaterThanOrEqual,
|
||||
"<" => LessThan,
|
||||
"<=" => LessThanOrEqual,
|
||||
"==" => Equality,
|
||||
"=" => Assignment,
|
||||
"<=>" => Comparison,
|
||||
"." => FieldAccess,
|
||||
"print" => IOPrint,
|
||||
"println" => IOPrintLn,
|
||||
"getline" => IOGetLine,
|
||||
_ => return Err(())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub struct PrefixOp {
|
||||
sigil: Rc<String>
|
||||
}
|
||||
|
||||
impl PrefixOp {
|
||||
pub fn from_sigil(sigil: &str) -> PrefixOp {
|
||||
PrefixOp { sigil: Rc::new(sigil.to_string()) }
|
||||
}
|
||||
pub fn sigil(&self) -> &Rc<String> {
|
||||
&self.sigil
|
||||
}
|
||||
pub fn is_prefix(op: &str) -> bool {
|
||||
PREFIX_OPS.get(op).is_some()
|
||||
}
|
||||
pub fn get_type(&self) -> Result<Type, String> {
|
||||
let s = self.sigil.as_str();
|
||||
PREFIX_OPS.get(s).map(|x| x.0.clone()).ok_or(format!("Prefix op {} not found", s))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
lazy_static! {
|
||||
static ref PREFIX_OPS: HashMap<&'static str, (Type, ())> =
|
||||
hashmap! {
|
||||
"+" => (ty!(Nat -> Int), ()),
|
||||
"-" => (ty!(Nat -> Int), ()),
|
||||
"!" => (ty!(Bool -> Bool), ()),
|
||||
};
|
||||
}
|
||||
|
||||
/* the second tuple member is a placeholder for when I want to make evaluation rules tied to the
|
||||
* binop definition */
|
||||
//TODO some of these types are going to have to be adjusted
|
||||
lazy_static! {
|
||||
static ref BINOPS: HashMap<&'static str, (Type, (), i32)> =
|
||||
hashmap! {
|
||||
"+" => (ty!(Nat -> Nat -> Nat), (), 10),
|
||||
"-" => (ty!(Nat -> Nat -> Nat), (), 10),
|
||||
"*" => (ty!(Nat -> Nat -> Nat), (), 20),
|
||||
"/" => (ty!(Nat -> Nat -> Float), (), 20),
|
||||
"quot" => (ty!(Nat -> Nat -> Nat), (), 20),
|
||||
"%" => (ty!(Nat -> Nat -> Nat), (), 20),
|
||||
"++" => (ty!(StringT -> StringT -> StringT), (), 30),
|
||||
"^" => (ty!(Nat -> Nat -> Nat), (), 20),
|
||||
"&" => (ty!(Nat -> Nat -> Nat), (), 20),
|
||||
"|" => (ty!(Nat -> Nat -> Nat), (), 20),
|
||||
">" => (ty!(Nat -> Nat -> Bool), (), 20),
|
||||
">=" => (ty!(Nat -> Nat -> Bool), (), 20),
|
||||
"<" => (ty!(Nat -> Nat -> Bool), (), 20),
|
||||
"<=" => (ty!(Nat -> Nat -> Bool), (), 20),
|
||||
"==" => (ty!(Nat -> Nat -> Bool), (), 20),
|
||||
"=" => (ty!(Unit), (), 20), //TODO not sure what the type of this should be b/c special fmr
|
||||
"<=>" => (ty!(Nat -> Nat -> Ordering), (), 20), //TODO figure out how to treat Order
|
||||
};
|
||||
}
|
||||
|
||||
10
schala-lang/language/src/debugging.rs
Normal file
10
schala-lang/language/src/debugging.rs
Normal file
@@ -0,0 +1,10 @@
|
||||
use crate::ast::*;
|
||||
|
||||
impl AST {
|
||||
pub fn compact_debug(&self) -> String {
|
||||
format!("{:?}", self)
|
||||
}
|
||||
pub fn expanded_debug(&self) -> String {
|
||||
format!("{:#?}", self)
|
||||
}
|
||||
}
|
||||
@@ -1,33 +1,25 @@
|
||||
|
||||
use std::cell::RefCell;
|
||||
use std::rc::Rc;
|
||||
use std::fmt::Write;
|
||||
use std::io;
|
||||
|
||||
use itertools::Itertools;
|
||||
|
||||
use crate::schala::SymbolTableHandle;
|
||||
use crate::util::ScopeStack;
|
||||
use crate::reduced_ast::{BoundVars, ReducedAST, Stmt, Expr, Lit, Func, Alternative, Subpattern};
|
||||
use crate::symbol_table::{SymbolSpec, Symbol, SymbolTable};
|
||||
use crate::symbol_table::{SymbolSpec, Symbol, SymbolTable, FullyQualifiedSymbolName};
|
||||
use crate::builtin::Builtin;
|
||||
|
||||
mod test;
|
||||
|
||||
pub struct State<'a> {
|
||||
values: ScopeStack<'a, Rc<String>, ValueEntry>,
|
||||
symbol_table_handle: Rc<RefCell<SymbolTable>>,
|
||||
}
|
||||
|
||||
macro_rules! builtin_binding {
|
||||
($name:expr, $values:expr) => {
|
||||
$values.insert(Rc::new(format!($name)), ValueEntry::Binding { constant: true, val: Node::Expr(Expr::Func(Func::BuiltIn(Rc::new(format!($name))))) });
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> State<'a> {
|
||||
pub fn new(symbol_table_handle: Rc<RefCell<SymbolTable>>) -> State<'a> {
|
||||
let mut values = ScopeStack::new(Some(format!("global")));
|
||||
builtin_binding!("print", values);
|
||||
builtin_binding!("println", values);
|
||||
builtin_binding!("getline", values);
|
||||
State { values, symbol_table_handle }
|
||||
pub fn new() -> State<'a> {
|
||||
let values = ScopeStack::new(Some(format!("global")));
|
||||
State { values }
|
||||
}
|
||||
|
||||
pub fn debug_print(&self) -> String {
|
||||
@@ -37,7 +29,6 @@ impl<'a> State<'a> {
|
||||
fn new_frame(&'a self, items: &'a Vec<Node>, bound_vars: &BoundVars) -> State<'a> {
|
||||
let mut inner_state = State {
|
||||
values: self.values.new_scope(None),
|
||||
symbol_table_handle: self.symbol_table_handle.clone(),
|
||||
};
|
||||
for (bound_var, val) in bound_vars.iter().zip(items.iter()) {
|
||||
if let Some(bv) = bound_var.as_ref() {
|
||||
@@ -119,16 +110,12 @@ impl Expr {
|
||||
StringLit(s) => format!("\"{}\"", s),
|
||||
},
|
||||
Expr::Func(f) => match f {
|
||||
BuiltIn(name) => format!("<built-in function '{}'>", name),
|
||||
BuiltIn(builtin) => format!("<built-in function '{:?}'>", builtin),
|
||||
UserDefined { name: None, .. } => format!("<function>"),
|
||||
UserDefined { name: Some(name), .. } => format!("<function '{}'>", name),
|
||||
},
|
||||
Expr::Constructor {
|
||||
type_name: _, name, arity, ..
|
||||
} => if *arity == 0 {
|
||||
format!("{}", name)
|
||||
} else {
|
||||
format!("<data constructor '{}'>", name)
|
||||
Expr::Constructor { type_name, arity, .. } => {
|
||||
format!("<constructor for `{}` arity {}>", type_name, arity)
|
||||
},
|
||||
Expr::Tuple(exprs) => paren_wrapped_vec(exprs.iter().map(|x| x.to_repl())),
|
||||
_ => format!("{:?}", self),
|
||||
@@ -140,8 +127,8 @@ impl Expr {
|
||||
|
||||
match self {
|
||||
ConditionalTargetSigilValue => replacement.clone(),
|
||||
Unit | Lit(_) | Func(_) | Val(_) | Constructor { .. } |
|
||||
CaseMatch { .. } | UnimplementedSigilValue => self,
|
||||
Unit | Lit(_) | Func(_) | Sym(_) | Constructor { .. } |
|
||||
CaseMatch { .. } | UnimplementedSigilValue | ReductionError(_) => self,
|
||||
Tuple(exprs) => Tuple(exprs.into_iter().map(|e| e.replace_conditional_target_sigil(replacement)).collect()),
|
||||
Call { f, args } => {
|
||||
let new_args = args.into_iter().map(|e| e.replace_conditional_target_sigil(replacement)).collect();
|
||||
@@ -164,7 +151,9 @@ impl<'a> State<'a> {
|
||||
|
||||
for statement in ast.0 {
|
||||
match self.statement(statement) {
|
||||
Ok(Some(ref output)) if repl => acc.push(Ok(output.to_repl())),
|
||||
Ok(Some(ref output)) if repl => {
|
||||
acc.push(Ok(output.to_repl()))
|
||||
},
|
||||
Ok(_) => (),
|
||||
Err(error) => {
|
||||
acc.push(Err(format!("Runtime error: {}", error)));
|
||||
@@ -217,7 +206,10 @@ impl<'a> State<'a> {
|
||||
Node::Expr(expr) => match expr {
|
||||
literal @ Lit(_) => Ok(Node::Expr(literal)),
|
||||
Call { box f, args } => self.call_expression(f, args),
|
||||
Val(v) => self.value(v),
|
||||
Sym(name) => Ok(match self.values.lookup(&name) {
|
||||
Some(ValueEntry::Binding { val, .. }) => val.clone(),
|
||||
None => return Err(format!("Could not look up symbol {}", name))
|
||||
}),
|
||||
Constructor { arity, ref name, tag, .. } if arity == 0 => Ok(Node::PrimObject { name: name.clone(), tag, items: vec![] }),
|
||||
constructor @ Constructor { .. } => Ok(Node::Expr(constructor)),
|
||||
func @ Func(_) => Ok(Node::Expr(func)),
|
||||
@@ -231,6 +223,7 @@ impl<'a> State<'a> {
|
||||
CaseMatch { box cond, alternatives } => self.case_match_expression(cond, alternatives),
|
||||
ConditionalTargetSigilValue => Ok(Node::Expr(ConditionalTargetSigilValue)),
|
||||
UnimplementedSigilValue => Err(format!("Sigil value eval not implemented")),
|
||||
ReductionError(err) => Err(format!("Reduction error: {}", err)),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -246,7 +239,7 @@ impl<'a> State<'a> {
|
||||
|
||||
fn apply_data_constructor(&mut self, _type_name: Rc<String>, name: Rc<String>, tag: usize, arity: usize, args: Vec<Expr>) -> EvalResult<Node> {
|
||||
if arity != args.len() {
|
||||
return Err(format!("Data constructor {} requires {} args", name, arity));
|
||||
return Err(format!("Data constructor {} requires {} arg(s)", name, arity));
|
||||
}
|
||||
|
||||
let evaled_args = args.into_iter().map(|expr| self.expression(Node::Expr(expr))).collect::<Result<Vec<Node>,_>>()?;
|
||||
@@ -260,7 +253,7 @@ impl<'a> State<'a> {
|
||||
|
||||
fn apply_function(&mut self, f: Func, args: Vec<Expr>) -> EvalResult<Node> {
|
||||
match f {
|
||||
Func::BuiltIn(sigil) => Ok(Node::Expr(self.apply_builtin(sigil, args)?)),
|
||||
Func::BuiltIn(builtin) => Ok(self.apply_builtin(builtin, args)?),
|
||||
Func::UserDefined { params, body, name } => {
|
||||
|
||||
if params.len() != args.len() {
|
||||
@@ -268,7 +261,6 @@ impl<'a> State<'a> {
|
||||
}
|
||||
let mut func_state = State {
|
||||
values: self.values.new_scope(name.map(|n| format!("{}", n))),
|
||||
symbol_table_handle: self.symbol_table_handle.clone(),
|
||||
};
|
||||
for (param, val) in params.into_iter().zip(args.into_iter()) {
|
||||
let val = func_state.expression(Node::Expr(val))?;
|
||||
@@ -280,81 +272,84 @@ impl<'a> State<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn apply_builtin(&mut self, name: Rc<String>, args: Vec<Expr>) -> EvalResult<Expr> {
|
||||
fn apply_builtin(&mut self, builtin: Builtin, args: Vec<Expr>) -> EvalResult<Node> {
|
||||
use self::Expr::*;
|
||||
use self::Lit::*;
|
||||
let evaled_args: Result<Vec<Expr>, String> = args.into_iter().map(|arg| {
|
||||
match self.expression(Node::Expr(arg)) {
|
||||
Ok(Node::Expr(e)) => Ok(e),
|
||||
Ok(Node::PrimTuple { .. }) => Err(format!("Trying to apply a builtin to a tuple")),
|
||||
Ok(Node::PrimObject { .. }) => Err(format!("Trying to apply a builtin to a primitive object")),
|
||||
Err(e) => Err(e)
|
||||
}
|
||||
}).collect();
|
||||
use Builtin::*;
|
||||
|
||||
let evaled_args: Result<Vec<Node>, String> = args.into_iter().map(|arg| self.expression(arg.to_node()))
|
||||
.collect();
|
||||
let evaled_args = evaled_args?;
|
||||
|
||||
Ok(match (name.as_str(), evaled_args.as_slice()) {
|
||||
/* binops */
|
||||
("+", &[Lit(Nat(l)), Lit(Nat(r))]) => Lit(Nat(l + r)),
|
||||
("++", &[Lit(StringLit(ref s1)), Lit(StringLit(ref s2))]) => Lit(StringLit(Rc::new(format!("{}{}", s1, s2)))),
|
||||
("-", &[Lit(Nat(l)), Lit(Nat(r))]) => Lit(Nat(l - r)),
|
||||
("*", &[Lit(Nat(l)), Lit(Nat(r))]) => Lit(Nat(l * r)),
|
||||
("/", &[Lit(Nat(l)), Lit(Nat(r))]) => Lit(Float((l as f64)/ (r as f64))),
|
||||
("quot", &[Lit(Nat(l)), Lit(Nat(r))]) => if r == 0 {
|
||||
return Err(format!("divide by zero"));
|
||||
} else {
|
||||
Lit(Nat(l / r))
|
||||
Ok(match (builtin, evaled_args.as_slice()) {
|
||||
(FieldAccess, &[Node::PrimObject { .. }]) => {
|
||||
//TODO implement field access
|
||||
unimplemented!()
|
||||
},
|
||||
("%", &[Lit(Nat(l)), Lit(Nat(r))]) => Lit(Nat(l % r)),
|
||||
("^", &[Lit(Nat(l)), Lit(Nat(r))]) => Lit(Nat(l ^ r)),
|
||||
("&", &[Lit(Nat(l)), Lit(Nat(r))]) => Lit(Nat(l & r)),
|
||||
("|", &[Lit(Nat(l)), Lit(Nat(r))]) => Lit(Nat(l | r)),
|
||||
(binop, &[Node::Expr(ref lhs), Node::Expr(ref rhs)]) => match (binop, lhs, rhs) {
|
||||
/* binops */
|
||||
(Add, Lit(Nat(l)), Lit(Nat(r))) => Lit(Nat(l + r)),
|
||||
(Concatenate, Lit(StringLit(ref s1)), Lit(StringLit(ref s2))) => Lit(StringLit(Rc::new(format!("{}{}", s1, s2)))),
|
||||
(Subtract, Lit(Nat(l)), Lit(Nat(r))) => Lit(Nat(l - r)),
|
||||
(Multiply, Lit(Nat(l)), Lit(Nat(r))) => Lit(Nat(l * r)),
|
||||
(Divide, Lit(Nat(l)), Lit(Nat(r))) => Lit(Float((*l as f64)/ (*r as f64))),
|
||||
(Quotient, Lit(Nat(l)), Lit(Nat(r))) => if *r == 0 {
|
||||
return Err(format!("divide by zero"));
|
||||
} else {
|
||||
Lit(Nat(l / r))
|
||||
},
|
||||
(Modulo, Lit(Nat(l)), Lit(Nat(r))) => Lit(Nat(l % r)),
|
||||
(Exponentiation, Lit(Nat(l)), Lit(Nat(r))) => Lit(Nat(l ^ r)),
|
||||
(BitwiseAnd, Lit(Nat(l)), Lit(Nat(r))) => Lit(Nat(l & r)),
|
||||
(BitwiseOr, Lit(Nat(l)), Lit(Nat(r))) => Lit(Nat(l | r)),
|
||||
|
||||
/* comparisons */
|
||||
("==", &[Lit(Nat(l)), Lit(Nat(r))]) => Lit(Bool(l == r)),
|
||||
("==", &[Lit(Int(l)), Lit(Int(r))]) => Lit(Bool(l == r)),
|
||||
("==", &[Lit(Float(l)), Lit(Float(r))]) => Lit(Bool(l == r)),
|
||||
("==", &[Lit(Bool(l)), Lit(Bool(r))]) => Lit(Bool(l == r)),
|
||||
("==", &[Lit(StringLit(ref l)), Lit(StringLit(ref r))]) => Lit(Bool(l == r)),
|
||||
/* comparisons */
|
||||
(Equality, Lit(Nat(l)), Lit(Nat(r))) => Lit(Bool(l == r)),
|
||||
(Equality, Lit(Int(l)), Lit(Int(r))) => Lit(Bool(l == r)),
|
||||
(Equality, Lit(Float(l)), Lit(Float(r))) => Lit(Bool(l == r)),
|
||||
(Equality, Lit(Bool(l)), Lit(Bool(r))) => Lit(Bool(l == r)),
|
||||
(Equality, Lit(StringLit(ref l)), Lit(StringLit(ref r))) => Lit(Bool(l == r)),
|
||||
|
||||
("<", &[Lit(Nat(l)), Lit(Nat(r))]) => Lit(Bool(l < r)),
|
||||
("<", &[Lit(Int(l)), Lit(Int(r))]) => Lit(Bool(l < r)),
|
||||
("<", &[Lit(Float(l)), Lit(Float(r))]) => Lit(Bool(l < r)),
|
||||
(LessThan, Lit(Nat(l)), Lit(Nat(r))) => Lit(Bool(l < r)),
|
||||
(LessThan, Lit(Int(l)), Lit(Int(r))) => Lit(Bool(l < r)),
|
||||
(LessThan, Lit(Float(l)), Lit(Float(r))) => Lit(Bool(l < r)),
|
||||
|
||||
("<=", &[Lit(Nat(l)), Lit(Nat(r))]) => Lit(Bool(l <= r)),
|
||||
("<=", &[Lit(Int(l)), Lit(Int(r))]) => Lit(Bool(l <= r)),
|
||||
("<=", &[Lit(Float(l)), Lit(Float(r))]) => Lit(Bool(l <= r)),
|
||||
(LessThanOrEqual, Lit(Nat(l)), Lit(Nat(r))) => Lit(Bool(l <= r)),
|
||||
(LessThanOrEqual, Lit(Int(l)), Lit(Int(r))) => Lit(Bool(l <= r)),
|
||||
(LessThanOrEqual, Lit(Float(l)), Lit(Float(r))) => Lit(Bool(l <= r)),
|
||||
|
||||
(">", &[Lit(Nat(l)), Lit(Nat(r))]) => Lit(Bool(l > r)),
|
||||
(">", &[Lit(Int(l)), Lit(Int(r))]) => Lit(Bool(l > r)),
|
||||
(">", &[Lit(Float(l)), Lit(Float(r))]) => Lit(Bool(l > r)),
|
||||
|
||||
(">=", &[Lit(Nat(l)), Lit(Nat(r))]) => Lit(Bool(l >= r)),
|
||||
(">=", &[Lit(Int(l)), Lit(Int(r))]) => Lit(Bool(l >= r)),
|
||||
(">=", &[Lit(Float(l)), Lit(Float(r))]) => Lit(Bool(l >= r)),
|
||||
|
||||
/* prefix ops */
|
||||
("!", &[Lit(Bool(true))]) => Lit(Bool(false)),
|
||||
("!", &[Lit(Bool(false))]) => Lit(Bool(true)),
|
||||
("-", &[Lit(Nat(n))]) => Lit(Int(-1*(n as i64))),
|
||||
("-", &[Lit(Int(n))]) => Lit(Int(-1*(n as i64))),
|
||||
("+", &[Lit(Int(n))]) => Lit(Int(n)),
|
||||
("+", &[Lit(Nat(n))]) => Lit(Nat(n)),
|
||||
(GreaterThan, Lit(Nat(l)), Lit(Nat(r))) => Lit(Bool(l > r)),
|
||||
(GreaterThan, Lit(Int(l)), Lit(Int(r))) => Lit(Bool(l > r)),
|
||||
(GreaterThan, Lit(Float(l)), Lit(Float(r))) => Lit(Bool(l > r)),
|
||||
|
||||
(GreaterThanOrEqual, Lit(Nat(l)), Lit(Nat(r))) => Lit(Bool(l >= r)),
|
||||
(GreaterThanOrEqual, Lit(Int(l)), Lit(Int(r))) => Lit(Bool(l >= r)),
|
||||
(GreaterThanOrEqual, Lit(Float(l)), Lit(Float(r))) => Lit(Bool(l >= r)),
|
||||
_ => return Err("No valid binop".to_string())
|
||||
}.to_node(),
|
||||
(prefix, &[Node::Expr(ref arg)]) => match (prefix, arg) {
|
||||
(BooleanNot, Lit(Bool(true))) => Lit(Bool(false)),
|
||||
(BooleanNot, Lit(Bool(false))) => Lit(Bool(true)),
|
||||
(Negate, Lit(Nat(n))) => Lit(Int(-1*(*n as i64))),
|
||||
(Negate, Lit(Int(n))) => Lit(Int(-1*(*n as i64))),
|
||||
(Increment, Lit(Int(n))) => Lit(Int(*n)),
|
||||
(Increment, Lit(Nat(n))) => Lit(Nat(*n)),
|
||||
_ => return Err("No valid prefix op".to_string())
|
||||
}.to_node(),
|
||||
|
||||
/* builtin functions */
|
||||
("print", &[ref anything]) => {
|
||||
(IOPrint, &[ref anything]) => {
|
||||
print!("{}", anything.to_repl());
|
||||
Expr::Unit
|
||||
Expr::Unit.to_node()
|
||||
},
|
||||
("println", &[ref anything]) => {
|
||||
(IOPrintLn, &[ref anything]) => {
|
||||
println!("{}", anything.to_repl());
|
||||
Expr::Unit
|
||||
Expr::Unit.to_node()
|
||||
},
|
||||
("getline", &[]) => {
|
||||
(IOGetLine, &[]) => {
|
||||
let mut buf = String::new();
|
||||
io::stdin().read_line(&mut buf).expect("Error readling line in 'getline'");
|
||||
Lit(StringLit(Rc::new(buf.trim().to_string())))
|
||||
Lit(StringLit(Rc::new(buf.trim().to_string()))).to_node()
|
||||
},
|
||||
(x, args) => return Err(format!("bad or unimplemented builtin {:?} | {:?}", x, args)),
|
||||
})
|
||||
@@ -371,7 +366,7 @@ impl<'a> State<'a> {
|
||||
|
||||
fn assign_expression(&mut self, val: Expr, expr: Expr) -> EvalResult<Node> {
|
||||
let name = match val {
|
||||
Expr::Val(name) => name,
|
||||
Expr::Sym(name) => name,
|
||||
_ => return Err(format!("Trying to assign to a non-value")),
|
||||
};
|
||||
|
||||
@@ -425,15 +420,15 @@ impl<'a> State<'a> {
|
||||
let cond = self.expression(Node::Expr(cond))?;
|
||||
for alt in alternatives {
|
||||
// no matter what type of condition we have, ignore alternative if the guard evaluates false
|
||||
if !self.guard_passes(&alt.guard, &cond)? {
|
||||
if !self.guard_passes(&alt.matchable.guard, &cond)? {
|
||||
continue;
|
||||
}
|
||||
|
||||
match cond {
|
||||
Node::PrimObject { ref tag, ref items, .. } => {
|
||||
if alt.tag.map(|t| t == *tag).unwrap_or(true) {
|
||||
let mut inner_state = self.new_frame(items, &alt.bound_vars);
|
||||
if all_subpatterns_pass(&mut inner_state, &alt.subpatterns, items)? {
|
||||
if alt.matchable.tag.map(|t| t == *tag).unwrap_or(true) {
|
||||
let mut inner_state = self.new_frame(items, &alt.matchable.bound_vars);
|
||||
if all_subpatterns_pass(&mut inner_state, &alt.matchable.subpatterns, items)? {
|
||||
return inner_state.block(alt.item);
|
||||
} else {
|
||||
continue;
|
||||
@@ -441,15 +436,15 @@ impl<'a> State<'a> {
|
||||
}
|
||||
},
|
||||
Node::PrimTuple { ref items } => {
|
||||
let mut inner_state = self.new_frame(items, &alt.bound_vars);
|
||||
if all_subpatterns_pass(&mut inner_state, &alt.subpatterns, items)? {
|
||||
let mut inner_state = self.new_frame(items, &alt.matchable.bound_vars);
|
||||
if all_subpatterns_pass(&mut inner_state, &alt.matchable.subpatterns, items)? {
|
||||
return inner_state.block(alt.item);
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
},
|
||||
Node::Expr(ref _e) => {
|
||||
if let None = alt.tag {
|
||||
if let None = alt.matchable.tag {
|
||||
return self.block(alt.item)
|
||||
}
|
||||
}
|
||||
@@ -457,293 +452,4 @@ impl<'a> State<'a> {
|
||||
}
|
||||
Err(format!("{:?} failed pattern match", cond))
|
||||
}
|
||||
|
||||
//TODO if I don't need to lookup by name here...
|
||||
fn value(&mut self, name: Rc<String>) -> EvalResult<Node> {
|
||||
use self::ValueEntry::*;
|
||||
use self::Func::*;
|
||||
//TODO add a layer of indirection here to talk to the symbol table first, and only then look up
|
||||
//in the values table
|
||||
|
||||
let symbol_table = self.symbol_table_handle.borrow();
|
||||
let value = symbol_table.lookup_by_name(&name);
|
||||
Ok(match value {
|
||||
Some(Symbol { name, spec, .. }) => match spec {
|
||||
//TODO I'll need this type_name later to do a table lookup
|
||||
SymbolSpec::DataConstructor { type_name: _type_name, type_args, .. } => {
|
||||
if type_args.len() == 0 {
|
||||
Node::PrimObject { name: name.clone(), tag: 0, items: vec![] }
|
||||
} else {
|
||||
return Err(format!("This data constructor thing not done"))
|
||||
}
|
||||
},
|
||||
SymbolSpec::Func(_) => match self.values.lookup(&name) {
|
||||
Some(Binding { val: Node::Expr(Expr::Func(UserDefined { name, params, body })), .. }) => {
|
||||
Node::Expr(Expr::Func(UserDefined { name: name.clone(), params: params.clone(), body: body.clone() }))
|
||||
},
|
||||
_ => unreachable!(),
|
||||
},
|
||||
SymbolSpec::RecordConstructor { .. } => return Err(format!("This shouldn't be a record!")),
|
||||
SymbolSpec::Binding => match self.values.lookup(&name) {
|
||||
Some(Binding { val, .. }) => val.clone(),
|
||||
None => return Err(format!("Symbol {} exists in symbol table but not in evaluator table", name))
|
||||
}
|
||||
},
|
||||
//TODO ideally this should be returning a runtime error if this is ever None, but it's not
|
||||
//handling all bindings correctly yet
|
||||
//None => return Err(format!("Couldn't find value {}", name)),
|
||||
None => match self.values.lookup(&name) {
|
||||
Some(Binding { val, .. }) => val.clone(),
|
||||
None => return Err(format!("Couldn't find value {}", name)),
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod eval_tests {
|
||||
use std::cell::RefCell;
|
||||
use std::rc::Rc;
|
||||
|
||||
use crate::symbol_table::SymbolTable;
|
||||
use crate::eval::State;
|
||||
|
||||
fn evaluate_all_outputs(input: &str) -> Vec<Result<String, String>> {
|
||||
let symbol_table = Rc::new(RefCell::new(SymbolTable::new()));
|
||||
let mut state = State::new(symbol_table);
|
||||
let ast = crate::util::quick_ast(input);
|
||||
state.symbol_table_handle.borrow_mut().add_top_level_symbols(&ast).unwrap();
|
||||
let reduced = ast.reduce(&state.symbol_table_handle.borrow());
|
||||
let all_output = state.evaluate(reduced, true);
|
||||
all_output
|
||||
}
|
||||
|
||||
macro_rules! test_in_fresh_env {
|
||||
($string:expr, $correct:expr) => {
|
||||
{
|
||||
let all_output = evaluate_all_outputs($string);
|
||||
let ref output = all_output.last().unwrap();
|
||||
assert_eq!(**output, Ok($correct.to_string()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_basic_eval() {
|
||||
test_in_fresh_env!("1 + 2", "3");
|
||||
test_in_fresh_env!("let mut a = 1; a = 2", "Unit");
|
||||
/*
|
||||
test_in_fresh_env!("let mut a = 1; a = 2; a", "2");
|
||||
test_in_fresh_env!(r#"("a", 1 + 2)"#, r#"("a", 3)"#);
|
||||
*/
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn function_eval() {
|
||||
test_in_fresh_env!("fn oi(x) { x + 1 }; oi(4)", "5");
|
||||
test_in_fresh_env!("fn oi(x) { x + 1 }; oi(1+2)", "4");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn scopes() {
|
||||
let scope_ok = r#"
|
||||
let a = 20
|
||||
fn haha() {
|
||||
let a = 10
|
||||
a
|
||||
}
|
||||
haha()
|
||||
"#;
|
||||
test_in_fresh_env!(scope_ok, "10");
|
||||
let scope_ok = r#"
|
||||
let a = 20
|
||||
fn haha() {
|
||||
let a = 10
|
||||
a
|
||||
}
|
||||
a
|
||||
"#;
|
||||
test_in_fresh_env!(scope_ok, "20");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn if_is_patterns() {
|
||||
let source = r#"
|
||||
type Option<T> = Some(T) | None
|
||||
let x = Some(9); if x is Some(q) then { q } else { 0 }"#;
|
||||
test_in_fresh_env!(source, "9");
|
||||
|
||||
let source = r#"
|
||||
type Option<T> = Some(T) | None
|
||||
let x = None; if x is Some(q) then { q } else { 0 }"#;
|
||||
test_in_fresh_env!(source, "0");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn full_if_matching() {
|
||||
let source = r#"
|
||||
type Option<T> = Some(T) | None
|
||||
let a = None
|
||||
if a { is None -> 4, is Some(x) -> x }
|
||||
"#;
|
||||
test_in_fresh_env!(source, "4");
|
||||
|
||||
let source = r#"
|
||||
type Option<T> = Some(T) | None
|
||||
let a = Some(99)
|
||||
if a { is None -> 4, is Some(x) -> x }
|
||||
"#;
|
||||
test_in_fresh_env!(source, "99");
|
||||
|
||||
let source = r#"
|
||||
let a = 10
|
||||
if a { is 10 -> "x", is 4 -> "y" }
|
||||
"#;
|
||||
test_in_fresh_env!(source, "\"x\"");
|
||||
|
||||
let source = r#"
|
||||
let a = 10
|
||||
if a { is 15 -> "x", is 10 -> "y" }
|
||||
"#;
|
||||
test_in_fresh_env!(source, "\"y\"");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn string_pattern() {
|
||||
let source = r#"
|
||||
let a = "foo"
|
||||
if a { is "foo" -> "x", is _ -> "y" }
|
||||
"#;
|
||||
test_in_fresh_env!(source, "\"x\"");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn boolean_pattern() {
|
||||
let source = r#"
|
||||
let a = true
|
||||
if a {
|
||||
is true -> "x",
|
||||
is false -> "y"
|
||||
}
|
||||
"#;
|
||||
test_in_fresh_env!(source, "\"x\"");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn boolean_pattern_2() {
|
||||
let source = r#"
|
||||
let a = false
|
||||
if a { is true -> "x", is false -> "y" }
|
||||
"#;
|
||||
test_in_fresh_env!(source, "\"y\"");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ignore_pattern() {
|
||||
let source = r#"
|
||||
type Option<T> = Some(T) | None
|
||||
if Some(10) {
|
||||
is _ -> "hella"
|
||||
}
|
||||
"#;
|
||||
test_in_fresh_env!(source, "\"hella\"");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tuple_pattern() {
|
||||
let source = r#"
|
||||
if (1, 2) {
|
||||
is (1, x) -> x,
|
||||
is _ -> 99
|
||||
}
|
||||
"#;
|
||||
test_in_fresh_env!(source, 2);
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
fn tuple_pattern_2() {
|
||||
let source = r#"
|
||||
if (1, 2) {
|
||||
is (10, x) -> x,
|
||||
is (y, x) -> x + y
|
||||
}
|
||||
"#;
|
||||
test_in_fresh_env!(source, 3);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tuple_pattern_3() {
|
||||
let source = r#"
|
||||
if (1, 5) {
|
||||
is (10, x) -> x,
|
||||
is (1, x) -> x
|
||||
}
|
||||
"#;
|
||||
test_in_fresh_env!(source, 5);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tuple_pattern_4() {
|
||||
let source = r#"
|
||||
if (1, 5) {
|
||||
is (10, x) -> x,
|
||||
is (1, x) -> x,
|
||||
}
|
||||
"#;
|
||||
test_in_fresh_env!(source, 5);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prim_obj_pattern() {
|
||||
let source = r#"
|
||||
type Stuff = Mulch(Nat) | Jugs(Nat, String) | Mardok
|
||||
let a = Mulch(20)
|
||||
let b = Jugs(1, "haha")
|
||||
let c = Mardok
|
||||
|
||||
let x = if a {
|
||||
is Mulch(20) -> "x",
|
||||
is _ -> "ERR"
|
||||
}
|
||||
|
||||
let y = if b {
|
||||
is Mulch(n) -> "ERR",
|
||||
is Jugs(2, _) -> "ERR",
|
||||
is Jugs(1, s) -> s,
|
||||
is _ -> "ERR",
|
||||
}
|
||||
|
||||
let z = if c {
|
||||
is Jugs(_, _) -> "ERR",
|
||||
is Mardok -> "NIGH",
|
||||
is _ -> "ERR",
|
||||
}
|
||||
|
||||
(x, y, z)
|
||||
"#;
|
||||
test_in_fresh_env!(source, r#"("x", "haha", "NIGH")"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn basic_lambda_syntax() {
|
||||
let source = r#"
|
||||
let q = \(x, y) { x * y }
|
||||
let x = q(5,2)
|
||||
let y = \(m, n, o) { m + n + o }(1,2,3)
|
||||
(x, y)
|
||||
"#;
|
||||
test_in_fresh_env!(source, r"(10, 6)");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn lambda_syntax_2() {
|
||||
let source = r#"
|
||||
fn milta() {
|
||||
\(x) { x + 33 }
|
||||
}
|
||||
milta()(10)
|
||||
"#;
|
||||
test_in_fresh_env!(source, "43");
|
||||
}
|
||||
}
|
||||
|
||||
269
schala-lang/language/src/eval/test.rs
Normal file
269
schala-lang/language/src/eval/test.rs
Normal file
@@ -0,0 +1,269 @@
|
||||
#![cfg(test)]
|
||||
|
||||
use std::cell::RefCell;
|
||||
use std::rc::Rc;
|
||||
|
||||
use crate::symbol_table::SymbolTable;
|
||||
use crate::scope_resolution::ScopeResolver;
|
||||
use crate::reduced_ast::reduce;
|
||||
use crate::eval::State;
|
||||
|
||||
fn evaluate_all_outputs(input: &str) -> Vec<Result<String, String>> {
|
||||
let (mut ast, source_map) = crate::util::quick_ast(input);
|
||||
let source_map = Rc::new(RefCell::new(source_map));
|
||||
let symbol_table = Rc::new(RefCell::new(SymbolTable::new(source_map)));
|
||||
symbol_table.borrow_mut().add_top_level_symbols(&ast).unwrap();
|
||||
{
|
||||
let mut scope_resolver = ScopeResolver::new(symbol_table.clone());
|
||||
let _ = scope_resolver.resolve(&mut ast);
|
||||
}
|
||||
|
||||
let reduced = reduce(&ast, &symbol_table.borrow());
|
||||
let mut state = State::new();
|
||||
let all_output = state.evaluate(reduced, true);
|
||||
all_output
|
||||
}
|
||||
|
||||
macro_rules! test_in_fresh_env {
|
||||
($string:expr, $correct:expr) => {
|
||||
{
|
||||
let all_output = evaluate_all_outputs($string);
|
||||
let ref output = all_output.last().unwrap();
|
||||
assert_eq!(**output, Ok($correct.to_string()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_basic_eval() {
|
||||
test_in_fresh_env!("1 + 2", "3");
|
||||
test_in_fresh_env!("let mut a = 1; a = 2", "Unit");
|
||||
/*
|
||||
test_in_fresh_env!("let mut a = 1; a = 2; a", "2");
|
||||
test_in_fresh_env!(r#"("a", 1 + 2)"#, r#"("a", 3)"#);
|
||||
*/
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn op_eval() {
|
||||
test_in_fresh_env!("- 13", "-13");
|
||||
test_in_fresh_env!("10 - 2", "8");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn function_eval() {
|
||||
test_in_fresh_env!("fn oi(x) { x + 1 }; oi(4)", "5");
|
||||
test_in_fresh_env!("fn oi(x) { x + 1 }; oi(1+2)", "4");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn scopes() {
|
||||
let scope_ok = r#"
|
||||
let a = 20
|
||||
fn haha() {
|
||||
let a = 10
|
||||
a
|
||||
}
|
||||
haha()
|
||||
"#;
|
||||
test_in_fresh_env!(scope_ok, "10");
|
||||
let scope_ok = r#"
|
||||
let a = 20
|
||||
fn queque() {
|
||||
let a = 10
|
||||
a
|
||||
}
|
||||
a
|
||||
"#;
|
||||
test_in_fresh_env!(scope_ok, "20");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn if_is_patterns() {
|
||||
let source = r#"
|
||||
type Option<T> = Some(T) | None
|
||||
let x = Option::Some(9); if x is Option::Some(q) then { q } else { 0 }"#;
|
||||
test_in_fresh_env!(source, "9");
|
||||
|
||||
let source = r#"
|
||||
type Option<T> = Some(T) | None
|
||||
let x = Option::None; if x is Option::Some(q) then { q } else { 0 }"#;
|
||||
test_in_fresh_env!(source, "0");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn full_if_matching() {
|
||||
let source = r#"
|
||||
type Option<T> = Some(T) | None
|
||||
let a = Option::None
|
||||
if a { is Option::None then 4, is Option::Some(x) then x }
|
||||
"#;
|
||||
test_in_fresh_env!(source, "4");
|
||||
|
||||
let source = r#"
|
||||
type Option<T> = Some(T) | None
|
||||
let a = Option::Some(99)
|
||||
if a { is Option::None then 4, is Option::Some(x) then x }
|
||||
"#;
|
||||
test_in_fresh_env!(source, "99");
|
||||
|
||||
let source = r#"
|
||||
let a = 10
|
||||
if a { is 10 then "x", is 4 then "y" }
|
||||
"#;
|
||||
test_in_fresh_env!(source, "\"x\"");
|
||||
|
||||
let source = r#"
|
||||
let a = 10
|
||||
if a { is 15 then "x", is 10 then "y" }
|
||||
"#;
|
||||
test_in_fresh_env!(source, "\"y\"");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn string_pattern() {
|
||||
let source = r#"
|
||||
let a = "foo"
|
||||
if a { is "foo" then "x", is _ then "y" }
|
||||
"#;
|
||||
test_in_fresh_env!(source, "\"x\"");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn boolean_pattern() {
|
||||
let source = r#"
|
||||
let a = true
|
||||
if a {
|
||||
is true then "x",
|
||||
is false then "y"
|
||||
}
|
||||
"#;
|
||||
test_in_fresh_env!(source, "\"x\"");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn boolean_pattern_2() {
|
||||
let source = r#"
|
||||
let a = false
|
||||
if a { is true then "x", is false then "y" }
|
||||
"#;
|
||||
test_in_fresh_env!(source, "\"y\"");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ignore_pattern() {
|
||||
let source = r#"
|
||||
type Option<T> = Some(T) | None
|
||||
if Option::Some(10) {
|
||||
is _ then "hella"
|
||||
}
|
||||
"#;
|
||||
test_in_fresh_env!(source, "\"hella\"");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tuple_pattern() {
|
||||
let source = r#"
|
||||
if (1, 2) {
|
||||
is (1, x) then x,
|
||||
is _ then 99
|
||||
}
|
||||
"#;
|
||||
test_in_fresh_env!(source, 2);
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
fn tuple_pattern_2() {
|
||||
let source = r#"
|
||||
if (1, 2) {
|
||||
is (10, x) then x,
|
||||
is (y, x) then x + y
|
||||
}
|
||||
"#;
|
||||
test_in_fresh_env!(source, 3);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tuple_pattern_3() {
|
||||
let source = r#"
|
||||
if (1, 5) {
|
||||
is (10, x) then x,
|
||||
is (1, x) then x
|
||||
}
|
||||
"#;
|
||||
test_in_fresh_env!(source, 5);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tuple_pattern_4() {
|
||||
let source = r#"
|
||||
if (1, 5) {
|
||||
is (10, x) then x,
|
||||
is (1, x) then x,
|
||||
}
|
||||
"#;
|
||||
test_in_fresh_env!(source, 5);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prim_obj_pattern() {
|
||||
let source = r#"
|
||||
type Stuff = Mulch(Nat) | Jugs(Nat, String) | Mardok
|
||||
let a = Stuff::Mulch(20)
|
||||
let b = Stuff::Jugs(1, "haha")
|
||||
let c = Stuff::Mardok
|
||||
|
||||
let x = if a {
|
||||
is Stuff::Mulch(20) then "x",
|
||||
is _ then "ERR"
|
||||
}
|
||||
|
||||
let y = if b {
|
||||
is Stuff::Mulch(n) then "ERR",
|
||||
is Stuff::Jugs(2, _) then "ERR",
|
||||
is Stuff::Jugs(1, s) then s,
|
||||
is _ then "ERR",
|
||||
}
|
||||
|
||||
let z = if c {
|
||||
is Stuff::Jugs(_, _) then "ERR",
|
||||
is Stuff::Mardok then "NIGH",
|
||||
is _ then "ERR",
|
||||
}
|
||||
|
||||
(x, y, z)
|
||||
"#;
|
||||
test_in_fresh_env!(source, r#"("x", "haha", "NIGH")"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn basic_lambda_syntax() {
|
||||
let source = r#"
|
||||
let q = \(x, y) { x * y }
|
||||
let x = q(5,2)
|
||||
let y = \(m, n, o) { m + n + o }(1,2,3)
|
||||
(x, y)
|
||||
"#;
|
||||
test_in_fresh_env!(source, r"(10, 6)");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn lambda_syntax_2() {
|
||||
let source = r#"
|
||||
fn milta() {
|
||||
\(x) { x + 33 }
|
||||
}
|
||||
milta()(10)
|
||||
"#;
|
||||
test_in_fresh_env!(source, "43");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn import_all() {
|
||||
let source = r#"
|
||||
type Option<T> = Some(T) | None
|
||||
import Option::*
|
||||
let x = Some(9); if x is Some(q) then { q } else { 0 }"#;
|
||||
test_in_fresh_env!(source, "9");
|
||||
}
|
||||
@@ -1,6 +1,5 @@
|
||||
#![feature(associated_type_defaults)] //needed for Visitor trait
|
||||
#![feature(trace_macros)]
|
||||
#![feature(custom_attribute)]
|
||||
//#![feature(unrestricted_attribute_tokens)]
|
||||
#![feature(slice_patterns, box_patterns, box_syntax)]
|
||||
|
||||
//! `schala-lang` is where the Schala programming language is actually implemented.
|
||||
@@ -16,6 +15,9 @@ extern crate schala_repl;
|
||||
#[macro_use]
|
||||
extern crate schala_lang_codegen;
|
||||
extern crate ena;
|
||||
extern crate derivative;
|
||||
extern crate colored;
|
||||
extern crate radix_trie;
|
||||
|
||||
|
||||
macro_rules! bx {
|
||||
@@ -26,14 +28,18 @@ macro_rules! bx {
|
||||
mod util;
|
||||
#[macro_use]
|
||||
mod typechecking;
|
||||
mod debugging;
|
||||
|
||||
mod tokenizing;
|
||||
mod ast;
|
||||
mod parsing;
|
||||
#[macro_use]
|
||||
mod symbol_table;
|
||||
mod scope_resolution;
|
||||
mod builtin;
|
||||
mod reduced_ast;
|
||||
mod eval;
|
||||
mod source_map;
|
||||
|
||||
mod schala;
|
||||
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
//! ```text
|
||||
//! program := (statement delimiter)* EOF
|
||||
//! delimiter := NEWLINE | ";"
|
||||
//! statement := expression | declaration
|
||||
//! statement := expression | declaration | import | module
|
||||
//! block := "{" (statement delimiter)* "}"
|
||||
//! declaration := type_declaration | func_declaration | binding_declaration | impl_declaration
|
||||
//! ```
|
||||
@@ -85,13 +85,14 @@
|
||||
//! lambda_param_list := formal_param_list | formal_param
|
||||
//! paren_expr := "(" paren_inner ")"
|
||||
//! paren_inner := (expression ",")*
|
||||
//! identifier_expr := named_struct | IDENTIFIER
|
||||
//! identifier_expr := qualified_identifier | named_struct
|
||||
//! qualified_identifier := IDENTIFIER ("::" IDENTIFIER)*
|
||||
//! ```
|
||||
//!
|
||||
//! ## Literals
|
||||
//! ```text
|
||||
//! literal := "true" | "false" | number_literal | STR_LITERAL
|
||||
//! named_struct := IDENTIFIER record_block
|
||||
//! named_struct := qualified_identifier record_block
|
||||
//! record_block := "{" (record_entry, ",")* | "}" //TODO support anonymus structs, update syntax
|
||||
//! record_entry := IDENTIFIER ":" expression
|
||||
//! anonymous_struct := TODO
|
||||
@@ -106,24 +107,32 @@
|
||||
//! ```text
|
||||
//! pattern := "(" (pattern, ",")* ")" | simple_pattern
|
||||
//! simple_pattern := pattern_literal | record_pattern | tuple_struct_pattern
|
||||
//! pattern_literal := "true" | "false" | signed_number_literal | STR_LITERAL | IDENTIFIER
|
||||
//! pattern_literal := "true" | "false" | signed_number_literal | STR_LITERAL | qualified_identifier
|
||||
//! signed_number_literal := "-"? number_literal
|
||||
//! record_pattern := IDENTIFIER "{" (record_pattern_entry, ",")* "}"
|
||||
//! record_pattern := qualified_identifier "{" (record_pattern_entry, ",")* "}"
|
||||
//! record_pattern_entry := IDENTIFIER | IDENTIFIER ":" Pattern
|
||||
//! tuple_struct_pattern := IDENTIFIER "(" (pattern, ",")* ")"
|
||||
//! tuple_struct_pattern := qualified_identifier "(" (pattern, ",")* ")"
|
||||
//! ```
|
||||
//! ### If expressions
|
||||
//!
|
||||
//! TODO: it would be nice if the grammar could capture an incomplete precedence expr in the
|
||||
//! discriminator
|
||||
//!
|
||||
//! ```text
|
||||
//! if_expr := "if" discriminator if_expr_body
|
||||
//! if_expr_body := ("then" simple_conditional | "is" simple_pattern_match | cond_block)
|
||||
//! discriminator := ε | expression
|
||||
//! simple_conditional := expr_or_block else_case
|
||||
//! simple_pattern_match := pattern "then" simple_conditional
|
||||
//! else_case := "else" expr_or_block
|
||||
//!
|
||||
//! cond_block := "{" (cond_arm comma_or_delimiter)* "}"
|
||||
//! cond_arm := condition guard "then" expr_or_block | "else" expr_or_block
|
||||
//! condition := "is" pattern | operator precedence_expr | expression
|
||||
//! guard := "if" expression
|
||||
//! comma_or_delimiter := "," | delimiter
|
||||
//! ```
|
||||
//!
|
||||
//! ### If-expressions
|
||||
//! ```text
|
||||
//! if_expr := "if" discriminator ("then" condititional | "is" simple_pattern_match | guard_block)
|
||||
//! discriminator := precedence_expr (operator)+
|
||||
//! conditional := expr_or_block else_clause
|
||||
//! simple_pattern_match := pattern "then" conditional
|
||||
//! else_clause := ε | "else" expr_or_block
|
||||
//! guard_block := "{" (guard_arm, ",")* "}"
|
||||
//! guard_arm := guard "->" expr_or_block
|
||||
//! guard := "is" pattern | (operator)+ precedence_expr
|
||||
//! ```
|
||||
//!
|
||||
//! ### While expressions
|
||||
//! ```text
|
||||
@@ -140,30 +149,39 @@
|
||||
//! enumerators := enumerator ("," enumerators)*
|
||||
//! enumerator := identifier "<-" expression | identifier "=" expression //TODO add guards, etc.
|
||||
//! ```
|
||||
//! ## Imports
|
||||
//! ```text
|
||||
//! import := 'import' IDENTIFIER (:: IDENTIFIER)* import_suffix
|
||||
//! import_suffix := ε | '::{' IDENTIFIER (, IDENTIFIER)* '}' | '*' //TODO add qualified, exclusions, etc.
|
||||
//!
|
||||
|
||||
//! ## Modules
|
||||
//!
|
||||
//! module := 'module' IDENTIFIER '{' statement* '}'
|
||||
//! ```
|
||||
mod test;
|
||||
|
||||
use std::rc::Rc;
|
||||
use std::str::FromStr;
|
||||
|
||||
use crate::tokenizing::*;
|
||||
use crate::tokenizing::Kw::*;
|
||||
use crate::tokenizing::TokenKind::*;
|
||||
|
||||
use crate::source_map::Location;
|
||||
use crate::ast::*;
|
||||
|
||||
use crate::builtin::{BinOp, PrefixOp};
|
||||
use crate::schala::SourceMapHandle;
|
||||
|
||||
/// Represents a parsing error
|
||||
#[derive(Debug)]
|
||||
pub struct ParseError {
|
||||
pub production_name: Option<String>,
|
||||
pub msg: String,
|
||||
pub token: Token
|
||||
}
|
||||
|
||||
impl ParseError {
|
||||
fn new_with_token<T, M>(msg: M, token: Token) -> ParseResult<T> where M: Into<String> {
|
||||
Err(ParseError { msg: msg.into(), token })
|
||||
Err(ParseError { msg: msg.into(), token, production_name: None })
|
||||
}
|
||||
}
|
||||
|
||||
@@ -183,8 +201,11 @@ pub struct Parser {
|
||||
parse_record: Vec<ParseRecord>,
|
||||
parse_level: u32,
|
||||
restrictions: ParserRestrictions,
|
||||
id_store: ItemIdStore,
|
||||
source_map: SourceMapHandle
|
||||
}
|
||||
|
||||
|
||||
struct ParserRestrictions {
|
||||
no_struct_literal: bool
|
||||
}
|
||||
@@ -192,14 +213,14 @@ struct ParserRestrictions {
|
||||
struct TokenHandler {
|
||||
tokens: Vec<Token>,
|
||||
idx: usize,
|
||||
end_of_file: (usize, usize),
|
||||
end_of_file: Location
|
||||
}
|
||||
|
||||
impl TokenHandler {
|
||||
fn new(tokens: Vec<Token>) -> TokenHandler {
|
||||
let end_of_file = match tokens.last() {
|
||||
None => (0, 0),
|
||||
Some(t) => (t.line_num, t.char_num)
|
||||
None => Location { line_num: 0, char_num : 0 },
|
||||
Some(t) => t.location,
|
||||
};
|
||||
TokenHandler { idx: 0, tokens, end_of_file }
|
||||
}
|
||||
@@ -212,47 +233,58 @@ impl TokenHandler {
|
||||
self.peek_n(n).kind
|
||||
}
|
||||
fn peek(&mut self) -> Token {
|
||||
self.tokens.get(self.idx).map(|t: &Token| { t.clone()}).unwrap_or(Token { kind: TokenKind::EOF, line_num: self.end_of_file.0, char_num: self.end_of_file.1})
|
||||
self.tokens.get(self.idx).map(|t: &Token| { t.clone()}).unwrap_or(Token { kind: TokenKind::EOF, location: self.end_of_file })
|
||||
}
|
||||
/// calling peek_n(0) is the same thing as peek()
|
||||
fn peek_n(&mut self, n: usize) -> Token {
|
||||
self.tokens.get(self.idx + n).map(|t: &Token| { t.clone()}).unwrap_or(Token { kind: TokenKind::EOF, line_num: self.end_of_file.0, char_num: self.end_of_file.1})
|
||||
self.tokens.get(self.idx + n).map(|t: &Token| { t.clone()}).unwrap_or(Token { kind: TokenKind::EOF, location: self.end_of_file })
|
||||
}
|
||||
fn next(&mut self) -> Token {
|
||||
self.idx += 1;
|
||||
self.tokens.get(self.idx - 1).map(|t: &Token| { t.clone() }).unwrap_or(Token { kind: TokenKind::EOF, line_num: self.end_of_file.0, char_num: self.end_of_file.1})
|
||||
self.tokens.get(self.idx - 1).map(|t: &Token| { t.clone() }).unwrap_or(Token { kind: TokenKind::EOF, location: self.end_of_file })
|
||||
}
|
||||
}
|
||||
|
||||
impl Parser {
|
||||
/// Create a new parser initialized with some tokens.
|
||||
pub fn new(initial_input: Vec<Token>) -> Parser {
|
||||
pub fn new(source_map: SourceMapHandle) -> Parser {
|
||||
Parser {
|
||||
token_handler: TokenHandler::new(initial_input),
|
||||
token_handler: TokenHandler::new(vec![]),
|
||||
parse_record: vec![],
|
||||
parse_level: 0,
|
||||
restrictions: ParserRestrictions { no_struct_literal: false }
|
||||
restrictions: ParserRestrictions { no_struct_literal: false },
|
||||
id_store: ItemIdStore::new(),
|
||||
source_map,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_new_tokens(&mut self, new_tokens: Vec<Token>) {
|
||||
self.token_handler = TokenHandler::new(new_tokens);
|
||||
}
|
||||
|
||||
/// Parse all loaded tokens up to this point.
|
||||
pub fn parse(&mut self) -> ParseResult<AST> {
|
||||
self.program()
|
||||
}
|
||||
|
||||
/*
|
||||
pub fn parse_with_new_tokens(&mut self, new_tokens: Vec<Token>) -> ParseResult<AST> {
|
||||
|
||||
}
|
||||
*/
|
||||
|
||||
pub fn format_parse_trace(self) -> Vec<String> {
|
||||
self.parse_record.into_iter().map(|r| {
|
||||
pub fn format_parse_trace(&self) -> String {
|
||||
let mut buf = String::new();
|
||||
buf.push_str("Parse productions:\n");
|
||||
let mut next_token = None;
|
||||
for r in self.parse_record.iter() {
|
||||
let mut indent = String::new();
|
||||
for _ in 0..r.level {
|
||||
indent.push(' ');
|
||||
indent.push('.');
|
||||
}
|
||||
format!("{}Production `{}`, token: {}", indent, r.production_name, r.next_token)
|
||||
}).collect()
|
||||
let effective_token = if next_token == Some(&r.next_token) {
|
||||
"".to_string()
|
||||
} else {
|
||||
next_token = Some(&r.next_token);
|
||||
format!(", next token: {}", r.next_token)
|
||||
};
|
||||
buf.push_str(&format!("{}`{}`{}\n", indent, r.production_name, effective_token));
|
||||
}
|
||||
buf
|
||||
}
|
||||
}
|
||||
|
||||
@@ -327,25 +359,31 @@ impl Parser {
|
||||
continue;
|
||||
},
|
||||
_ => statements.push(
|
||||
Meta::new(self.statement()?)
|
||||
self.statement()?
|
||||
),
|
||||
}
|
||||
}
|
||||
Ok(AST(statements))
|
||||
Ok(AST { id: self.id_store.fresh(), statements })
|
||||
}
|
||||
|
||||
/// `statement := expression | declaration`
|
||||
#[recursive_descent_method]
|
||||
fn statement(&mut self) -> ParseResult<Statement> {
|
||||
//TODO handle error recovery here
|
||||
match self.token_handler.peek().get_kind() {
|
||||
Keyword(Type) => self.type_declaration().map(|decl| { Statement::Declaration(decl) }),
|
||||
Keyword(Func)=> self.func_declaration().map(|func| { Statement::Declaration(func) }),
|
||||
Keyword(Let) => self.binding_declaration().map(|decl| Statement::Declaration(decl)),
|
||||
Keyword(Interface) => self.interface_declaration().map(|decl| Statement::Declaration(decl)),
|
||||
Keyword(Impl) => self.impl_declaration().map(|decl| Statement::Declaration(decl)),
|
||||
_ => self.expression().map(|expr| { Statement::ExpressionStatement(expr.into()) } ),
|
||||
}
|
||||
let tok = self.token_handler.peek();
|
||||
let kind = match tok.get_kind() {
|
||||
Keyword(Type) => self.type_declaration().map(|decl| { StatementKind::Declaration(decl) }),
|
||||
Keyword(Func)=> self.func_declaration().map(|func| { StatementKind::Declaration(func) }),
|
||||
Keyword(Let) => self.binding_declaration().map(|decl| StatementKind::Declaration(decl)),
|
||||
Keyword(Interface) => self.interface_declaration().map(|decl| StatementKind::Declaration(decl)),
|
||||
Keyword(Impl) => self.impl_declaration().map(|decl| StatementKind::Declaration(decl)),
|
||||
Keyword(Import) => self.import_declaration().map(|spec| StatementKind::Import(spec)),
|
||||
Keyword(Module) => self.module_declaration().map(|spec| StatementKind::Module(spec)),
|
||||
_ => self.expression().map(|expr| { StatementKind::Expression(expr) } ),
|
||||
}?;
|
||||
let id = self.id_store.fresh();
|
||||
self.source_map.borrow_mut().add_location(&id, tok.location);
|
||||
Ok(Statement { kind, id })
|
||||
}
|
||||
|
||||
#[recursive_descent_method]
|
||||
@@ -378,7 +416,7 @@ impl Parser {
|
||||
let alias = self.identifier()?;
|
||||
expect!(self, Equals);
|
||||
let original = self.identifier()?;
|
||||
Ok(Declaration::TypeAlias(alias, original))
|
||||
Ok(Declaration::TypeAlias { alias, original })
|
||||
}
|
||||
|
||||
#[recursive_descent_method]
|
||||
@@ -453,9 +491,9 @@ impl Parser {
|
||||
}
|
||||
|
||||
#[recursive_descent_method]
|
||||
fn nonempty_func_body(&mut self) -> ParseResult<Vec<Meta<Statement>>> {
|
||||
fn nonempty_func_body(&mut self) -> ParseResult<Vec<Statement>> {
|
||||
let statements = delimited!(self, LCurlyBrace, statement, Newline | Semicolon, RCurlyBrace, nonstrict);
|
||||
Ok(statements.into_iter().map(|s| Meta::new(s)).collect())
|
||||
Ok(statements)
|
||||
}
|
||||
|
||||
#[recursive_descent_method]
|
||||
@@ -463,7 +501,6 @@ impl Parser {
|
||||
Ok(delimited!(self, LParen, formal_param, Comma, RParen))
|
||||
}
|
||||
|
||||
//TODO needs to support default values
|
||||
#[recursive_descent_method]
|
||||
fn formal_param(&mut self) -> ParseResult<FormalParam> {
|
||||
let name = self.identifier()?;
|
||||
@@ -499,7 +536,7 @@ impl Parser {
|
||||
};
|
||||
|
||||
expect!(self, Equals);
|
||||
let expr = self.expression()?.into();
|
||||
let expr = self.expression()?;
|
||||
|
||||
Ok(Declaration::Binding { name, constant, type_anno, expr })
|
||||
}
|
||||
@@ -605,10 +642,11 @@ impl Parser {
|
||||
let next_tok = self.token_handler.next();
|
||||
let operation = match BinOp::from_sigil_token(&next_tok.kind) {
|
||||
Some(sigil) => sigil,
|
||||
//TODO I think I can fix this unreachable
|
||||
None => unreachable!()
|
||||
};
|
||||
let rhs = self.precedence_expr(new_precedence)?;
|
||||
lhs = Expression::new(ExpressionKind::BinExp(operation, bx!(lhs.into()), bx!(rhs.into())));
|
||||
lhs = Expression::new(self.id_store.fresh(), ExpressionKind::BinExp(operation, bx!(lhs), bx!(rhs)));
|
||||
}
|
||||
self.parse_level -= 1;
|
||||
Ok(lhs)
|
||||
@@ -623,8 +661,10 @@ impl Parser {
|
||||
_ => unreachable!(),
|
||||
};
|
||||
let expr = self.primary()?;
|
||||
let prefix_op = PrefixOp::from_str(sigil.as_str()).unwrap();
|
||||
Ok(Expression::new(
|
||||
ExpressionKind::PrefixExp(PrefixOp::from_sigil(sigil.as_str()), bx!(expr.into()))
|
||||
self.id_store.fresh(),
|
||||
ExpressionKind::PrefixExp(prefix_op, bx!(expr))
|
||||
))
|
||||
},
|
||||
_ => self.call_expr()
|
||||
@@ -636,8 +676,7 @@ impl Parser {
|
||||
let mut expr = self.index_expr()?;
|
||||
while let LParen = self.token_handler.peek_kind() {
|
||||
let arguments = delimited!(self, LParen, invocation_argument, Comma, RParen);
|
||||
let arguments = arguments.into_iter().map(|s| Meta::new(s)).collect();
|
||||
expr = Expression::new(ExpressionKind::Call { f: bx!(expr.into()), arguments }); //TODO no type anno is incorrect
|
||||
expr = Expression::new(self.id_store.fresh(), ExpressionKind::Call { f: bx!(expr), arguments }); //TODO no type anno is incorrect
|
||||
}
|
||||
|
||||
Ok(expr)
|
||||
@@ -672,10 +711,9 @@ impl Parser {
|
||||
fn index_expr(&mut self) -> ParseResult<Expression> {
|
||||
let primary = self.primary()?;
|
||||
Ok(if let LSquareBracket = self.token_handler.peek_kind() {
|
||||
let indexers = delimited!(self, LSquareBracket, expression, Comma, RSquareBracket)
|
||||
.into_iter().map(|ex| ex.into()).collect();
|
||||
Expression::new(ExpressionKind::Index {
|
||||
indexee: bx!(Expression::new(primary.kind).into()),
|
||||
let indexers = delimited!(self, LSquareBracket, expression, Comma, RSquareBracket);
|
||||
Expression::new(self.id_store.fresh(), ExpressionKind::Index {
|
||||
indexee: bx!(Expression::new(self.id_store.fresh(), primary.kind)),
|
||||
indexers,
|
||||
})
|
||||
} else {
|
||||
@@ -700,9 +738,8 @@ impl Parser {
|
||||
|
||||
#[recursive_descent_method]
|
||||
fn list_expr(&mut self) -> ParseResult<Expression> {
|
||||
let exprs = delimited!(self, LSquareBracket, expression, Comma, RSquareBracket)
|
||||
.into_iter().map(|ex| ex.into()).collect();
|
||||
Ok(Expression::new(ExpressionKind::ListLiteral(exprs)))
|
||||
let exprs = delimited!(self, LSquareBracket, expression, Comma, RSquareBracket);
|
||||
Ok(Expression::new(self.id_store.fresh(), ExpressionKind::ListLiteral(exprs)))
|
||||
}
|
||||
|
||||
#[recursive_descent_method]
|
||||
@@ -719,7 +756,7 @@ impl Parser {
|
||||
_ => None,
|
||||
};
|
||||
let body = self.nonempty_func_body()?;
|
||||
Ok(Expression::new(ExpressionKind::Lambda { params, type_anno, body })) //TODO need to handle types somehow
|
||||
Ok(Expression::new(self.id_store.fresh(), ExpressionKind::Lambda { params, type_anno, body })) //TODO need to handle types somehow
|
||||
}
|
||||
|
||||
#[recursive_descent_method]
|
||||
@@ -740,12 +777,9 @@ impl Parser {
|
||||
let output = {
|
||||
let mut inner = delimited!(self, LParen, expression, Comma, RParen);
|
||||
match inner.len() {
|
||||
0 => Ok(Expression::new(TupleLiteral(vec![]))),
|
||||
0 => Ok(Expression::new(self.id_store.fresh(), TupleLiteral(vec![]))),
|
||||
1 => Ok(inner.pop().unwrap()),
|
||||
_ => {
|
||||
let inner: Vec<Meta<Expression>> = inner.into_iter().map(|ex| ex.into()).collect();
|
||||
Ok(Expression::new(TupleLiteral(inner)))
|
||||
}
|
||||
_ => Ok(Expression::new(self.id_store.fresh(), TupleLiteral(inner)))
|
||||
}
|
||||
};
|
||||
self.restrictions.no_struct_literal = old_struct_value;
|
||||
@@ -755,22 +789,35 @@ impl Parser {
|
||||
#[recursive_descent_method]
|
||||
fn identifier_expr(&mut self) -> ParseResult<Expression> {
|
||||
use self::ExpressionKind::*;
|
||||
let identifier = self.identifier()?;
|
||||
let components = self.qualified_identifier()?;
|
||||
let qualified_identifier = QualifiedName { id: self.id_store.fresh(), components };
|
||||
Ok(match self.token_handler.peek_kind() {
|
||||
LCurlyBrace if !self.restrictions.no_struct_literal => {
|
||||
let fields = self.record_block()?;
|
||||
Expression::new(NamedStruct { name: identifier, fields })
|
||||
Expression::new(self.id_store.fresh(), NamedStruct { name: qualified_identifier, fields })
|
||||
},
|
||||
_ => Expression::new(Value(identifier))
|
||||
_ => Expression::new(self.id_store.fresh(), Value(qualified_identifier))
|
||||
})
|
||||
}
|
||||
|
||||
#[recursive_descent_method]
|
||||
fn record_block(&mut self) -> ParseResult<Vec<(Rc<String>, Meta<Expression>)>> {
|
||||
Ok(
|
||||
delimited!(self, LCurlyBrace, record_entry, Comma, RCurlyBrace)
|
||||
.into_iter().map(|(s, ex)| (s, ex.into())).collect()
|
||||
)
|
||||
fn qualified_identifier(&mut self) -> ParseResult<Vec<Rc<String>>> {
|
||||
let mut components = vec![self.identifier()?];
|
||||
loop {
|
||||
match (self.token_handler.peek_kind(), self.token_handler.peek_kind_n(1)) {
|
||||
(Colon, Colon) => {
|
||||
self.token_handler.next(); self.token_handler.next();
|
||||
components.push(self.identifier()?);
|
||||
},
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
Ok(components)
|
||||
}
|
||||
|
||||
#[recursive_descent_method]
|
||||
fn record_block(&mut self) -> ParseResult<Vec<(Rc<String>, Expression)>> {
|
||||
Ok(delimited!(self, LCurlyBrace, record_entry, Comma, RCurlyBrace))
|
||||
}
|
||||
|
||||
#[recursive_descent_method]
|
||||
@@ -784,53 +831,47 @@ impl Parser {
|
||||
#[recursive_descent_method]
|
||||
fn if_expr(&mut self) -> ParseResult<Expression> {
|
||||
expect!(self, Keyword(Kw::If));
|
||||
let discriminator = Box::new({
|
||||
self.restrictions.no_struct_literal = true;
|
||||
let x = self.discriminator();
|
||||
self.restrictions.no_struct_literal = false;
|
||||
x?
|
||||
});
|
||||
|
||||
let body = Box::new(match self.token_handler.peek_kind() {
|
||||
Keyword(Kw::Then) => self.conditional()?,
|
||||
Keyword(Kw::Is) => self.simple_pattern_match()? ,
|
||||
_ => self.guard_block()?
|
||||
});
|
||||
|
||||
Ok(Expression::new(ExpressionKind::IfExpression { discriminator, body }))
|
||||
}
|
||||
|
||||
#[recursive_descent_method]
|
||||
fn discriminator(&mut self) -> ParseResult<Discriminator> {
|
||||
let lhs = self.prefix_expr()?;
|
||||
let ref next = self.token_handler.peek_kind();
|
||||
Ok(if let Some(op) = BinOp::from_sigil_token(next) {
|
||||
Discriminator::BinOp(lhs, op)
|
||||
let old_struct_value = self.restrictions.no_struct_literal;
|
||||
self.restrictions.no_struct_literal = true;
|
||||
let discriminator = if let LCurlyBrace = self.token_handler.peek_kind() {
|
||||
None
|
||||
} else {
|
||||
Discriminator::Simple(lhs)
|
||||
})
|
||||
Some(Box::new(self.expression()?))
|
||||
};
|
||||
let body = Box::new(self.if_expr_body()?);
|
||||
self.restrictions.no_struct_literal = old_struct_value;
|
||||
Ok(Expression::new(self.id_store.fresh(), ExpressionKind::IfExpression { discriminator, body }))
|
||||
}
|
||||
|
||||
#[recursive_descent_method]
|
||||
fn conditional(&mut self) -> ParseResult<IfExpressionBody> {
|
||||
fn if_expr_body(&mut self) -> ParseResult<IfExpressionBody> {
|
||||
match self.token_handler.peek_kind() {
|
||||
Keyword(Kw::Then) => self.simple_conditional(),
|
||||
Keyword(Kw::Is) => self.simple_pattern_match(),
|
||||
_ => self.cond_block(),
|
||||
}
|
||||
}
|
||||
|
||||
#[recursive_descent_method]
|
||||
fn simple_conditional(&mut self) -> ParseResult<IfExpressionBody> {
|
||||
expect!(self, Keyword(Kw::Then));
|
||||
let then_clause = self.expr_or_block()?;
|
||||
let else_clause = self.else_clause()?;
|
||||
Ok(IfExpressionBody::SimpleConditional(then_clause, else_clause))
|
||||
let then_case = self.expr_or_block()?;
|
||||
let else_case = self.else_case()?;
|
||||
Ok(IfExpressionBody::SimpleConditional {then_case, else_case })
|
||||
}
|
||||
|
||||
#[recursive_descent_method]
|
||||
fn simple_pattern_match(&mut self) -> ParseResult<IfExpressionBody> {
|
||||
expect!(self, Keyword(Kw::Is));
|
||||
let pat = self.pattern()?;
|
||||
let pattern = self.pattern()?;
|
||||
expect!(self, Keyword(Kw::Then));
|
||||
let then_clause = self.expr_or_block()?;
|
||||
let else_clause = self.else_clause()?;
|
||||
Ok(IfExpressionBody::SimplePatternMatch(pat, then_clause, else_clause))
|
||||
let then_case = self.expr_or_block()?;
|
||||
let else_case = self.else_case()?;
|
||||
Ok(IfExpressionBody::SimplePatternMatch { pattern, then_case, else_case })
|
||||
}
|
||||
|
||||
#[recursive_descent_method]
|
||||
fn else_clause(&mut self) -> ParseResult<Option<Block>> {
|
||||
fn else_case(&mut self) -> ParseResult<Option<Block>> {
|
||||
Ok(if let Keyword(Kw::Else) = self.token_handler.peek_kind() {
|
||||
self.token_handler.next();
|
||||
Some(self.expr_or_block()?)
|
||||
@@ -840,63 +881,67 @@ impl Parser {
|
||||
}
|
||||
|
||||
#[recursive_descent_method]
|
||||
fn guard_block(&mut self) -> ParseResult<IfExpressionBody> {
|
||||
//TODO - delimited! isn't sophisticated enough to do thisa
|
||||
//let guards = delimited!(self, LCurlyBrace, guard_arm, Comma, RCurlyBrace);
|
||||
fn cond_block(&mut self) -> ParseResult<IfExpressionBody> {
|
||||
expect!(self, LCurlyBrace);
|
||||
|
||||
let mut guards = vec![];
|
||||
let mut cond_arms = vec![];
|
||||
loop {
|
||||
match self.token_handler.peek_kind() {
|
||||
RCurlyBrace | EOF => break,
|
||||
Semicolon | Newline => { self.token_handler.next(); continue},
|
||||
_ => {
|
||||
let guard_arm = self.guard_arm()?;
|
||||
guards.push(guard_arm);
|
||||
loop {
|
||||
match self.token_handler.peek_kind() {
|
||||
Semicolon | Newline => { self.token_handler.next(); continue; },
|
||||
_ => break,
|
||||
}
|
||||
cond_arms.push(self.cond_arm()?);
|
||||
match self.token_handler.peek_kind() {
|
||||
Comma | Semicolon | Newline => { self.token_handler.next(); continue; },
|
||||
_ => break,
|
||||
}
|
||||
if let RCurlyBrace = self.token_handler.peek_kind() {
|
||||
break;
|
||||
}
|
||||
expect!(self, Comma);
|
||||
}
|
||||
}
|
||||
}
|
||||
expect!(self, RCurlyBrace);
|
||||
Ok(IfExpressionBody::GuardList(guards))
|
||||
Ok(IfExpressionBody::CondList(cond_arms))
|
||||
}
|
||||
|
||||
#[recursive_descent_method]
|
||||
fn guard_arm(&mut self) -> ParseResult<GuardArm> {
|
||||
let guard = self.guard()?;
|
||||
expect!(self, Operator(ref c) if **c == "->");
|
||||
fn cond_arm(&mut self) -> ParseResult<ConditionArm> {
|
||||
let (condition, guard) = if let Keyword(Kw::Else) = self.token_handler.peek_kind() {
|
||||
self.token_handler.next();
|
||||
(Condition::Else, None)
|
||||
} else {
|
||||
let condition = self.condition()?;
|
||||
let guard = self.guard()?;
|
||||
expect!(self, Keyword(Kw::Then));
|
||||
(condition, guard)
|
||||
};
|
||||
let body = self.expr_or_block()?;
|
||||
Ok(GuardArm { guard, body })
|
||||
Ok(ConditionArm { condition, guard, body })
|
||||
}
|
||||
|
||||
#[recursive_descent_method]
|
||||
fn guard(&mut self) -> ParseResult<Guard> {
|
||||
fn condition(&mut self) -> ParseResult<Condition> {
|
||||
Ok(match self.token_handler.peek_kind() {
|
||||
Keyword(Kw::Is) => {
|
||||
self.token_handler.next();
|
||||
let pat = self.pattern()?;
|
||||
Guard::Pat(pat)
|
||||
Condition::Pattern(self.pattern()?)
|
||||
},
|
||||
ref tok if BinOp::from_sigil_token(tok).is_some() => {
|
||||
let op = BinOp::from_sigil_token(&self.token_handler.next().kind).unwrap();
|
||||
let precedence = op.get_precedence();
|
||||
let Expression { kind, .. } = self.precedence_expr(precedence)?;
|
||||
Guard::HalfExpr(HalfExpr { op: Some(op), expr: kind })
|
||||
let expr = self.expression()?;
|
||||
Condition::TruncatedOp(op, expr)
|
||||
},
|
||||
_ => {
|
||||
//TODO - I think there's a better way to do this involving the precedence of ->
|
||||
let Expression { kind, .. } = self.prefix_expr()?;
|
||||
Guard::HalfExpr(HalfExpr { op: None, expr: kind })
|
||||
}
|
||||
Condition::Expression(self.expression()?)
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
#[recursive_descent_method]
|
||||
fn guard(&mut self) -> ParseResult<Option<Expression>> {
|
||||
Ok(match self.token_handler.peek_kind() {
|
||||
Keyword(Kw::If) => {
|
||||
self.token_handler.next();
|
||||
Some(self.expression()?)
|
||||
},
|
||||
_ => None
|
||||
})
|
||||
}
|
||||
|
||||
@@ -912,43 +957,51 @@ impl Parser {
|
||||
|
||||
#[recursive_descent_method]
|
||||
fn simple_pattern(&mut self) -> ParseResult<Pattern> {
|
||||
Ok({
|
||||
let tok = self.token_handler.peek();
|
||||
match tok.get_kind() {
|
||||
Identifier(_) => {
|
||||
let id = self.identifier()?;
|
||||
match self.token_handler.peek_kind() {
|
||||
LCurlyBrace => {
|
||||
let members = delimited!(self, LCurlyBrace, record_pattern_entry, Comma, RCurlyBrace);
|
||||
Pattern::Record(id, members)
|
||||
},
|
||||
LParen => {
|
||||
let members = delimited!(self, LParen, pattern, Comma, RParen);
|
||||
Pattern::TupleStruct(id, members)
|
||||
},
|
||||
_ => Pattern::Literal(PatternLiteral::VarPattern(id))
|
||||
}
|
||||
},
|
||||
Keyword(Kw::True) => {
|
||||
self.token_handler.next();
|
||||
Pattern::Literal(PatternLiteral::BoolPattern(true))
|
||||
},
|
||||
Keyword(Kw::False) => {
|
||||
self.token_handler.next();
|
||||
Pattern::Literal(PatternLiteral::BoolPattern(false))
|
||||
},
|
||||
StrLiteral(s) => {
|
||||
self.token_handler.next();
|
||||
Pattern::Literal(PatternLiteral::StringPattern(s))
|
||||
},
|
||||
DigitGroup(_) | HexLiteral(_) | BinNumberSigil | Period => self.signed_number_literal()?,
|
||||
Operator(ref op) if **op == "-" => self.signed_number_literal()?,
|
||||
Underscore => {
|
||||
self.token_handler.next();
|
||||
Pattern::Ignored
|
||||
},
|
||||
other => return ParseError::new_with_token(format!("{:?} is not a valid Pattern", other), tok)
|
||||
}
|
||||
Ok(match self.token_handler.peek_kind() {
|
||||
Identifier(_) => {
|
||||
let components = self.qualified_identifier()?;
|
||||
let qualified_identifier = QualifiedName { id: self.id_store.fresh(), components };
|
||||
match self.token_handler.peek_kind() {
|
||||
LCurlyBrace => {
|
||||
let members = delimited!(self, LCurlyBrace, record_pattern_entry, Comma, RCurlyBrace);
|
||||
Pattern::Record(qualified_identifier, members)
|
||||
},
|
||||
LParen => {
|
||||
let members = delimited!(self, LParen, pattern, Comma, RParen);
|
||||
Pattern::TupleStruct(qualified_identifier, members)
|
||||
},
|
||||
_ => {
|
||||
Pattern::VarOrName(qualified_identifier)
|
||||
},
|
||||
}
|
||||
},
|
||||
_ => self.pattern_literal()?
|
||||
})
|
||||
}
|
||||
|
||||
#[recursive_descent_method]
|
||||
fn pattern_literal(&mut self) -> ParseResult<Pattern> {
|
||||
let tok = self.token_handler.peek();
|
||||
Ok(match tok.get_kind() {
|
||||
Keyword(Kw::True) => {
|
||||
self.token_handler.next();
|
||||
Pattern::Literal(PatternLiteral::BoolPattern(true))
|
||||
},
|
||||
Keyword(Kw::False) => {
|
||||
self.token_handler.next();
|
||||
Pattern::Literal(PatternLiteral::BoolPattern(false))
|
||||
},
|
||||
StrLiteral { s, .. } => {
|
||||
self.token_handler.next();
|
||||
Pattern::Literal(PatternLiteral::StringPattern(s))
|
||||
},
|
||||
DigitGroup(_) | HexLiteral(_) | BinNumberSigil | Period => self.signed_number_literal()?,
|
||||
Operator(ref op) if **op == "-" => self.signed_number_literal()?,
|
||||
Underscore => {
|
||||
self.token_handler.next();
|
||||
Pattern::Ignored
|
||||
},
|
||||
other => return ParseError::new_with_token(format!("{:?} is not a valid Pattern", other), tok)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -981,7 +1034,7 @@ impl Parser {
|
||||
#[recursive_descent_method]
|
||||
fn block(&mut self) -> ParseResult<Block> {
|
||||
let block = delimited!(self, LCurlyBrace, statement, Newline | Semicolon, RCurlyBrace, nonstrict);
|
||||
Ok(block.into_iter().map(|s| { Meta::new(s) }).collect())
|
||||
Ok(block)
|
||||
}
|
||||
|
||||
#[recursive_descent_method]
|
||||
@@ -990,7 +1043,8 @@ impl Parser {
|
||||
LCurlyBrace => self.block(),
|
||||
_ => {
|
||||
let expr = self.expression()?;
|
||||
Ok(vec![Meta::new(Statement::ExpressionStatement(expr.into()))])
|
||||
let s = Statement { id: self.id_store.fresh(), kind: StatementKind::Expression(expr) };
|
||||
Ok(vec![s])
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1003,10 +1057,10 @@ impl Parser {
|
||||
self.restrictions.no_struct_literal = true;
|
||||
let x = self.while_cond();
|
||||
self.restrictions.no_struct_literal = false;
|
||||
x?.map(|expr| bx!(expr.into()))
|
||||
x?.map(|expr| bx!(expr))
|
||||
};
|
||||
let body = self.block()?;
|
||||
Ok(Expression::new(WhileExpression {condition, body}))
|
||||
Ok(Expression::new(self.id_store.fresh(), WhileExpression {condition, body}))
|
||||
}
|
||||
|
||||
#[recursive_descent_method]
|
||||
@@ -1032,14 +1086,14 @@ impl Parser {
|
||||
vec![single_enum]
|
||||
};
|
||||
let body = Box::new(self.for_expr_body()?);
|
||||
Ok(Expression::new(ExpressionKind::ForExpression { enumerators, body }))
|
||||
Ok(Expression::new(self.id_store.fresh(), ExpressionKind::ForExpression { enumerators, body }))
|
||||
}
|
||||
|
||||
#[recursive_descent_method]
|
||||
fn enumerator(&mut self) -> ParseResult<Enumerator> {
|
||||
let id = self.identifier()?;
|
||||
expect!(self, Operator(ref c) if **c == "<-");
|
||||
let generator = self.expression()?.into();
|
||||
let generator = self.expression()?;
|
||||
Ok(Enumerator { id, generator })
|
||||
}
|
||||
|
||||
@@ -1050,11 +1104,11 @@ impl Parser {
|
||||
Ok(match tok.get_kind() {
|
||||
LCurlyBrace => {
|
||||
let statements = delimited!(self, LCurlyBrace, statement, Newline | Semicolon, RCurlyBrace, nonstrict);
|
||||
StatementBlock(statements.into_iter().map(|s| Meta::new(s)).collect())
|
||||
StatementBlock(statements)
|
||||
},
|
||||
Keyword(Kw::Return) => {
|
||||
self.token_handler.next();
|
||||
MonadicReturn(self.expression()?.into())
|
||||
MonadicReturn(self.expression()?)
|
||||
},
|
||||
_ => return ParseError::new_with_token("for expressions must end in a block or 'return'", tok),
|
||||
})
|
||||
@@ -1078,15 +1132,18 @@ impl Parser {
|
||||
DigitGroup(_) | HexLiteral(_) | BinNumberSigil | Period => self.number_literal(),
|
||||
Keyword(Kw::True) => {
|
||||
self.token_handler.next();
|
||||
Ok(Expression::new(BoolLiteral(true)))
|
||||
let id = self.id_store.fresh();
|
||||
Ok(Expression::new(id, BoolLiteral(true)))
|
||||
},
|
||||
Keyword(Kw::False) => {
|
||||
self.token_handler.next();
|
||||
Ok(Expression::new(BoolLiteral(false)))
|
||||
let id = self.id_store.fresh();
|
||||
Ok(Expression::new(id, BoolLiteral(false)))
|
||||
},
|
||||
StrLiteral(s) => {
|
||||
StrLiteral {s, ..} => {
|
||||
self.token_handler.next();
|
||||
Ok(Expression::new(StringLiteral(s.clone())))
|
||||
let id = self.id_store.fresh();
|
||||
Ok(Expression::new(id, StringLiteral(s.clone())))
|
||||
}
|
||||
e => ParseError::new_with_token(format!("Expected a literal expression, got {:?}", e), tok),
|
||||
}
|
||||
@@ -1108,12 +1165,12 @@ impl Parser {
|
||||
BinNumberSigil => {
|
||||
let digits = self.digits()?;
|
||||
let n = parse_binary(digits, tok)?;
|
||||
Ok(Expression::new(NatLiteral(n)))
|
||||
Ok(Expression::new(self.id_store.fresh(), NatLiteral(n)))
|
||||
},
|
||||
HexLiteral(text) => {
|
||||
let digits: String = text.chars().filter(|c| c.is_digit(16)).collect();
|
||||
let n = parse_hex(digits, tok)?;
|
||||
Ok(Expression::new(NatLiteral(n)))
|
||||
Ok(Expression::new(self.id_store.fresh(), NatLiteral(n)))
|
||||
},
|
||||
_ => return ParseError::new_with_token("Expected '0x' or '0b'", tok),
|
||||
}
|
||||
@@ -1129,13 +1186,13 @@ impl Parser {
|
||||
digits.push_str(".");
|
||||
digits.push_str(&self.digits()?);
|
||||
match digits.parse::<f64>() {
|
||||
Ok(f) => Ok(Expression::new(FloatLiteral(f))),
|
||||
Ok(f) => Ok(Expression::new(self.id_store.fresh(), FloatLiteral(f))),
|
||||
Err(e) => ParseError::new_with_token(format!("Float failed to parse with error: {}", e), tok),
|
||||
|
||||
}
|
||||
} else {
|
||||
match digits.parse::<u64>() {
|
||||
Ok(d) => Ok(Expression::new(NatLiteral(d))),
|
||||
Ok(d) => Ok(Expression::new(self.id_store.fresh(), NatLiteral(d))),
|
||||
Err(e) => ParseError::new_with_token(format!("Integer failed to parse with error: {}", e), tok),
|
||||
}
|
||||
}
|
||||
@@ -1153,6 +1210,67 @@ impl Parser {
|
||||
}
|
||||
Ok(ds)
|
||||
}
|
||||
|
||||
#[recursive_descent_method]
|
||||
fn import_declaration(&mut self) -> ParseResult<ImportSpecifier> {
|
||||
expect!(self, Keyword(Import));
|
||||
let mut path_components = vec![];
|
||||
path_components.push(self.identifier()?);
|
||||
loop {
|
||||
match (self.token_handler.peek_kind(), self.token_handler.peek_kind_n(1)) {
|
||||
(Colon, Colon) => {
|
||||
self.token_handler.next(); self.token_handler.next();
|
||||
if let Identifier(_) = self.token_handler.peek_kind() {
|
||||
path_components.push(self.identifier()?);
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
},
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
|
||||
let imported_names = match self.token_handler.peek_kind() {
|
||||
LCurlyBrace => {
|
||||
let names = delimited!(self, LCurlyBrace, identifier, Comma, RCurlyBrace);
|
||||
ImportedNames::List(names)
|
||||
},
|
||||
Operator(ref s) if **s == "*" => {
|
||||
self.token_handler.next();
|
||||
ImportedNames::All
|
||||
},
|
||||
_ => ImportedNames::LastOfPath
|
||||
};
|
||||
|
||||
Ok(ImportSpecifier {
|
||||
id: self.id_store.fresh(),
|
||||
path_components,
|
||||
imported_names
|
||||
})
|
||||
}
|
||||
|
||||
#[recursive_descent_method]
|
||||
fn import_suffix(&mut self) -> ParseResult<ImportedNames> {
|
||||
Ok(match self.token_handler.peek_kind() {
|
||||
Operator(ref s) if **s == "*" => {
|
||||
self.token_handler.next();
|
||||
ImportedNames::All
|
||||
},
|
||||
LCurlyBrace => {
|
||||
let names = delimited!(self, LCurlyBrace, identifier, Comma, RCurlyBrace);
|
||||
ImportedNames::List(names)
|
||||
},
|
||||
_ => return ParseError::new_with_token("Expected '{{' or '*'", self.token_handler.peek()),
|
||||
})
|
||||
}
|
||||
|
||||
#[recursive_descent_method]
|
||||
fn module_declaration(&mut self) -> ParseResult<ModuleSpecifier> {
|
||||
expect!(self, Keyword(Kw::Module));
|
||||
let name = self.identifier()?;
|
||||
let contents = delimited!(self, LCurlyBrace, statement, Newline | Semicolon, RCurlyBrace, nonstrict);
|
||||
Ok(ModuleSpecifier { name, contents })
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_binary(digits: String, tok: Token) -> ParseResult<u64> {
|
||||
@@ -1187,3 +1305,4 @@ fn parse_hex(digits: String, tok: Token) -> ParseResult<u64> {
|
||||
}
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
#![cfg(test)]
|
||||
use ::std::rc::Rc;
|
||||
use super::tokenize;
|
||||
use super::ParseResult;
|
||||
use crate::builtin::{PrefixOp, BinOp};
|
||||
use crate::ast::{AST, Meta, Expression, Statement, IfExpressionBody, Discriminator, Pattern, PatternLiteral, TypeBody, Enumerator, ForBody, InvocationArgument, FormalParam};
|
||||
use super::Statement::*;
|
||||
use std::cell::RefCell;
|
||||
use std::rc::Rc;
|
||||
use std::str::FromStr;
|
||||
|
||||
use super::{Parser, ParseResult, tokenize};
|
||||
use crate::ast::*;
|
||||
use super::Declaration::*;
|
||||
use super::Signature;
|
||||
use super::TypeIdentifier::*;
|
||||
@@ -13,23 +13,44 @@ use super::ExpressionKind::*;
|
||||
use super::Variant::*;
|
||||
use super::ForBody::*;
|
||||
|
||||
fn parse(input: &str) -> ParseResult<AST> {
|
||||
fn make_parser(input: &str) -> Parser {
|
||||
let source_map = crate::source_map::SourceMap::new();
|
||||
let source_map_handle = Rc::new(RefCell::new(source_map));
|
||||
let tokens: Vec<crate::tokenizing::Token> = tokenize(input);
|
||||
let mut parser = super::Parser::new(tokens);
|
||||
let mut parser = super::Parser::new(source_map_handle);
|
||||
parser.add_new_tokens(tokens);
|
||||
parser
|
||||
}
|
||||
|
||||
fn parse(input: &str) -> ParseResult<AST> {
|
||||
let mut parser = make_parser(input);
|
||||
parser.parse()
|
||||
}
|
||||
|
||||
macro_rules! parse_test {
|
||||
($string:expr, $correct:expr) => { assert_eq!(parse($string).unwrap(), $correct) };
|
||||
($string:expr, $correct:expr) => {
|
||||
assert_eq!(parse($string).unwrap(), $correct)
|
||||
};
|
||||
}
|
||||
macro_rules! parse_test_wrap_ast {
|
||||
($string:expr, $correct:expr) => { parse_test!($string, AST(vec![$correct])) }
|
||||
($string:expr, $correct:expr) => { parse_test!($string, AST { id: ItemIdStore::new_id(), statements: vec![$correct] }) }
|
||||
}
|
||||
macro_rules! parse_error {
|
||||
($string:expr) => { assert!(parse($string).is_err()) }
|
||||
}
|
||||
macro_rules! qname {
|
||||
( $( $component:expr),* ) => {
|
||||
{
|
||||
let mut components = vec![];
|
||||
$(
|
||||
components.push(rc!($component));
|
||||
)*
|
||||
QualifiedName { components, id: ItemIdStore::new_id() }
|
||||
}
|
||||
};
|
||||
}
|
||||
macro_rules! val {
|
||||
($var:expr) => { Value(Rc::new($var.to_string())) }
|
||||
($var:expr) => { Value(QualifiedName { components: vec![Rc::new($var.to_string())], id: ItemIdStore::new_id() }) };
|
||||
}
|
||||
macro_rules! ty {
|
||||
($name:expr) => { Singleton(tys!($name)) }
|
||||
@@ -38,38 +59,54 @@ macro_rules! tys {
|
||||
($name:expr) => { TypeSingletonName { name: Rc::new($name.to_string()), params: vec![] } };
|
||||
}
|
||||
|
||||
macro_rules! decl {
|
||||
($expr_type:expr) => {
|
||||
Statement { id: ItemIdStore::new_id(), kind: StatementKind::Declaration($expr_type) }
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! import {
|
||||
($import_spec:expr) => {
|
||||
Statement { id: ItemIdStore::new_id(), kind: StatementKind::Import($import_spec) }
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! module {
|
||||
($module_spec:expr) => {
|
||||
Statement { id: ItemIdStore::new_id(), kind: StatementKind::Module($module_spec) }
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! ex {
|
||||
($expr_type:expr) => { Expression::new($expr_type) };
|
||||
(m $expr_type:expr) => { Meta::new(Expression::new($expr_type)) };
|
||||
(m $expr_type:expr, $type_anno:expr) => { Meta::new(Expression::with_anno($expr_type, $type_anno)) };
|
||||
($expr_type:expr) => { Expression::new(ItemIdStore::new_id(), $expr_type) };
|
||||
($expr_type:expr, $type_anno:expr) => { Expression::with_anno(ItemIdStore::new_id(), $expr_type, $type_anno) };
|
||||
(s $expr_text:expr) => {
|
||||
{
|
||||
let tokens: Vec<crate::tokenizing::Token> = tokenize($expr_text);
|
||||
let mut parser = super::Parser::new(tokens);
|
||||
let mut parser = make_parser($expr_text);
|
||||
parser.expression().unwrap()
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! inv {
|
||||
($expr_type:expr) => { Meta::new(InvocationArgument::Positional($expr_type)) }
|
||||
($expr_type:expr) => { InvocationArgument::Positional($expr_type) }
|
||||
}
|
||||
|
||||
macro_rules! binexp {
|
||||
($op:expr, $lhs:expr, $rhs:expr) => { BinExp(BinOp::from_sigil($op), bx!(Expression::new($lhs).into()), bx!(Expression::new($rhs).into())) }
|
||||
($op:expr, $lhs:expr, $rhs:expr) => { BinExp(BinOp::from_sigil($op), bx!(Expression::new(ItemIdStore::new_id(), $lhs).into()), bx!(Expression::new(ItemIdStore::new_id(), $rhs).into())) }
|
||||
}
|
||||
macro_rules! prefexp {
|
||||
($op:expr, $lhs:expr) => { PrefixExp(PrefixOp::from_sigil($op), bx!(Expression::new($lhs).into())) }
|
||||
($op:expr, $lhs:expr) => { PrefixExp(PrefixOp::from_str($op).unwrap(), bx!(Expression::new(ItemIdStore::new_id(), $lhs).into())) }
|
||||
}
|
||||
macro_rules! exst {
|
||||
($expr_type:expr) => { Meta::new(Statement::ExpressionStatement(Expression::new($expr_type).into())) };
|
||||
($expr_type:expr, $type_anno:expr) => { Meta::new(Statement::ExpressionStatement(Expression::with_anno($expr_type, $type_anno).into())) };
|
||||
($op:expr, $lhs:expr, $rhs:expr) => { Meta::new(Statement::ExpressionStatement(ex!(binexp!($op, $lhs, $rhs)))) };
|
||||
($expr_type:expr) => { Statement { id: ItemIdStore::new_id(), kind: StatementKind::Expression(Expression::new(ItemIdStore::new_id(), $expr_type).into())} };
|
||||
($expr_type:expr, $type_anno:expr) => { Statement { id: ItemIdStore::new_id(), kind: StatementKind::Expression(Expression::with_anno(ItemIdStore::new_id(), $expr_type, $type_anno).into())} };
|
||||
($op:expr, $lhs:expr, $rhs:expr) => { Statement { id: ItemIdStore::new_id(), ,kind: StatementKind::Expression(ex!(binexp!($op, $lhs, $rhs)))}
|
||||
};
|
||||
(s $statement_text:expr) => {
|
||||
{
|
||||
let tokens: Vec<crate::tokenizing::Token> = tokenize($statement_text);
|
||||
let mut parser = super::Parser::new(tokens);
|
||||
Meta::new(parser.statement().unwrap())
|
||||
let mut parser = make_parser($statement_text);
|
||||
parser.statement().unwrap()
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -87,54 +124,55 @@ fn parsing_number_literals_and_binexps() {
|
||||
|
||||
parse_test_wrap_ast! {"0xf_f_+1", exst!(binexp!("+", NatLiteral(255), NatLiteral(1))) };
|
||||
|
||||
parse_test! {"3; 4; 4.3", AST(
|
||||
vec![exst!(NatLiteral(3)), exst!(NatLiteral(4)),
|
||||
exst!(FloatLiteral(4.3))])
|
||||
parse_test! {"3; 4; 4.3",
|
||||
AST {
|
||||
id: ItemIdStore::new_id(),
|
||||
statements: vec![exst!(NatLiteral(3)), exst!(NatLiteral(4)),
|
||||
exst!(FloatLiteral(4.3))]
|
||||
}
|
||||
};
|
||||
|
||||
parse_test!("1 + 2 * 3", AST(vec!
|
||||
[
|
||||
exst!(binexp!("+", NatLiteral(1), binexp!("*", NatLiteral(2), NatLiteral(3))))
|
||||
]));
|
||||
parse_test_wrap_ast!("1 + 2 * 3",
|
||||
exst!(binexp!("+", NatLiteral(1), binexp!("*", NatLiteral(2), NatLiteral(3))))
|
||||
);
|
||||
|
||||
parse_test!("1 * 2 + 3", AST(vec!
|
||||
[
|
||||
exst!(binexp!("+", binexp!("*", NatLiteral(1), NatLiteral(2)), NatLiteral(3)))
|
||||
]));
|
||||
parse_test_wrap_ast!("1 * 2 + 3",
|
||||
exst!(binexp!("+", binexp!("*", NatLiteral(1), NatLiteral(2)), NatLiteral(3)))
|
||||
) ;
|
||||
|
||||
parse_test!("1 && 2", AST(vec![exst!(binexp!("&&", NatLiteral(1), NatLiteral(2)))]));
|
||||
parse_test_wrap_ast!("1 && 2", exst!(binexp!("&&", NatLiteral(1), NatLiteral(2))));
|
||||
|
||||
parse_test!("1 + 2 * 3 + 4", AST(vec![exst!(
|
||||
parse_test_wrap_ast!("1 + 2 * 3 + 4", exst!(
|
||||
binexp!("+",
|
||||
binexp!("+", NatLiteral(1), binexp!("*", NatLiteral(2), NatLiteral(3))),
|
||||
NatLiteral(4)))]));
|
||||
NatLiteral(4))));
|
||||
|
||||
parse_test!("(1 + 2) * 3", AST(vec!
|
||||
[exst!(binexp!("*", binexp!("+", NatLiteral(1), NatLiteral(2)), NatLiteral(3)))]));
|
||||
parse_test_wrap_ast!("(1 + 2) * 3",
|
||||
exst!(binexp!("*", binexp!("+", NatLiteral(1), NatLiteral(2)), NatLiteral(3))));
|
||||
|
||||
parse_test!(".1 + .2", AST(vec![exst!(binexp!("+", FloatLiteral(0.1), FloatLiteral(0.2)))]));
|
||||
parse_test!("1 / 2", AST(vec![exst!(binexp!("/", NatLiteral(1), NatLiteral(2)))]));
|
||||
parse_test_wrap_ast!(".1 + .2", exst!(binexp!("+", FloatLiteral(0.1), FloatLiteral(0.2))));
|
||||
parse_test_wrap_ast!("1 / 2", exst!(binexp!("/", NatLiteral(1), NatLiteral(2))));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parsing_tuples() {
|
||||
parse_test!("()", AST(vec![exst!(TupleLiteral(vec![]))]));
|
||||
parse_test!("(\"hella\", 34)", AST(vec![exst!(
|
||||
parse_test_wrap_ast!("()", exst!(TupleLiteral(vec![])));
|
||||
parse_test_wrap_ast!("(\"hella\", 34)", exst!(
|
||||
TupleLiteral(
|
||||
vec![ex!(s r#""hella""#).into(), ex!(s "34").into()]
|
||||
)
|
||||
)]));
|
||||
parse_test!("((1+2), \"slough\")", AST(vec![exst!(TupleLiteral(vec![
|
||||
));
|
||||
parse_test_wrap_ast!("((1+2), \"slough\")", exst!(TupleLiteral(vec![
|
||||
ex!(binexp!("+", NatLiteral(1), NatLiteral(2))).into(),
|
||||
ex!(StringLiteral(rc!(slough))).into(),
|
||||
]))]))
|
||||
])))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parsing_identifiers() {
|
||||
parse_test!("a", AST(vec![exst!(val!("a"))]));
|
||||
parse_test!("some_value", AST(vec![exst!(val!("some_value"))]));
|
||||
parse_test!("a + b", AST(vec![exst!(binexp!("+", val!("a"), val!("b")))]));
|
||||
parse_test_wrap_ast!("a", exst!(val!("a")));
|
||||
parse_test_wrap_ast!("some_value", exst!(val!("some_value")));
|
||||
parse_test_wrap_ast!("a + b", exst!(binexp!("+", val!("a"), val!("b"))));
|
||||
//parse_test!("a[b]", AST(vec![Expression(
|
||||
//parse_test!("a[]", <- TODO THIS NEEDS TO FAIL
|
||||
//parse_test("a()[b]()[d]")
|
||||
@@ -148,71 +186,90 @@ fn parsing_identifiers() {
|
||||
])
|
||||
}
|
||||
*/
|
||||
parse_test!("a[b,c]", AST(vec![exst!(Index { indexee: bx!(ex!(m val!("a"))), indexers: vec![ex!(m val!("b")), ex!(m val!("c"))]} )]));
|
||||
parse_test_wrap_ast!("a[b,c]", exst!(Index { indexee: bx!(ex!(val!("a"))), indexers: vec![ex!(val!("b")), ex!(val!("c"))]} ));
|
||||
|
||||
parse_test!("None", AST(vec![exst!(val!("None"))]));
|
||||
parse_test!("Pandas { a: x + y }", AST(vec![
|
||||
exst!(NamedStruct { name: rc!(Pandas), fields: vec![(rc!(a), ex!(m binexp!("+", val!("x"), val!("y"))))]})
|
||||
]));
|
||||
parse_test! { "Pandas { a: n, b: q, }",
|
||||
AST(vec![
|
||||
exst!(NamedStruct { name: rc!(Pandas), fields:
|
||||
vec![(rc!(a), ex!(m val!("n"))), (rc!(b), ex!(m val!("q")))]
|
||||
}
|
||||
)
|
||||
])
|
||||
parse_test_wrap_ast!("None", exst!(val!("None")));
|
||||
parse_test_wrap_ast!("Pandas { a: x + y }",
|
||||
exst!(NamedStruct { name: qname!(Pandas), fields: vec![(rc!(a), ex!(binexp!("+", val!("x"), val!("y"))))]})
|
||||
);
|
||||
parse_test_wrap_ast! { "Pandas { a: n, b: q, }",
|
||||
exst!(NamedStruct { name: qname!(Pandas), fields:
|
||||
vec![(rc!(a), ex!(val!("n"))), (rc!(b), ex!(val!("q")))]
|
||||
}
|
||||
)
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
fn qualified_identifiers() {
|
||||
parse_test_wrap_ast! {
|
||||
"let q_q = Yolo::Swaggins",
|
||||
decl!(Binding { name: rc!(q_q), constant: true, type_anno: None,
|
||||
expr: Expression::new(ItemIdStore::new_id(), Value(qname!(Yolo, Swaggins))),
|
||||
})
|
||||
}
|
||||
|
||||
parse_test_wrap_ast! {
|
||||
"thing::item::call()",
|
||||
exst!(Call { f: bx![ex!(Value(qname!(thing, item, call)))], arguments: vec![] })
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn reserved_words() {
|
||||
parse_error!("module::item::call()");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parsing_complicated_operators() {
|
||||
parse_test!("a <- b", AST(vec![exst!(binexp!("<-", val!("a"), val!("b")))]));
|
||||
parse_test!("a || b", AST(vec![exst!(binexp!("||", val!("a"), val!("b")))]));
|
||||
parse_test!("a<>b", AST(vec![exst!(binexp!("<>", val!("a"), val!("b")))]));
|
||||
parse_test!("a.b.c.d", AST(vec![exst!(binexp!(".",
|
||||
parse_test_wrap_ast!("a <- b", exst!(binexp!("<-", val!("a"), val!("b"))));
|
||||
parse_test_wrap_ast!("a || b", exst!(binexp!("||", val!("a"), val!("b"))));
|
||||
parse_test_wrap_ast!("a<>b", exst!(binexp!("<>", val!("a"), val!("b"))));
|
||||
parse_test_wrap_ast!("a.b.c.d", exst!(binexp!(".",
|
||||
binexp!(".",
|
||||
binexp!(".", val!("a"), val!("b")),
|
||||
val!("c")),
|
||||
val!("d")))]));
|
||||
parse_test!("-3", AST(vec![exst!(prefexp!("-", NatLiteral(3)))]));
|
||||
parse_test!("-0.2", AST(vec![exst!(prefexp!("-", FloatLiteral(0.2)))]));
|
||||
parse_test!("!3", AST(vec![exst!(prefexp!("!", NatLiteral(3)))]));
|
||||
parse_test!("a <- -b", AST(vec![exst!(binexp!("<-", val!("a"), prefexp!("-", val!("b"))))]));
|
||||
parse_test!("a <--b", AST(vec![exst!(binexp!("<--", val!("a"), val!("b")))]));
|
||||
val!("d"))));
|
||||
parse_test_wrap_ast!("-3", exst!(prefexp!("-", NatLiteral(3))));
|
||||
parse_test_wrap_ast!("-0.2", exst!(prefexp!("-", FloatLiteral(0.2))));
|
||||
parse_test_wrap_ast!("!3", exst!(prefexp!("!", NatLiteral(3))));
|
||||
parse_test_wrap_ast!("a <- -b", exst!(binexp!("<-", val!("a"), prefexp!("-", val!("b")))));
|
||||
parse_test_wrap_ast!("a <--b", exst!(binexp!("<--", val!("a"), val!("b"))));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parsing_functions() {
|
||||
parse_test!("fn oi()", AST(vec![Meta::new(Declaration(FuncSig(Signature { name: rc!(oi), operator: false, params: vec![], type_anno: None })))]));
|
||||
parse_test!("oi()", AST(vec![exst!(Call { f: bx!(ex!(m val!("oi"))), arguments: vec![] })]));
|
||||
parse_test!("oi(a, 2 + 2)", AST(vec![exst!(Call
|
||||
{ f: bx!(ex!(m val!("oi"))),
|
||||
arguments: vec![inv!(ex!(val!("a"))).into(), inv!(ex!(binexp!("+", NatLiteral(2), NatLiteral(2)))).into()]
|
||||
})]));
|
||||
parse_test_wrap_ast!("fn oi()", decl!(FuncSig(Signature { name: rc!(oi), operator: false, params: vec![], type_anno: None })));
|
||||
parse_test_wrap_ast!("oi()", exst!(Call { f: bx!(ex!(val!("oi"))), arguments: vec![] }));
|
||||
parse_test_wrap_ast!("oi(a, 2 + 2)", exst!(Call
|
||||
{ f: bx!(ex!(val!("oi"))),
|
||||
arguments: vec![inv!(ex!(val!("a"))), inv!(ex!(binexp!("+", NatLiteral(2), NatLiteral(2)))).into()]
|
||||
}));
|
||||
parse_error!("a(b,,c)");
|
||||
|
||||
parse_test!("fn a(b, c: Int): Int", AST(vec![Meta::new(Declaration(
|
||||
FuncSig(Signature { name: rc!(a), operator: false, params: vec![
|
||||
FormalParam { name: rc!(b), anno: None, default: None },
|
||||
FormalParam { name: rc!(c), anno: Some(ty!("Int")), default: None }
|
||||
], type_anno: Some(ty!("Int")) })))]));
|
||||
parse_test_wrap_ast!("fn a(b, c: Int): Int", decl!(
|
||||
FuncSig(Signature { name: rc!(a), operator: false, params: vec![
|
||||
FormalParam { name: rc!(b), anno: None, default: None },
|
||||
FormalParam { name: rc!(c), anno: Some(ty!("Int")), default: None }
|
||||
], type_anno: Some(ty!("Int")) })));
|
||||
|
||||
|
||||
parse_test!("fn a(x) { x() }", AST(vec![Meta::new(Declaration(
|
||||
FuncDecl(Signature { name: rc!(a), operator: false, params: vec![FormalParam { name: rc!(x), anno: None, default: None }], type_anno: None },
|
||||
vec![exst!(Call { f: bx!(ex!(m val!("x"))), arguments: vec![] })])))]));
|
||||
parse_test!("fn a(x) {\n x() }", AST(vec![Meta::new(Declaration(
|
||||
FuncDecl(Signature { name: rc!(a), operator: false, params: vec![FormalParam { name: rc!(x), anno: None, default: None }], type_anno: None },
|
||||
vec![exst!(Call { f: bx!(ex!(m val!("x"))), arguments: vec![] })])))]));
|
||||
parse_test_wrap_ast!("fn a(x) { x() }", decl!(
|
||||
FuncDecl(Signature { name: rc!(a), operator: false, params: vec![FormalParam { name: rc!(x), anno: None, default: None }], type_anno: None },
|
||||
vec![exst!(Call { f: bx!(ex!(val!("x"))), arguments: vec![] })])));
|
||||
parse_test_wrap_ast!("fn a(x) {\n x() }", decl!(
|
||||
FuncDecl(Signature { name: rc!(a), operator: false, params: vec![FormalParam { name: rc!(x), anno: None, default: None }], type_anno: None },
|
||||
vec![exst!(Call { f: bx!(ex!(val!("x"))), arguments: vec![] })])));
|
||||
|
||||
let multiline = r#"
|
||||
fn a(x) {
|
||||
x()
|
||||
}
|
||||
"#;
|
||||
parse_test!(multiline, AST(vec![Meta::new(Declaration(
|
||||
parse_test_wrap_ast!(multiline, decl!(
|
||||
FuncDecl(Signature { name: rc!(a), operator: false, params: vec![FormalParam { name: rc!(x), default: None, anno: None }], type_anno: None },
|
||||
vec![exst!(Call { f: bx!(ex!(m val!("x"))), arguments: vec![] })])))]));
|
||||
vec![exst!(Call { f: bx!(ex!(val!("x"))), arguments: vec![] })])));
|
||||
let multiline2 = r#"
|
||||
fn a(x) {
|
||||
|
||||
@@ -220,44 +277,42 @@ x()
|
||||
|
||||
}
|
||||
"#;
|
||||
parse_test!(multiline2, AST(vec![Meta::new(Declaration(
|
||||
parse_test_wrap_ast!(multiline2, decl!(
|
||||
FuncDecl(Signature { name: rc!(a), operator: false, params: vec![FormalParam { name: rc!(x), default: None, anno: None }], type_anno: None },
|
||||
vec![exst!(s "x()")])))]));
|
||||
vec![exst!(s "x()")])));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn functions_with_default_args() {
|
||||
parse_test! {
|
||||
parse_test_wrap_ast! {
|
||||
"fn func(x: Int, y: Int = 4) { }",
|
||||
AST(vec![
|
||||
Meta::new(Declaration(
|
||||
FuncDecl(Signature { name: rc!(func), operator: false, type_anno: None, params: vec![
|
||||
FormalParam { name: rc!(x), default: None, anno: Some(ty!("Int")) },
|
||||
FormalParam { name: rc!(y), default: Some(ex!(s "4")), anno: Some(ty!("Int")) }
|
||||
]}, vec![])
|
||||
))
|
||||
])
|
||||
decl!(
|
||||
FuncDecl(Signature { name: rc!(func), operator: false, type_anno: None, params: vec![
|
||||
FormalParam { name: rc!(x), default: None, anno: Some(ty!("Int")) },
|
||||
FormalParam { name: rc!(y), default: Some(ex!(s "4")), anno: Some(ty!("Int")) }
|
||||
]}, vec![])
|
||||
)
|
||||
};
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parsing_bools() {
|
||||
parse_test!("false", AST(vec![exst!(BoolLiteral(false))]));
|
||||
parse_test!("true", AST(vec![exst!(BoolLiteral(true))]));
|
||||
parse_test_wrap_ast!("false", exst!(BoolLiteral(false)));
|
||||
parse_test_wrap_ast!("true", exst!(BoolLiteral(true)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parsing_strings() {
|
||||
parse_test!(r#""hello""#, AST(vec![exst!(StringLiteral(rc!(hello)))]));
|
||||
parse_test_wrap_ast!(r#""hello""#, exst!(StringLiteral(rc!(hello))));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parsing_types() {
|
||||
parse_test!("type Yolo = Yolo", AST(vec![Meta::new(Declaration(TypeDecl { name: tys!("Yolo"), body: TypeBody(vec![UnitStruct(rc!(Yolo))]), mutable: false} ))]));
|
||||
parse_test!("type mut Yolo = Yolo", AST(vec![Meta::new(Declaration(TypeDecl { name: tys!("Yolo"), body: TypeBody(vec![UnitStruct(rc!(Yolo))]), mutable: true} ))]));
|
||||
parse_test!("type alias Sex = Drugs", AST(vec![Meta::new(Declaration(TypeAlias(rc!(Sex), rc!(Drugs))))]));
|
||||
parse_test!("type Sanchez = Miguel | Alejandro(Int, Option<a>) | Esperanza { a: Int, b: String }",
|
||||
AST(vec![Meta::new(Declaration(TypeDecl{
|
||||
parse_test_wrap_ast!("type Yolo = Yolo", decl!(TypeDecl { name: tys!("Yolo"), body: TypeBody(vec![UnitStruct(rc!(Yolo))]), mutable: false} ));
|
||||
parse_test_wrap_ast!("type mut Yolo = Yolo", decl!(TypeDecl { name: tys!("Yolo"), body: TypeBody(vec![UnitStruct(rc!(Yolo))]), mutable: true} ));
|
||||
parse_test_wrap_ast!("type alias Sex = Drugs", decl!(TypeAlias { alias: rc!(Sex), original: rc!(Drugs) }));
|
||||
parse_test_wrap_ast!("type Sanchez = Miguel | Alejandro(Int, Option<a>) | Esperanza { a: Int, b: String }",
|
||||
decl!(TypeDecl {
|
||||
name: tys!("Sanchez"),
|
||||
body: TypeBody(vec![
|
||||
UnitStruct(rc!(Miguel)),
|
||||
@@ -274,61 +329,61 @@ fn parsing_types() {
|
||||
}
|
||||
]),
|
||||
mutable: false
|
||||
}))]));
|
||||
}));
|
||||
|
||||
parse_test!("type Jorge<a> = Diego | Kike(a)", AST(vec![
|
||||
Meta::new(Declaration(TypeDecl{
|
||||
parse_test_wrap_ast! {
|
||||
"type Jorge<a> = Diego | Kike(a)",
|
||||
decl!(TypeDecl{
|
||||
name: TypeSingletonName { name: rc!(Jorge), params: vec![Singleton(TypeSingletonName { name: rc!(a), params: vec![] })] },
|
||||
body: TypeBody(vec![UnitStruct(rc!(Diego)), TupleStruct(rc!(Kike), vec![Singleton(TypeSingletonName { name: rc!(a), params: vec![] })])]),
|
||||
mutable: false
|
||||
}
|
||||
))]));
|
||||
)
|
||||
};
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parsing_bindings() {
|
||||
parse_test!("let mut a = 10", AST(vec![Meta::new(Declaration(Binding { name: rc!(a), constant: false, type_anno: None, expr: ex!(m NatLiteral(10)) } ))]));
|
||||
parse_test!("let a = 2 + 2", AST(vec![Meta::new(Declaration(Binding { name: rc!(a), constant: true, type_anno: None, expr: ex!(m binexp!("+", NatLiteral(2), NatLiteral(2))) }) )]));
|
||||
parse_test!("let a: Nat = 2 + 2", AST(vec![Meta::new(Declaration(
|
||||
Binding { name: rc!(a), constant: true, type_anno: Some(Singleton(TypeSingletonName { name: rc!(Nat), params: vec![] })),
|
||||
expr: Meta::new(ex!(binexp!("+", NatLiteral(2), NatLiteral(2)))) }
|
||||
))]));
|
||||
parse_test_wrap_ast!("let mut a = 10", decl!(Binding { name: rc!(a), constant: false, type_anno: None, expr: ex!(NatLiteral(10)) } ));
|
||||
parse_test_wrap_ast!("let a = 2 + 2", decl!(Binding { name: rc!(a), constant: true, type_anno: None, expr: ex!(binexp!("+", NatLiteral(2), NatLiteral(2))) }));
|
||||
parse_test_wrap_ast!("let a: Nat = 2 + 2", decl!(
|
||||
Binding { name: rc!(a), constant: true, type_anno: Some(Singleton(TypeSingletonName { name: rc!(Nat), params: vec![] })),
|
||||
expr: ex!(binexp!("+", NatLiteral(2), NatLiteral(2))) }
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parsing_block_expressions() {
|
||||
parse_test! {
|
||||
"if a() then { b(); c() }", AST(vec![exst!(
|
||||
parse_test_wrap_ast! {
|
||||
"if a() then { b(); c() }", exst!(
|
||||
IfExpression {
|
||||
discriminator: bx! {
|
||||
Discriminator::Simple(ex!(Call { f: bx!(ex!(m val!("a"))), arguments: vec![]}))
|
||||
},
|
||||
discriminator: Some(bx! {
|
||||
ex!(Call { f: bx!(ex!(val!("a"))), arguments: vec![]})
|
||||
}),
|
||||
body: bx! {
|
||||
IfExpressionBody::SimpleConditional(
|
||||
vec![exst!(Call { f: bx!(ex!(m val!("b"))), arguments: vec![]}), exst!(Call { f: bx!(ex!(m val!("c"))), arguments: vec![] })],
|
||||
None
|
||||
)
|
||||
IfExpressionBody::SimpleConditional {
|
||||
then_case: vec![exst!(Call { f: bx!(ex!(val!("b"))), arguments: vec![]}), exst!(Call { f: bx!(ex!(val!("c"))), arguments: vec![] })],
|
||||
else_case: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
)])
|
||||
)
|
||||
};
|
||||
|
||||
parse_test! {
|
||||
"if a() then { b(); c() } else { q }", AST(vec![exst!(
|
||||
parse_test_wrap_ast! {
|
||||
"if a() then { b(); c() } else { q }", exst!(
|
||||
IfExpression {
|
||||
discriminator: bx! {
|
||||
Discriminator::Simple(ex!(Call { f: bx!(ex!(m val!("a"))), arguments: vec![]}))
|
||||
},
|
||||
discriminator: Some(bx! {
|
||||
ex!(Call { f: bx!(ex!(val!("a"))), arguments: vec![]})
|
||||
}),
|
||||
body: bx! {
|
||||
IfExpressionBody::SimpleConditional(
|
||||
vec![exst!(Call { f: bx!(ex!(m val!("b"))), arguments: vec![]}), exst!(Call { f: bx!(ex!(m val!("c"))), arguments: vec![] })],
|
||||
Some(
|
||||
vec![exst!(val!("q"))],
|
||||
)
|
||||
)
|
||||
IfExpressionBody::SimpleConditional {
|
||||
then_case: vec![exst!(Call { f: bx!(ex!(val!("b"))), arguments: vec![]}), exst!(Call { f: bx!(ex!(val!("c"))), arguments: vec![] })],
|
||||
else_case: Some(vec![exst!(val!("q"))]),
|
||||
}
|
||||
}
|
||||
}
|
||||
)])
|
||||
)
|
||||
};
|
||||
|
||||
/*
|
||||
@@ -345,7 +400,7 @@ fn parsing_block_expressions() {
|
||||
c
|
||||
}"#,
|
||||
AST(vec![exst!(IfExpression(bx!(ex!(BoolLiteral(true))),
|
||||
vec![Declaration(Binding { name: rc!(a), constant: true, expr: ex!(NatLiteral(10)) }),
|
||||
vec![decl!(Binding { name: rc!(a), constant: true, expr: ex!(NatLiteral(10)) }),
|
||||
exst!(val!(rc!(b)))],
|
||||
Some(vec![exst!(val!(rc!(c)))])))])
|
||||
);
|
||||
@@ -365,90 +420,87 @@ fn parsing_block_expressions() {
|
||||
}
|
||||
#[test]
|
||||
fn parsing_interfaces() {
|
||||
parse_test!("interface Unglueable { fn unglue(a: Glue); fn mar(): Glue }", AST(vec![
|
||||
Meta::new(Declaration(Interface {
|
||||
name: rc!(Unglueable),
|
||||
signatures: vec![
|
||||
Signature {
|
||||
name: rc!(unglue),
|
||||
operator: false,
|
||||
params: vec![
|
||||
FormalParam { name: rc!(a), anno: Some(Singleton(TypeSingletonName { name: rc!(Glue), params: vec![] })), default: None }
|
||||
],
|
||||
type_anno: None
|
||||
},
|
||||
Signature { name: rc!(mar), operator: false, params: vec![], type_anno: Some(Singleton(TypeSingletonName { name: rc!(Glue), params: vec![] })) },
|
||||
]
|
||||
}))
|
||||
]));
|
||||
parse_test_wrap_ast!("interface Unglueable { fn unglue(a: Glue); fn mar(): Glue }",
|
||||
decl!(Interface {
|
||||
name: rc!(Unglueable),
|
||||
signatures: vec![
|
||||
Signature {
|
||||
name: rc!(unglue),
|
||||
operator: false,
|
||||
params: vec![
|
||||
FormalParam { name: rc!(a), anno: Some(Singleton(TypeSingletonName { name: rc!(Glue), params: vec![] })), default: None }
|
||||
],
|
||||
type_anno: None
|
||||
},
|
||||
Signature { name: rc!(mar), operator: false, params: vec![], type_anno: Some(Singleton(TypeSingletonName { name: rc!(Glue), params: vec![] })) },
|
||||
]
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parsing_impls() {
|
||||
parse_test!("impl Heh { fn yolo(); fn swagg(); }", AST(vec![
|
||||
Meta::new(
|
||||
Declaration(Impl {
|
||||
type_name: ty!("Heh"),
|
||||
interface_name: None,
|
||||
block: vec![
|
||||
FuncSig(Signature { name: rc!(yolo), operator: false, params: vec![], type_anno: None }),
|
||||
FuncSig(Signature { name: rc!(swagg), operator: false, params: vec![], type_anno: None })
|
||||
] }))]));
|
||||
parse_test_wrap_ast!("impl Heh { fn yolo(); fn swagg(); }",
|
||||
decl!(Impl {
|
||||
type_name: ty!("Heh"),
|
||||
interface_name: None,
|
||||
block: vec![
|
||||
FuncSig(Signature { name: rc!(yolo), operator: false, params: vec![], type_anno: None }),
|
||||
FuncSig(Signature { name: rc!(swagg), operator: false, params: vec![], type_anno: None })
|
||||
] }));
|
||||
|
||||
parse_test!("impl Mondai for Lollerino { fn yolo(); fn swagg(); }", AST(vec![
|
||||
Meta::new(Declaration(Impl {
|
||||
type_name: ty!("Lollerino"),
|
||||
interface_name: Some(TypeSingletonName { name: rc!(Mondai), params: vec![] }),
|
||||
block: vec![
|
||||
FuncSig(Signature { name: rc!(yolo), operator: false, params: vec![], type_anno: None}),
|
||||
FuncSig(Signature { name: rc!(swagg), operator: false, params: vec![], type_anno: None })
|
||||
] }))]));
|
||||
parse_test_wrap_ast!("impl Mondai for Lollerino { fn yolo(); fn swagg(); }",
|
||||
decl!(Impl {
|
||||
type_name: ty!("Lollerino"),
|
||||
interface_name: Some(TypeSingletonName { name: rc!(Mondai), params: vec![] }),
|
||||
block: vec![
|
||||
FuncSig(Signature { name: rc!(yolo), operator: false, params: vec![], type_anno: None}),
|
||||
FuncSig(Signature { name: rc!(swagg), operator: false, params: vec![], type_anno: None })
|
||||
] }));
|
||||
|
||||
parse_test!("impl Hella<T> for (Alpha, Omega) { }", AST(vec![
|
||||
Meta::new(Declaration(Impl {
|
||||
type_name: Tuple(vec![ty!("Alpha"), ty!("Omega")]),
|
||||
interface_name: Some(TypeSingletonName { name: rc!(Hella), params: vec![ty!("T")] }),
|
||||
block: vec![]
|
||||
}))
|
||||
]));
|
||||
parse_test_wrap_ast!("impl Hella<T> for (Alpha, Omega) { }",
|
||||
decl!(Impl {
|
||||
type_name: Tuple(vec![ty!("Alpha"), ty!("Omega")]),
|
||||
interface_name: Some(TypeSingletonName { name: rc!(Hella), params: vec![ty!("T")] }),
|
||||
block: vec![]
|
||||
})
|
||||
);
|
||||
|
||||
parse_test!("impl Option<WTFMate> { fn oi() }", AST(vec![
|
||||
Meta::new(
|
||||
Declaration(Impl {
|
||||
type_name: Singleton(TypeSingletonName { name: rc!(Option), params: vec![ty!("WTFMate")]}),
|
||||
interface_name: None,
|
||||
block: vec![
|
||||
FuncSig(Signature { name: rc!(oi), operator: false, params: vec![], type_anno: None }),
|
||||
]
|
||||
}))]));
|
||||
parse_test_wrap_ast!("impl Option<WTFMate> { fn oi() }",
|
||||
decl!(Impl {
|
||||
type_name: Singleton(TypeSingletonName { name: rc!(Option), params: vec![ty!("WTFMate")]}),
|
||||
interface_name: None,
|
||||
block: vec![
|
||||
FuncSig(Signature { name: rc!(oi), operator: false, params: vec![], type_anno: None }),
|
||||
]
|
||||
}));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parsing_type_annotations() {
|
||||
parse_test!("let a = b : Int", AST(vec![
|
||||
Meta::new(
|
||||
Declaration(Binding { name: rc!(a), constant: true, type_anno: None, expr:
|
||||
ex!(m val!("b"), ty!("Int")) }))]));
|
||||
parse_test_wrap_ast!("let a = b : Int",
|
||||
decl!(Binding { name: rc!(a), constant: true, type_anno: None, expr:
|
||||
ex!(val!("b"), ty!("Int")) }));
|
||||
|
||||
parse_test!("a : Int", AST(vec![
|
||||
parse_test_wrap_ast!("a : Int",
|
||||
exst!(val!("a"), ty!("Int"))
|
||||
]));
|
||||
);
|
||||
|
||||
parse_test!("a : Option<Int>", AST(vec![
|
||||
parse_test_wrap_ast!("a : Option<Int>",
|
||||
exst!(val!("a"), Singleton(TypeSingletonName { name: rc!(Option), params: vec![ty!("Int")] }))
|
||||
]));
|
||||
);
|
||||
|
||||
parse_test!("a : KoreanBBQSpecifier<Kimchi, Option<Bulgogi> >", AST(vec![
|
||||
parse_test_wrap_ast!("a : KoreanBBQSpecifier<Kimchi, Option<Bulgogi> >",
|
||||
exst!(val!("a"), Singleton(TypeSingletonName { name: rc!(KoreanBBQSpecifier), params: vec![
|
||||
ty!("Kimchi"), Singleton(TypeSingletonName { name: rc!(Option), params: vec![ty!("Bulgogi")] })
|
||||
] }))
|
||||
]));
|
||||
);
|
||||
|
||||
parse_test!("a : (Int, Yolo<a>)", AST(vec![
|
||||
parse_test_wrap_ast!("a : (Int, Yolo<a>)",
|
||||
exst!(val!("a"), Tuple(
|
||||
vec![ty!("Int"), Singleton(TypeSingletonName {
|
||||
name: rc!(Yolo), params: vec![ty!("a")]
|
||||
})]))]));
|
||||
})])));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -458,7 +510,7 @@ fn parsing_lambdas() {
|
||||
)
|
||||
}
|
||||
|
||||
parse_test!(r#"\ (x: Int, y) { a;b;c;}"#, AST(vec![
|
||||
parse_test_wrap_ast!(r#"\ (x: Int, y) { a;b;c;}"#,
|
||||
exst!(Lambda {
|
||||
params: vec![
|
||||
FormalParam { name: rc!(x), anno: Some(ty!("Int")), default: None },
|
||||
@@ -467,10 +519,10 @@ fn parsing_lambdas() {
|
||||
type_anno: None,
|
||||
body: vec![exst!(s "a"), exst!(s "b"), exst!(s "c")]
|
||||
})
|
||||
]));
|
||||
);
|
||||
|
||||
parse_test!(r#"\(x){y}(1)"#, AST(vec![
|
||||
exst!(Call { f: bx!(ex!(m
|
||||
parse_test_wrap_ast! { r#"\(x){y}(1)"#,
|
||||
exst!(Call { f: bx!(ex!(
|
||||
Lambda {
|
||||
params: vec![
|
||||
FormalParam { name: rc!(x), anno: None, default: None }
|
||||
@@ -478,7 +530,8 @@ fn parsing_lambdas() {
|
||||
type_anno: None,
|
||||
body: vec![exst!(s "y")] }
|
||||
)),
|
||||
arguments: vec![inv!(ex!(NatLiteral(1))).into()] })]));
|
||||
arguments: vec![inv!(ex!(NatLiteral(1))).into()] })
|
||||
};
|
||||
|
||||
parse_test_wrap_ast! {
|
||||
r#"\(x: Int): String { "q" }"#,
|
||||
@@ -517,54 +570,58 @@ fn single_param_lambda() {
|
||||
fn more_advanced_lambdas() {
|
||||
parse_test! {
|
||||
r#"fn wahoo() { let a = 10; \(x) { x + a } };
|
||||
wahoo()(3) "#, AST(vec![
|
||||
exst!(s r"fn wahoo() { let a = 10; \(x) { x + a } }"),
|
||||
exst! {
|
||||
wahoo()(3) "#,
|
||||
AST {
|
||||
id: ItemIdStore::new_id(),
|
||||
statements: vec![
|
||||
exst!(s r"fn wahoo() { let a = 10; \(x) { x + a } }"),
|
||||
exst! {
|
||||
Call {
|
||||
f: bx!(ex!(m Call { f: bx!(ex!(m val!("wahoo"))), arguments: vec![] })),
|
||||
f: bx!(ex!(Call { f: bx!(ex!(val!("wahoo"))), arguments: vec![] })),
|
||||
arguments: vec![inv!(ex!(NatLiteral(3))).into()],
|
||||
}
|
||||
}
|
||||
])
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn list_literals() {
|
||||
parse_test! {
|
||||
"[1,2]", AST(vec![
|
||||
exst!(ListLiteral(vec![ex!(m NatLiteral(1)), ex!(m NatLiteral(2))]))])
|
||||
parse_test_wrap_ast! {
|
||||
"[1,2]",
|
||||
exst!(ListLiteral(vec![ex!(NatLiteral(1)), ex!(NatLiteral(2))]))
|
||||
};
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn while_expr() {
|
||||
parse_test! {
|
||||
"while { }", AST(vec![
|
||||
exst!(WhileExpression { condition: None, body: vec![] })])
|
||||
parse_test_wrap_ast! {
|
||||
"while { }",
|
||||
exst!(WhileExpression { condition: None, body: vec![] })
|
||||
}
|
||||
|
||||
parse_test! {
|
||||
"while a == b { }", AST(vec![
|
||||
exst!(WhileExpression { condition: Some(bx![ex![m binexp!("==", val!("a"), val!("b"))]]), body: vec![] })])
|
||||
parse_test_wrap_ast! {
|
||||
"while a == b { }",
|
||||
exst!(WhileExpression { condition: Some(bx![ex![binexp!("==", val!("a"), val!("b"))]]), body: vec![] })
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn for_expr() {
|
||||
parse_test! {
|
||||
"for { a <- maybeValue } return 1", AST(vec![
|
||||
parse_test_wrap_ast! {
|
||||
"for { a <- maybeValue } return 1",
|
||||
exst!(ForExpression {
|
||||
enumerators: vec![Enumerator { id: rc!(a), generator: ex!(m val!("maybeValue")) }],
|
||||
body: bx!(MonadicReturn(Meta::new(ex!(s "1"))))
|
||||
})])
|
||||
enumerators: vec![Enumerator { id: rc!(a), generator: ex!(val!("maybeValue")) }],
|
||||
body: bx!(MonadicReturn(ex!(s "1")))
|
||||
})
|
||||
}
|
||||
|
||||
parse_test! {
|
||||
"for n <- someRange { f(n); }", AST(vec![
|
||||
exst!(ForExpression { enumerators: vec![Enumerator { id: rc!(n), generator: ex!(m val!("someRange"))}],
|
||||
parse_test_wrap_ast! {
|
||||
"for n <- someRange { f(n); }",
|
||||
exst!(ForExpression { enumerators: vec![Enumerator { id: rc!(n), generator: ex!(val!("someRange"))}],
|
||||
body: bx!(ForBody::StatementBlock(vec![exst!(s "f(n)")]))
|
||||
})])
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -573,29 +630,41 @@ fn patterns() {
|
||||
parse_test_wrap_ast! {
|
||||
"if x is Some(a) then { 4 } else { 9 }", exst!(
|
||||
IfExpression {
|
||||
discriminator: bx!(Discriminator::Simple(ex!(s "x"))),
|
||||
body: bx!(IfExpressionBody::SimplePatternMatch(Pattern::TupleStruct(rc!(Some), vec![Pattern::Literal(PatternLiteral::VarPattern(rc!(a)))]), vec![exst!(s "4")], Some(vec![exst!(s "9")]))) }
|
||||
discriminator: Some(bx!(ex!(s "x"))),
|
||||
body: bx!(IfExpressionBody::SimplePatternMatch {
|
||||
pattern: Pattern::TupleStruct(qname!(Some), vec![Pattern::VarOrName(qname!(a))]),
|
||||
then_case: vec![exst!(s "4")],
|
||||
else_case: Some(vec![exst!(s "9")]) })
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
parse_test_wrap_ast! {
|
||||
"if x is Some(a) then 4 else 9", exst!(
|
||||
IfExpression {
|
||||
discriminator: bx!(Discriminator::Simple(ex!(s "x"))),
|
||||
body: bx!(IfExpressionBody::SimplePatternMatch(Pattern::TupleStruct(rc!(Some), vec![Pattern::Literal(PatternLiteral::VarPattern(rc!(a)))]), vec![exst!(s "4")], Some(vec![exst!(s "9")]))) }
|
||||
discriminator: Some(bx!(ex!(s "x"))),
|
||||
body: bx!(IfExpressionBody::SimplePatternMatch {
|
||||
pattern: Pattern::TupleStruct(qname!(Some), vec![Pattern::VarOrName(qname!(a))]),
|
||||
then_case: vec![exst!(s "4")],
|
||||
else_case: Some(vec![exst!(s "9")]) }
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
parse_test_wrap_ast! {
|
||||
"if x is Something { a, b: x } then { 4 } else { 9 }", exst!(
|
||||
IfExpression {
|
||||
discriminator: bx!(Discriminator::Simple(ex!(s "x"))),
|
||||
body: bx!(IfExpressionBody::SimplePatternMatch(
|
||||
Pattern::Record(rc!(Something), vec![
|
||||
(rc!(a),Pattern::Literal(PatternLiteral::StringPattern(rc!(a)))),
|
||||
(rc!(b),Pattern::Literal(PatternLiteral::VarPattern(rc!(x))))
|
||||
discriminator: Some(bx!(ex!(s "x"))),
|
||||
body: bx!(IfExpressionBody::SimplePatternMatch {
|
||||
pattern: Pattern::Record(qname!(Something), vec![
|
||||
(rc!(a),Pattern::Literal(PatternLiteral::StringPattern(rc!(a)))),
|
||||
(rc!(b),Pattern::VarOrName(qname!(x)))
|
||||
]),
|
||||
vec![exst!(s "4")], Some(vec![exst!(s "9")])))
|
||||
then_case: vec![exst!(s "4")],
|
||||
else_case: Some(vec![exst!(s "9")])
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
@@ -607,12 +676,12 @@ fn pattern_literals() {
|
||||
"if x is -1 then 1 else 2",
|
||||
exst!(
|
||||
IfExpression {
|
||||
discriminator: bx!(Discriminator::Simple(ex!(s "x"))),
|
||||
body: bx!(IfExpressionBody::SimplePatternMatch(
|
||||
Pattern::Literal(PatternLiteral::NumPattern { neg: true, num: NatLiteral(1) }),
|
||||
vec![exst!(NatLiteral(1))],
|
||||
Some(vec![exst!(NatLiteral(2))]),
|
||||
))
|
||||
discriminator: Some(bx!(ex!(s "x"))),
|
||||
body: bx!(IfExpressionBody::SimplePatternMatch {
|
||||
pattern: Pattern::Literal(PatternLiteral::NumPattern { neg: true, num: NatLiteral(1) }),
|
||||
then_case: vec![exst!(NatLiteral(1))],
|
||||
else_case: Some(vec![exst!(NatLiteral(2))]),
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
@@ -621,42 +690,139 @@ fn pattern_literals() {
|
||||
"if x is 1 then 1 else 2",
|
||||
exst!(
|
||||
IfExpression {
|
||||
discriminator: bx!(Discriminator::Simple(ex!(s "x"))),
|
||||
body: bx!(IfExpressionBody::SimplePatternMatch(
|
||||
Pattern::Literal(PatternLiteral::NumPattern { neg: false, num: NatLiteral(1) }),
|
||||
vec![exst!(s "1")],
|
||||
Some(vec![exst!(s "2")]),
|
||||
))
|
||||
discriminator: Some(bx!(ex!(s "x"))),
|
||||
body: bx!(IfExpressionBody::SimplePatternMatch {
|
||||
pattern: Pattern::Literal(PatternLiteral::NumPattern { neg: false, num: NatLiteral(1) }),
|
||||
then_case: vec![exst!(s "1")],
|
||||
else_case: Some(vec![exst!(s "2")]),
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
parse_test! {
|
||||
"if x is true then 1 else 2", AST(vec![
|
||||
exst!(
|
||||
IfExpression {
|
||||
discriminator: bx!(Discriminator::Simple(ex!(s "x"))),
|
||||
body: bx!(IfExpressionBody::SimplePatternMatch(
|
||||
Pattern::Literal(PatternLiteral::BoolPattern(true)),
|
||||
vec![exst!(NatLiteral(1))],
|
||||
Some(vec![exst!(NatLiteral(2))]),
|
||||
))
|
||||
}
|
||||
)
|
||||
])
|
||||
parse_test_wrap_ast! {
|
||||
"if x is true then 1 else 2",
|
||||
exst!(
|
||||
IfExpression {
|
||||
discriminator: Some(bx!(ex!(s "x"))),
|
||||
body: bx!(
|
||||
IfExpressionBody::SimplePatternMatch {
|
||||
pattern: Pattern::Literal(PatternLiteral::BoolPattern(true)),
|
||||
then_case: vec![exst!(NatLiteral(1))],
|
||||
else_case: Some(vec![exst!(NatLiteral(2))]),
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
parse_test_wrap_ast! {
|
||||
"if x is \"gnosticism\" then 1 else 2",
|
||||
exst!(
|
||||
IfExpression {
|
||||
discriminator: bx!(Discriminator::Simple(ex!(s "x"))),
|
||||
body: bx!(IfExpressionBody::SimplePatternMatch(
|
||||
Pattern::Literal(PatternLiteral::StringPattern(rc!(gnosticism))),
|
||||
vec![exst!(s "1")],
|
||||
Some(vec![exst!(s "2")]),
|
||||
))
|
||||
discriminator: Some(bx!(ex!(s "x"))),
|
||||
body: bx!(IfExpressionBody::SimplePatternMatch {
|
||||
pattern: Pattern::Literal(PatternLiteral::StringPattern(rc!(gnosticism))),
|
||||
then_case: vec![exst!(s "1")],
|
||||
else_case: Some(vec![exst!(s "2")]),
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn imports() {
|
||||
parse_test_wrap_ast! {
|
||||
"import harbinger::draughts::Norgleheim",
|
||||
import!(ImportSpecifier {
|
||||
id: ItemIdStore::new_id(),
|
||||
path_components: vec![rc!(harbinger), rc!(draughts), rc!(Norgleheim)],
|
||||
imported_names: ImportedNames::LastOfPath
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn imports_2() {
|
||||
parse_test_wrap_ast! {
|
||||
"import harbinger::draughts::{Norgleheim, Xraksenlaigar}",
|
||||
import!(ImportSpecifier {
|
||||
id: ItemIdStore::new_id(),
|
||||
path_components: vec![rc!(harbinger), rc!(draughts)],
|
||||
imported_names: ImportedNames::List(vec![
|
||||
rc!(Norgleheim),
|
||||
rc!(Xraksenlaigar)
|
||||
])
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn imports_3() {
|
||||
parse_test_wrap_ast! {
|
||||
"import bespouri::{}",
|
||||
import!(ImportSpecifier {
|
||||
id: ItemIdStore::new_id(),
|
||||
path_components: vec![rc!(bespouri)],
|
||||
imported_names: ImportedNames::List(vec![])
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
fn imports_4() {
|
||||
parse_test_wrap_ast! {
|
||||
"import bespouri::*",
|
||||
import!(ImportSpecifier {
|
||||
id: ItemIdStore::new_id(),
|
||||
path_components: vec![rc!(bespouri)],
|
||||
imported_names: ImportedNames::All
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
fn if_expr() {
|
||||
parse_test_wrap_ast! {
|
||||
"if x { is 1 then 5, else 20 }",
|
||||
exst! {
|
||||
IfExpression {
|
||||
discriminator: Some(bx!(ex!(s "x"))),
|
||||
body: bx!(IfExpressionBody::CondList(
|
||||
vec![
|
||||
ConditionArm {
|
||||
condition: Condition::Pattern(Pattern::Literal(PatternLiteral::NumPattern { neg: false, num: NatLiteral(1)})),
|
||||
guard: None,
|
||||
body: vec![exst!(s "5")],
|
||||
},
|
||||
ConditionArm {
|
||||
condition: Condition::Else,
|
||||
guard: None,
|
||||
body: vec![exst!(s "20")],
|
||||
},
|
||||
]
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn modules() {
|
||||
parse_test_wrap_ast! {
|
||||
r#"
|
||||
module ephraim {
|
||||
let a = 10
|
||||
fn nah() { 33 }
|
||||
}
|
||||
"#,
|
||||
module!(
|
||||
ModuleSpecifier { name: rc!(ephraim), contents: vec![
|
||||
decl!(Binding { name: rc!(a), constant: true, type_anno: None, expr: ex!(s "10") }),
|
||||
decl!(FuncDecl(Signature { name: rc!(nah), operator: false, params: vec![], type_anno: None }, vec![exst!(NatLiteral(33))])),
|
||||
] }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
let _SCHALA_VERSION = "0.1.0"
|
||||
|
||||
type Option<T> = Some(T) | None
|
||||
type Color = Red | Green | Blue
|
||||
type Ord = LT | EQ | GT
|
||||
|
||||
|
||||
fn map(input: Option<T>, func: Func): Option<T> {
|
||||
if input {
|
||||
is Some(x) -> Some(func(x)),
|
||||
is None -> None,
|
||||
is Option::Some(x) then Option::Some(func(x)),
|
||||
is Option::None then Option::None,
|
||||
}
|
||||
}
|
||||
|
||||
type Complicated = Sunrise | Metal { black: bool, norwegian: bool } | Fella(String, Int)
|
||||
|
||||
@@ -1,8 +1,24 @@
|
||||
//! # Reduced AST
|
||||
//! The reduced AST is a minimal AST designed to be built from the full AST after all possible
|
||||
//! static checks have been done. Consequently, the AST reduction phase does very little error
|
||||
//! checking itself - any errors should ideally be caught either by an earlier phase, or are
|
||||
//! runtime errors that the evaluator should handle. That said, becuase it does do table lookups
|
||||
//! that can in principle fail [especially at the moment with most static analysis not yet complete],
|
||||
//! there is an Expr variant `ReductionError` to handle these cases.
|
||||
//!
|
||||
//! A design decision to make - should the ReducedAST types contain all information about
|
||||
//! type/layout necessary for the evaluator to work? If so, then the evaluator should not
|
||||
//! have access to the symbol table at all and ReducedAST should carry that information. If not,
|
||||
//! then ReducedAST shouldn't be duplicating information that can be queried at runtime from the
|
||||
//! symbol table. But I think the former might make sense since ultimately the bytecode will be
|
||||
//! built from the ReducedAST.
|
||||
use std::rc::Rc;
|
||||
use std::str::FromStr;
|
||||
|
||||
use crate::ast::*;
|
||||
use crate::symbol_table::{Symbol, SymbolSpec, SymbolTable};
|
||||
use crate::builtin::{BinOp, PrefixOp};
|
||||
use crate::symbol_table::{Symbol, SymbolSpec, SymbolTable, FullyQualifiedSymbolName};
|
||||
use crate::builtin::Builtin;
|
||||
use crate::util::deref_optional_box;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ReducedAST(pub Vec<Stmt>);
|
||||
@@ -26,21 +42,21 @@ pub enum Stmt {
|
||||
pub enum Expr {
|
||||
Unit,
|
||||
Lit(Lit),
|
||||
Sym(Rc<String>), //a Sym is anything that can be looked up by name at runtime - i.e. a function or variable address
|
||||
Tuple(Vec<Expr>),
|
||||
Func(Func),
|
||||
Val(Rc<String>),
|
||||
Constructor {
|
||||
type_name: Rc<String>,
|
||||
name: Rc<String>,
|
||||
tag: usize,
|
||||
arity: usize,
|
||||
arity: usize, // n.b. arity here is always the value from the symbol table - if it doesn't match what it's being called with, that's an eval error, eval will handle it
|
||||
},
|
||||
Call {
|
||||
f: Box<Expr>,
|
||||
args: Vec<Expr>,
|
||||
},
|
||||
Assign {
|
||||
val: Box<Expr>,
|
||||
val: Box<Expr>, //TODO this probably can't be a val
|
||||
expr: Box<Expr>,
|
||||
},
|
||||
Conditional {
|
||||
@@ -53,17 +69,15 @@ pub enum Expr {
|
||||
cond: Box<Expr>,
|
||||
alternatives: Vec<Alternative>
|
||||
},
|
||||
UnimplementedSigilValue
|
||||
UnimplementedSigilValue,
|
||||
ReductionError(String),
|
||||
}
|
||||
|
||||
pub type BoundVars = Vec<Option<Rc<String>>>; //remember that order matters here
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Alternative {
|
||||
pub tag: Option<usize>,
|
||||
pub subpatterns: Vec<Option<Subpattern>>,
|
||||
pub guard: Option<Expr>,
|
||||
pub bound_vars: BoundVars,
|
||||
pub matchable: Subpattern,
|
||||
pub item: Vec<Stmt>,
|
||||
}
|
||||
|
||||
@@ -86,7 +100,7 @@ pub enum Lit {
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum Func {
|
||||
BuiltIn(Rc<String>),
|
||||
BuiltIn(Builtin),
|
||||
UserDefined {
|
||||
name: Option<Rc<String>>,
|
||||
params: Vec<Rc<String>>,
|
||||
@@ -94,144 +108,272 @@ pub enum Func {
|
||||
}
|
||||
}
|
||||
|
||||
impl AST {
|
||||
pub fn reduce(&self, symbol_table: &SymbolTable) -> ReducedAST {
|
||||
pub fn reduce(ast: &AST, symbol_table: &SymbolTable) -> ReducedAST {
|
||||
let mut reducer = Reducer { symbol_table };
|
||||
reducer.ast(ast)
|
||||
}
|
||||
|
||||
struct Reducer<'a> {
|
||||
symbol_table: &'a SymbolTable
|
||||
}
|
||||
|
||||
impl<'a> Reducer<'a> {
|
||||
fn ast(&mut self, ast: &AST) -> ReducedAST {
|
||||
let mut output = vec![];
|
||||
for statement in self.0.iter() {
|
||||
output.push(statement.node().reduce(symbol_table));
|
||||
for statement in ast.statements.iter() {
|
||||
output.push(self.statement(statement));
|
||||
}
|
||||
ReducedAST(output)
|
||||
}
|
||||
}
|
||||
|
||||
impl Statement {
|
||||
fn reduce(&self, symbol_table: &SymbolTable) -> Stmt {
|
||||
use crate::ast::Statement::*;
|
||||
match self {
|
||||
ExpressionStatement(expr) => Stmt::Expr(expr.node().reduce(symbol_table)),
|
||||
Declaration(decl) => decl.reduce(symbol_table),
|
||||
fn statement(&mut self, stmt: &Statement) -> Stmt {
|
||||
match &stmt.kind {
|
||||
StatementKind::Expression(expr) => Stmt::Expr(self.expression(&expr)),
|
||||
StatementKind::Declaration(decl) => self.declaration(&decl),
|
||||
StatementKind::Import(_) => Stmt::Noop,
|
||||
StatementKind::Module(modspec) => {
|
||||
for statement in modspec.contents.iter() {
|
||||
self.statement(&statement);
|
||||
}
|
||||
Stmt::Noop
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn reduce_block(block: &Block, symbol_table: &SymbolTable) -> Vec<Stmt> {
|
||||
block.iter().map(|stmt| stmt.node().reduce(symbol_table)).collect()
|
||||
}
|
||||
fn block(&mut self, block: &Block) -> Vec<Stmt> {
|
||||
block.iter().map(|stmt| self.statement(stmt)).collect()
|
||||
}
|
||||
|
||||
impl InvocationArgument {
|
||||
fn reduce(&self, symbol_table: &SymbolTable) -> Expr {
|
||||
fn invocation_argument(&mut self, invoc: &InvocationArgument) -> Expr {
|
||||
use crate::ast::InvocationArgument::*;
|
||||
match self {
|
||||
Positional(ex) => ex.reduce(symbol_table),
|
||||
match invoc {
|
||||
Positional(ex) => self.expression(ex),
|
||||
Keyword { .. } => Expr::UnimplementedSigilValue,
|
||||
Ignored => Expr::UnimplementedSigilValue,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Expression {
|
||||
fn reduce(&self, symbol_table: &SymbolTable) -> Expr {
|
||||
fn expression(&mut self, expr: &Expression) -> Expr {
|
||||
use crate::ast::ExpressionKind::*;
|
||||
let ref input = self.kind;
|
||||
let symbol_table = self.symbol_table;
|
||||
let ref input = expr.kind;
|
||||
match input {
|
||||
NatLiteral(n) => Expr::Lit(Lit::Nat(*n)),
|
||||
FloatLiteral(f) => Expr::Lit(Lit::Float(*f)),
|
||||
StringLiteral(s) => Expr::Lit(Lit::StringLit(s.clone())),
|
||||
BoolLiteral(b) => Expr::Lit(Lit::Bool(*b)),
|
||||
BinExp(binop, lhs, rhs) => binop.reduce(symbol_table, lhs, rhs),
|
||||
PrefixExp(op, arg) => op.reduce(symbol_table, arg),
|
||||
Value(name) => match symbol_table.lookup_by_name(name) {
|
||||
Some(Symbol { spec: SymbolSpec::DataConstructor { index, type_args, type_name}, .. }) => Expr::Constructor {
|
||||
type_name: type_name.clone(),
|
||||
name: name.clone(),
|
||||
tag: index.clone(),
|
||||
arity: type_args.len(),
|
||||
},
|
||||
_ => Expr::Val(name.clone()),
|
||||
},
|
||||
Call { f, arguments } => reduce_call_expression(f, arguments, symbol_table),
|
||||
TupleLiteral(exprs) => Expr::Tuple(exprs.iter().map(|e| e.node().reduce(symbol_table)).collect()),
|
||||
IfExpression { discriminator, body } => reduce_if_expression(discriminator, body, symbol_table),
|
||||
Lambda { params, body, .. } => reduce_lambda(params, body, symbol_table),
|
||||
NamedStruct { .. } => Expr::UnimplementedSigilValue,
|
||||
BinExp(binop, lhs, rhs) => self.binop(binop, lhs, rhs),
|
||||
PrefixExp(op, arg) => self.prefix(op, arg),
|
||||
Value(qualified_name) => self.value(qualified_name),
|
||||
Call { f, arguments } => self.reduce_call_expression(f, arguments),
|
||||
TupleLiteral(exprs) => Expr::Tuple(exprs.iter().map(|e| self.expression(e)).collect()),
|
||||
IfExpression { discriminator, body } => self.reduce_if_expression(deref_optional_box(discriminator), body),
|
||||
Lambda { params, body, .. } => self.reduce_lambda(params, body),
|
||||
NamedStruct { name, fields } => self.reduce_named_struct(name, fields),
|
||||
Index { .. } => Expr::UnimplementedSigilValue,
|
||||
WhileExpression { .. } => Expr::UnimplementedSigilValue,
|
||||
ForExpression { .. } => Expr::UnimplementedSigilValue,
|
||||
ListLiteral { .. } => Expr::UnimplementedSigilValue,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn reduce_lambda(params: &Vec<FormalParam>, body: &Block, symbol_table: &SymbolTable) -> Expr {
|
||||
Expr::Func(Func::UserDefined {
|
||||
name: None,
|
||||
params: params.iter().map(|param| param.name.clone()).collect(),
|
||||
body: reduce_block(body, symbol_table),
|
||||
})
|
||||
}
|
||||
fn value(&mut self, qualified_name: &QualifiedName) -> Expr {
|
||||
let symbol_table = self.symbol_table;
|
||||
let ref id = qualified_name.id;
|
||||
let ref sym_name = match symbol_table.get_fqsn_from_id(id) {
|
||||
Some(fqsn) => fqsn,
|
||||
None => return Expr::ReductionError(format!("FQSN lookup for Value {:?} failed", qualified_name)),
|
||||
};
|
||||
|
||||
fn reduce_call_expression(func: &Meta<Expression>, arguments: &Vec<Meta<InvocationArgument>>, symbol_table: &SymbolTable) -> Expr {
|
||||
Expr::Call {
|
||||
f: Box::new(func.node().reduce(symbol_table)),
|
||||
args: arguments.iter().map(|arg| arg.node().reduce(symbol_table)).collect(),
|
||||
//TODO this probably needs to change
|
||||
let FullyQualifiedSymbolName(ref v) = sym_name;
|
||||
let name = v.last().unwrap().name.clone();
|
||||
|
||||
let Symbol { local_name, spec, .. } = match symbol_table.lookup_by_fqsn(&sym_name) {
|
||||
Some(s) => s,
|
||||
//None => return Expr::ReductionError(format!("Symbol {:?} not found", sym_name)),
|
||||
None => return Expr::Sym(name.clone())
|
||||
};
|
||||
|
||||
match spec {
|
||||
SymbolSpec::RecordConstructor { .. } => Expr::ReductionError(format!("AST reducer doesn't expect a RecordConstructor here")),
|
||||
SymbolSpec::DataConstructor { index, type_args, type_name } => Expr::Constructor {
|
||||
type_name: type_name.clone(),
|
||||
name: name.clone(),
|
||||
tag: index.clone(),
|
||||
arity: type_args.len(),
|
||||
},
|
||||
SymbolSpec::Func(_) => Expr::Sym(local_name.clone()),
|
||||
SymbolSpec::Binding => Expr::Sym(local_name.clone()), //TODO not sure if this is right, probably needs to eventually be fqsn
|
||||
SymbolSpec::Type { .. } => Expr::ReductionError("AST reducer doesnt expect a type here".to_string())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn reduce_if_expression(discriminator: &Discriminator, body: &IfExpressionBody, symbol_table: &SymbolTable) -> Expr {
|
||||
let cond = Box::new(match *discriminator {
|
||||
Discriminator::Simple(ref expr) => expr.reduce(symbol_table),
|
||||
Discriminator::BinOp(ref _expr, ref _binop) => panic!("Can't yet handle binop discriminators")
|
||||
});
|
||||
match *body {
|
||||
IfExpressionBody::SimpleConditional(ref then_clause, ref else_clause) => {
|
||||
let then_clause = reduce_block(then_clause, symbol_table);
|
||||
let else_clause = match else_clause {
|
||||
None => vec![],
|
||||
Some(stmts) => reduce_block(stmts, symbol_table),
|
||||
};
|
||||
Expr::Conditional { cond, then_clause, else_clause }
|
||||
},
|
||||
IfExpressionBody::SimplePatternMatch(ref pat, ref then_clause, ref else_clause) => {
|
||||
let then_clause = reduce_block(then_clause, symbol_table);
|
||||
let else_clause = match else_clause {
|
||||
None => vec![],
|
||||
Some(stmts) => reduce_block(stmts, symbol_table),
|
||||
};
|
||||
fn reduce_lambda(&mut self, params: &Vec<FormalParam>, body: &Block) -> Expr {
|
||||
Expr::Func(Func::UserDefined {
|
||||
name: None,
|
||||
params: params.iter().map(|param| param.name.clone()).collect(),
|
||||
body: self.block(body),
|
||||
})
|
||||
}
|
||||
|
||||
let alternatives = vec![
|
||||
pat.to_alternative(then_clause, symbol_table),
|
||||
Alternative {
|
||||
tag: None,
|
||||
subpatterns: vec![],
|
||||
bound_vars: vec![],
|
||||
guard: None,
|
||||
item: else_clause
|
||||
},
|
||||
];
|
||||
fn reduce_named_struct(&mut self, name: &QualifiedName, fields: &Vec<(Rc<String>, Expression)>) -> Expr {
|
||||
let symbol_table = self.symbol_table;
|
||||
let ref sym_name = match symbol_table.get_fqsn_from_id(&name.id) {
|
||||
Some(fqsn) => fqsn,
|
||||
None => return Expr::ReductionError(format!("FQSN lookup for name {:?} failed", name)),
|
||||
};
|
||||
|
||||
Expr::CaseMatch {
|
||||
cond,
|
||||
alternatives,
|
||||
}
|
||||
},
|
||||
IfExpressionBody::GuardList(ref guard_arms) => {
|
||||
let mut alternatives = vec![];
|
||||
for arm in guard_arms {
|
||||
match arm.guard {
|
||||
Guard::Pat(ref p) => {
|
||||
let item = reduce_block(&arm.body, symbol_table);
|
||||
let alt = p.to_alternative(item, symbol_table);
|
||||
alternatives.push(alt);
|
||||
let FullyQualifiedSymbolName(ref v) = sym_name;
|
||||
let ref name = v.last().unwrap().name;
|
||||
let (type_name, index, members_from_table) = match symbol_table.lookup_by_fqsn(&sym_name) {
|
||||
Some(Symbol { spec: SymbolSpec::RecordConstructor { members, type_name, index }, .. }) => (type_name.clone(), index, members),
|
||||
_ => return Expr::ReductionError("Not a record constructor".to_string()),
|
||||
};
|
||||
let arity = members_from_table.len();
|
||||
|
||||
let mut args: Vec<(Rc<String>, Expr)> = fields.iter()
|
||||
.map(|(name, expr)| (name.clone(), self.expression(expr)))
|
||||
.collect();
|
||||
|
||||
args.as_mut_slice()
|
||||
.sort_unstable_by(|(name1, _), (name2, _)| name1.cmp(name2)); //arbitrary - sorting by alphabetical order
|
||||
|
||||
let args = args.into_iter().map(|(_, expr)| expr).collect();
|
||||
|
||||
//TODO make sure this sorting actually works
|
||||
let f = box Expr::Constructor { type_name, name: name.clone(), tag: *index, arity, };
|
||||
Expr::Call { f, args }
|
||||
}
|
||||
|
||||
fn reduce_call_expression(&mut self, func: &Expression, arguments: &Vec<InvocationArgument>) -> Expr {
|
||||
Expr::Call {
|
||||
f: Box::new(self.expression(func)),
|
||||
args: arguments.iter().map(|arg| self.invocation_argument(arg)).collect(),
|
||||
}
|
||||
}
|
||||
|
||||
fn reduce_if_expression(&mut self, discriminator: Option<&Expression>, body: &IfExpressionBody) -> Expr {
|
||||
let symbol_table = self.symbol_table;
|
||||
let cond = Box::new(match discriminator {
|
||||
Some(expr) => self.expression(expr),
|
||||
None => return Expr::ReductionError(format!("blank cond if-expr not supported")),
|
||||
});
|
||||
|
||||
match body {
|
||||
IfExpressionBody::SimpleConditional { then_case, else_case } => {
|
||||
let then_clause = self.block(&then_case);
|
||||
let else_clause = match else_case.as_ref() {
|
||||
None => vec![],
|
||||
Some(stmts) => self.block(&stmts),
|
||||
};
|
||||
Expr::Conditional { cond, then_clause, else_clause }
|
||||
},
|
||||
IfExpressionBody::SimplePatternMatch { pattern, then_case, else_case } => {
|
||||
let then_clause = self.block(&then_case);
|
||||
let else_clause = match else_case.as_ref() {
|
||||
None => vec![],
|
||||
Some(stmts) => self.block(&stmts),
|
||||
};
|
||||
|
||||
let alternatives = vec![
|
||||
pattern.to_alternative(then_clause, symbol_table),
|
||||
Alternative {
|
||||
matchable: Subpattern {
|
||||
tag: None,
|
||||
subpatterns: vec![],
|
||||
bound_vars: vec![],
|
||||
guard: None,
|
||||
},
|
||||
item: else_clause
|
||||
},
|
||||
Guard::HalfExpr(HalfExpr { op: _, expr: _ }) => {
|
||||
return Expr::UnimplementedSigilValue
|
||||
];
|
||||
|
||||
Expr::CaseMatch {
|
||||
cond,
|
||||
alternatives,
|
||||
}
|
||||
},
|
||||
IfExpressionBody::CondList(ref condition_arms) => {
|
||||
let mut alternatives = vec![];
|
||||
for arm in condition_arms {
|
||||
match arm.condition {
|
||||
Condition::Expression(ref _expr) => {
|
||||
return Expr::UnimplementedSigilValue
|
||||
},
|
||||
Condition::Pattern(ref p) => {
|
||||
let item = self.block(&arm.body);
|
||||
let alt = p.to_alternative(item, symbol_table);
|
||||
alternatives.push(alt);
|
||||
},
|
||||
Condition::TruncatedOp(_, _) => {
|
||||
return Expr::UnimplementedSigilValue
|
||||
},
|
||||
Condition::Else => {
|
||||
return Expr::UnimplementedSigilValue
|
||||
}
|
||||
}
|
||||
}
|
||||
Expr::CaseMatch { cond, alternatives }
|
||||
}
|
||||
Expr::CaseMatch { cond, alternatives }
|
||||
}
|
||||
}
|
||||
|
||||
fn binop(&mut self, binop: &BinOp, lhs: &Box<Expression>, rhs: &Box<Expression>) -> Expr {
|
||||
let operation = Builtin::from_str(binop.sigil()).ok();
|
||||
match operation {
|
||||
Some(Builtin::Assignment) => Expr::Assign {
|
||||
val: Box::new(self.expression(&*lhs)),
|
||||
expr: Box::new(self.expression(&*rhs)),
|
||||
},
|
||||
Some(op) => {
|
||||
let f = Box::new(Expr::Func(Func::BuiltIn(op)));
|
||||
Expr::Call { f, args: vec![self.expression(&*lhs), self.expression(&*rhs)] }
|
||||
},
|
||||
None => {
|
||||
//TODO handle a user-defined operation
|
||||
Expr::UnimplementedSigilValue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn prefix(&mut self, prefix: &PrefixOp, arg: &Box<Expression>) -> Expr {
|
||||
match prefix.builtin {
|
||||
Some(op) => {
|
||||
let f = Box::new(Expr::Func(Func::BuiltIn(op)));
|
||||
Expr::Call { f, args: vec![self.expression(arg)] }
|
||||
},
|
||||
None => { //TODO need this for custom prefix ops
|
||||
Expr::UnimplementedSigilValue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn declaration(&mut self, declaration: &Declaration) -> Stmt {
|
||||
use self::Declaration::*;
|
||||
match declaration {
|
||||
Binding {name, constant, expr, .. } => Stmt::Binding { name: name.clone(), constant: *constant, expr: self.expression(expr) },
|
||||
FuncDecl(Signature { name, params, .. }, statements) => Stmt::PreBinding {
|
||||
name: name.clone(),
|
||||
func: Func::UserDefined {
|
||||
name: Some(name.clone()),
|
||||
params: params.iter().map(|param| param.name.clone()).collect(),
|
||||
body: self.block(&statements),
|
||||
}
|
||||
},
|
||||
TypeDecl { .. } => Stmt::Noop,
|
||||
TypeAlias{ .. } => Stmt::Noop,
|
||||
Interface { .. } => Stmt::Noop,
|
||||
Impl { .. } => Stmt::Expr(Expr::UnimplementedSigilValue),
|
||||
_ => Stmt::Expr(Expr::UnimplementedSigilValue)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
/* ig var pat
|
||||
* x is SomeBigOldEnum(_, x, Some(t))
|
||||
*/
|
||||
@@ -243,13 +385,26 @@ fn handle_symbol(symbol: Option<&Symbol>, inner_patterns: &Vec<Pattern>, symbol_
|
||||
_ => panic!("Symbol is not a data constructor - this should've been caught in type-checking"),
|
||||
});
|
||||
let bound_vars = inner_patterns.iter().map(|p| match p {
|
||||
Literal(PatternLiteral::VarPattern(var)) => Some(var.clone()),
|
||||
VarOrName(qualified_name) => {
|
||||
let fqsn = symbol_table.get_fqsn_from_id(&qualified_name.id);
|
||||
let symbol_exists = fqsn.and_then(|fqsn| symbol_table.lookup_by_fqsn(&fqsn)).is_some();
|
||||
if symbol_exists {
|
||||
None
|
||||
} else {
|
||||
let QualifiedName { components, .. } = qualified_name;
|
||||
if components.len() == 1 {
|
||||
Some(components[0].clone())
|
||||
} else {
|
||||
panic!("Bad variable name in pattern");
|
||||
}
|
||||
}
|
||||
},
|
||||
_ => None,
|
||||
}).collect();
|
||||
|
||||
let subpatterns = inner_patterns.iter().map(|p| match p {
|
||||
Ignored => None,
|
||||
Literal(PatternLiteral::VarPattern(_)) => None,
|
||||
VarOrName(_) => None,
|
||||
Literal(other) => Some(other.to_subpattern(symbol_table)),
|
||||
tp @ TuplePattern(_) => Some(tp.to_subpattern(symbol_table)),
|
||||
ts @ TupleStruct(_, _) => Some(ts.to_subpattern(symbol_table)),
|
||||
@@ -278,10 +433,12 @@ impl Pattern {
|
||||
fn to_alternative(&self, item: Vec<Stmt>, symbol_table: &SymbolTable) -> Alternative {
|
||||
let s = self.to_subpattern(symbol_table);
|
||||
Alternative {
|
||||
tag: s.tag,
|
||||
subpatterns: s.subpatterns,
|
||||
bound_vars: s.bound_vars,
|
||||
guard: s.guard,
|
||||
matchable: Subpattern {
|
||||
tag: s.tag,
|
||||
subpatterns: s.subpatterns,
|
||||
bound_vars: s.bound_vars,
|
||||
guard: s.guard,
|
||||
},
|
||||
item
|
||||
}
|
||||
}
|
||||
@@ -289,9 +446,14 @@ impl Pattern {
|
||||
fn to_subpattern(&self, symbol_table: &SymbolTable) -> Subpattern {
|
||||
use self::Pattern::*;
|
||||
match self {
|
||||
TupleStruct(name, inner_patterns) => {
|
||||
let symbol = symbol_table.lookup_by_name(name).expect(&format!("Symbol {} not found", name));
|
||||
handle_symbol(Some(symbol), inner_patterns, symbol_table)
|
||||
TupleStruct(QualifiedName{ components, id }, inner_patterns) => {
|
||||
let fqsn = symbol_table.get_fqsn_from_id(&id);
|
||||
match fqsn.and_then(|fqsn| symbol_table.lookup_by_fqsn(&fqsn)) {
|
||||
Some(symbol) => handle_symbol(Some(symbol), inner_patterns, symbol_table),
|
||||
None => {
|
||||
panic!("Symbol {:?} not found", components);
|
||||
}
|
||||
}
|
||||
},
|
||||
TuplePattern(inner_patterns) => handle_symbol(None, inner_patterns, symbol_table),
|
||||
Record(_name, _pairs) => {
|
||||
@@ -299,12 +461,33 @@ impl Pattern {
|
||||
},
|
||||
Ignored => Subpattern { tag: None, subpatterns: vec![], guard: None, bound_vars: vec![] },
|
||||
Literal(lit) => lit.to_subpattern(symbol_table),
|
||||
VarOrName(QualifiedName { components, id }) => {
|
||||
// if fqsn is Some, treat this as a symbol pattern. If it's None, treat it
|
||||
// as a variable.
|
||||
let fqsn = symbol_table.get_fqsn_from_id(&id);
|
||||
match fqsn.and_then(|fqsn| symbol_table.lookup_by_fqsn(&fqsn)) {
|
||||
Some(symbol) => handle_symbol(Some(symbol), &vec![], symbol_table),
|
||||
None => {
|
||||
let name = if components.len() == 1 {
|
||||
components[0].clone()
|
||||
} else {
|
||||
panic!("check this line of code yo");
|
||||
};
|
||||
Subpattern {
|
||||
tag: None,
|
||||
subpatterns: vec![],
|
||||
guard: None,
|
||||
bound_vars: vec![Some(name.clone())],
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PatternLiteral {
|
||||
fn to_subpattern(&self, symbol_table: &SymbolTable) -> Subpattern {
|
||||
fn to_subpattern(&self, _symbol_table: &SymbolTable) -> Subpattern {
|
||||
use self::PatternLiteral::*;
|
||||
match self {
|
||||
NumPattern { neg, num } => {
|
||||
@@ -316,7 +499,7 @@ impl PatternLiteral {
|
||||
_ => panic!("This should never happen")
|
||||
});
|
||||
let guard = Some(Expr::Call {
|
||||
f: Box::new(Expr::Func(Func::BuiltIn(Rc::new("==".to_string())))),
|
||||
f: Box::new(Expr::Func(Func::BuiltIn(Builtin::Equality))),
|
||||
args: vec![comparison, Expr::ConditionalTargetSigilValue],
|
||||
});
|
||||
Subpattern {
|
||||
@@ -328,7 +511,7 @@ impl PatternLiteral {
|
||||
},
|
||||
StringPattern(s) => {
|
||||
let guard = Some(Expr::Call {
|
||||
f: Box::new(Expr::Func(Func::BuiltIn(Rc::new("==".to_string())))),
|
||||
f: Box::new(Expr::Func(Func::BuiltIn(Builtin::Equality))),
|
||||
args: vec![Expr::Lit(Lit::StringLit(s.clone())), Expr::ConditionalTargetSigilValue]
|
||||
});
|
||||
|
||||
@@ -344,7 +527,7 @@ impl PatternLiteral {
|
||||
Expr::ConditionalTargetSigilValue
|
||||
} else {
|
||||
Expr::Call {
|
||||
f: Box::new(Expr::Func(Func::BuiltIn(Rc::new("!".to_string())))),
|
||||
f: Box::new(Expr::Func(Func::BuiltIn(Builtin::BooleanNot))),
|
||||
args: vec![Expr::ConditionalTargetSigilValue]
|
||||
}
|
||||
});
|
||||
@@ -355,58 +538,7 @@ impl PatternLiteral {
|
||||
bound_vars: vec![],
|
||||
}
|
||||
},
|
||||
VarPattern(var) => match symbol_table.lookup_by_name(var) {
|
||||
Some(symbol) => handle_symbol(Some(symbol), &vec![], symbol_table),
|
||||
None => Subpattern {
|
||||
tag: None,
|
||||
subpatterns: vec![],
|
||||
guard: None,
|
||||
bound_vars: vec![Some(var.clone())],
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Declaration {
|
||||
fn reduce(&self, symbol_table: &SymbolTable) -> Stmt {
|
||||
use self::Declaration::*;
|
||||
match self {
|
||||
Binding {name, constant, expr, .. } => Stmt::Binding { name: name.clone(), constant: *constant, expr: expr.node().reduce(symbol_table) },
|
||||
FuncDecl(Signature { name, params, .. }, statements) => Stmt::PreBinding {
|
||||
name: name.clone(),
|
||||
func: Func::UserDefined {
|
||||
name: Some(name.clone()),
|
||||
params: params.iter().map(|param| param.name.clone()).collect(),
|
||||
body: reduce_block(&statements, symbol_table),
|
||||
}
|
||||
},
|
||||
TypeDecl { .. } => Stmt::Noop,
|
||||
TypeAlias(_, _) => Stmt::Noop,
|
||||
Interface { .. } => Stmt::Noop,
|
||||
Impl { .. } => Stmt::Expr(Expr::UnimplementedSigilValue),
|
||||
_ => Stmt::Expr(Expr::UnimplementedSigilValue)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl BinOp {
|
||||
fn reduce(&self, symbol_table: &SymbolTable, lhs: &Box<Meta<Expression>>, rhs: &Box<Meta<Expression>>) -> Expr {
|
||||
if **self.sigil() == "=" {
|
||||
Expr::Assign {
|
||||
val: Box::new(lhs.node().reduce(symbol_table)),
|
||||
expr: Box::new(rhs.node().reduce(symbol_table)),
|
||||
}
|
||||
} else {
|
||||
let f = Box::new(Expr::Func(Func::BuiltIn(self.sigil().clone())));
|
||||
Expr::Call { f, args: vec![lhs.node().reduce(symbol_table), rhs.node().reduce(symbol_table)]}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PrefixOp {
|
||||
fn reduce(&self, symbol_table: &SymbolTable, arg: &Box<Meta<Expression>>) -> Expr {
|
||||
let f = Box::new(Expr::Func(Func::BuiltIn(self.sigil().clone())));
|
||||
Expr::Call { f, args: vec![arg.node().reduce(symbol_table)]}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,17 +10,22 @@ use schala_repl::{ProgrammingLanguageInterface,
|
||||
ComputationRequest, ComputationResponse,
|
||||
LangMetaRequest, LangMetaResponse, GlobalOutputStats,
|
||||
DebugResponse, DebugAsk};
|
||||
use crate::{ast, reduced_ast, tokenizing, parsing, eval, typechecking, symbol_table};
|
||||
use crate::{ast, reduced_ast, tokenizing, parsing, eval, typechecking, symbol_table, source_map};
|
||||
|
||||
pub type SymbolTableHandle = Rc<RefCell<symbol_table::SymbolTable>>;
|
||||
pub type SourceMapHandle = Rc<RefCell<source_map::SourceMap>>;
|
||||
|
||||
/// All the state necessary to parse and execute a Schala program are stored in this struct.
|
||||
/// `state` represents the execution state for the AST-walking interpreter, the other fields
|
||||
/// should be self-explanatory.
|
||||
pub struct Schala {
|
||||
source_reference: SourceReference,
|
||||
source_map: SourceMapHandle,
|
||||
state: eval::State<'static>,
|
||||
symbol_table: Rc<RefCell<symbol_table::SymbolTable>>,
|
||||
symbol_table: SymbolTableHandle,
|
||||
resolver: crate::scope_resolution::ScopeResolver<'static>,
|
||||
type_context: typechecking::TypeContext<'static>,
|
||||
active_parser: Option<parsing::Parser>,
|
||||
active_parser: parsing::Parser,
|
||||
}
|
||||
|
||||
impl Schala {
|
||||
@@ -34,13 +39,17 @@ impl Schala {
|
||||
impl Schala {
|
||||
/// Creates a new Schala environment *without* any prelude.
|
||||
fn new_blank_env() -> Schala {
|
||||
let symbols = Rc::new(RefCell::new(symbol_table::SymbolTable::new()));
|
||||
let source_map = Rc::new(RefCell::new(source_map::SourceMap::new()));
|
||||
let symbols = Rc::new(RefCell::new(symbol_table::SymbolTable::new(source_map.clone())));
|
||||
Schala {
|
||||
//TODO maybe these can be the same structure
|
||||
source_reference: SourceReference::new(),
|
||||
symbol_table: symbols.clone(),
|
||||
state: eval::State::new(symbols),
|
||||
source_map: source_map.clone(),
|
||||
resolver: crate::scope_resolution::ScopeResolver::new(symbols.clone()),
|
||||
state: eval::State::new(),
|
||||
type_context: typechecking::TypeContext::new(),
|
||||
active_parser: None,
|
||||
active_parser: parsing::Parser::new(source_map)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -51,7 +60,10 @@ impl Schala {
|
||||
let mut s = Schala::new_blank_env();
|
||||
|
||||
let request = ComputationRequest { source: prelude, debug_requests: HashSet::default() };
|
||||
s.run_computation(request);
|
||||
let response = s.run_computation(request);
|
||||
if let Err(msg) = response.main_output {
|
||||
panic!("Error in prelude, panicking: {}", msg);
|
||||
}
|
||||
s
|
||||
}
|
||||
|
||||
@@ -94,54 +106,65 @@ fn tokenizing(input: &str, _handle: &mut Schala, comp: Option<&mut PassDebugArti
|
||||
}
|
||||
|
||||
fn parsing(input: Vec<tokenizing::Token>, handle: &mut Schala, comp: Option<&mut PassDebugArtifact>) -> Result<ast::AST, String> {
|
||||
use crate::parsing::Parser;
|
||||
|
||||
let mut parser = match handle.active_parser.take() {
|
||||
None => Parser::new(input),
|
||||
Some(parser) => parser
|
||||
};
|
||||
use ParsingDebugType::*;
|
||||
|
||||
let ref mut parser = handle.active_parser;
|
||||
parser.add_new_tokens(input);
|
||||
let ast = parser.parse();
|
||||
let trace = parser.format_parse_trace();
|
||||
|
||||
comp.map(|comp| {
|
||||
let debug_info = match comp.parsing.as_ref().unwrap_or(&ParsingDebugType::CompactAST) {
|
||||
ParsingDebugType::CompactAST => format!("{:?}", ast),
|
||||
ParsingDebugType::ExpandedAST => format!("{:#?}", ast),
|
||||
ParsingDebugType::Trace => format!("{}", trace[0]) //TODO fix this
|
||||
let debug_format = comp.parsing.as_ref().unwrap_or(&CompactAST);
|
||||
let debug_info = match debug_format {
|
||||
CompactAST => match ast{
|
||||
Ok(ref ast) => ast.compact_debug(),
|
||||
Err(_) => "Error - see output".to_string(),
|
||||
},
|
||||
ExpandedAST => match ast{
|
||||
Ok(ref ast) => ast.expanded_debug(),
|
||||
Err(_) => "Error - see output".to_string(),
|
||||
},
|
||||
Trace => parser.format_parse_trace(),
|
||||
};
|
||||
comp.add_artifact(debug_info);
|
||||
});
|
||||
ast.map_err(|err| format_parse_error(err, handle))
|
||||
ast.map_err(|err| format_parse_error(err, &handle.source_reference))
|
||||
}
|
||||
|
||||
fn format_parse_error(error: parsing::ParseError, handle: &mut Schala) -> String {
|
||||
let line_num = error.token.line_num;
|
||||
let ch = error.token.char_num;
|
||||
let line_from_program = handle.source_reference.get_line(line_num);
|
||||
fn format_parse_error(error: parsing::ParseError, source_reference: &SourceReference) -> String {
|
||||
let line_num = error.token.location.line_num;
|
||||
let ch = error.token.location.char_num;
|
||||
let line_from_program = source_reference.get_line(line_num);
|
||||
let location_pointer = format!("{}^", " ".repeat(ch));
|
||||
|
||||
let line_num_digits = format!("{}", line_num).chars().count();
|
||||
let space_padding = " ".repeat(line_num_digits);
|
||||
|
||||
let production = match error.production_name {
|
||||
Some(n) => format!("\n(from production \"{}\")", n),
|
||||
None => "".to_string()
|
||||
};
|
||||
|
||||
format!(r#"
|
||||
{error_msg}
|
||||
{error_msg}{production}
|
||||
{space_padding} |
|
||||
{line_num} | {}
|
||||
{space_padding} | {}
|
||||
"#, line_from_program, location_pointer, error_msg=error.msg, space_padding=space_padding, line_num=line_num)
|
||||
"#, line_from_program, location_pointer, error_msg=error.msg, space_padding=space_padding, line_num=line_num, production=production
|
||||
)
|
||||
}
|
||||
|
||||
fn symbol_table(input: ast::AST, handle: &mut Schala, comp: Option<&mut PassDebugArtifact>) -> Result<ast::AST, String> {
|
||||
let add = handle.symbol_table.borrow_mut().add_top_level_symbols(&input);
|
||||
match add {
|
||||
Ok(()) => {
|
||||
let debug = handle.symbol_table.borrow().debug_symbol_table();
|
||||
comp.map(|comp| comp.add_artifact(debug));
|
||||
Ok(input)
|
||||
},
|
||||
Err(msg) => Err(msg)
|
||||
}
|
||||
let () = handle.symbol_table.borrow_mut().add_top_level_symbols(&input)?;
|
||||
comp.map(|comp| {
|
||||
let debug = handle.symbol_table.borrow().debug_symbol_table();
|
||||
comp.add_artifact(debug);
|
||||
});
|
||||
Ok(input)
|
||||
}
|
||||
|
||||
fn scope_resolution(mut input: ast::AST, handle: &mut Schala, _com: Option<&mut PassDebugArtifact>) -> Result<ast::AST, String> {
|
||||
let () = handle.resolver.resolve(&mut input)?;
|
||||
Ok(input)
|
||||
}
|
||||
|
||||
fn typechecking(input: ast::AST, handle: &mut Schala, comp: Option<&mut PassDebugArtifact>) -> Result<ast::AST, String> {
|
||||
@@ -159,7 +182,7 @@ fn typechecking(input: ast::AST, handle: &mut Schala, comp: Option<&mut PassDebu
|
||||
|
||||
fn ast_reducing(input: ast::AST, handle: &mut Schala, comp: Option<&mut PassDebugArtifact>) -> Result<reduced_ast::ReducedAST, String> {
|
||||
let ref symbol_table = handle.symbol_table.borrow();
|
||||
let output = input.reduce(symbol_table);
|
||||
let output = reduced_ast::reduce(&input, symbol_table);
|
||||
comp.map(|comp| comp.add_artifact(format!("{:?}", output)));
|
||||
Ok(output)
|
||||
}
|
||||
@@ -218,6 +241,7 @@ fn stage_names() -> Vec<&'static str> {
|
||||
"tokenizing",
|
||||
"parsing",
|
||||
"symbol-table",
|
||||
"scope-resolution",
|
||||
"typechecking",
|
||||
"ast-reduction",
|
||||
"ast-walking-evaluation"
|
||||
@@ -244,24 +268,26 @@ impl ProgrammingLanguageInterface for Schala {
|
||||
let stage_names = stage_names();
|
||||
let cur_stage_name = stage_names[n];
|
||||
let ask = token.debug_requests.iter().find(|ask| ask.is_for_stage(cur_stage_name));
|
||||
let mut debug_artifact = ask.and_then(|ask| match ask {
|
||||
DebugAsk::ByStage { token, .. } => token.as_ref(),
|
||||
_ => None
|
||||
}).map(|token| {
|
||||
let parsing = if cur_stage_name != "parsing" {
|
||||
None
|
||||
} else {
|
||||
Some(match &token[..] {
|
||||
|
||||
let parsing = match ask {
|
||||
Some(DebugAsk::ByStage { token, .. }) if cur_stage_name == "parsing" => Some(
|
||||
token.as_ref().map(|token| match &token[..] {
|
||||
"compact" => ParsingDebugType::CompactAST,
|
||||
"expanded" => ParsingDebugType::ExpandedAST,
|
||||
"trace" => ParsingDebugType::Trace,
|
||||
_ => ParsingDebugType::CompactAST,
|
||||
})
|
||||
};
|
||||
PassDebugArtifact { parsing, ..Default::default() }
|
||||
}).unwrap_or(ParsingDebugType::CompactAST)
|
||||
),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
let mut debug_artifact = ask.map(|_| PassDebugArtifact {
|
||||
parsing, ..Default::default()
|
||||
});
|
||||
|
||||
let output = func(input, token.schala, debug_artifact.as_mut());
|
||||
|
||||
//TODO I think this is not counting the time since the *previous* stage
|
||||
token.stage_durations.push((cur_stage_name.to_string(), token.sw.elapsed()));
|
||||
if let Some(artifact) = debug_artifact {
|
||||
for value in artifact.artifacts.into_iter() {
|
||||
@@ -283,9 +309,10 @@ impl ProgrammingLanguageInterface for Schala {
|
||||
.and_then(|source| output_wrapper(0, tokenizing, source, &mut tok))
|
||||
.and_then(|tokens| output_wrapper(1, parsing, tokens, &mut tok))
|
||||
.and_then(|ast| output_wrapper(2, symbol_table, ast, &mut tok))
|
||||
.and_then(|ast| output_wrapper(3, typechecking, ast, &mut tok))
|
||||
.and_then(|ast| output_wrapper(4, ast_reducing, ast, &mut tok))
|
||||
.and_then(|reduced_ast| output_wrapper(5, eval, reduced_ast, &mut tok));
|
||||
.and_then(|ast| output_wrapper(3, scope_resolution, ast, &mut tok))
|
||||
.and_then(|ast| output_wrapper(4, typechecking, ast, &mut tok))
|
||||
.and_then(|ast| output_wrapper(5, ast_reducing, ast, &mut tok))
|
||||
.and_then(|reduced_ast| output_wrapper(6, eval, reduced_ast, &mut tok));
|
||||
|
||||
let total_duration = sw.elapsed();
|
||||
let global_output_stats = GlobalOutputStats {
|
||||
|
||||
119
schala-lang/language/src/scope_resolution.rs
Normal file
119
schala-lang/language/src/scope_resolution.rs
Normal file
@@ -0,0 +1,119 @@
|
||||
use std::rc::Rc;
|
||||
|
||||
use crate::schala::SymbolTableHandle;
|
||||
use crate::symbol_table::{ScopeSegment, FullyQualifiedSymbolName};
|
||||
use crate::ast::*;
|
||||
use crate::util::ScopeStack;
|
||||
|
||||
type FQSNPrefix = Vec<ScopeSegment>;
|
||||
|
||||
pub struct ScopeResolver<'a> {
|
||||
symbol_table_handle: SymbolTableHandle,
|
||||
name_scope_stack: ScopeStack<'a, Rc<String>, FQSNPrefix>,
|
||||
}
|
||||
|
||||
impl<'a> ASTVisitor for ScopeResolver<'a> {
|
||||
//TODO need to un-insert these - maybe need to rethink visitor
|
||||
fn import(&mut self, import_spec: &ImportSpecifier) {
|
||||
let ref symbol_table = self.symbol_table_handle.borrow();
|
||||
let ImportSpecifier { ref path_components, ref imported_names, .. } = &import_spec;
|
||||
match imported_names {
|
||||
ImportedNames::All => {
|
||||
let prefix = FullyQualifiedSymbolName(path_components.iter().map(|c| ScopeSegment {
|
||||
name: c.clone(),
|
||||
}).collect());
|
||||
let members = symbol_table.lookup_children_of_fqsn(&prefix);
|
||||
for member in members.into_iter() {
|
||||
let local_name = member.0.last().unwrap().name.clone();
|
||||
self.name_scope_stack.insert(local_name.clone(), member.0);
|
||||
}
|
||||
},
|
||||
ImportedNames::LastOfPath => {
|
||||
let name = path_components.last().unwrap(); //TODO handle better
|
||||
let fqsn_prefix = path_components.iter().map(|c| ScopeSegment {
|
||||
name: c.clone(),
|
||||
}).collect();
|
||||
self.name_scope_stack.insert(name.clone(), fqsn_prefix);
|
||||
}
|
||||
ImportedNames::List(ref names) => {
|
||||
let fqsn_prefix: FQSNPrefix = path_components.iter().map(|c| ScopeSegment {
|
||||
name: c.clone(),
|
||||
}).collect();
|
||||
for name in names.iter() {
|
||||
self.name_scope_stack.insert(name.clone(), fqsn_prefix.clone());
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
fn qualified_name(&mut self, qualified_name: &QualifiedName) {
|
||||
let ref mut symbol_table = self.symbol_table_handle.borrow_mut();
|
||||
let fqsn = self.lookup_name_in_scope(&qualified_name);
|
||||
let ref id = qualified_name.id;
|
||||
symbol_table.map_id_to_fqsn(id, fqsn);
|
||||
}
|
||||
|
||||
fn named_struct(&mut self, name: &QualifiedName, _fields: &Vec<(Rc<String>, Expression)>) {
|
||||
let ref mut symbol_table = self.symbol_table_handle.borrow_mut();
|
||||
let ref id = name.id;
|
||||
let fqsn = self.lookup_name_in_scope(&name);
|
||||
symbol_table.map_id_to_fqsn(id, fqsn);
|
||||
}
|
||||
|
||||
fn pattern(&mut self, pat: &Pattern) {
|
||||
use Pattern::*;
|
||||
match pat {
|
||||
Ignored => (),
|
||||
TuplePattern(_) => (),
|
||||
Literal(_) => (),
|
||||
TupleStruct(name, _) => self.qualified_name_in_pattern(name),
|
||||
Record(name, _) => self.qualified_name_in_pattern(name),
|
||||
VarOrName(name) => self.qualified_name_in_pattern(name),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> ScopeResolver<'a> {
|
||||
pub fn new(symbol_table_handle: SymbolTableHandle) -> ScopeResolver<'static> {
|
||||
let name_scope_stack = ScopeStack::new(None);
|
||||
ScopeResolver { symbol_table_handle, name_scope_stack }
|
||||
}
|
||||
pub fn resolve(&mut self, ast: &mut AST) -> Result<(), String> {
|
||||
walk_ast(self, ast);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn lookup_name_in_scope(&self, sym_name: &QualifiedName) -> FullyQualifiedSymbolName {
|
||||
let QualifiedName { components, .. } = sym_name;
|
||||
let first_component = &components[0];
|
||||
match self.name_scope_stack.lookup(first_component) {
|
||||
None => {
|
||||
FullyQualifiedSymbolName(components.iter().map(|name| ScopeSegment { name: name.clone() }).collect())
|
||||
},
|
||||
Some(fqsn_prefix) => {
|
||||
let mut full_name = fqsn_prefix.clone();
|
||||
let rest_of_name: FQSNPrefix = components[1..].iter().map(|name| ScopeSegment { name: name.clone() }).collect();
|
||||
full_name.extend_from_slice(&rest_of_name);
|
||||
FullyQualifiedSymbolName(full_name)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// this might be a variable or a pattern. if a variable, set to none
|
||||
fn qualified_name_in_pattern(&mut self, qualified_name: &QualifiedName) {
|
||||
let ref mut symbol_table = self.symbol_table_handle.borrow_mut();
|
||||
let ref id = qualified_name.id;
|
||||
let fqsn = self.lookup_name_in_scope(qualified_name);
|
||||
if symbol_table.lookup_by_fqsn(&fqsn).is_some() {
|
||||
symbol_table.map_id_to_fqsn(&id, fqsn);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
#[test]
|
||||
fn basic_scope() {
|
||||
|
||||
}
|
||||
}
|
||||
39
schala-lang/language/src/source_map.rs
Normal file
39
schala-lang/language/src/source_map.rs
Normal file
@@ -0,0 +1,39 @@
|
||||
use std::collections::HashMap;
|
||||
use std::fmt;
|
||||
|
||||
use crate::ast::ItemId;
|
||||
|
||||
pub type LineNumber = usize;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||
pub struct Location {
|
||||
pub line_num: LineNumber,
|
||||
pub char_num: usize,
|
||||
}
|
||||
|
||||
impl fmt::Display for Location {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, "{}:{}", self.line_num, self.char_num)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct SourceMap {
|
||||
map: HashMap<ItemId, Location>
|
||||
}
|
||||
|
||||
impl SourceMap {
|
||||
pub fn new() -> SourceMap {
|
||||
SourceMap { map: HashMap::new() }
|
||||
}
|
||||
|
||||
pub fn add_location(&mut self, id: &ItemId, loc: Location) {
|
||||
self.map.insert(id.clone(), loc);
|
||||
}
|
||||
|
||||
pub fn lookup(&self, id: &ItemId) -> Option<Location> {
|
||||
match self.map.get(id) {
|
||||
Some(loc) => Some(loc.clone()),
|
||||
None => None
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -4,71 +4,146 @@ use std::rc::Rc;
|
||||
use std::fmt;
|
||||
use std::fmt::Write;
|
||||
|
||||
use crate::schala::SourceMapHandle;
|
||||
use crate::source_map::{SourceMap, LineNumber};
|
||||
use crate::ast;
|
||||
use crate::ast::{Meta, TypeBody, TypeSingletonName, Signature, Statement};
|
||||
use crate::ast::{ItemId, TypeBody, TypeSingletonName, Signature, Statement, StatementKind, ModuleSpecifier};
|
||||
use crate::typechecking::TypeName;
|
||||
|
||||
type LineNumber = u32;
|
||||
type SymbolTrackTable = HashMap<Rc<String>, LineNumber>;
|
||||
|
||||
#[derive(PartialEq, Eq, Hash, Debug)]
|
||||
struct PathToSymbol(Vec<Rc<String>>);
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct ScopeSegment {
|
||||
scope_name: Rc<String>,
|
||||
scope_type: ScopeSegmentKind,
|
||||
#[allow(unused_macros)]
|
||||
macro_rules! fqsn {
|
||||
( $( $name:expr ; $kind:tt),* ) => {
|
||||
{
|
||||
let mut vec = vec![];
|
||||
$(
|
||||
vec.push(crate::symbol_table::ScopeSegment::new(std::rc::Rc::new($name.to_string())));
|
||||
)*
|
||||
FullyQualifiedSymbolName(vec)
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
enum ScopeSegmentKind {
|
||||
Function,
|
||||
//Type,
|
||||
mod symbol_trie;
|
||||
use symbol_trie::SymbolTrie;
|
||||
mod test;
|
||||
|
||||
/// Keeps track of what names were used in a given namespace. Call try_register to add a name to
|
||||
/// the table, or report an error if a name already exists.
|
||||
struct DuplicateNameTrackTable {
|
||||
table: HashMap<Rc<String>, LineNumber>,
|
||||
}
|
||||
|
||||
impl DuplicateNameTrackTable {
|
||||
fn new() -> DuplicateNameTrackTable {
|
||||
DuplicateNameTrackTable { table: HashMap::new() }
|
||||
}
|
||||
|
||||
fn try_register(&mut self, name: &Rc<String>, id: &ItemId, source_map: &SourceMap) -> Result<(), LineNumber> {
|
||||
match self.table.entry(name.clone()) {
|
||||
Entry::Occupied(o) => {
|
||||
let line_number = o.get();
|
||||
Err(*line_number)
|
||||
},
|
||||
Entry::Vacant(v) => {
|
||||
let line_number = if let Some(loc) = source_map.lookup(id) {
|
||||
loc.line_num
|
||||
} else {
|
||||
0
|
||||
};
|
||||
v.insert(line_number);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Hash, Debug, Clone, PartialOrd, Ord)]
|
||||
pub struct FullyQualifiedSymbolName(pub Vec<ScopeSegment>);
|
||||
|
||||
impl fmt::Display for FullyQualifiedSymbolName {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
let FullyQualifiedSymbolName(v) = self;
|
||||
for segment in v {
|
||||
write!(f, "::{}", segment)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Hash, PartialOrd, Ord)]
|
||||
pub struct ScopeSegment {
|
||||
pub name: Rc<String>, //TODO maybe this could be a &str, for efficiency?
|
||||
}
|
||||
|
||||
impl fmt::Display for ScopeSegment {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
let kind = ""; //TODO implement some kind of kind-tracking here
|
||||
write!(f, "{}{}", self.name, kind)
|
||||
}
|
||||
}
|
||||
|
||||
impl ScopeSegment {
|
||||
pub fn new(name: Rc<String>) -> ScopeSegment {
|
||||
ScopeSegment { name }
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
//cf. p. 150 or so of Language Implementation Patterns
|
||||
pub struct SymbolTable {
|
||||
values: HashMap<PathToSymbol, Symbol>,
|
||||
source_map_handle: SourceMapHandle,
|
||||
symbol_path_to_symbol: HashMap<FullyQualifiedSymbolName, Symbol>,
|
||||
id_to_fqsn: HashMap<ItemId, FullyQualifiedSymbolName>,
|
||||
symbol_trie: SymbolTrie,
|
||||
}
|
||||
|
||||
//TODO add various types of lookups here, maybe multiple hash tables internally?
|
||||
impl SymbolTable {
|
||||
pub fn new() -> SymbolTable {
|
||||
pub fn new(source_map_handle: SourceMapHandle) -> SymbolTable {
|
||||
SymbolTable {
|
||||
values: HashMap::new(),
|
||||
source_map_handle,
|
||||
symbol_path_to_symbol: HashMap::new(),
|
||||
id_to_fqsn: HashMap::new(),
|
||||
symbol_trie: SymbolTrie::new()
|
||||
}
|
||||
}
|
||||
|
||||
fn add_new_symbol(&mut self, name: &Rc<String>, scope_path: &Vec<ScopeSegment>, spec: SymbolSpec) {
|
||||
let mut vec: Vec<Rc<String>> = scope_path.iter().map(|segment| segment.scope_name.clone()).collect();
|
||||
vec.push(name.clone());
|
||||
let symbol_path = PathToSymbol(vec);
|
||||
let symbol = Symbol { name: name.clone(), scopes: scope_path.to_vec(), spec };
|
||||
self.values.insert(symbol_path, symbol);
|
||||
pub fn map_id_to_fqsn(&mut self, id: &ItemId, fqsn: FullyQualifiedSymbolName) {
|
||||
self.id_to_fqsn.insert(id.clone(), fqsn);
|
||||
}
|
||||
|
||||
pub fn lookup_by_name(&self, name: &Rc<String>) -> Option<&Symbol> {
|
||||
self.lookup_by_path(name, &vec![])
|
||||
pub fn get_fqsn_from_id(&self, id: &ItemId) -> Option<FullyQualifiedSymbolName> {
|
||||
self.id_to_fqsn.get(&id).cloned()
|
||||
}
|
||||
|
||||
pub fn lookup_by_path(&self, name: &Rc<String>, path: &Vec<Rc<String>>) -> Option<&Symbol> {
|
||||
let mut vec = path.clone();
|
||||
vec.push(name.clone());
|
||||
let symbol_path = PathToSymbol(vec);
|
||||
self.values.get(&symbol_path)
|
||||
fn add_new_symbol(&mut self, local_name: &Rc<String>, scope_path: &Vec<ScopeSegment>, spec: SymbolSpec) {
|
||||
let mut vec: Vec<ScopeSegment> = scope_path.clone();
|
||||
vec.push(ScopeSegment { name: local_name.clone() });
|
||||
let fully_qualified_name = FullyQualifiedSymbolName(vec);
|
||||
let symbol = Symbol { local_name: local_name.clone(), fully_qualified_name: fully_qualified_name.clone(), spec };
|
||||
self.symbol_trie.insert(&fully_qualified_name);
|
||||
self.symbol_path_to_symbol.insert(fully_qualified_name, symbol);
|
||||
}
|
||||
|
||||
pub fn lookup_by_fqsn(&self, fully_qualified_path: &FullyQualifiedSymbolName) -> Option<&Symbol> {
|
||||
self.symbol_path_to_symbol.get(fully_qualified_path)
|
||||
}
|
||||
|
||||
pub fn lookup_children_of_fqsn(&self, path: &FullyQualifiedSymbolName) -> Vec<FullyQualifiedSymbolName> {
|
||||
self.symbol_trie.get_children(path)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Symbol {
|
||||
pub name: Rc<String>, //TODO does this need to be pub?
|
||||
scopes: Vec<ScopeSegment>,
|
||||
pub local_name: Rc<String>, //TODO does this need to be pub?
|
||||
fully_qualified_name: FullyQualifiedSymbolName,
|
||||
pub spec: SymbolSpec,
|
||||
}
|
||||
|
||||
impl fmt::Display for Symbol {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, "<Name: {}, Spec: {}>", self.name, self.spec)
|
||||
write!(f, "<Local name: {}, Spec: {}>", self.local_name, self.spec)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -77,13 +152,18 @@ pub enum SymbolSpec {
|
||||
Func(Vec<TypeName>),
|
||||
DataConstructor {
|
||||
index: usize,
|
||||
type_name: Rc<String>,
|
||||
type_name: TypeName,
|
||||
type_args: Vec<Rc<String>>,
|
||||
},
|
||||
RecordConstructor {
|
||||
fields: HashMap<Rc<String>, Rc<String>>
|
||||
index: usize,
|
||||
members: HashMap<Rc<String>, TypeName>,
|
||||
type_name: TypeName,
|
||||
},
|
||||
Binding,
|
||||
Type {
|
||||
name: TypeName
|
||||
},
|
||||
Binding
|
||||
}
|
||||
|
||||
impl fmt::Display for SymbolSpec {
|
||||
@@ -92,8 +172,9 @@ impl fmt::Display for SymbolSpec {
|
||||
match self {
|
||||
Func(type_names) => write!(f, "Func({:?})", type_names),
|
||||
DataConstructor { index, type_name, type_args } => write!(f, "DataConstructor(idx: {})({:?} -> {})", index, type_args, type_name),
|
||||
RecordConstructor { fields: _fields } => write!(f, "RecordConstructor( <fields> )"),
|
||||
RecordConstructor { type_name, index, ..} => write!(f, "RecordConstructor(idx: {})(<members> -> {})", index, type_name),
|
||||
Binding => write!(f, "Binding"),
|
||||
Type { name } => write!(f, "Type <{}>", name),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -104,65 +185,67 @@ impl SymbolTable {
|
||||
|
||||
pub fn add_top_level_symbols(&mut self, ast: &ast::AST) -> Result<(), String> {
|
||||
let mut scope_name_stack = Vec::new();
|
||||
self.add_symbols_from_scope(&ast.0, &mut scope_name_stack)
|
||||
self.add_symbols_from_scope(&ast.statements, &mut scope_name_stack)
|
||||
}
|
||||
|
||||
fn add_symbols_from_scope<'a>(&'a mut self, statements: &Vec<Meta<Statement>>, scope_name_stack: &mut Vec<ScopeSegment>) -> Result<(), String> {
|
||||
fn add_symbols_from_scope<'a>(&'a mut self, statements: &Vec<Statement>, scope_name_stack: &mut Vec<ScopeSegment>) -> Result<(), String> {
|
||||
use self::ast::Declaration::*;
|
||||
|
||||
fn insert_and_check_duplicate_symbol(table: &mut SymbolTrackTable, name: &Rc<String>) -> Result<(), String> {
|
||||
match table.entry(name.clone()) {
|
||||
Entry::Occupied(o) => {
|
||||
let line_number = o.get(); //TODO make this actually work
|
||||
Err(format!("Duplicate definition: {}. It's already defined at {}", name, line_number))
|
||||
let mut seen_identifiers = DuplicateNameTrackTable::new();
|
||||
let mut seen_modules = DuplicateNameTrackTable::new();
|
||||
|
||||
for statement in statements.iter() {
|
||||
match statement {
|
||||
Statement { kind: StatementKind::Declaration(decl), id } => {
|
||||
match decl {
|
||||
FuncSig(ref signature) => {
|
||||
seen_identifiers.try_register(&signature.name, &id, &self.source_map_handle.borrow())
|
||||
.map_err(|line| format!("Duplicate function definition: {}. It's already defined at {}", signature.name, line))?;
|
||||
self.add_function_signature(signature, scope_name_stack)?
|
||||
}
|
||||
FuncDecl(ref signature, ref body) => {
|
||||
seen_identifiers.try_register(&signature.name, &id, &self.source_map_handle.borrow())
|
||||
.map_err(|line| format!("Duplicate function definition: {}. It's already defined at {}", signature.name, line))?;
|
||||
self.add_function_signature(signature, scope_name_stack)?;
|
||||
scope_name_stack.push(ScopeSegment{
|
||||
name: signature.name.clone(),
|
||||
});
|
||||
let output = self.add_symbols_from_scope(body, scope_name_stack);
|
||||
scope_name_stack.pop();
|
||||
output?
|
||||
},
|
||||
TypeDecl { name, body, mutable } => {
|
||||
seen_identifiers.try_register(&name.name, &id, &self.source_map_handle.borrow())
|
||||
.map_err(|line| format!("Duplicate type definition: {}. It's already defined at {}", name.name, line))?;
|
||||
self.add_type_decl(name, body, mutable, scope_name_stack)?
|
||||
},
|
||||
Binding { name, .. } => {
|
||||
seen_identifiers.try_register(&name, &id, &self.source_map_handle.borrow())
|
||||
.map_err(|line| format!("Duplicate variable definition: {}. It's already defined at {}", name, line))?;
|
||||
self.add_new_symbol(name, scope_name_stack, SymbolSpec::Binding);
|
||||
}
|
||||
_ => ()
|
||||
}
|
||||
},
|
||||
Entry::Vacant(v) => {
|
||||
let line_number = 0; //TODO should work
|
||||
v.insert(line_number);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut seen_identifiers: SymbolTrackTable = HashMap::new();
|
||||
|
||||
for meta in statements.iter() {
|
||||
let statement = meta.node();
|
||||
if let Statement::Declaration(decl) = statement {
|
||||
match decl {
|
||||
FuncSig(ref signature) => {
|
||||
insert_and_check_duplicate_symbol(&mut seen_identifiers, &signature.name)?;
|
||||
self.add_function_signature(signature, scope_name_stack)?
|
||||
}
|
||||
FuncDecl(ref signature, ref body) => {
|
||||
insert_and_check_duplicate_symbol(&mut seen_identifiers, &signature.name)?;
|
||||
self.add_function_signature(signature, scope_name_stack)?;
|
||||
scope_name_stack.push(ScopeSegment{
|
||||
scope_name: signature.name.clone(),
|
||||
scope_type: ScopeSegmentKind::Function,
|
||||
});
|
||||
let output = self.add_symbols_from_scope(body, scope_name_stack);
|
||||
let _ = scope_name_stack.pop();
|
||||
output?
|
||||
},
|
||||
TypeDecl { name, body, mutable } => {
|
||||
insert_and_check_duplicate_symbol(&mut seen_identifiers, &name.name)?;
|
||||
self.add_type_decl(name, body, mutable, scope_name_stack)?
|
||||
},
|
||||
Binding { name, .. } => {
|
||||
insert_and_check_duplicate_symbol(&mut seen_identifiers, name)?;
|
||||
self.add_new_symbol(name, scope_name_stack, SymbolSpec::Binding);
|
||||
}
|
||||
_ => ()
|
||||
}
|
||||
Statement { kind: StatementKind::Module(ModuleSpecifier { name, contents}), id } => {
|
||||
seen_modules.try_register(&name, &id, &self.source_map_handle.borrow())
|
||||
.map_err(|line| format!("Duplicate module definition: {}. It's already defined at {}", name, line))?;
|
||||
scope_name_stack.push(ScopeSegment { name: name.clone() });
|
||||
let output = self.add_symbols_from_scope(contents, scope_name_stack);
|
||||
scope_name_stack.pop();
|
||||
output?
|
||||
},
|
||||
_ => ()
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
pub fn debug_symbol_table(&self) -> String {
|
||||
let mut output = format!("Symbol table\n");
|
||||
for (name, sym) in &self.values {
|
||||
write!(output, "{:?} -> {}\n", name, sym).unwrap();
|
||||
let mut sorted_symbols: Vec<(&FullyQualifiedSymbolName, &Symbol)> = self.symbol_path_to_symbol.iter().collect();
|
||||
sorted_symbols.sort_by(|(fqsn, _), (other_fqsn, _)| fqsn.cmp(other_fqsn));
|
||||
for (name, sym) in sorted_symbols.iter() {
|
||||
write!(output, "{} -> {}\n", name, sym).unwrap();
|
||||
}
|
||||
output
|
||||
}
|
||||
@@ -177,45 +260,68 @@ impl SymbolTable {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
//TODO handle type mutability
|
||||
fn add_type_decl(&mut self, type_name: &TypeSingletonName, body: &TypeBody, _mutable: &bool, scope_name_stack: &mut Vec<ScopeSegment>) -> Result<(), String> {
|
||||
use crate::ast::{TypeIdentifier, Variant};
|
||||
let TypeBody(variants) = body;
|
||||
let TypeSingletonName { name, .. } = type_name;
|
||||
//scope_name_stack.push(name.clone()); //TODO adding this makes variants scoped under their
|
||||
//type name and breaks a lot of things - don't add it until importing names works
|
||||
let ref type_name = type_name.name;
|
||||
|
||||
|
||||
let type_spec = SymbolSpec::Type {
|
||||
name: type_name.clone(),
|
||||
};
|
||||
self.add_new_symbol(type_name, &scope_name_stack, type_spec);
|
||||
|
||||
scope_name_stack.push(ScopeSegment{
|
||||
name: type_name.clone(),
|
||||
});
|
||||
//TODO figure out why _params isn't being used here
|
||||
for (index, var) in variants.iter().enumerate() {
|
||||
match var {
|
||||
Variant::UnitStruct(variant_name) => {
|
||||
let spec = SymbolSpec::DataConstructor {
|
||||
index,
|
||||
type_name: name.clone(),
|
||||
type_name: type_name.clone(),
|
||||
type_args: vec![],
|
||||
};
|
||||
self.add_new_symbol(variant_name, scope_name_stack, spec);
|
||||
},
|
||||
Variant::TupleStruct(variant_name, tuple_members) => {
|
||||
//TODO fix the notion of a tuple type
|
||||
let type_args = tuple_members.iter().map(|type_name| match type_name {
|
||||
TypeIdentifier::Singleton(TypeSingletonName { name, ..}) => name.clone(),
|
||||
TypeIdentifier::Tuple(_) => unimplemented!(),
|
||||
}).collect();
|
||||
let spec = SymbolSpec::DataConstructor {
|
||||
index,
|
||||
type_name: name.clone(),
|
||||
type_name: type_name.clone(),
|
||||
type_args
|
||||
};
|
||||
self.add_new_symbol(variant_name, scope_name_stack, spec);
|
||||
},
|
||||
//TODO if there is only one variant, and it is a record, it doesn't need to have an
|
||||
//explicit name
|
||||
Variant::Record { name, members: _members } => {
|
||||
let fields = HashMap::new();
|
||||
let spec = SymbolSpec::RecordConstructor { fields };
|
||||
Variant::Record { name, members: defined_members } => {
|
||||
let mut members = HashMap::new();
|
||||
let mut duplicate_member_definitions = Vec::new();
|
||||
for (member_name, member_type) in defined_members {
|
||||
match members.entry(member_name.clone()) {
|
||||
Entry::Occupied(_) => duplicate_member_definitions.push(member_name.clone()),
|
||||
Entry::Vacant(v) => {
|
||||
v.insert(match member_type {
|
||||
TypeIdentifier::Singleton(TypeSingletonName { name, ..}) => name.clone(),
|
||||
TypeIdentifier::Tuple(_) => unimplemented!(),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
if duplicate_member_definitions.len() != 0 {
|
||||
return Err(format!("Duplicate member(s) in definition of type {}: {:?}", type_name, duplicate_member_definitions));
|
||||
}
|
||||
let spec = SymbolSpec::RecordConstructor { index, type_name: type_name.clone(), members };
|
||||
self.add_new_symbol(name, scope_name_stack, spec);
|
||||
},
|
||||
}
|
||||
}
|
||||
//scope_name_stack.pop();
|
||||
scope_name_stack.pop();
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -235,157 +341,3 @@ impl LocalTypeContext {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#[cfg(test)]
|
||||
mod symbol_table_tests {
|
||||
use super::*;
|
||||
use crate::util::quick_ast;
|
||||
|
||||
macro_rules! values_in_table {
|
||||
//TODO multiple values
|
||||
($source:expr, $single_value:expr) => {
|
||||
{
|
||||
let mut symbol_table = SymbolTable::new();
|
||||
let ast = quick_ast($source);
|
||||
symbol_table.add_top_level_symbols(&ast).unwrap();
|
||||
match symbol_table.lookup_by_name($single_value) {
|
||||
Some(_spec) => (),
|
||||
None => panic!(),
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn basic_symbol_table() {
|
||||
values_in_table! { "let a = 10; fn b() { 20 }", &rc!(b) };
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_duplicates() {
|
||||
let source = r#"
|
||||
fn a() { 1 }
|
||||
fn b() { 2 }
|
||||
fn a() { 3 }
|
||||
"#;
|
||||
let mut symbol_table = SymbolTable::new();
|
||||
let ast = quick_ast(source);
|
||||
let output = symbol_table.add_top_level_symbols(&ast).unwrap_err();
|
||||
assert!(output.contains("Duplicate"))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_duplicates_2() {
|
||||
let source = r#"
|
||||
let a = 20;
|
||||
let q = 39;
|
||||
let a = 30;
|
||||
"#;
|
||||
let mut symbol_table = SymbolTable::new();
|
||||
let ast = quick_ast(source);
|
||||
let output = symbol_table.add_top_level_symbols(&ast).unwrap_err();
|
||||
assert!(output.contains("Duplicate"))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_duplicates_3() {
|
||||
let source = r#"
|
||||
fn a() {
|
||||
let a = 20
|
||||
let b = 40
|
||||
a + b
|
||||
}
|
||||
|
||||
fn q() {
|
||||
let x = 30
|
||||
let x = 33
|
||||
}
|
||||
"#;
|
||||
let mut symbol_table = SymbolTable::new();
|
||||
let ast = quick_ast(source);
|
||||
let output = symbol_table.add_top_level_symbols(&ast).unwrap_err();
|
||||
assert!(output.contains("Duplicate"))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn dont_falsely_detect_duplicates() {
|
||||
let source = r#"
|
||||
let a = 20;
|
||||
fn some_func() {
|
||||
let a = 40;
|
||||
77
|
||||
}
|
||||
let q = 39;
|
||||
"#;
|
||||
let mut symbol_table = SymbolTable::new();
|
||||
let ast = quick_ast(source);
|
||||
symbol_table.add_top_level_symbols(&ast).unwrap();
|
||||
assert!(symbol_table.lookup_by_path(&rc!(a), &vec![]).is_some());
|
||||
assert!(symbol_table.lookup_by_path(&rc!(a), &vec![rc!(some_func)]).is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn enclosing_scopes() {
|
||||
let source = r#"
|
||||
fn outer_func(x) {
|
||||
fn inner_func(arg) {
|
||||
arg
|
||||
}
|
||||
x + inner_func(x)
|
||||
}"#;
|
||||
let mut symbol_table = SymbolTable::new();
|
||||
let ast = quick_ast(source);
|
||||
symbol_table.add_top_level_symbols(&ast).unwrap();
|
||||
assert!(symbol_table.lookup_by_path(&rc!(outer_func), &vec![]).is_some());
|
||||
assert!(symbol_table.lookup_by_path(&rc!(inner_func), &vec![rc!(outer_func)]).is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn enclosing_scopes_2() {
|
||||
let source = r#"
|
||||
fn outer_func(x) {
|
||||
fn inner_func(arg) {
|
||||
arg
|
||||
}
|
||||
|
||||
fn second_inner_func() {
|
||||
fn another_inner_func() {
|
||||
}
|
||||
}
|
||||
|
||||
inner_func(x)
|
||||
}"#;
|
||||
let mut symbol_table = SymbolTable::new();
|
||||
let ast = quick_ast(source);
|
||||
symbol_table.add_top_level_symbols(&ast).unwrap();
|
||||
println!("{}", symbol_table.debug_symbol_table());
|
||||
assert!(symbol_table.lookup_by_path(&rc!(outer_func), &vec![]).is_some());
|
||||
assert!(symbol_table.lookup_by_path(&rc!(inner_func), &vec![rc!(outer_func)]).is_some());
|
||||
assert!(symbol_table.lookup_by_path(&rc!(second_inner_func), &vec![rc!(outer_func)]).is_some());
|
||||
assert!(symbol_table.lookup_by_path(&rc!(another_inner_func), &vec![rc!(outer_func), rc!(second_inner_func)]).is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn enclosing_scopes_3() {
|
||||
let source = r#"
|
||||
fn outer_func(x) {
|
||||
fn inner_func(arg) {
|
||||
arg
|
||||
}
|
||||
|
||||
fn second_inner_func() {
|
||||
fn another_inner_func() {
|
||||
}
|
||||
fn another_inner_func() {
|
||||
}
|
||||
}
|
||||
|
||||
inner_func(x)
|
||||
}"#;
|
||||
let mut symbol_table = SymbolTable::new();
|
||||
let ast = quick_ast(source);
|
||||
let output = symbol_table.add_top_level_symbols(&ast).unwrap_err();
|
||||
assert!(output.contains("Duplicate"))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
51
schala-lang/language/src/symbol_table/symbol_trie.rs
Normal file
51
schala-lang/language/src/symbol_table/symbol_trie.rs
Normal file
@@ -0,0 +1,51 @@
|
||||
use radix_trie::{Trie, TrieCommon, TrieKey};
|
||||
use super::FullyQualifiedSymbolName;
|
||||
use std::hash::{Hasher, Hash};
|
||||
use std::collections::hash_map::DefaultHasher;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct SymbolTrie(Trie<FullyQualifiedSymbolName, ()>);
|
||||
|
||||
impl TrieKey for FullyQualifiedSymbolName {
|
||||
fn encode_bytes(&self) -> Vec<u8> {
|
||||
let mut hasher = DefaultHasher::new();
|
||||
let mut output = vec![];
|
||||
let FullyQualifiedSymbolName(scopes) = self;
|
||||
for segment in scopes.iter() {
|
||||
segment.name.as_bytes().hash(&mut hasher);
|
||||
output.extend_from_slice(&hasher.finish().to_be_bytes());
|
||||
}
|
||||
output
|
||||
}
|
||||
}
|
||||
|
||||
impl SymbolTrie {
|
||||
pub fn new() -> SymbolTrie {
|
||||
SymbolTrie(Trie::new())
|
||||
}
|
||||
|
||||
pub fn insert(&mut self, fqsn: &FullyQualifiedSymbolName) {
|
||||
self.0.insert(fqsn.clone(), ());
|
||||
}
|
||||
|
||||
pub fn get_children(&self, fqsn: &FullyQualifiedSymbolName) -> Vec<FullyQualifiedSymbolName> {
|
||||
let subtrie = match self.0.subtrie(fqsn) {
|
||||
Some(s) => s,
|
||||
None => return vec![]
|
||||
};
|
||||
let output: Vec<FullyQualifiedSymbolName> = subtrie.keys().filter(|cur_key| **cur_key != *fqsn).map(|fqsn| fqsn.clone()).collect();
|
||||
output
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_trie_insertion() {
|
||||
let mut trie = SymbolTrie::new();
|
||||
|
||||
trie.insert(&fqsn!("unrelated"; ty, "thing"; tr));
|
||||
trie.insert(&fqsn!("outer"; ty, "inner"; tr));
|
||||
trie.insert(&fqsn!("outer"; ty, "inner"; ty, "still_inner"; tr));
|
||||
|
||||
let children = trie.get_children(&fqsn!("outer"; ty, "inner"; tr));
|
||||
assert_eq!(children.len(), 1);
|
||||
}
|
||||
193
schala-lang/language/src/symbol_table/test.rs
Normal file
193
schala-lang/language/src/symbol_table/test.rs
Normal file
@@ -0,0 +1,193 @@
|
||||
#![cfg(test)]
|
||||
use std::cell::RefCell;
|
||||
use std::rc::Rc;
|
||||
|
||||
use super::*;
|
||||
use crate::util::quick_ast;
|
||||
|
||||
fn add_symbols_from_source(src: &str) -> (SymbolTable, Result<(), String>) {
|
||||
let (ast, source_map) = quick_ast(src);
|
||||
let source_map = Rc::new(RefCell::new(source_map));
|
||||
let mut symbol_table = SymbolTable::new(source_map);
|
||||
let result = symbol_table.add_top_level_symbols(&ast);
|
||||
(symbol_table, result)
|
||||
}
|
||||
|
||||
macro_rules! values_in_table {
|
||||
($source:expr, $single_value:expr) => {
|
||||
values_in_table!($source => $single_value);
|
||||
};
|
||||
($source:expr => $( $value:expr ),* ) => {
|
||||
{
|
||||
let (symbol_table, _) = add_symbols_from_source($source);
|
||||
$(
|
||||
match symbol_table.lookup_by_fqsn($value) {
|
||||
Some(_spec) => (),
|
||||
None => panic!(),
|
||||
};
|
||||
)*
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn basic_symbol_table() {
|
||||
values_in_table! { "let a = 10; fn b() { 20 }", &fqsn!("b"; tr) };
|
||||
values_in_table! { "type Option<T> = Some(T) | None" =>
|
||||
&fqsn!("Option"; tr),
|
||||
&fqsn!("Option"; ty, "Some"; tr),
|
||||
&fqsn!("Option"; ty, "None"; tr) };
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_function_definition_duplicates() {
|
||||
let source = r#"
|
||||
fn a() { 1 }
|
||||
fn b() { 2 }
|
||||
fn a() { 3 }
|
||||
"#;
|
||||
let (_, output) = add_symbols_from_source(source);
|
||||
assert!(output.unwrap_err().contains("Duplicate function definition: a"))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_variable_definition_duplicates() {
|
||||
let source = r#"
|
||||
let x = 9
|
||||
let a = 20
|
||||
let q = 39
|
||||
let a = 30
|
||||
"#;
|
||||
let (_, output) = add_symbols_from_source(source);
|
||||
let output = output.unwrap_err();
|
||||
assert!(output.contains("Duplicate variable definition: a"));
|
||||
assert!(output.contains("already defined at 2"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_variable_definition_duplicates_in_function() {
|
||||
let source = r#"
|
||||
fn a() {
|
||||
let a = 20
|
||||
let b = 40
|
||||
a + b
|
||||
}
|
||||
|
||||
fn q() {
|
||||
let a = 29
|
||||
let x = 30
|
||||
let x = 33
|
||||
}
|
||||
"#;
|
||||
let (_, output) = add_symbols_from_source(source);
|
||||
assert!(output.unwrap_err().contains("Duplicate variable definition: x"))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn dont_falsely_detect_duplicates() {
|
||||
let source = r#"
|
||||
let a = 20;
|
||||
fn some_func() {
|
||||
let a = 40;
|
||||
77
|
||||
}
|
||||
let q = 39;
|
||||
"#;
|
||||
let (symbol_table, _) = add_symbols_from_source(source);
|
||||
assert!(symbol_table.lookup_by_fqsn(&fqsn!["a"; tr]).is_some());
|
||||
assert!(symbol_table.lookup_by_fqsn(&fqsn!["some_func"; fn, "a";tr]).is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn enclosing_scopes() {
|
||||
let source = r#"
|
||||
fn outer_func(x) {
|
||||
fn inner_func(arg) {
|
||||
arg
|
||||
}
|
||||
x + inner_func(x)
|
||||
}"#;
|
||||
let (symbol_table, _) = add_symbols_from_source(source);
|
||||
assert!(symbol_table.lookup_by_fqsn(&fqsn!("outer_func"; tr)).is_some());
|
||||
assert!(symbol_table.lookup_by_fqsn(&fqsn!("outer_func"; fn, "inner_func"; tr)).is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn enclosing_scopes_2() {
|
||||
let source = r#"
|
||||
fn outer_func(x) {
|
||||
fn inner_func(arg) {
|
||||
arg
|
||||
}
|
||||
|
||||
fn second_inner_func() {
|
||||
fn another_inner_func() {
|
||||
}
|
||||
}
|
||||
|
||||
inner_func(x)
|
||||
}"#;
|
||||
let (symbol_table, _) = add_symbols_from_source(source);
|
||||
assert!(symbol_table.lookup_by_fqsn(&fqsn!("outer_func"; tr)).is_some());
|
||||
assert!(symbol_table.lookup_by_fqsn(&fqsn!("outer_func"; fn, "inner_func"; tr)).is_some());
|
||||
assert!(symbol_table.lookup_by_fqsn(&fqsn!("outer_func"; fn, "second_inner_func"; tr)).is_some());
|
||||
assert!(symbol_table.lookup_by_fqsn(&fqsn!("outer_func"; fn, "second_inner_func"; fn, "another_inner_func"; tr)).is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn enclosing_scopes_3() {
|
||||
let source = r#"
|
||||
fn outer_func(x) {
|
||||
fn inner_func(arg) {
|
||||
arg
|
||||
}
|
||||
|
||||
fn second_inner_func() {
|
||||
fn another_inner_func() {
|
||||
}
|
||||
fn another_inner_func() {
|
||||
}
|
||||
}
|
||||
|
||||
inner_func(x)
|
||||
}"#;
|
||||
let (_, output) = add_symbols_from_source(source);
|
||||
assert!(output.unwrap_err().contains("Duplicate"))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn modules() {
|
||||
let source = r#"
|
||||
module stuff {
|
||||
fn item() {
|
||||
}
|
||||
}
|
||||
|
||||
fn item()
|
||||
"#;
|
||||
values_in_table! { source =>
|
||||
&fqsn!("item"; tr),
|
||||
&fqsn!("stuff"; tr, "item"; tr)
|
||||
};
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn duplicate_modules() {
|
||||
let source = r#"
|
||||
module q {
|
||||
fn foo() { 4 }
|
||||
}
|
||||
|
||||
module a {
|
||||
fn foo() { 334 }
|
||||
}
|
||||
|
||||
module a {
|
||||
fn foo() { 256.1 }
|
||||
}
|
||||
"#;
|
||||
let (_, output) = add_symbols_from_source(source);
|
||||
let output = output.unwrap_err();
|
||||
assert!(output.contains("Duplicate module"));
|
||||
assert!(output.contains("already defined at 5"));
|
||||
}
|
||||
@@ -4,6 +4,8 @@ use std::rc::Rc;
|
||||
use std::iter::{Iterator, Peekable};
|
||||
use std::fmt;
|
||||
|
||||
use crate::source_map::Location;
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub enum TokenKind {
|
||||
Newline, Semicolon,
|
||||
@@ -19,7 +21,10 @@ pub enum TokenKind {
|
||||
|
||||
Operator(Rc<String>),
|
||||
DigitGroup(Rc<String>), HexLiteral(Rc<String>), BinNumberSigil,
|
||||
StrLiteral(Rc<String>),
|
||||
StrLiteral {
|
||||
s: Rc<String>,
|
||||
prefix: Option<Rc<String>>
|
||||
},
|
||||
Identifier(Rc<String>),
|
||||
Keyword(Kw),
|
||||
|
||||
@@ -35,7 +40,7 @@ impl fmt::Display for TokenKind {
|
||||
&Operator(ref s) => write!(f, "Operator({})", **s),
|
||||
&DigitGroup(ref s) => write!(f, "DigitGroup({})", s),
|
||||
&HexLiteral(ref s) => write!(f, "HexLiteral({})", s),
|
||||
&StrLiteral(ref s) => write!(f, "StrLiteral({})", s),
|
||||
&StrLiteral {ref s, .. } => write!(f, "StrLiteral({})", s),
|
||||
&Identifier(ref s) => write!(f, "Identifier({})", s),
|
||||
&Error(ref s) => write!(f, "Error({})", s),
|
||||
other => write!(f, "{:?}", other),
|
||||
@@ -55,7 +60,7 @@ pub enum Kw {
|
||||
Alias, Type, SelfType, SelfIdent,
|
||||
Interface, Impl,
|
||||
True, False,
|
||||
Module
|
||||
Module, Import
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
@@ -82,14 +87,14 @@ lazy_static! {
|
||||
"true" => Kw::True,
|
||||
"false" => Kw::False,
|
||||
"module" => Kw::Module,
|
||||
"import" => Kw::Import,
|
||||
};
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct Token {
|
||||
pub kind: TokenKind,
|
||||
pub line_num: usize,
|
||||
pub char_num: usize
|
||||
pub location: Location,
|
||||
}
|
||||
|
||||
impl Token {
|
||||
@@ -100,7 +105,7 @@ impl Token {
|
||||
}
|
||||
}
|
||||
pub fn to_string_with_metadata(&self) -> String {
|
||||
format!("{}(L:{},c:{})", self.kind, self.line_num, self.char_num)
|
||||
format!("{}({})", self.kind, self.location)
|
||||
}
|
||||
|
||||
pub fn get_kind(&self) -> TokenKind {
|
||||
@@ -161,14 +166,15 @@ pub fn tokenize(input: &str) -> Vec<Token> {
|
||||
'(' => LParen, ')' => RParen,
|
||||
'{' => LCurlyBrace, '}' => RCurlyBrace,
|
||||
'[' => LSquareBracket, ']' => RSquareBracket,
|
||||
'"' => handle_quote(&mut input),
|
||||
'"' => handle_quote(&mut input, None),
|
||||
'\\' => Backslash,
|
||||
c if c.is_digit(10) => handle_digit(c, &mut input),
|
||||
c if c.is_alphabetic() || c == '_' => handle_alphabetic(c, &mut input),
|
||||
c if is_operator(&c) => handle_operator(c, &mut input),
|
||||
unknown => Error(format!("Unexpected character: {}", unknown)),
|
||||
};
|
||||
tokens.push(Token { kind: cur_tok_kind, line_num, char_num });
|
||||
let location = Location { line_num, char_num };
|
||||
tokens.push(Token { kind: cur_tok_kind, location });
|
||||
}
|
||||
tokens
|
||||
}
|
||||
@@ -188,7 +194,7 @@ fn handle_digit(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>) ->
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_quote(input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenKind {
|
||||
fn handle_quote(input: &mut Peekable<impl Iterator<Item=CharData>>, quote_prefix: Option<&str>) -> TokenKind {
|
||||
let mut buf = String::new();
|
||||
loop {
|
||||
match input.next().map(|(_, _, c)| { c }) {
|
||||
@@ -210,7 +216,7 @@ fn handle_quote(input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenKind
|
||||
None => return TokenKind::Error(format!("Unclosed string")),
|
||||
}
|
||||
}
|
||||
TokenKind::StrLiteral(Rc::new(buf))
|
||||
TokenKind::StrLiteral { s: Rc::new(buf), prefix: quote_prefix.map(|s| Rc::new(s.to_string())) }
|
||||
}
|
||||
|
||||
fn handle_alphabetic(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenKind {
|
||||
@@ -222,6 +228,10 @@ fn handle_alphabetic(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>
|
||||
|
||||
loop {
|
||||
match input.peek().map(|&(_, _, c)| { c }) {
|
||||
Some(c) if c == '"' => {
|
||||
input.next();
|
||||
return handle_quote(input, Some(&buf));
|
||||
},
|
||||
Some(c) if c.is_alphanumeric() || c == '_' => {
|
||||
input.next();
|
||||
buf.push(c);
|
||||
@@ -322,4 +332,13 @@ mod schala_tokenizer_tests {
|
||||
let token_kinds: Vec<TokenKind> = tokenize("1 `plus` 2").into_iter().map(move |t| t.kind).collect();
|
||||
assert_eq!(token_kinds, vec![digit!("1"), op!("plus"), digit!("2")]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn string_literals() {
|
||||
let token_kinds: Vec<TokenKind> = tokenize(r#""some string""#).into_iter().map(move |t| t.kind).collect();
|
||||
assert_eq!(token_kinds, vec![StrLiteral { s: Rc::new("some string".to_string()), prefix: None }]);
|
||||
|
||||
let token_kinds: Vec<TokenKind> = tokenize(r#"b"some bytestring""#).into_iter().map(move |t| t.kind).collect();
|
||||
assert_eq!(token_kinds, vec![StrLiteral { s: Rc::new("some bytestring".to_string()), prefix: Some(Rc::new("b".to_string())) }]);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ use ena::unify::{UnifyKey, InPlaceUnificationTable, UnificationTable, EqUnifyVal
|
||||
|
||||
use crate::ast::*;
|
||||
use crate::util::ScopeStack;
|
||||
use crate::builtin::{PrefixOp, BinOp};
|
||||
use crate::util::deref_optional_box;
|
||||
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
@@ -14,6 +14,7 @@ pub struct TypeData {
|
||||
}
|
||||
|
||||
impl TypeData {
|
||||
#[allow(dead_code)]
|
||||
pub fn new() -> TypeData {
|
||||
TypeData { ty: None }
|
||||
}
|
||||
@@ -38,6 +39,7 @@ impl TypeError {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)] // avoids warning from Compound
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub enum Type {
|
||||
Const(TypeConst),
|
||||
@@ -264,16 +266,18 @@ impl<'a> TypeContext<'a> {
|
||||
/// the AST to ReducedAST
|
||||
pub fn typecheck(&mut self, ast: &AST) -> Result<Type, TypeError> {
|
||||
let mut returned_type = Type::Const(TypeConst::Unit);
|
||||
for statement in ast.0.iter() {
|
||||
returned_type = self.statement(statement.node())?;
|
||||
for statement in ast.statements.iter() {
|
||||
returned_type = self.statement(statement)?;
|
||||
}
|
||||
Ok(returned_type)
|
||||
}
|
||||
|
||||
fn statement(&mut self, statement: &Statement) -> InferResult<Type> {
|
||||
match statement {
|
||||
Statement::ExpressionStatement(e) => self.expr(e.node()),
|
||||
Statement::Declaration(decl) => self.decl(decl),
|
||||
match &statement.kind {
|
||||
StatementKind::Expression(e) => self.expr(e),
|
||||
StatementKind::Declaration(decl) => self.decl(&decl),
|
||||
StatementKind::Import(_) => Ok(ty!(Unit)),
|
||||
StatementKind::Module(_) => Ok(ty!(Unit)),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -281,7 +285,7 @@ impl<'a> TypeContext<'a> {
|
||||
use self::Declaration::*;
|
||||
match decl {
|
||||
Binding { name, expr, .. } => {
|
||||
let ty = self.expr(expr.node())?;
|
||||
let ty = self.expr(expr)?;
|
||||
self.variable_map.insert(name.clone(), ty);
|
||||
},
|
||||
_ => (),
|
||||
@@ -299,12 +303,12 @@ impl<'a> TypeContext<'a> {
|
||||
|
||||
fn expr(&mut self, expr: &Expression) -> InferResult<Type> {
|
||||
match expr {
|
||||
Expression { kind, type_anno: Some(anno) } => {
|
||||
Expression { kind, type_anno: Some(anno), .. } => {
|
||||
let t1 = self.expr_type(kind)?;
|
||||
let t2 = self.get_type_from_name(anno)?;
|
||||
self.unify(t2, t1)
|
||||
},
|
||||
Expression { kind, type_anno: None } => self.expr_type(kind)
|
||||
Expression { kind, type_anno: None, .. } => self.expr_type(kind)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -315,20 +319,20 @@ impl<'a> TypeContext<'a> {
|
||||
BoolLiteral(_) => ty!(Bool),
|
||||
FloatLiteral(_) => ty!(Float),
|
||||
StringLiteral(_) => ty!(StringT),
|
||||
PrefixExp(op, expr) => self.prefix(op, expr.node())?,
|
||||
BinExp(op, lhs, rhs) => self.binexp(op, lhs.node(), rhs.node())?,
|
||||
IfExpression { discriminator, body } => self.if_expr(discriminator, body)?,
|
||||
PrefixExp(op, expr) => self.prefix(op, expr)?,
|
||||
BinExp(op, lhs, rhs) => self.binexp(op, lhs, rhs)?,
|
||||
IfExpression { discriminator, body } => self.if_expr(deref_optional_box(discriminator), &**body)?,
|
||||
Value(val) => self.handle_value(val)?,
|
||||
Call { box ref f, arguments } => self.call(f.node(), arguments)?,
|
||||
Call { box ref f, arguments } => self.call(f, arguments)?,
|
||||
Lambda { params, type_anno, body } => self.lambda(params, type_anno, body)?,
|
||||
_ => ty!(Unit),
|
||||
})
|
||||
}
|
||||
|
||||
fn prefix(&mut self, op: &PrefixOp, expr: &Expression) -> InferResult<Type> {
|
||||
let tf = match op.get_type() {
|
||||
Ok(ty) => ty,
|
||||
Err(e) => return TypeError::new(e)
|
||||
let tf = match op.builtin.map(|b| b.get_type()) {
|
||||
Some(ty) => ty,
|
||||
None => return TypeError::new("no type found")
|
||||
};
|
||||
|
||||
let tx = self.expr(expr)?;
|
||||
@@ -336,9 +340,9 @@ impl<'a> TypeContext<'a> {
|
||||
}
|
||||
|
||||
fn binexp(&mut self, op: &BinOp, lhs: &Expression, rhs: &Expression) -> InferResult<Type> {
|
||||
let tf = match op.get_type() {
|
||||
Ok(ty) => ty,
|
||||
Err(e) => return TypeError::new(e),
|
||||
let tf = match op.builtin.map(|b| b.get_type()) {
|
||||
Some(ty) => ty,
|
||||
None => return TypeError::new("no type found"),
|
||||
};
|
||||
|
||||
let t_lhs = self.expr(lhs)?;
|
||||
@@ -347,10 +351,10 @@ impl<'a> TypeContext<'a> {
|
||||
self.handle_apply(tf, vec![t_lhs, t_rhs])
|
||||
}
|
||||
|
||||
fn if_expr(&mut self, discriminator: &Discriminator, body: &IfExpressionBody) -> InferResult<Type> {
|
||||
use self::Discriminator::*; use self::IfExpressionBody::*;
|
||||
fn if_expr(&mut self, discriminator: Option<&Expression>, body: &IfExpressionBody) -> InferResult<Type> {
|
||||
use self::IfExpressionBody::*;
|
||||
match (discriminator, body) {
|
||||
(Simple(expr), SimpleConditional(then_clause, else_clause)) => self.handle_simple_if(expr, then_clause, else_clause),
|
||||
(Some(expr), SimpleConditional{ then_case, else_case }) => self.handle_simple_if(expr, then_case, else_case),
|
||||
_ => TypeError::new(format!("Complex conditionals not supported"))
|
||||
}
|
||||
}
|
||||
@@ -384,9 +388,9 @@ impl<'a> TypeContext<'a> {
|
||||
Ok(ty!(argument_types, ret_type))
|
||||
}
|
||||
|
||||
fn call(&mut self, f: &Expression, args: &Vec<Meta<InvocationArgument>>) -> InferResult<Type> {
|
||||
fn call(&mut self, f: &Expression, args: &Vec<InvocationArgument>) -> InferResult<Type> {
|
||||
let tf = self.expr(f)?;
|
||||
let arg_types: InferResult<Vec<Type>> = args.iter().map(|ex| self.invoc(ex.node())).collect();
|
||||
let arg_types: InferResult<Vec<Type>> = args.iter().map(|ex| self.invoc(ex)).collect();
|
||||
let arg_types = arg_types?;
|
||||
self.handle_apply(tf, arg_types)
|
||||
}
|
||||
@@ -406,17 +410,18 @@ impl<'a> TypeContext<'a> {
|
||||
|
||||
fn block(&mut self, block: &Block) -> InferResult<Type> {
|
||||
let mut output = ty!(Unit);
|
||||
for s in block.iter() {
|
||||
let statement = s.node();
|
||||
for statement in block.iter() {
|
||||
output = self.statement(statement)?;
|
||||
}
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
fn handle_value(&mut self, val: &Rc<String>) -> InferResult<Type> {
|
||||
match self.variable_map.lookup(val) {
|
||||
fn handle_value(&mut self, val: &QualifiedName) -> InferResult<Type> {
|
||||
let QualifiedName { components: vec, .. } = val;
|
||||
let var = &vec[0];
|
||||
match self.variable_map.lookup(var) {
|
||||
Some(ty) => Ok(ty.clone()),
|
||||
None => TypeError::new(format!("Couldn't find variable: {}", val))
|
||||
None => TypeError::new(format!("Couldn't find variable: {}", &var)),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -457,7 +462,7 @@ mod typechecking_tests {
|
||||
macro_rules! assert_type_in_fresh_context {
|
||||
($string:expr, $type:expr) => {
|
||||
let mut tc = TypeContext::new();
|
||||
let ref ast = crate::util::quick_ast($string);
|
||||
let (ref ast, _) = crate::util::quick_ast($string);
|
||||
let ty = tc.typecheck(ast).unwrap();
|
||||
assert_eq!(ty, $type)
|
||||
}
|
||||
@@ -468,13 +473,16 @@ mod typechecking_tests {
|
||||
assert_type_in_fresh_context!("1", ty!(Nat));
|
||||
assert_type_in_fresh_context!(r#""drugs""#, ty!(StringT));
|
||||
assert_type_in_fresh_context!("true", ty!(Bool));
|
||||
assert_type_in_fresh_context!("-1", ty!(Int));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn operators() {
|
||||
//TODO fix these with new operator regime
|
||||
/*
|
||||
assert_type_in_fresh_context!("-1", ty!(Int));
|
||||
assert_type_in_fresh_context!("1 + 2", ty!(Nat));
|
||||
assert_type_in_fresh_context!("-2", ty!(Int));
|
||||
assert_type_in_fresh_context!("!true", ty!(Bool));
|
||||
*/
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,11 @@
|
||||
use std::collections::HashMap;
|
||||
use std::hash::Hash;
|
||||
use std::cmp::Eq;
|
||||
use std::ops::Deref;
|
||||
|
||||
pub fn deref_optional_box<T>(x: &Option<Box<T>>) -> Option<&T> {
|
||||
x.as_ref().map(|b: &Box<T>| Deref::deref(b))
|
||||
}
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
pub struct ScopeStack<'a, T: 'a, V: 'a> where T: Hash + Eq {
|
||||
@@ -43,10 +48,18 @@ impl<'a, T, V> ScopeStack<'a, T, V> where T: Hash + Eq {
|
||||
|
||||
/// this is intended for use in tests, and does no error-handling whatsoever
|
||||
#[allow(dead_code)]
|
||||
pub fn quick_ast(input: &str) -> crate::ast::AST {
|
||||
pub fn quick_ast(input: &str) -> (crate::ast::AST, crate::source_map::SourceMap) {
|
||||
use std::cell::RefCell;
|
||||
use std::rc::Rc;
|
||||
|
||||
let source_map = crate::source_map::SourceMap::new();
|
||||
let source_map_handle = Rc::new(RefCell::new(source_map));
|
||||
let tokens = crate::tokenizing::tokenize(input);
|
||||
let mut parser = crate::parsing::Parser::new(tokens);
|
||||
parser.parse().unwrap()
|
||||
let mut parser = crate::parsing::Parser::new(source_map_handle.clone());
|
||||
parser.add_new_tokens(tokens);
|
||||
let output = parser.parse();
|
||||
std::mem::drop(parser);
|
||||
(output.unwrap(), Rc::try_unwrap(source_map_handle).map_err(|_| ()).unwrap().into_inner())
|
||||
}
|
||||
|
||||
#[allow(unused_macros)]
|
||||
|
||||
@@ -45,7 +45,7 @@ pub fn start_repl(langs: Vec<Box<dyn ProgrammingLanguageInterface>>) {
|
||||
let mut repl = repl::Repl::new(langs);
|
||||
repl.run_repl();
|
||||
}
|
||||
[_, ref filename, _..] => {
|
||||
[_, ref filename, ..] => {
|
||||
run_noninteractive(filename, langs);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -49,8 +49,8 @@ impl CommandTree {
|
||||
}
|
||||
pub fn get_help(&self) -> &str {
|
||||
match self {
|
||||
CommandTree::Terminal { help_msg, ..} => help_msg.as_ref().map(|s| s.as_str()).unwrap_or(""),
|
||||
CommandTree::NonTerminal { help_msg, .. } => help_msg.as_ref().map(|s| s.as_str()).unwrap_or(""),
|
||||
CommandTree::Terminal { help_msg, ..} => help_msg.as_ref().map(|s| s.as_str()).unwrap_or("<no help text provided>"),
|
||||
CommandTree::NonTerminal { help_msg, .. } => help_msg.as_ref().map(|s| s.as_str()).unwrap_or("<no help text provided>"),
|
||||
CommandTree::Top(_) => ""
|
||||
}
|
||||
}
|
||||
|
||||
@@ -48,7 +48,7 @@ impl DirectiveAction {
|
||||
},
|
||||
ShowImmediate => {
|
||||
let cur_state = repl.get_cur_language_state();
|
||||
let stage_name = match arguments.get(1) {
|
||||
let stage_name = match arguments.get(0) {
|
||||
Some(s) => s.to_string(),
|
||||
None => return Some(format!("Must specify a thing to debug")),
|
||||
};
|
||||
|
||||
@@ -13,7 +13,12 @@ pub fn help(repl: &mut Repl, arguments: &[&str]) -> InterpreterDirectiveOutput {
|
||||
None => format!("Directive `{}` not found", commands.last().unwrap()),
|
||||
Some(dir) => {
|
||||
let mut buf = String::new();
|
||||
writeln!(buf, "`{}` - {}", dir.get_cmd(), dir.get_help()).unwrap();
|
||||
let cmd = dir.get_cmd();
|
||||
let children = dir.get_children();
|
||||
writeln!(buf, "`{}` - {}", cmd, dir.get_help()).unwrap();
|
||||
for sub in children.iter() {
|
||||
writeln!(buf, "\t`{} {}` - {}", cmd, sub.get_cmd(), sub.get_help()).unwrap();
|
||||
}
|
||||
buf
|
||||
}
|
||||
})
|
||||
|
||||
@@ -27,6 +27,12 @@ pub struct Repl {
|
||||
options: ReplOptions,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
enum PromptStyle {
|
||||
Normal,
|
||||
Multiline
|
||||
}
|
||||
|
||||
impl Repl {
|
||||
pub fn new(initial_states: Vec<Box<dyn ProgrammingLanguageInterface>>) -> Repl {
|
||||
use linefeed::Interface;
|
||||
@@ -62,37 +68,73 @@ impl Repl {
|
||||
|
||||
fn handle_repl_loop(&mut self) {
|
||||
use linefeed::ReadResult::*;
|
||||
let sigil = self.interpreter_directive_sigil;
|
||||
|
||||
loop {
|
||||
self.update_line_reader();
|
||||
match self.line_reader.read_line() {
|
||||
Err(e) => {
|
||||
println!("readline IO Error: {}", e);
|
||||
break;
|
||||
},
|
||||
Ok(Eof) | Ok(Signal(_)) => break,
|
||||
Ok(Input(ref input)) => {
|
||||
self.line_reader.add_history_unique(input.to_string());
|
||||
match input.chars().nth(0) {
|
||||
Some(ch) if ch == self.interpreter_directive_sigil => match self.handle_interpreter_directive(input) {
|
||||
Some(directive_output) => println!("<> {}", directive_output),
|
||||
None => (),
|
||||
'main: loop {
|
||||
macro_rules! match_or_break {
|
||||
($line:expr) => {
|
||||
match $line {
|
||||
Err(e) => {
|
||||
println!("readline IO Error: {}", e);
|
||||
break 'main;
|
||||
},
|
||||
_ => {
|
||||
for repl_response in self.handle_input(input) {
|
||||
println!("{}", repl_response);
|
||||
}
|
||||
}
|
||||
Ok(Eof) | Ok(Signal(_)) => break 'main,
|
||||
Ok(Input(ref input)) => input,
|
||||
}
|
||||
}
|
||||
}
|
||||
self.update_line_reader();
|
||||
let line = self.line_reader.read_line();
|
||||
let input: &str = match_or_break!(line);
|
||||
|
||||
self.line_reader.add_history_unique(input.to_string());
|
||||
let mut chars = input.chars().peekable();
|
||||
let repl_responses = match chars.nth(0) {
|
||||
Some(ch) if ch == sigil => {
|
||||
if chars.peek() == Some(&'{') {
|
||||
let mut buf = String::new();
|
||||
buf.push_str(input.get(2..).unwrap());
|
||||
'multiline: loop {
|
||||
self.set_prompt(PromptStyle::Multiline);
|
||||
let new_line = self.line_reader.read_line();
|
||||
let new_input = match_or_break!(new_line);
|
||||
if new_input.starts_with(":}") {
|
||||
break 'multiline;
|
||||
} else {
|
||||
buf.push_str(new_input);
|
||||
buf.push_str("\n");
|
||||
}
|
||||
}
|
||||
self.handle_input(&buf)
|
||||
} else {
|
||||
match self.handle_interpreter_directive(input) {
|
||||
Some(directive_output) => println!("<> {}", directive_output),
|
||||
None => (),
|
||||
}
|
||||
continue
|
||||
}
|
||||
},
|
||||
_ => self.handle_input(input)
|
||||
};
|
||||
|
||||
for repl_response in repl_responses.iter() {
|
||||
println!("{}", repl_response);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn update_line_reader(&mut self) {
|
||||
let tab_complete_handler = TabCompleteHandler::new(self.interpreter_directive_sigil, self.get_directives());
|
||||
self.line_reader.set_completer(Arc::new(tab_complete_handler)); //TODO fix this here
|
||||
let prompt_str = format!(">> ");
|
||||
self.set_prompt(PromptStyle::Normal);
|
||||
}
|
||||
|
||||
fn set_prompt(&mut self, prompt_style: PromptStyle) {
|
||||
let prompt_str = match prompt_style {
|
||||
PromptStyle::Normal => ">> ".to_string(),
|
||||
PromptStyle::Multiline => ">| ".to_string(),
|
||||
};
|
||||
|
||||
self.line_reader.set_prompt(&prompt_str).unwrap();
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user