Compare commits
355 Commits
COMPILER_B
...
antiquated
Author | SHA1 | Date | |
---|---|---|---|
|
df2d882538 | ||
|
ab7d6af5f3 | ||
|
012c89b5c4 | ||
|
a6e1a6c36c | ||
|
52b2426f30 | ||
|
e7576b26e9 | ||
|
d9d6f7dc16 | ||
|
1daf23b129 | ||
|
c1b5fa392a | ||
|
4c99be700f | ||
|
2247d9b58e | ||
|
dcec8be307 | ||
|
934a390f2d | ||
|
8e8be1b449 | ||
|
5bfd79669e | ||
|
d913443e97 | ||
|
b5ec8116a2 | ||
|
b778428e98 | ||
|
354dd7d8c1 | ||
|
71911f0ab5 | ||
|
d6c5cd100b | ||
|
88dfa87e85 | ||
|
89a93d59c7 | ||
|
0eccceabd9 | ||
|
9d9331f4b0 | ||
|
1470e7fbdd | ||
|
d3866a1908 | ||
|
0a471ed71c | ||
|
9c0f60b6ce | ||
|
fc463d3807 | ||
|
7221d2cb11 | ||
|
032fe5fed9 | ||
|
bf55e6e82a | ||
|
9adae9c262 | ||
|
91985df449 | ||
|
4b11a6622a | ||
|
884c8e515f | ||
|
d2b5deb802 | ||
|
2ba0fb4869 | ||
|
a9afb6d24e | ||
|
8d5858d3d2 | ||
|
1a48e9b43a | ||
|
1797136156 | ||
|
3e422291f4 | ||
|
bba433c808 | ||
|
194cb2202a | ||
|
5a38ff8f41 | ||
|
de13e69769 | ||
|
8f3c982131 | ||
|
e5b6f2bc2f | ||
|
b760ec7eca | ||
|
94db2ea17f | ||
|
02ead69a44 | ||
|
c1ef0ee506 | ||
|
9a13848f80 | ||
|
1b6a7021e7 | ||
|
2c139df6dd | ||
|
7c3e924194 | ||
|
a41d808da3 | ||
|
eeec85c2b1 | ||
|
ec5bf12a65 | ||
|
bb26d9e674 | ||
|
219f5a183a | ||
|
69d857e94d | ||
|
8365690860 | ||
|
7ae7eaa07b | ||
|
88d2571401 | ||
|
721a499384 | ||
|
ec51659452 | ||
|
44cebec818 | ||
|
7e2b95593f | ||
|
58a1782162 | ||
|
6454cc5ad1 | ||
|
d5cd0dada7 | ||
|
65c745fb30 | ||
|
33573bf268 | ||
|
12a7fe3e3e | ||
|
7f3b4a727f | ||
|
7a8ab3d571 | ||
|
b7b4e75f01 | ||
|
7a9e43bf8e | ||
|
a666ac985b | ||
|
37e85c417e | ||
|
fc088923c0 | ||
|
8ace37c5cf | ||
|
c1e6bc8c4c | ||
|
d37be75478 | ||
|
f1ffeb155a | ||
|
222e0aad08 | ||
|
77030091bb | ||
|
b4b1a0cf63 | ||
|
a2d5f380a8 | ||
|
1cdaaee9a6 | ||
|
e6a9811ee5 | ||
|
ff1d4ef7bb | ||
|
f4029fe31a | ||
|
d38bb2278c | ||
|
96393604c3 | ||
|
29207876ae | ||
|
b0795f2dd4 | ||
|
aec3fd070e | ||
|
da4990107c | ||
|
94ee3e1897 | ||
|
05e1555a9b | ||
|
4b0aced11f | ||
|
205ab7179d | ||
|
abab667c43 | ||
|
869de8c033 | ||
|
ba8fb86e3f | ||
|
a00125d4a5 | ||
|
a93fc48ee8 | ||
|
8fe7fca88c | ||
|
6cd5a9353c | ||
|
671ce54dd3 | ||
|
c67adc3a38 | ||
|
13353f8801 | ||
|
10ea99e95c | ||
|
fa736f2dd4 | ||
|
b7f796322b | ||
|
f9349edf77 | ||
|
c5f7616303 | ||
|
5af42d0828 | ||
|
92c6d7f311 | ||
|
e618498881 | ||
|
a31735da88 | ||
|
96d12f3659 | ||
|
c3d36ab320 | ||
|
7bd6072dae | ||
|
08a4800175 | ||
|
8d7f8f555f | ||
|
fbb0269623 | ||
|
8c48f63a2d | ||
|
54b33282ef | ||
|
d46f40bc0f | ||
|
02fc76c8fc | ||
|
87141fcca3 | ||
|
4ec2585d25 | ||
|
8aa306746a | ||
|
4f3ef5c850 | ||
|
76b1e9c0dc | ||
|
6a318257d6 | ||
|
8e19b7c39d | ||
|
54eb8252a9 | ||
|
6cbe562241 | ||
|
88b39b5561 | ||
|
359f274f33 | ||
|
4c1ee0a34e | ||
|
8a9c63eccf | ||
|
c66f67e469 | ||
|
a13ad0edaa | ||
|
8336211a4b | ||
|
45c72f97a2 | ||
|
eb6a7e95a9 | ||
|
383eb7bb62 | ||
|
8b5e965f16 | ||
|
3e16070602 | ||
|
63ef1451d9 | ||
|
e40782739d | ||
|
9de1b4ea33 | ||
|
0464d959ec | ||
|
e2f39dd7b9 | ||
|
d4b00b008b | ||
|
ec92e14fcf | ||
|
0bf0b3e2e8 | ||
|
acc99fa0ef | ||
|
8798650024 | ||
|
4c6a93302d | ||
|
a3f2539993 | ||
|
d9f53abeb2 | ||
|
f28f4eab78 | ||
|
f4d3282090 | ||
|
7289504ab7 | ||
|
cd1bb91555 | ||
|
76f7524fdb | ||
|
d084deac80 | ||
|
87024b79ba | ||
|
803a836887 | ||
|
663e99df23 | ||
|
81bfe22974 | ||
|
8db9176fce | ||
|
de741448e0 | ||
|
f0e4b50c99 | ||
|
1c6545fb74 | ||
|
46fe03b43f | ||
|
ea494bb328 | ||
|
53112c9f9d | ||
|
e7308485df | ||
|
08d66f0a43 | ||
|
6e7bd1ccb8 | ||
|
68506571a8 | ||
|
8111f69640 | ||
|
304df5c50e | ||
|
6b9ca92e00 | ||
|
209b6bba48 | ||
|
cec0f35fc3 | ||
|
30fbc9a721 | ||
|
b2d9622feb | ||
|
5d04a020dc | ||
|
765a0bec58 | ||
|
40be5a8a33 | ||
|
6a7c88cd02 | ||
|
08590430e4 | ||
|
4dd39fe085 | ||
|
0f40a7de5d | ||
|
d65233240a | ||
|
a3463f5519 | ||
|
0c6d2be95a | ||
|
264fc2ae58 | ||
|
e4592ddfb2 | ||
|
8896b1a7a7 | ||
|
851fd9885f | ||
|
3402cfe326 | ||
|
f71d3707c6 | ||
|
0808bcbc87 | ||
|
47ff6b3cb5 | ||
|
e6061becc0 | ||
|
c8af776b15 | ||
|
b9767d0d7d | ||
|
df173a0096 | ||
|
9e799c23ba | ||
|
e52d0bf515 | ||
|
a03f570266 | ||
|
48e2d9a683 | ||
|
e40b8ece3b | ||
|
899a4df55e | ||
|
284d7ce383 | ||
|
6162d05b60 | ||
|
77cdfc229f | ||
|
856e74cb5e | ||
|
59956903f2 | ||
|
0a2f06f598 | ||
|
ec8ae05018 | ||
|
e4af5beb1c | ||
|
a1d6661a6b | ||
|
cac61ba093 | ||
|
e18ddbded9 | ||
|
b00df64f55 | ||
|
d6fcc65392 | ||
|
b5141e27d6 | ||
|
97117827c6 | ||
|
fb0bf29826 | ||
|
8ceaa734d2 | ||
|
df41da84b4 | ||
|
9ec1e00afa | ||
|
630420b114 | ||
|
856a0808de | ||
|
96595d8fb6 | ||
|
009095f771 | ||
|
5b4bb6606e | ||
|
7c5a08664a | ||
|
81859306b3 | ||
|
b365a3fec7 | ||
|
37ce12b6d8 | ||
|
4193971303 | ||
|
7282a38a08 | ||
|
16164c2235 | ||
|
f2c9cf20cb | ||
|
c9cfc467b0 | ||
|
3383921c6b | ||
|
7a7e4ec0f2 | ||
|
bd698629ff | ||
|
82de5c6e27 | ||
|
9540dc70f2 | ||
|
ba09919aa1 | ||
|
d8f6c41f04 | ||
|
e68331fe0a | ||
|
e947569100 | ||
|
92a695e523 | ||
|
b342213826 | ||
|
b4f765167b | ||
|
2256f25482 | ||
|
4ddcbc89ad | ||
|
fb31687dea | ||
|
93d0a2cd7d | ||
|
9b5c3629c0 | ||
|
b5484e67ee | ||
|
248af74ec0 | ||
|
3b5ebf92b4 | ||
|
4a366fda30 | ||
|
f625b80d0c | ||
|
5eb743a8b5 | ||
|
75935db9e6 | ||
|
2f669b77fd | ||
|
0d488b250d | ||
|
60ddac9774 | ||
|
ae6a79077f | ||
|
36f06b38de | ||
|
c9c65b050c | ||
|
91a7abf4cd | ||
|
0c6c4ef47e | ||
|
355ed3c749 | ||
|
c0a3a03045 | ||
|
f8c2e57b37 | ||
|
49a50deb04 | ||
|
052a2feb23 | ||
|
a9b8fdcad6 | ||
|
15a08aa8f7 | ||
|
9640a5b05b | ||
|
d3378c3210 | ||
|
7a0134014b | ||
|
3c4d31c963 | ||
|
736aa8aad2 | ||
|
40f759eea8 | ||
|
d1d3a70339 | ||
|
3060afd752 | ||
|
8b724cf0ff | ||
|
769ef448e8 | ||
|
f5328fac9d | ||
|
1e9a15d01e | ||
|
2609dd404a | ||
|
845461e2b3 | ||
|
9d89440a6d | ||
|
db6c9bb162 | ||
|
c697c929a4 | ||
|
5bba900a3d | ||
|
2fe4109296 | ||
|
25bffa339c | ||
|
6d84675ff8 | ||
|
f8924cf65f | ||
|
ed6360247d | ||
|
eeb4e743d2 | ||
|
3bb323667d | ||
|
69304de998 | ||
|
fd3a641c71 | ||
|
be8633fedb | ||
|
ec55e2e8f0 | ||
|
3ed5f1d16c | ||
|
63360e5617 | ||
|
90ede076cc | ||
|
0cb0145cc5 | ||
|
075e323239 | ||
|
fcbf2d959b | ||
|
421a33c42c | ||
|
61e2acc338 | ||
|
2d72f560ed | ||
|
6ac0628265 | ||
|
5dcfce46cc | ||
|
76c2257c7e | ||
|
3cbe80e933 | ||
|
0f7e568341 | ||
|
75b1f9cce5 | ||
|
c3131a6d5e | ||
|
d01d280452 | ||
|
d578aa0fc7 | ||
|
7e0acb7d87 | ||
|
7b7e20859f | ||
|
d3ebcc9654 | ||
|
2c64bb6c34 | ||
|
cd4045b8e7 | ||
|
6012e8cf9d | ||
|
ec6f4b510e | ||
|
c92e88900c | ||
|
c9a4c83fce | ||
|
77bf42be6c | ||
|
a0955e07dc |
978
Cargo.lock
generated
978
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@ -2,11 +2,14 @@
|
|||||||
name = "schala"
|
name = "schala"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
authors = ["greg <greg.shuflin@protonmail.com>"]
|
authors = ["greg <greg.shuflin@protonmail.com>"]
|
||||||
|
edition = "2018"
|
||||||
|
resolver = "2"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
getopts = "0.2.21"
|
||||||
|
|
||||||
schala-repl = { path = "schala-repl" }
|
schala-repl = { path = "schala-repl" }
|
||||||
schala-lang = { path = "schala-lang/language" }
|
schala-lang = { path = "schala-lang" }
|
||||||
# maaru-lang = { path = "maaru" }
|
# maaru-lang = { path = "maaru" }
|
||||||
# rukka-lang = { path = "rukka" }
|
# rukka-lang = { path = "rukka" }
|
||||||
# robo-lang = { path = "robo" }
|
# robo-lang = { path = "robo" }
|
||||||
|
85
README.md
85
README.md
@ -1,22 +1,44 @@
|
|||||||
# Schala - a programming language meta-interpreter
|
# Schala - a programming language meta-interpreter
|
||||||
|
|
||||||
Schala is a Rust framework written to make it easy to create and experiment
|
Schala is a Rust framework written to make it easy to create and experiment
|
||||||
with multipl toy programming languages. It provides a cross-language REPL and
|
with multiple toy programming languages. It provides a cross-language REPL and
|
||||||
provisions for tokenizing text, parsing tokens, evaluating an abstract syntax
|
provisions for tokenizing text, parsing tokens, evaluating an abstract syntax
|
||||||
tree, and other tasks that are common to all programming languages, as well as sharing state
|
tree, and other tasks that are common to all programming languages, as well as
|
||||||
between multiple programming languages.
|
sharing state between multiple programming languages.
|
||||||
|
|
||||||
Schala is implemented as a Rust library `schala-repl`, which provides a
|
Schala is implemented as a Rust library `schala-repl`, which provides a `Repl`
|
||||||
function `start_repl`, meant to be used as entry point into a common REPL or
|
data structure that takes in a value implementing the
|
||||||
non-interactive environment. Clients are expected to invoke `start_repl` with a
|
`ProgrammingLanguageInterface` trait. Individual programming language
|
||||||
vector of programming languages. Individual programming language
|
implementations are Rust types that implement `ProgrammingLanguageInterface`
|
||||||
implementations are Rust types that implement the
|
and store whatever persistent state is relevant to that language.
|
||||||
`ProgrammingLanguageInterface` trait and store whatever persistent state is
|
|
||||||
relevant to that language.
|
|
||||||
|
|
||||||
Run schala with: `cargo run`. This will drop you into a REPL environment. Type
|
|
||||||
`:help` for more information, or type in text in any supported programming
|
## Running
|
||||||
language (currently only schala-lang) to evaluate it in the REPL.
|
|
||||||
|
Run schala with the normal `cargo run`. This will drop you into a REPL
|
||||||
|
environment. Type `:help` for more information, or type in text in any
|
||||||
|
supported programming language (currently only `schala-lang`) to evaluate it in
|
||||||
|
the REPL.
|
||||||
|
|
||||||
|
### Examples
|
||||||
|
|
||||||
|
Try running the following `schala-lang` code example in the REPL:
|
||||||
|
|
||||||
|
```
|
||||||
|
>> 1 + 1
|
||||||
|
(Total time)=> 736.368µs
|
||||||
|
=> 2
|
||||||
|
>> fn foo(x) { x + 10 }
|
||||||
|
(Total time)=> 772.496µs
|
||||||
|
=>
|
||||||
|
>> foo(0)
|
||||||
|
(Total time)=> 593.591µs
|
||||||
|
=> 10
|
||||||
|
>> 5 + foo(1)
|
||||||
|
(Total time)=> 1.119916ms
|
||||||
|
=> 16
|
||||||
|
>>
|
||||||
|
```
|
||||||
|
|
||||||
## History
|
## History
|
||||||
|
|
||||||
@ -61,35 +83,28 @@ of learning how to write a programming language.
|
|||||||
|
|
||||||
### General
|
### General
|
||||||
|
|
||||||
http://thume.ca/2019/04/18/writing-a-compiler-in-rust/
|
* http://thume.ca/2019/04/18/writing-a-compiler-in-rust/
|
||||||
|
|
||||||
### Type-checking
|
### Type-checking
|
||||||
https://skillsmatter.com/skillscasts/10868-inside-the-rust-compiler
|
* https://skillsmatter.com/skillscasts/10868-inside-the-rust-compiler
|
||||||
https://www.youtube.com/watch?v=il3gD7XMdmA
|
* https://www.youtube.com/watch?v=il3gD7XMdmA
|
||||||
http://dev.stephendiehl.com/fun/006_hindley_milner.html
|
* http://dev.stephendiehl.com/fun/006_hindley_milner.html
|
||||||
https://rust-lang-nursery.github.io/rustc-guide/type-inference.html
|
* https://rust-lang-nursery.github.io/rustc-guide/type-inference.html
|
||||||
|
* https://eli.thegreenplace.net/2018/unification/
|
||||||
https://eli.thegreenplace.net/2018/unification/
|
* https://eli.thegreenplace.net/2018/type-inference/
|
||||||
https://eli.thegreenplace.net/2018/type-inference/
|
* http://smallcultfollowing.com/babysteps/blog/2017/03/25/unification-in-chalk-part-1/
|
||||||
http://smallcultfollowing.com/babysteps/blog/2017/03/25/unification-in-chalk-part-1/
|
* http://reasonableapproximation.net/2019/05/05/hindley-milner.html
|
||||||
http://reasonableapproximation.net/2019/05/05/hindley-milner.html
|
|
||||||
https://rickyhan.com/jekyll/update/2018/05/26/hindley-milner-tutorial-rust.html
|
https://rickyhan.com/jekyll/update/2018/05/26/hindley-milner-tutorial-rust.html
|
||||||
|
|
||||||
### Evaluation
|
### Evaluation
|
||||||
*Understanding Computation*, Tom Stuart, O'Reilly 2013
|
|
||||||
|
|
||||||
*Basics of Compiler Design*, Torben Mogensen
|
* _Understanding Computation_, Tom Stuart, O'Reilly 2013
|
||||||
|
* _Basics of Compiler Design_, Torben Mogensen
|
||||||
|
|
||||||
### Parsing
|
### Parsing
|
||||||
http://journal.stuffwithstuff.com/2011/03/19/pratt-parsers-expression-parsing-made-easy/
|
* http://journal.stuffwithstuff.com/2011/03/19/pratt-parsers-expression-parsing-made-easy/
|
||||||
https://soc.github.io/languages/unified-condition-syntax
|
* https://soc.github.io/languages/unified-condition-syntax
|
||||||
|
* [Crafting Interpreters](http://www.craftinginterpreters.com/)
|
||||||
[Crafting Interpreters](http://www.craftinginterpreters.com/)
|
|
||||||
|
|
||||||
### LLVM
|
### LLVM
|
||||||
http://blog.ulysse.io/2016/07/03/llvm-getting-started.html
|
* http://blog.ulysse.io/2016/07/03/llvm-getting-started.html
|
||||||
|
|
||||||
###Rust resources
|
|
||||||
https://thefullsnack.com/en/rust-for-the-web.html
|
|
||||||
|
|
||||||
https://rocket.rs/guide/getting-started/
|
|
||||||
|
151
TODO.md
151
TODO.md
@ -1,4 +1,32 @@
|
|||||||
# Plan of attack
|
# Immediate TODOs / General Code Cleanup
|
||||||
|
|
||||||
|
## Parsing
|
||||||
|
|
||||||
|
* cf. https://siraben.dev/2022/03/22/tree-sitter-linter.html write a tree-sitter parser for Schala
|
||||||
|
|
||||||
|
* Create a macro system, perhaps c.f. Crystal's?
|
||||||
|
* Macro system should be able to implement:
|
||||||
|
* printf-style variadic arguments
|
||||||
|
* something like the Rust/Haskell `Derive` construct
|
||||||
|
* doing useful things with all variants of an enum
|
||||||
|
* (e.g. what https://matklad.github.io//2022/03/26/self-modifying-code.html tries to solve)
|
||||||
|
|
||||||
|
|
||||||
|
## Testing
|
||||||
|
|
||||||
|
* Make an automatic (macro-based?) system for numbering compiler errors, this should be every type of error
|
||||||
|
|
||||||
|
## Symbols
|
||||||
|
|
||||||
|
* Add some good printf-debugging impls for SymbolTable-related items
|
||||||
|
|
||||||
|
* the symbol table should probably *only* be for global definitions (maybe rename it to reflect this?)
|
||||||
|
* dealing with variable lookup w/in functions/closures should probably happen in AST -> ReducedAST
|
||||||
|
* b/c that's where we go from a string name to a canonical ID (for e.g. 2nd param in 3rd enclosing scope)
|
||||||
|
|
||||||
|
* In fact to prove this works, the symbol table shoudl _parallelize_ the process of checking subscopes for local items
|
||||||
|
|
||||||
|
* Old notes on a plan of attack:
|
||||||
|
|
||||||
1. modify visitor so it can handle scopes
|
1. modify visitor so it can handle scopes
|
||||||
-this is needed both to handle import scope correctly
|
-this is needed both to handle import scope correctly
|
||||||
@ -6,66 +34,59 @@
|
|||||||
|
|
||||||
2. Once FQSNs are aware of function parameters, most of the Rc<String> things in eval.rs can go away
|
2. Once FQSNs are aware of function parameters, most of the Rc<String> things in eval.rs can go away
|
||||||
|
|
||||||
# TODO items
|
## Typechecking
|
||||||
|
|
||||||
-use 'let' sigil in patterns for variables :
|
* make a type to represent types rather than relying on string comparisons
|
||||||
|
* look at https://rickyhan.com/jekyll/update/2018/05/26/hindley-milner-tutorial-rust.html
|
||||||
|
|
||||||
|
## General code cleanup
|
||||||
|
* standardize on an error type that isn't String
|
||||||
|
* implement a visitor pattern for the use of scope_resolver
|
||||||
|
* maybe implement this twice: 1) the value-returning, no-default one in the haoyi blogpost,
|
||||||
|
* look at
|
||||||
|
* https://gitlab.haskell.org/ghc/ghc/wikis/pattern-synonyms
|
||||||
|
* the non-value-returning, default one like in rustc (cf. https://github.com/rust-unofficial/patterns/blob/master/patterns/visitor.md)
|
||||||
|
|
||||||
|
|
||||||
|
# Longer-term Ideas
|
||||||
|
|
||||||
|
## Language Syntax
|
||||||
|
|
||||||
|
* the `type` declaration should have some kind of GADT-like syntax
|
||||||
|
* syntactic sugar for typestates? (cf. https://rustype.github.io/notes/notes/rust-typestate-series/rust-typestate-part-1.html )
|
||||||
|
* use `let` sigil to indicate a variable in a pattern explicitly:
|
||||||
|
|
||||||
```
|
```
|
||||||
q is MyStruct(let a, Chrono::Trigga) then {
|
q is MyStruct(let a, Chrono::Trigga) then {
|
||||||
|
// a is in scope here
|
||||||
|
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
-idea: what if there was something like React jsx syntas built in? i.e. a way to automatically transform some kind of markup
|
* if you have a pattern-match where one variant has a variable and the other
|
||||||
into a function call, cf. `<h1 prop="arg">` -> h1(prop=arg)
|
lacks it instead of treating this as a type error, promote the bound variable
|
||||||
|
to an option type
|
||||||
|
|
||||||
## General code cleanup
|
* what if there was something like React jsx syntas built in? i.e. a way to
|
||||||
- I think I can restructure the parser to get rid of most instances of expect!, at least at the beginning of a rule
|
automatically transform some kind of markup into a function call, cf. `<h1
|
||||||
DONE -experiment with storing metadata via ItemIds on AST nodes (cf. https://rust-lang.github.io/rustc-guide/hir.html, https://github.com/rust-lang/rust/blob/master/src/librustc/hir/mod.rs )
|
prop="arg">` -> h1(prop=arg)
|
||||||
-implement and test open/use statements
|
|
||||||
-implement field access
|
|
||||||
- standardize on an error type that isn't String
|
|
||||||
-implement a visitor pattern for the use of scope_resolver
|
|
||||||
- maybe implement this twice: 1) the value-returning, no-default one in the haoyi blogpost,
|
|
||||||
-look at https://gitlab.haskell.org/ghc/ghc/wikis/pattern-synonyms
|
|
||||||
2) the non-value-returning, default one like in rustc (cf. https://github.com/rust-unofficial/patterns/blob/master/patterns/visitor.md)
|
|
||||||
|
|
||||||
-parser error - should report subset of AST parsed *so far*
|
* implement and test open/use statements
|
||||||
- what if you used python 'def' syntax to define a function? what error message makes sense here?
|
|
||||||
|
|
||||||
## Reduction
|
* Include extensible scala-style `html"string ${var}"` string interpolations
|
||||||
- make a good type for actual language builtins to avoid string comparisons
|
|
||||||
|
|
||||||
## Typechecking
|
* A neat idea for pattern matching optimization would be if you could match on
|
||||||
|
one of several things in a list
|
||||||
- make a type to represent types rather than relying on string comparisons
|
|
||||||
|
|
||||||
- look at https://rickyhan.com/jekyll/update/2018/05/26/hindley-milner-tutorial-rust.html
|
|
||||||
|
|
||||||
- cf. the notation mentioned in the cardelli paper, the debug information for the `typechecking` pass should
|
|
||||||
print the generated type variable for every subexpression in an expression
|
|
||||||
|
|
||||||
- think about idris-related ideas of multiple implementations of a type for an interface (+ vs * impl for monoids, for preorder/inorder/postorder for Foldable)
|
|
||||||
|
|
||||||
-should have an Idris-like `cast To From` function
|
|
||||||
|
|
||||||
## Schala-lang syntax
|
|
||||||
|
|
||||||
-idea: the `type` declaration should have some kind of GADT-like syntax
|
|
||||||
|
|
||||||
- Idea: if you have a pattern-match where one variant has a variable and the other lacks it
|
|
||||||
instead of treating this as a type error, promote the bound variable to an option type
|
|
||||||
|
|
||||||
- Include extensible scala-style html"string ${var}" string interpolations
|
|
||||||
|
|
||||||
- A neat idea for pattern matching optimization would be if you could match on one of several things in a list
|
|
||||||
ex:
|
ex:
|
||||||
```if x {
|
```
|
||||||
|
if x {
|
||||||
is (comp, LHSPat, RHSPat) if comp in ["==, "<"] -> ...
|
is (comp, LHSPat, RHSPat) if comp in ["==, "<"] -> ...
|
||||||
}```
|
}
|
||||||
|
```
|
||||||
|
|
||||||
- Schala should have both currying *and* default arguments!
|
* Schala should have both currying *and* default arguments!
|
||||||
```fn a(b: Int, c:Int, d:Int = 1) -> Int
|
```
|
||||||
|
fn a(b: Int, c:Int, d:Int = 1) -> Int
|
||||||
a(1,2) : Int
|
a(1,2) : Int
|
||||||
a(1,2,d=2): Int
|
a(1,2,d=2): Int
|
||||||
a(_,1,3) : Int -> Int
|
a(_,1,3) : Int -> Int
|
||||||
@ -73,35 +94,49 @@ ex:
|
|||||||
a(_,_,_) : Int -> Int -> Int -> Int
|
a(_,_,_) : Int -> Int -> Int -> Int
|
||||||
```
|
```
|
||||||
|
|
||||||
- scoped types - be able to define a quick enum type scoped to a function or other type for
|
* scoped types - be able to define a quick enum type scoped to a function or other type for
|
||||||
something, that only is meant to be used as a quick bespoke interface between
|
something, that only is meant to be used as a quick bespoke interface between
|
||||||
two other things
|
two other things
|
||||||
|
|
||||||
|
|
||||||
ex.
|
ex.
|
||||||
```type enum {
|
```
|
||||||
|
type enum {
|
||||||
type enum MySubVariant {
|
type enum MySubVariant {
|
||||||
SubVariant1, SubVariant2, etc.
|
SubVariant1, SubVariant2, etc.
|
||||||
}
|
}
|
||||||
Variant1(MySubVariant),
|
Variant1(MySubVariant),
|
||||||
Variant2(...),
|
Variant2(...),
|
||||||
}```
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
* inclusive/exclusive range syntax like .. vs ..=
|
||||||
|
|
||||||
|
* Nameable patterns/ pattern synonyms cf. https://gitlab.haskell.org/ghc/ghc/-/wikis/pattern-synonyms
|
||||||
|
|
||||||
|
|
||||||
|
## Typechecking
|
||||||
|
|
||||||
|
* cf. the notation mentioned in the cardelli paper, the debug information for the `typechecking` pass should
|
||||||
|
* print the generated type variable for every subexpression in an expression
|
||||||
|
* think about idris-related ideas of multiple implementations of a type for an interface (+ vs * impl for monoids, for preorder/inorder/postorder for Foldable)
|
||||||
|
* should have an Idris-like `cast To From` function
|
||||||
|
|
||||||
|
* something like the swift `Never` type ( https://nshipster.com/never/ ) in the stdlib
|
||||||
|
|
||||||
- inclusive/exclusive range syntax like .. vs ..=
|
|
||||||
|
|
||||||
## Compilation
|
## Compilation
|
||||||
-look into Inkwell for rust LLVM bindings
|
* look into Inkwell for rust LLVM bindings
|
||||||
|
* https://cranelift.readthedocs.io/en/latest/?badge=latest<Paste>
|
||||||
|
|
||||||
-https://cranelift.readthedocs.io/en/latest/?badge=latest<Paste>
|
* look at https://gluon-lang.org/doc/nightly/book/embedding-api.html
|
||||||
|
|
||||||
|
|
||||||
## Other links of note
|
# Syntax Playground
|
||||||
|
|
||||||
- https://nshipster.com/never/
|
|
||||||
-consult http://gluon-lang.org/book/embedding-api.html
|
|
||||||
|
|
||||||
|
|
||||||
## Trying if-syntax again
|
## Trying if-syntax again
|
||||||
|
|
||||||
|
```
|
||||||
//simple if expr
|
//simple if expr
|
||||||
if x == 10 then "a" else "z"
|
if x == 10 then "a" else "z"
|
||||||
|
|
||||||
@ -132,7 +167,7 @@ if x {
|
|||||||
is Person(_, age) if age > 13 then "barmitzvah'd"
|
is Person(_, age) if age > 13 then "barmitzvah'd"
|
||||||
else "foo"
|
else "foo"
|
||||||
}
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
## (OLD) Playing around with conditional syntax ideas
|
## (OLD) Playing around with conditional syntax ideas
|
||||||
|
2
rust-toolchain.toml
Normal file
2
rust-toolchain.toml
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
[toolchain]
|
||||||
|
channel = "nightly"
|
8
rustfmt.toml
Normal file
8
rustfmt.toml
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
|
||||||
|
max_width = 110
|
||||||
|
use_small_heuristics = "max"
|
||||||
|
imports_indent = "block"
|
||||||
|
imports_granularity = "crate"
|
||||||
|
group_imports = "stdexternalcrate"
|
||||||
|
match_arm_blocks = false
|
||||||
|
where_single_line = true
|
27
schala-lang/Cargo.toml
Normal file
27
schala-lang/Cargo.toml
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
[package]
|
||||||
|
name = "schala-lang"
|
||||||
|
version = "0.1.0"
|
||||||
|
authors = ["greg <greg.shuflin@protonmail.com>"]
|
||||||
|
edition = "2021"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
itertools = "0.10"
|
||||||
|
take_mut = "0.2.2"
|
||||||
|
failure = "0.1.5"
|
||||||
|
ena = "0.11.0"
|
||||||
|
stopwatch = "0.0.7"
|
||||||
|
derivative = "2.2.0"
|
||||||
|
colored = "1.8"
|
||||||
|
radix_trie = "0.1.5"
|
||||||
|
assert_matches = "1.5"
|
||||||
|
#peg = "0.7.0"
|
||||||
|
peg = "0.8.1"
|
||||||
|
nom = "7.1.0"
|
||||||
|
nom_locate = "4.0.0"
|
||||||
|
|
||||||
|
|
||||||
|
schala-repl = { path = "../schala-repl" }
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
test-case = "1.2.0"
|
||||||
|
pretty_assertions = "1.0.0"
|
@ -1,12 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "schala-lang-codegen"
|
|
||||||
version = "0.1.0"
|
|
||||||
authors = ["greg <greg.shuflin@protonmail.com>"]
|
|
||||||
edition = "2018"
|
|
||||||
|
|
||||||
[lib]
|
|
||||||
proc-macro = true
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
syn = { version = "0.15.12", features = ["full", "extra-traits", "fold"] }
|
|
||||||
quote = "0.6.8"
|
|
@ -1,54 +0,0 @@
|
|||||||
#![feature(box_patterns)]
|
|
||||||
#![recursion_limit="128"]
|
|
||||||
extern crate proc_macro;
|
|
||||||
#[macro_use]
|
|
||||||
extern crate quote;
|
|
||||||
#[macro_use]
|
|
||||||
extern crate syn;
|
|
||||||
|
|
||||||
use self::proc_macro::TokenStream;
|
|
||||||
use self::syn::fold::Fold;
|
|
||||||
|
|
||||||
struct RecursiveDescentFn {
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Fold for RecursiveDescentFn {
|
|
||||||
fn fold_item_fn(&mut self, mut i: syn::ItemFn) -> syn::ItemFn {
|
|
||||||
let box block = i.block;
|
|
||||||
let ref ident = i.ident;
|
|
||||||
|
|
||||||
let new_block: syn::Block = parse_quote! {
|
|
||||||
{
|
|
||||||
let next_token_before_parse = self.token_handler.peek();
|
|
||||||
let record = ParseRecord {
|
|
||||||
production_name: stringify!(#ident).to_string(),
|
|
||||||
next_token: format!("{}", next_token_before_parse.to_string_with_metadata()),
|
|
||||||
level: self.parse_level,
|
|
||||||
};
|
|
||||||
self.parse_level += 1;
|
|
||||||
self.parse_record.push(record);
|
|
||||||
let result = { #block };
|
|
||||||
|
|
||||||
if self.parse_level != 0 {
|
|
||||||
self.parse_level -= 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
result.map_err(|mut parse_error: ParseError| {
|
|
||||||
parse_error.production_name = Some(stringify!(#ident).to_string());
|
|
||||||
parse_error
|
|
||||||
})
|
|
||||||
}
|
|
||||||
};
|
|
||||||
i.block = Box::new(new_block);
|
|
||||||
i
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[proc_macro_attribute]
|
|
||||||
pub fn recursive_descent_method(_attr: TokenStream, item: TokenStream) -> TokenStream {
|
|
||||||
|
|
||||||
let input: syn::ItemFn = parse_macro_input!(item as syn::ItemFn);
|
|
||||||
let mut folder = RecursiveDescentFn {};
|
|
||||||
let output = folder.fold_item_fn(input);
|
|
||||||
TokenStream::from(quote!(#output))
|
|
||||||
}
|
|
@ -1,20 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "schala-lang"
|
|
||||||
version = "0.1.0"
|
|
||||||
authors = ["greg <greg.shuflin@protonmail.com>"]
|
|
||||||
edition = "2018"
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
itertools = "0.8.0"
|
|
||||||
take_mut = "0.2.2"
|
|
||||||
maplit = "1.0.1"
|
|
||||||
lazy_static = "1.3.0"
|
|
||||||
failure = "0.1.5"
|
|
||||||
ena = "0.11.0"
|
|
||||||
stopwatch = "0.0.7"
|
|
||||||
derivative = "1.0.3"
|
|
||||||
colored = "1.8"
|
|
||||||
radix_trie = "0.1.5"
|
|
||||||
|
|
||||||
schala-lang-codegen = { path = "../codegen" }
|
|
||||||
schala-repl = { path = "../../schala-repl" }
|
|
@ -1,307 +0,0 @@
|
|||||||
use std::rc::Rc;
|
|
||||||
|
|
||||||
use crate::derivative::Derivative;
|
|
||||||
|
|
||||||
mod walker;
|
|
||||||
mod visitor;
|
|
||||||
mod visitor_test;
|
|
||||||
mod operators;
|
|
||||||
pub use operators::*;
|
|
||||||
pub use visitor::ASTVisitor;
|
|
||||||
pub use walker::walk_ast;
|
|
||||||
|
|
||||||
/// An abstract identifier for an AST node
|
|
||||||
#[derive(Debug, PartialEq, Eq, Hash, Clone)]
|
|
||||||
pub struct ItemId {
|
|
||||||
idx: u32,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ItemId {
|
|
||||||
fn new(n: u32) -> ItemId {
|
|
||||||
ItemId { idx: n }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct ItemIdStore {
|
|
||||||
last_idx: u32
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ItemIdStore {
|
|
||||||
pub fn new() -> ItemIdStore {
|
|
||||||
ItemIdStore { last_idx: 0 }
|
|
||||||
}
|
|
||||||
/// Always returns an ItemId with internal value zero
|
|
||||||
#[cfg(test)]
|
|
||||||
pub fn new_id() -> ItemId {
|
|
||||||
ItemId { idx: 0 }
|
|
||||||
}
|
|
||||||
|
|
||||||
/// This limits the size of the AST to 2^32 tree elements
|
|
||||||
pub fn fresh(&mut self) -> ItemId {
|
|
||||||
let idx = self.last_idx;
|
|
||||||
self.last_idx += 1;
|
|
||||||
ItemId::new(idx)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Derivative, Debug)]
|
|
||||||
#[derivative(PartialEq)]
|
|
||||||
pub struct AST {
|
|
||||||
#[derivative(PartialEq="ignore")]
|
|
||||||
pub id: ItemId,
|
|
||||||
pub statements: Vec<Statement>
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Derivative, Debug, Clone)]
|
|
||||||
#[derivative(PartialEq)]
|
|
||||||
pub struct Statement {
|
|
||||||
#[derivative(PartialEq="ignore")]
|
|
||||||
pub id: ItemId,
|
|
||||||
pub kind: StatementKind,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
|
||||||
pub enum StatementKind {
|
|
||||||
Expression(Expression),
|
|
||||||
Declaration(Declaration),
|
|
||||||
Import(ImportSpecifier),
|
|
||||||
Module(ModuleSpecifier),
|
|
||||||
}
|
|
||||||
|
|
||||||
pub type Block = Vec<Statement>;
|
|
||||||
pub type ParamName = Rc<String>;
|
|
||||||
|
|
||||||
#[derive(Debug, Derivative, Clone)]
|
|
||||||
#[derivative(PartialEq)]
|
|
||||||
pub struct QualifiedName {
|
|
||||||
#[derivative(PartialEq="ignore")]
|
|
||||||
pub id: ItemId,
|
|
||||||
pub components: Vec<Rc<String>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
|
||||||
pub struct FormalParam {
|
|
||||||
pub name: ParamName,
|
|
||||||
pub default: Option<Expression>,
|
|
||||||
pub anno: Option<TypeIdentifier>
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
|
||||||
pub enum Declaration {
|
|
||||||
FuncSig(Signature),
|
|
||||||
FuncDecl(Signature, Block),
|
|
||||||
TypeDecl {
|
|
||||||
name: TypeSingletonName,
|
|
||||||
body: TypeBody,
|
|
||||||
mutable: bool
|
|
||||||
},
|
|
||||||
//TODO this needs to be more sophisticated
|
|
||||||
TypeAlias {
|
|
||||||
alias: Rc<String>,
|
|
||||||
original: Rc<String>,
|
|
||||||
},
|
|
||||||
Binding {
|
|
||||||
name: Rc<String>,
|
|
||||||
constant: bool,
|
|
||||||
type_anno: Option<TypeIdentifier>,
|
|
||||||
expr: Expression,
|
|
||||||
},
|
|
||||||
Impl {
|
|
||||||
type_name: TypeIdentifier,
|
|
||||||
interface_name: Option<TypeSingletonName>,
|
|
||||||
block: Vec<Declaration>,
|
|
||||||
},
|
|
||||||
Interface {
|
|
||||||
name: Rc<String>,
|
|
||||||
signatures: Vec<Signature>
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
|
||||||
pub struct Signature {
|
|
||||||
pub name: Rc<String>,
|
|
||||||
pub operator: bool,
|
|
||||||
pub params: Vec<FormalParam>,
|
|
||||||
pub type_anno: Option<TypeIdentifier>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
|
||||||
pub struct TypeBody(pub Vec<Variant>);
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
|
||||||
pub enum Variant {
|
|
||||||
UnitStruct(Rc<String>),
|
|
||||||
TupleStruct(Rc<String>, Vec<TypeIdentifier>),
|
|
||||||
Record {
|
|
||||||
name: Rc<String>,
|
|
||||||
members: Vec<(Rc<String>, TypeIdentifier)>,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Derivative, Clone)]
|
|
||||||
#[derivative(PartialEq)]
|
|
||||||
pub struct Expression {
|
|
||||||
#[derivative(PartialEq="ignore")]
|
|
||||||
pub id: ItemId,
|
|
||||||
pub kind: ExpressionKind,
|
|
||||||
pub type_anno: Option<TypeIdentifier>
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Expression {
|
|
||||||
pub fn new(id: ItemId, kind: ExpressionKind) -> Expression {
|
|
||||||
Expression { id, kind, type_anno: None }
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn with_anno(id: ItemId, kind: ExpressionKind, type_anno: TypeIdentifier) -> Expression {
|
|
||||||
Expression { id, kind, type_anno: Some(type_anno) }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
|
||||||
pub enum TypeIdentifier {
|
|
||||||
Tuple(Vec<TypeIdentifier>),
|
|
||||||
Singleton(TypeSingletonName)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
|
||||||
pub struct TypeSingletonName {
|
|
||||||
pub name: Rc<String>,
|
|
||||||
pub params: Vec<TypeIdentifier>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
|
||||||
pub enum ExpressionKind {
|
|
||||||
NatLiteral(u64),
|
|
||||||
FloatLiteral(f64),
|
|
||||||
StringLiteral(Rc<String>),
|
|
||||||
BoolLiteral(bool),
|
|
||||||
BinExp(BinOp, Box<Expression>, Box<Expression>),
|
|
||||||
PrefixExp(PrefixOp, Box<Expression>),
|
|
||||||
TupleLiteral(Vec<Expression>),
|
|
||||||
Value(QualifiedName),
|
|
||||||
NamedStruct {
|
|
||||||
name: QualifiedName,
|
|
||||||
fields: Vec<(Rc<String>, Expression)>,
|
|
||||||
},
|
|
||||||
Call {
|
|
||||||
f: Box<Expression>,
|
|
||||||
arguments: Vec<InvocationArgument>,
|
|
||||||
},
|
|
||||||
Index {
|
|
||||||
indexee: Box<Expression>,
|
|
||||||
indexers: Vec<Expression>,
|
|
||||||
},
|
|
||||||
IfExpression {
|
|
||||||
discriminator: Option<Box<Expression>>,
|
|
||||||
body: Box<IfExpressionBody>,
|
|
||||||
},
|
|
||||||
WhileExpression {
|
|
||||||
condition: Option<Box<Expression>>,
|
|
||||||
body: Block,
|
|
||||||
},
|
|
||||||
ForExpression {
|
|
||||||
enumerators: Vec<Enumerator>,
|
|
||||||
body: Box<ForBody>,
|
|
||||||
},
|
|
||||||
Lambda {
|
|
||||||
params: Vec<FormalParam>,
|
|
||||||
type_anno: Option<TypeIdentifier>,
|
|
||||||
body: Block,
|
|
||||||
},
|
|
||||||
ListLiteral(Vec<Expression>),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
|
||||||
pub enum InvocationArgument {
|
|
||||||
Positional(Expression),
|
|
||||||
Keyword {
|
|
||||||
name: Rc<String>,
|
|
||||||
expr: Expression,
|
|
||||||
},
|
|
||||||
Ignored
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
|
||||||
pub enum IfExpressionBody {
|
|
||||||
SimpleConditional {
|
|
||||||
then_case: Block,
|
|
||||||
else_case: Option<Block>
|
|
||||||
},
|
|
||||||
SimplePatternMatch {
|
|
||||||
pattern: Pattern,
|
|
||||||
then_case: Block,
|
|
||||||
else_case: Option<Block>
|
|
||||||
},
|
|
||||||
CondList(Vec<ConditionArm>)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
|
||||||
pub struct ConditionArm {
|
|
||||||
pub condition: Condition,
|
|
||||||
pub guard: Option<Expression>,
|
|
||||||
pub body: Block,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
|
||||||
pub enum Condition {
|
|
||||||
Pattern(Pattern),
|
|
||||||
TruncatedOp(BinOp, Expression),
|
|
||||||
Expression(Expression),
|
|
||||||
Else,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
|
||||||
pub enum Pattern {
|
|
||||||
Ignored,
|
|
||||||
TuplePattern(Vec<Pattern>),
|
|
||||||
Literal(PatternLiteral),
|
|
||||||
TupleStruct(QualifiedName, Vec<Pattern>),
|
|
||||||
Record(QualifiedName, Vec<(Rc<String>, Pattern)>),
|
|
||||||
VarOrName(QualifiedName),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
|
||||||
pub enum PatternLiteral {
|
|
||||||
NumPattern {
|
|
||||||
neg: bool,
|
|
||||||
num: ExpressionKind,
|
|
||||||
},
|
|
||||||
StringPattern(Rc<String>),
|
|
||||||
BoolPattern(bool),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
|
||||||
pub struct Enumerator {
|
|
||||||
pub id: Rc<String>,
|
|
||||||
pub generator: Expression,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
|
||||||
pub enum ForBody {
|
|
||||||
MonadicReturn(Expression),
|
|
||||||
StatementBlock(Block),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Derivative, Clone)]
|
|
||||||
#[derivative(PartialEq)]
|
|
||||||
pub struct ImportSpecifier {
|
|
||||||
#[derivative(PartialEq="ignore")]
|
|
||||||
pub id: ItemId,
|
|
||||||
pub path_components: Vec<Rc<String>>,
|
|
||||||
pub imported_names: ImportedNames
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
|
||||||
pub enum ImportedNames {
|
|
||||||
All,
|
|
||||||
LastOfPath,
|
|
||||||
List(Vec<Rc<String>>)
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
|
||||||
pub struct ModuleSpecifier {
|
|
||||||
pub name: Rc<String>,
|
|
||||||
pub contents: Vec<Statement>,
|
|
||||||
}
|
|
||||||
|
|
@ -1,108 +0,0 @@
|
|||||||
use std::rc::Rc;
|
|
||||||
use std::str::FromStr;
|
|
||||||
|
|
||||||
use crate::tokenizing::TokenKind;
|
|
||||||
use crate::builtin::Builtin;
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
|
||||||
pub struct PrefixOp {
|
|
||||||
sigil: Rc<String>,
|
|
||||||
pub builtin: Option<Builtin>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PrefixOp {
|
|
||||||
#[allow(dead_code)]
|
|
||||||
pub fn sigil(&self) -> &Rc<String> {
|
|
||||||
&self.sigil
|
|
||||||
}
|
|
||||||
pub fn is_prefix(op: &str) -> bool {
|
|
||||||
match op {
|
|
||||||
"+" => true,
|
|
||||||
"-" => true,
|
|
||||||
"!" => true,
|
|
||||||
_ => false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FromStr for PrefixOp {
|
|
||||||
type Err = ();
|
|
||||||
|
|
||||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
|
||||||
use Builtin::*;
|
|
||||||
|
|
||||||
let builtin = match s {
|
|
||||||
"+" => Ok(Increment),
|
|
||||||
"-" => Ok(Negate),
|
|
||||||
"!" => Ok(BooleanNot),
|
|
||||||
_ => Err(())
|
|
||||||
};
|
|
||||||
|
|
||||||
builtin.map(|builtin| PrefixOp { sigil: Rc::new(s.to_string()), builtin: Some(builtin) })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
|
||||||
pub struct BinOp {
|
|
||||||
sigil: Rc<String>,
|
|
||||||
pub builtin: Option<Builtin>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl BinOp {
|
|
||||||
pub fn from_sigil(sigil: &str) -> BinOp {
|
|
||||||
let builtin = Builtin::from_str(sigil).ok();
|
|
||||||
BinOp { sigil: Rc::new(sigil.to_string()), builtin }
|
|
||||||
}
|
|
||||||
pub fn sigil(&self) -> &Rc<String> {
|
|
||||||
&self.sigil
|
|
||||||
}
|
|
||||||
pub fn from_sigil_token(tok: &TokenKind) -> Option<BinOp> {
|
|
||||||
let s = token_kind_to_sigil(tok)?;
|
|
||||||
Some(BinOp::from_sigil(s))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn min_precedence() -> i32 {
|
|
||||||
i32::min_value()
|
|
||||||
}
|
|
||||||
pub fn get_precedence_from_token(op_tok: &TokenKind) -> Option<i32> {
|
|
||||||
let s = token_kind_to_sigil(op_tok)?;
|
|
||||||
Some(binop_precedences(s))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn token_kind_to_sigil<'a>(tok: &'a TokenKind) -> Option<&'a str> {
|
|
||||||
use self::TokenKind::*;
|
|
||||||
Some(match tok {
|
|
||||||
Operator(op) => op.as_str(),
|
|
||||||
Period => ".",
|
|
||||||
Pipe => "|",
|
|
||||||
Slash => "/",
|
|
||||||
LAngleBracket => "<",
|
|
||||||
RAngleBracket => ">",
|
|
||||||
Equals => "=",
|
|
||||||
_ => return None
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn binop_precedences(s: &str) -> i32 {
|
|
||||||
let default = 10_000_000;
|
|
||||||
match s {
|
|
||||||
"+" => 10,
|
|
||||||
"-" => 10,
|
|
||||||
"*" => 20,
|
|
||||||
"/" => 20,
|
|
||||||
"%" => 20,
|
|
||||||
"++" => 30,
|
|
||||||
"^" => 30,
|
|
||||||
"&" => 20,
|
|
||||||
"|" => 20,
|
|
||||||
">" => 20,
|
|
||||||
">=" => 20,
|
|
||||||
"<" => 20,
|
|
||||||
"<=" => 20,
|
|
||||||
"==" => 40,
|
|
||||||
"=" => 10,
|
|
||||||
"<=>" => 30,
|
|
||||||
_ => default,
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,55 +0,0 @@
|
|||||||
use std::rc::Rc;
|
|
||||||
use crate::ast::*;
|
|
||||||
|
|
||||||
//TODO maybe these functions should take closures that return a KeepRecursing | StopHere type,
|
|
||||||
//or a tuple of (T, <that type>)
|
|
||||||
|
|
||||||
pub trait ASTVisitor<BlockHandler=()>: Sized {
|
|
||||||
type BlockHandler: BlockVisitor;
|
|
||||||
fn ast(&mut self, _ast: &AST) {}
|
|
||||||
fn block(&mut self) -> Self::BlockHandler { Self::BlockHandler::new() }
|
|
||||||
fn block_finished(&mut self, handler: Self::BlockHandler) {}
|
|
||||||
fn statement(&mut self, _statement: &Statement) {}
|
|
||||||
fn declaration(&mut self, _declaration: &Declaration) {}
|
|
||||||
fn signature(&mut self, _signature: &Signature) {}
|
|
||||||
fn type_declaration(&mut self, _name: &TypeSingletonName, _body: &TypeBody, _mutable: bool) {}
|
|
||||||
fn type_alias(&mut self, _alias: &Rc<String>, _original: &Rc<String>) {}
|
|
||||||
fn binding(&mut self, _name: &Rc<String>, _constant: bool, _type_anno: Option<&TypeIdentifier>, _expr: &Expression) {}
|
|
||||||
fn implemention(&mut self, _type_name: &TypeIdentifier, _interface_name: Option<&TypeSingletonName>, _block: &Vec<Declaration>) {}
|
|
||||||
fn interface(&mut self, _name: &Rc<String>, _signatures: &Vec<Signature>) {}
|
|
||||||
fn expression(&mut self, _expression: &Expression) {}
|
|
||||||
fn expression_kind(&mut self, _kind: &ExpressionKind) {}
|
|
||||||
fn type_annotation(&mut self, _type_anno: Option<&TypeIdentifier>) {}
|
|
||||||
fn named_struct(&mut self, _name: &QualifiedName, _fields: &Vec<(Rc<String>, Expression)>) {}
|
|
||||||
fn call(&mut self, _f: &Expression, _arguments: &Vec<InvocationArgument>) {}
|
|
||||||
fn index(&mut self, _indexee: &Expression, _indexers: &Vec<Expression>) {}
|
|
||||||
fn if_expression(&mut self, _discrim: Option<&Expression>, _body: &IfExpressionBody) {}
|
|
||||||
fn condition_arm(&mut self, _arm: &ConditionArm) {}
|
|
||||||
fn while_expression(&mut self, _condition: Option<&Expression>, _body: &Block) {}
|
|
||||||
fn for_expression(&mut self, _enumerators: &Vec<Enumerator>, _body: &ForBody) {}
|
|
||||||
fn lambda(&mut self, _params: &Vec<FormalParam>, _type_anno: Option<&TypeIdentifier>, _body: &Block) {}
|
|
||||||
fn invocation_argument(&mut self, _arg: &InvocationArgument) {}
|
|
||||||
fn formal_param(&mut self, _param: &FormalParam) {}
|
|
||||||
fn import(&mut self, _import: &ImportSpecifier) {}
|
|
||||||
fn module(&mut self, _module: &ModuleSpecifier) {}
|
|
||||||
fn qualified_name(&mut self, _name: &QualifiedName) {}
|
|
||||||
fn nat_literal(&mut self, _n: u64) {}
|
|
||||||
fn float_literal(&mut self, _f: f64) {}
|
|
||||||
fn string_literal(&mut self, _s: &Rc<String>) {}
|
|
||||||
fn bool_literal(&mut self, _b: bool) {}
|
|
||||||
fn binexp(&mut self, _op: &BinOp, _lhs: &Expression, _rhs: &Expression) {}
|
|
||||||
fn prefix_exp(&mut self, _op: &PrefixOp, _arg: &Expression) {}
|
|
||||||
fn pattern(&mut self, _pat: &Pattern) {}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait BlockVisitor {
|
|
||||||
fn new() -> Self;
|
|
||||||
fn pre_block(&mut self) {}
|
|
||||||
fn post_block(&mut self) {}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl BlockVisitor for () {
|
|
||||||
fn new() -> () { () }
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
@ -1,41 +0,0 @@
|
|||||||
#![cfg(test)]
|
|
||||||
|
|
||||||
use crate::ast::visitor::ASTVisitor;
|
|
||||||
use crate::ast::walker;
|
|
||||||
use crate::util::quick_ast;
|
|
||||||
|
|
||||||
struct Tester {
|
|
||||||
count: u64,
|
|
||||||
float_count: u64
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ASTVisitor for Tester {
|
|
||||||
fn nat_literal(&mut self, _n: u64) {
|
|
||||||
self.count += 1;
|
|
||||||
}
|
|
||||||
fn float_literal(&mut self, _f: f64) {
|
|
||||||
self.float_count += 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn foo() {
|
|
||||||
let mut tester = Tester { count: 0, float_count: 0 };
|
|
||||||
let (ast, _) = quick_ast(r#"
|
|
||||||
import gragh
|
|
||||||
|
|
||||||
let a = 20 + 84
|
|
||||||
let b = 28 + 1 + 2 + 2.0
|
|
||||||
fn heh() {
|
|
||||||
let m = 9
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
"#);
|
|
||||||
|
|
||||||
walker::walk_ast(&mut tester, &ast);
|
|
||||||
|
|
||||||
assert_eq!(tester.count, 6);
|
|
||||||
assert_eq!(tester.float_count, 1);
|
|
||||||
}
|
|
@ -1,270 +0,0 @@
|
|||||||
#![allow(dead_code)]
|
|
||||||
use std::rc::Rc;
|
|
||||||
use crate::ast::*;
|
|
||||||
use crate::ast::visitor::{ASTVisitor, BlockVisitor};
|
|
||||||
use crate::util::deref_optional_box;
|
|
||||||
|
|
||||||
pub fn walk_ast<V: ASTVisitor>(v: &mut V, ast: &AST) {
|
|
||||||
v.ast(ast);
|
|
||||||
walk_block(v, &ast.statements);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn walk_block<V: ASTVisitor>(v: &mut V, block: &Vec<Statement>) {
|
|
||||||
let mut block_handler = v.block();
|
|
||||||
block_handler.pre_block();
|
|
||||||
for s in block {
|
|
||||||
v.statement(s);
|
|
||||||
statement(v, s);
|
|
||||||
}
|
|
||||||
block_handler.post_block();
|
|
||||||
v.block_finished(block_handler);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn statement<V: ASTVisitor>(v: &mut V, statement: &Statement) {
|
|
||||||
use StatementKind::*;
|
|
||||||
match statement.kind {
|
|
||||||
Expression(ref expr) => {
|
|
||||||
v.expression(expr);
|
|
||||||
expression(v, expr);
|
|
||||||
},
|
|
||||||
Declaration(ref decl) => {
|
|
||||||
v.declaration(decl);
|
|
||||||
declaration(v, decl);
|
|
||||||
},
|
|
||||||
Import(ref import_spec) => v.import(import_spec),
|
|
||||||
Module(ref module_spec) => {
|
|
||||||
v.module(module_spec);
|
|
||||||
walk_block(v, &module_spec.contents);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn declaration<V: ASTVisitor>(v: &mut V, decl: &Declaration) {
|
|
||||||
use Declaration::*;
|
|
||||||
match decl {
|
|
||||||
FuncSig(sig) => {
|
|
||||||
v.signature(&sig);
|
|
||||||
signature(v, &sig);
|
|
||||||
},
|
|
||||||
FuncDecl(sig, block) => {
|
|
||||||
v.signature(&sig);
|
|
||||||
walk_block(v, block);
|
|
||||||
},
|
|
||||||
TypeDecl { name, body, mutable } => v.type_declaration(name, body, *mutable),
|
|
||||||
TypeAlias { alias, original} => v.type_alias(alias, original),
|
|
||||||
Binding { name, constant, type_anno, expr } => {
|
|
||||||
v.binding(name, *constant, type_anno.as_ref(), expr);
|
|
||||||
v.type_annotation(type_anno.as_ref());
|
|
||||||
v.expression(&expr);
|
|
||||||
expression(v, &expr);
|
|
||||||
},
|
|
||||||
Impl { type_name, interface_name, block } => {
|
|
||||||
v.implemention(type_name, interface_name.as_ref(), block);
|
|
||||||
}
|
|
||||||
Interface { name, signatures } => v.interface(name, signatures),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn signature<V: ASTVisitor>(v: &mut V, signature: &Signature) {
|
|
||||||
for p in signature.params.iter() {
|
|
||||||
v.formal_param(p);
|
|
||||||
}
|
|
||||||
v.type_annotation(signature.type_anno.as_ref());
|
|
||||||
for p in signature.params.iter() {
|
|
||||||
formal_param(v, p);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expression<V: ASTVisitor>(v: &mut V, expression: &Expression) {
|
|
||||||
v.expression_kind(&expression.kind);
|
|
||||||
v.type_annotation(expression.type_anno.as_ref());
|
|
||||||
expression_kind(v, &expression.kind);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
fn call<V: ASTVisitor>(v: &mut V, f: &Expression, args: &Vec<InvocationArgument>) {
|
|
||||||
v.expression(f);
|
|
||||||
expression(v, f);
|
|
||||||
for arg in args.iter() {
|
|
||||||
v.invocation_argument(arg);
|
|
||||||
invocation_argument(v, arg);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn invocation_argument<V: ASTVisitor>(v: &mut V, arg: &InvocationArgument) {
|
|
||||||
use InvocationArgument::*;
|
|
||||||
match arg {
|
|
||||||
Positional(expr) => {
|
|
||||||
v.expression(expr);
|
|
||||||
expression(v, expr);
|
|
||||||
},
|
|
||||||
Keyword { expr, .. } => {
|
|
||||||
v.expression(expr);
|
|
||||||
expression(v, expr);
|
|
||||||
},
|
|
||||||
Ignored => (),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn index<V: ASTVisitor>(v: &mut V, indexee: &Expression, indexers: &Vec<Expression>) {
|
|
||||||
v.expression(indexee);
|
|
||||||
for i in indexers.iter() {
|
|
||||||
v.expression(i);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn named_struct<V: ASTVisitor>(v: &mut V, n: &QualifiedName, fields: &Vec<(Rc<String>, Expression)>) {
|
|
||||||
v.qualified_name(n);
|
|
||||||
for (_, expr) in fields.iter() {
|
|
||||||
v.expression(expr);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn lambda<V: ASTVisitor>(v: &mut V, params: &Vec<FormalParam>, type_anno: Option<&TypeIdentifier>, body: &Block) {
|
|
||||||
for param in params {
|
|
||||||
v.formal_param(param);
|
|
||||||
formal_param(v, param);
|
|
||||||
}
|
|
||||||
v.type_annotation(type_anno);
|
|
||||||
walk_block(v, body);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn formal_param<V: ASTVisitor>(v: &mut V, param: &FormalParam) {
|
|
||||||
param.default.as_ref().map(|p| {
|
|
||||||
v.expression(p);
|
|
||||||
expression(v, p);
|
|
||||||
});
|
|
||||||
v.type_annotation(param.anno.as_ref());
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expression_kind<V: ASTVisitor>(v: &mut V, expression_kind: &ExpressionKind) {
|
|
||||||
use ExpressionKind::*;
|
|
||||||
match expression_kind {
|
|
||||||
NatLiteral(n) => v.nat_literal(*n),
|
|
||||||
FloatLiteral(f) => v.float_literal(*f),
|
|
||||||
StringLiteral(s) => v.string_literal(s),
|
|
||||||
BoolLiteral(b) => v.bool_literal(*b),
|
|
||||||
BinExp(op, lhs, rhs) => {
|
|
||||||
v.binexp(op, lhs, rhs);
|
|
||||||
expression(v, lhs);
|
|
||||||
expression(v, rhs);
|
|
||||||
},
|
|
||||||
PrefixExp(op, arg) => {
|
|
||||||
v.prefix_exp(op, arg);
|
|
||||||
expression(v, arg);
|
|
||||||
}
|
|
||||||
TupleLiteral(exprs) => {
|
|
||||||
for expr in exprs {
|
|
||||||
v.expression(expr);
|
|
||||||
expression(v, expr);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
Value(name) => v.qualified_name(name),
|
|
||||||
NamedStruct { name, fields } => {
|
|
||||||
v.named_struct(name, fields);
|
|
||||||
named_struct(v, name, fields);
|
|
||||||
}
|
|
||||||
Call { f, arguments } => {
|
|
||||||
v.call(f, arguments);
|
|
||||||
call(v, f, arguments);
|
|
||||||
},
|
|
||||||
Index { indexee, indexers } => {
|
|
||||||
v.index(indexee, indexers);
|
|
||||||
index(v, indexee, indexers);
|
|
||||||
},
|
|
||||||
IfExpression { discriminator, body } => {
|
|
||||||
v.if_expression(deref_optional_box(discriminator), body);
|
|
||||||
discriminator.as_ref().map(|d| expression(v, d));
|
|
||||||
if_expression_body(v, body);
|
|
||||||
},
|
|
||||||
WhileExpression { condition, body } => v.while_expression(deref_optional_box(condition), body),
|
|
||||||
ForExpression { enumerators, body } => v.for_expression(enumerators, body),
|
|
||||||
Lambda { params , type_anno, body } => {
|
|
||||||
v.lambda(params, type_anno.as_ref(), body);
|
|
||||||
lambda(v, params, type_anno.as_ref(), body);
|
|
||||||
},
|
|
||||||
ListLiteral(exprs) => {
|
|
||||||
for expr in exprs {
|
|
||||||
v.expression(expr);
|
|
||||||
expression(v, expr);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn if_expression_body<V: ASTVisitor>(v: &mut V, body: &IfExpressionBody) {
|
|
||||||
use IfExpressionBody::*;
|
|
||||||
match body {
|
|
||||||
SimpleConditional { then_case, else_case } => {
|
|
||||||
walk_block(v, then_case);
|
|
||||||
else_case.as_ref().map(|block| walk_block(v, block));
|
|
||||||
},
|
|
||||||
SimplePatternMatch { pattern, then_case, else_case } => {
|
|
||||||
v.pattern(pattern);
|
|
||||||
walk_pattern(v, pattern);
|
|
||||||
walk_block(v, then_case);
|
|
||||||
else_case.as_ref().map(|block| walk_block(v, block));
|
|
||||||
},
|
|
||||||
CondList(arms) => {
|
|
||||||
for arm in arms {
|
|
||||||
v.condition_arm(arm);
|
|
||||||
condition_arm(v, arm);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn condition_arm<V: ASTVisitor>(v: &mut V, arm: &ConditionArm) {
|
|
||||||
use Condition::*;
|
|
||||||
v.condition_arm(arm);
|
|
||||||
match arm.condition {
|
|
||||||
Pattern(ref pat) => {
|
|
||||||
v.pattern(pat);
|
|
||||||
walk_pattern(v, pat);
|
|
||||||
},
|
|
||||||
TruncatedOp(ref _binop, ref expr) => {
|
|
||||||
v.expression(expr);
|
|
||||||
expression(v, expr);
|
|
||||||
},
|
|
||||||
Expression(ref expr) => {
|
|
||||||
v.expression(expr);
|
|
||||||
expression(v, expr);
|
|
||||||
},
|
|
||||||
_ => ()
|
|
||||||
}
|
|
||||||
arm.guard.as_ref().map(|guard| {
|
|
||||||
v.expression(guard);
|
|
||||||
expression(v, guard);
|
|
||||||
});
|
|
||||||
walk_block(v, &arm.body);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn walk_pattern<V: ASTVisitor>(v: &mut V, pat: &Pattern) {
|
|
||||||
use Pattern::*;
|
|
||||||
match pat {
|
|
||||||
TuplePattern(patterns) => {
|
|
||||||
for pat in patterns {
|
|
||||||
v.pattern(pat);
|
|
||||||
walk_pattern(v, pat);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
TupleStruct(qualified_name, patterns) => {
|
|
||||||
v.qualified_name(qualified_name);
|
|
||||||
for pat in patterns {
|
|
||||||
v.pattern(pat);
|
|
||||||
walk_pattern(v, pat);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
Record(qualified_name, name_and_patterns) => {
|
|
||||||
v.qualified_name(qualified_name);
|
|
||||||
for (_, pat) in name_and_patterns {
|
|
||||||
v.pattern(pat);
|
|
||||||
walk_pattern(v, pat);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
VarOrName(qualified_name) => {
|
|
||||||
v.qualified_name(qualified_name);
|
|
||||||
},
|
|
||||||
_ => ()
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,102 +0,0 @@
|
|||||||
use std::str::FromStr;
|
|
||||||
|
|
||||||
use crate::typechecking::{TypeConst, Type};
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
|
||||||
pub enum Builtin {
|
|
||||||
Add,
|
|
||||||
Increment,
|
|
||||||
Subtract,
|
|
||||||
Negate,
|
|
||||||
Multiply,
|
|
||||||
Divide,
|
|
||||||
Quotient,
|
|
||||||
Modulo,
|
|
||||||
Exponentiation,
|
|
||||||
BitwiseAnd,
|
|
||||||
BitwiseOr,
|
|
||||||
BooleanAnd,
|
|
||||||
BooleanOr,
|
|
||||||
BooleanNot,
|
|
||||||
Equality,
|
|
||||||
LessThan,
|
|
||||||
LessThanOrEqual,
|
|
||||||
GreaterThan,
|
|
||||||
GreaterThanOrEqual,
|
|
||||||
Comparison,
|
|
||||||
FieldAccess,
|
|
||||||
IOPrint,
|
|
||||||
IOPrintLn,
|
|
||||||
IOGetLine,
|
|
||||||
Assignment,
|
|
||||||
Concatenate,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Builtin {
|
|
||||||
pub fn get_type(&self) -> Type {
|
|
||||||
use Builtin::*;
|
|
||||||
match self {
|
|
||||||
Add => ty!(Nat -> Nat -> Nat),
|
|
||||||
Subtract => ty!(Nat -> Nat -> Nat),
|
|
||||||
Multiply => ty!(Nat -> Nat -> Nat),
|
|
||||||
Divide => ty!(Nat -> Nat -> Float),
|
|
||||||
Quotient => ty!(Nat -> Nat -> Nat),
|
|
||||||
Modulo => ty!(Nat -> Nat -> Nat),
|
|
||||||
Exponentiation => ty!(Nat -> Nat -> Nat),
|
|
||||||
BitwiseAnd => ty!(Nat -> Nat -> Nat),
|
|
||||||
BitwiseOr => ty!(Nat -> Nat -> Nat),
|
|
||||||
BooleanAnd => ty!(Bool -> Bool -> Bool),
|
|
||||||
BooleanOr => ty!(Bool -> Bool -> Bool),
|
|
||||||
BooleanNot => ty!(Bool -> Bool),
|
|
||||||
Equality => ty!(Nat -> Nat -> Bool),
|
|
||||||
LessThan => ty!(Nat -> Nat -> Bool),
|
|
||||||
LessThanOrEqual => ty!(Nat -> Nat -> Bool),
|
|
||||||
GreaterThan => ty!(Nat -> Nat -> Bool),
|
|
||||||
GreaterThanOrEqual => ty!(Nat -> Nat -> Bool),
|
|
||||||
Comparison => ty!(Nat -> Nat -> Ordering),
|
|
||||||
FieldAccess => ty!(Unit),
|
|
||||||
IOPrint => ty!(Unit),
|
|
||||||
IOPrintLn => ty!(Unit) ,
|
|
||||||
IOGetLine => ty!(StringT),
|
|
||||||
Assignment => ty!(Unit),
|
|
||||||
Concatenate => ty!(StringT -> StringT -> StringT),
|
|
||||||
Increment => ty!(Nat -> Int),
|
|
||||||
Negate => ty!(Nat -> Int)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FromStr for Builtin {
|
|
||||||
type Err = ();
|
|
||||||
|
|
||||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
|
||||||
use Builtin::*;
|
|
||||||
Ok(match s {
|
|
||||||
"+" => Add,
|
|
||||||
"-" => Subtract,
|
|
||||||
"*" => Multiply,
|
|
||||||
"/" => Divide,
|
|
||||||
"quot" => Quotient,
|
|
||||||
"%" => Modulo,
|
|
||||||
"++" => Concatenate,
|
|
||||||
"^" => Exponentiation,
|
|
||||||
"&" => BitwiseAnd,
|
|
||||||
"&&" => BooleanAnd,
|
|
||||||
"|" => BitwiseOr,
|
|
||||||
"||" => BooleanOr,
|
|
||||||
"!" => BooleanNot,
|
|
||||||
">" => GreaterThan,
|
|
||||||
">=" => GreaterThanOrEqual,
|
|
||||||
"<" => LessThan,
|
|
||||||
"<=" => LessThanOrEqual,
|
|
||||||
"==" => Equality,
|
|
||||||
"=" => Assignment,
|
|
||||||
"<=>" => Comparison,
|
|
||||||
"." => FieldAccess,
|
|
||||||
"print" => IOPrint,
|
|
||||||
"println" => IOPrintLn,
|
|
||||||
"getline" => IOGetLine,
|
|
||||||
_ => return Err(())
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,10 +0,0 @@
|
|||||||
use crate::ast::*;
|
|
||||||
|
|
||||||
impl AST {
|
|
||||||
pub fn compact_debug(&self) -> String {
|
|
||||||
format!("{:?}", self)
|
|
||||||
}
|
|
||||||
pub fn expanded_debug(&self) -> String {
|
|
||||||
format!("{:#?}", self)
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,455 +0,0 @@
|
|||||||
use std::rc::Rc;
|
|
||||||
use std::fmt::Write;
|
|
||||||
use std::io;
|
|
||||||
|
|
||||||
use itertools::Itertools;
|
|
||||||
|
|
||||||
use crate::schala::SymbolTableHandle;
|
|
||||||
use crate::util::ScopeStack;
|
|
||||||
use crate::reduced_ast::{BoundVars, ReducedAST, Stmt, Expr, Lit, Func, Alternative, Subpattern};
|
|
||||||
use crate::symbol_table::{SymbolSpec, Symbol, SymbolTable, FullyQualifiedSymbolName};
|
|
||||||
use crate::builtin::Builtin;
|
|
||||||
|
|
||||||
mod test;
|
|
||||||
|
|
||||||
pub struct State<'a> {
|
|
||||||
values: ScopeStack<'a, Rc<String>, ValueEntry>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> State<'a> {
|
|
||||||
pub fn new() -> State<'a> {
|
|
||||||
let values = ScopeStack::new(Some(format!("global")));
|
|
||||||
State { values }
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn debug_print(&self) -> String {
|
|
||||||
format!("Values: {:?}", self.values)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn new_frame(&'a self, items: &'a Vec<Node>, bound_vars: &BoundVars) -> State<'a> {
|
|
||||||
let mut inner_state = State {
|
|
||||||
values: self.values.new_scope(None),
|
|
||||||
};
|
|
||||||
for (bound_var, val) in bound_vars.iter().zip(items.iter()) {
|
|
||||||
if let Some(bv) = bound_var.as_ref() {
|
|
||||||
inner_state.values.insert(bv.clone(), ValueEntry::Binding { constant: true, val: val.clone() });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
inner_state
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
enum Node {
|
|
||||||
Expr(Expr),
|
|
||||||
PrimObject {
|
|
||||||
name: Rc<String>,
|
|
||||||
tag: usize,
|
|
||||||
items: Vec<Node>,
|
|
||||||
},
|
|
||||||
PrimTuple {
|
|
||||||
items: Vec<Node>
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn paren_wrapped_vec(terms: impl Iterator<Item=String>) -> String {
|
|
||||||
let mut buf = String::new();
|
|
||||||
write!(buf, "(").unwrap();
|
|
||||||
for term in terms.map(|e| Some(e)).intersperse(None) {
|
|
||||||
match term {
|
|
||||||
Some(e) => write!(buf, "{}", e).unwrap(),
|
|
||||||
None => write!(buf, ", ").unwrap(),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
write!(buf, ")").unwrap();
|
|
||||||
buf
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
impl Node {
|
|
||||||
fn to_repl(&self) -> String {
|
|
||||||
match self {
|
|
||||||
Node::Expr(e) => e.to_repl(),
|
|
||||||
Node::PrimObject { name, items, .. } if items.len() == 0 => format!("{}", name),
|
|
||||||
Node::PrimObject { name, items, .. } => format!("{}{}", name, paren_wrapped_vec(items.iter().map(|x| x.to_repl()))),
|
|
||||||
Node::PrimTuple { items } => format!("{}", paren_wrapped_vec(items.iter().map(|x| x.to_repl()))),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
fn is_true(&self) -> bool {
|
|
||||||
match self {
|
|
||||||
Node::Expr(Expr::Lit(crate::reduced_ast::Lit::Bool(true))) => true,
|
|
||||||
_ => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
enum ValueEntry {
|
|
||||||
Binding {
|
|
||||||
constant: bool,
|
|
||||||
val: /*FullyEvaluatedExpr*/ Node, //TODO make this use a subtype to represent fully evaluatedness
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type EvalResult<T> = Result<T, String>;
|
|
||||||
|
|
||||||
impl Expr {
|
|
||||||
fn to_node(self) -> Node {
|
|
||||||
Node::Expr(self)
|
|
||||||
}
|
|
||||||
fn to_repl(&self) -> String {
|
|
||||||
use self::Lit::*;
|
|
||||||
use self::Func::*;
|
|
||||||
|
|
||||||
match self {
|
|
||||||
Expr::Lit(ref l) => match l {
|
|
||||||
Nat(n) => format!("{}", n),
|
|
||||||
Int(i) => format!("{}", i),
|
|
||||||
Float(f) => format!("{}", f),
|
|
||||||
Bool(b) => format!("{}", b),
|
|
||||||
StringLit(s) => format!("\"{}\"", s),
|
|
||||||
},
|
|
||||||
Expr::Func(f) => match f {
|
|
||||||
BuiltIn(builtin) => format!("<built-in function '{:?}'>", builtin),
|
|
||||||
UserDefined { name: None, .. } => format!("<function>"),
|
|
||||||
UserDefined { name: Some(name), .. } => format!("<function '{}'>", name),
|
|
||||||
},
|
|
||||||
Expr::Constructor { type_name, arity, .. } => {
|
|
||||||
format!("<constructor for `{}` arity {}>", type_name, arity)
|
|
||||||
},
|
|
||||||
Expr::Tuple(exprs) => paren_wrapped_vec(exprs.iter().map(|x| x.to_repl())),
|
|
||||||
_ => format!("{:?}", self),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn replace_conditional_target_sigil(self, replacement: &Expr) -> Expr {
|
|
||||||
use self::Expr::*;
|
|
||||||
|
|
||||||
match self {
|
|
||||||
ConditionalTargetSigilValue => replacement.clone(),
|
|
||||||
Unit | Lit(_) | Func(_) | Sym(_) | Constructor { .. } |
|
|
||||||
CaseMatch { .. } | UnimplementedSigilValue | ReductionError(_) => self,
|
|
||||||
Tuple(exprs) => Tuple(exprs.into_iter().map(|e| e.replace_conditional_target_sigil(replacement)).collect()),
|
|
||||||
Call { f, args } => {
|
|
||||||
let new_args = args.into_iter().map(|e| e.replace_conditional_target_sigil(replacement)).collect();
|
|
||||||
Call { f, args: new_args }
|
|
||||||
},
|
|
||||||
Conditional { .. } => panic!("Dunno if I need this, but if so implement"),
|
|
||||||
Assign { .. } => panic!("I'm pretty sure I don't need this"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> State<'a> {
|
|
||||||
pub fn evaluate(&mut self, ast: ReducedAST, repl: bool) -> Vec<Result<String, String>> {
|
|
||||||
let mut acc = vec![];
|
|
||||||
|
|
||||||
// handle prebindings
|
|
||||||
for statement in ast.0.iter() {
|
|
||||||
self.prebinding(statement);
|
|
||||||
}
|
|
||||||
|
|
||||||
for statement in ast.0 {
|
|
||||||
match self.statement(statement) {
|
|
||||||
Ok(Some(ref output)) if repl => {
|
|
||||||
acc.push(Ok(output.to_repl()))
|
|
||||||
},
|
|
||||||
Ok(_) => (),
|
|
||||||
Err(error) => {
|
|
||||||
acc.push(Err(format!("Runtime error: {}", error)));
|
|
||||||
return acc;
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
acc
|
|
||||||
}
|
|
||||||
|
|
||||||
fn prebinding(&mut self, stmt: &Stmt) {
|
|
||||||
match stmt {
|
|
||||||
Stmt::PreBinding { name, func } => {
|
|
||||||
let v_entry = ValueEntry::Binding { constant: true, val: Node::Expr(Expr::Func(func.clone())) };
|
|
||||||
self.values.insert(name.clone(), v_entry);
|
|
||||||
},
|
|
||||||
Stmt::Expr(_expr) => {
|
|
||||||
//TODO have this support things like nested function defs
|
|
||||||
|
|
||||||
},
|
|
||||||
_ => ()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn statement(&mut self, stmt: Stmt) -> EvalResult<Option<Node>> {
|
|
||||||
match stmt {
|
|
||||||
Stmt::Binding { name, constant, expr } => {
|
|
||||||
let val = self.expression(Node::Expr(expr))?;
|
|
||||||
self.values.insert(name.clone(), ValueEntry::Binding { constant, val });
|
|
||||||
Ok(None)
|
|
||||||
},
|
|
||||||
Stmt::Expr(expr) => Ok(Some(self.expression(expr.to_node())?)),
|
|
||||||
Stmt::PreBinding {..} | Stmt::Noop => Ok(None),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn block(&mut self, stmts: Vec<Stmt>) -> EvalResult<Node> {
|
|
||||||
let mut ret = None;
|
|
||||||
for stmt in stmts {
|
|
||||||
ret = self.statement(stmt)?;
|
|
||||||
}
|
|
||||||
Ok(ret.unwrap_or(Node::Expr(Expr::Unit)))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expression(&mut self, node: Node) -> EvalResult<Node> {
|
|
||||||
use self::Expr::*;
|
|
||||||
match node {
|
|
||||||
t @ Node::PrimTuple { .. } => Ok(t),
|
|
||||||
obj @ Node::PrimObject { .. } => Ok(obj),
|
|
||||||
Node::Expr(expr) => match expr {
|
|
||||||
literal @ Lit(_) => Ok(Node::Expr(literal)),
|
|
||||||
Call { box f, args } => self.call_expression(f, args),
|
|
||||||
Sym(name) => Ok(match self.values.lookup(&name) {
|
|
||||||
Some(ValueEntry::Binding { val, .. }) => val.clone(),
|
|
||||||
None => return Err(format!("Could not look up symbol {}", name))
|
|
||||||
}),
|
|
||||||
Constructor { arity, ref name, tag, .. } if arity == 0 => Ok(Node::PrimObject { name: name.clone(), tag, items: vec![] }),
|
|
||||||
constructor @ Constructor { .. } => Ok(Node::Expr(constructor)),
|
|
||||||
func @ Func(_) => Ok(Node::Expr(func)),
|
|
||||||
Tuple(exprs) => {
|
|
||||||
let nodes = exprs.into_iter().map(|expr| self.expression(Node::Expr(expr))).collect::<Result<Vec<Node>,_>>()?;
|
|
||||||
Ok(Node::PrimTuple { items: nodes })
|
|
||||||
},
|
|
||||||
Conditional { box cond, then_clause, else_clause } => self.conditional(cond, then_clause, else_clause),
|
|
||||||
Assign { box val, box expr } => self.assign_expression(val, expr),
|
|
||||||
Unit => Ok(Node::Expr(Unit)),
|
|
||||||
CaseMatch { box cond, alternatives } => self.case_match_expression(cond, alternatives),
|
|
||||||
ConditionalTargetSigilValue => Ok(Node::Expr(ConditionalTargetSigilValue)),
|
|
||||||
UnimplementedSigilValue => Err(format!("Sigil value eval not implemented")),
|
|
||||||
ReductionError(err) => Err(format!("Reduction error: {}", err)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn call_expression(&mut self, f: Expr, args: Vec<Expr>) -> EvalResult<Node> {
|
|
||||||
use self::Expr::*;
|
|
||||||
match self.expression(Node::Expr(f))? {
|
|
||||||
Node::Expr(Constructor { type_name, name, tag, arity }) => self.apply_data_constructor(type_name, name, tag, arity, args),
|
|
||||||
Node::Expr(Func(f)) => self.apply_function(f, args),
|
|
||||||
other => return Err(format!("Tried to call {:?} which is not a function or data constructor", other)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn apply_data_constructor(&mut self, _type_name: Rc<String>, name: Rc<String>, tag: usize, arity: usize, args: Vec<Expr>) -> EvalResult<Node> {
|
|
||||||
if arity != args.len() {
|
|
||||||
return Err(format!("Data constructor {} requires {} arg(s)", name, arity));
|
|
||||||
}
|
|
||||||
|
|
||||||
let evaled_args = args.into_iter().map(|expr| self.expression(Node::Expr(expr))).collect::<Result<Vec<Node>,_>>()?;
|
|
||||||
//let evaled_args = vec![];
|
|
||||||
Ok(Node::PrimObject {
|
|
||||||
name: name.clone(),
|
|
||||||
items: evaled_args,
|
|
||||||
tag
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn apply_function(&mut self, f: Func, args: Vec<Expr>) -> EvalResult<Node> {
|
|
||||||
match f {
|
|
||||||
Func::BuiltIn(builtin) => Ok(self.apply_builtin(builtin, args)?),
|
|
||||||
Func::UserDefined { params, body, name } => {
|
|
||||||
|
|
||||||
if params.len() != args.len() {
|
|
||||||
return Err(format!("calling a {}-argument function with {} args", params.len(), args.len()))
|
|
||||||
}
|
|
||||||
let mut func_state = State {
|
|
||||||
values: self.values.new_scope(name.map(|n| format!("{}", n))),
|
|
||||||
};
|
|
||||||
for (param, val) in params.into_iter().zip(args.into_iter()) {
|
|
||||||
let val = func_state.expression(Node::Expr(val))?;
|
|
||||||
func_state.values.insert(param, ValueEntry::Binding { constant: true, val });
|
|
||||||
}
|
|
||||||
// TODO figure out function return semantics
|
|
||||||
func_state.block(body)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn apply_builtin(&mut self, builtin: Builtin, args: Vec<Expr>) -> EvalResult<Node> {
|
|
||||||
use self::Expr::*;
|
|
||||||
use self::Lit::*;
|
|
||||||
use Builtin::*;
|
|
||||||
|
|
||||||
let evaled_args: Result<Vec<Node>, String> = args.into_iter().map(|arg| self.expression(arg.to_node()))
|
|
||||||
.collect();
|
|
||||||
let evaled_args = evaled_args?;
|
|
||||||
|
|
||||||
Ok(match (builtin, evaled_args.as_slice()) {
|
|
||||||
(FieldAccess, &[Node::PrimObject { .. }]) => {
|
|
||||||
//TODO implement field access
|
|
||||||
unimplemented!()
|
|
||||||
},
|
|
||||||
(binop, &[Node::Expr(ref lhs), Node::Expr(ref rhs)]) => match (binop, lhs, rhs) {
|
|
||||||
/* binops */
|
|
||||||
(Add, Lit(Nat(l)), Lit(Nat(r))) => Lit(Nat(l + r)),
|
|
||||||
(Concatenate, Lit(StringLit(ref s1)), Lit(StringLit(ref s2))) => Lit(StringLit(Rc::new(format!("{}{}", s1, s2)))),
|
|
||||||
(Subtract, Lit(Nat(l)), Lit(Nat(r))) => Lit(Nat(l - r)),
|
|
||||||
(Multiply, Lit(Nat(l)), Lit(Nat(r))) => Lit(Nat(l * r)),
|
|
||||||
(Divide, Lit(Nat(l)), Lit(Nat(r))) => Lit(Float((*l as f64)/ (*r as f64))),
|
|
||||||
(Quotient, Lit(Nat(l)), Lit(Nat(r))) => if *r == 0 {
|
|
||||||
return Err(format!("divide by zero"));
|
|
||||||
} else {
|
|
||||||
Lit(Nat(l / r))
|
|
||||||
},
|
|
||||||
(Modulo, Lit(Nat(l)), Lit(Nat(r))) => Lit(Nat(l % r)),
|
|
||||||
(Exponentiation, Lit(Nat(l)), Lit(Nat(r))) => Lit(Nat(l ^ r)),
|
|
||||||
(BitwiseAnd, Lit(Nat(l)), Lit(Nat(r))) => Lit(Nat(l & r)),
|
|
||||||
(BitwiseOr, Lit(Nat(l)), Lit(Nat(r))) => Lit(Nat(l | r)),
|
|
||||||
|
|
||||||
/* comparisons */
|
|
||||||
(Equality, Lit(Nat(l)), Lit(Nat(r))) => Lit(Bool(l == r)),
|
|
||||||
(Equality, Lit(Int(l)), Lit(Int(r))) => Lit(Bool(l == r)),
|
|
||||||
(Equality, Lit(Float(l)), Lit(Float(r))) => Lit(Bool(l == r)),
|
|
||||||
(Equality, Lit(Bool(l)), Lit(Bool(r))) => Lit(Bool(l == r)),
|
|
||||||
(Equality, Lit(StringLit(ref l)), Lit(StringLit(ref r))) => Lit(Bool(l == r)),
|
|
||||||
|
|
||||||
(LessThan, Lit(Nat(l)), Lit(Nat(r))) => Lit(Bool(l < r)),
|
|
||||||
(LessThan, Lit(Int(l)), Lit(Int(r))) => Lit(Bool(l < r)),
|
|
||||||
(LessThan, Lit(Float(l)), Lit(Float(r))) => Lit(Bool(l < r)),
|
|
||||||
|
|
||||||
(LessThanOrEqual, Lit(Nat(l)), Lit(Nat(r))) => Lit(Bool(l <= r)),
|
|
||||||
(LessThanOrEqual, Lit(Int(l)), Lit(Int(r))) => Lit(Bool(l <= r)),
|
|
||||||
(LessThanOrEqual, Lit(Float(l)), Lit(Float(r))) => Lit(Bool(l <= r)),
|
|
||||||
|
|
||||||
(GreaterThan, Lit(Nat(l)), Lit(Nat(r))) => Lit(Bool(l > r)),
|
|
||||||
(GreaterThan, Lit(Int(l)), Lit(Int(r))) => Lit(Bool(l > r)),
|
|
||||||
(GreaterThan, Lit(Float(l)), Lit(Float(r))) => Lit(Bool(l > r)),
|
|
||||||
|
|
||||||
(GreaterThanOrEqual, Lit(Nat(l)), Lit(Nat(r))) => Lit(Bool(l >= r)),
|
|
||||||
(GreaterThanOrEqual, Lit(Int(l)), Lit(Int(r))) => Lit(Bool(l >= r)),
|
|
||||||
(GreaterThanOrEqual, Lit(Float(l)), Lit(Float(r))) => Lit(Bool(l >= r)),
|
|
||||||
_ => return Err("No valid binop".to_string())
|
|
||||||
}.to_node(),
|
|
||||||
(prefix, &[Node::Expr(ref arg)]) => match (prefix, arg) {
|
|
||||||
(BooleanNot, Lit(Bool(true))) => Lit(Bool(false)),
|
|
||||||
(BooleanNot, Lit(Bool(false))) => Lit(Bool(true)),
|
|
||||||
(Negate, Lit(Nat(n))) => Lit(Int(-1*(*n as i64))),
|
|
||||||
(Negate, Lit(Int(n))) => Lit(Int(-1*(*n as i64))),
|
|
||||||
(Increment, Lit(Int(n))) => Lit(Int(*n)),
|
|
||||||
(Increment, Lit(Nat(n))) => Lit(Nat(*n)),
|
|
||||||
_ => return Err("No valid prefix op".to_string())
|
|
||||||
}.to_node(),
|
|
||||||
|
|
||||||
/* builtin functions */
|
|
||||||
(IOPrint, &[ref anything]) => {
|
|
||||||
print!("{}", anything.to_repl());
|
|
||||||
Expr::Unit.to_node()
|
|
||||||
},
|
|
||||||
(IOPrintLn, &[ref anything]) => {
|
|
||||||
println!("{}", anything.to_repl());
|
|
||||||
Expr::Unit.to_node()
|
|
||||||
},
|
|
||||||
(IOGetLine, &[]) => {
|
|
||||||
let mut buf = String::new();
|
|
||||||
io::stdin().read_line(&mut buf).expect("Error readling line in 'getline'");
|
|
||||||
Lit(StringLit(Rc::new(buf.trim().to_string()))).to_node()
|
|
||||||
},
|
|
||||||
(x, args) => return Err(format!("bad or unimplemented builtin {:?} | {:?}", x, args)),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn conditional(&mut self, cond: Expr, then_clause: Vec<Stmt>, else_clause: Vec<Stmt>) -> EvalResult<Node> {
|
|
||||||
let cond = self.expression(Node::Expr(cond))?;
|
|
||||||
Ok(match cond {
|
|
||||||
Node::Expr(Expr::Lit(Lit::Bool(true))) => self.block(then_clause)?,
|
|
||||||
Node::Expr(Expr::Lit(Lit::Bool(false))) => self.block(else_clause)?,
|
|
||||||
_ => return Err(format!("Conditional with non-boolean condition"))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn assign_expression(&mut self, val: Expr, expr: Expr) -> EvalResult<Node> {
|
|
||||||
let name = match val {
|
|
||||||
Expr::Sym(name) => name,
|
|
||||||
_ => return Err(format!("Trying to assign to a non-value")),
|
|
||||||
};
|
|
||||||
|
|
||||||
let constant = match self.values.lookup(&name) {
|
|
||||||
None => return Err(format!("Constant {} is undefined", name)),
|
|
||||||
Some(ValueEntry::Binding { constant, .. }) => constant.clone(),
|
|
||||||
};
|
|
||||||
if constant {
|
|
||||||
return Err(format!("trying to update {}, a non-mutable binding", name));
|
|
||||||
}
|
|
||||||
let val = self.expression(Node::Expr(expr))?;
|
|
||||||
self.values.insert(name.clone(), ValueEntry::Binding { constant: false, val });
|
|
||||||
Ok(Node::Expr(Expr::Unit))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn guard_passes(&mut self, guard: &Option<Expr>, cond: &Node) -> EvalResult<bool> {
|
|
||||||
if let Some(ref guard_expr) = guard {
|
|
||||||
let guard_expr = match cond {
|
|
||||||
Node::Expr(ref e) => guard_expr.clone().replace_conditional_target_sigil(e),
|
|
||||||
_ => guard_expr.clone()
|
|
||||||
};
|
|
||||||
Ok(self.expression(guard_expr.to_node())?.is_true())
|
|
||||||
} else {
|
|
||||||
Ok(true)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn case_match_expression(&mut self, cond: Expr, alternatives: Vec<Alternative>) -> EvalResult<Node> {
|
|
||||||
|
|
||||||
//TODO need to handle recursive subpatterns
|
|
||||||
let all_subpatterns_pass = |state: &mut State, subpatterns: &Vec<Option<Subpattern>>, items: &Vec<Node>| -> EvalResult<bool> {
|
|
||||||
|
|
||||||
if subpatterns.len() == 0 {
|
|
||||||
return Ok(true)
|
|
||||||
}
|
|
||||||
|
|
||||||
if items.len() != subpatterns.len() {
|
|
||||||
return Err(format!("Subpattern length isn't correct items {} subpatterns {}", items.len(), subpatterns.len()));
|
|
||||||
}
|
|
||||||
|
|
||||||
for (maybe_subp, cond) in subpatterns.iter().zip(items.iter()) {
|
|
||||||
if let Some(subp) = maybe_subp {
|
|
||||||
if !state.guard_passes(&subp.guard, &cond)? {
|
|
||||||
return Ok(false)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(true)
|
|
||||||
};
|
|
||||||
|
|
||||||
let cond = self.expression(Node::Expr(cond))?;
|
|
||||||
for alt in alternatives {
|
|
||||||
// no matter what type of condition we have, ignore alternative if the guard evaluates false
|
|
||||||
if !self.guard_passes(&alt.matchable.guard, &cond)? {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
match cond {
|
|
||||||
Node::PrimObject { ref tag, ref items, .. } => {
|
|
||||||
if alt.matchable.tag.map(|t| t == *tag).unwrap_or(true) {
|
|
||||||
let mut inner_state = self.new_frame(items, &alt.matchable.bound_vars);
|
|
||||||
if all_subpatterns_pass(&mut inner_state, &alt.matchable.subpatterns, items)? {
|
|
||||||
return inner_state.block(alt.item);
|
|
||||||
} else {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
Node::PrimTuple { ref items } => {
|
|
||||||
let mut inner_state = self.new_frame(items, &alt.matchable.bound_vars);
|
|
||||||
if all_subpatterns_pass(&mut inner_state, &alt.matchable.subpatterns, items)? {
|
|
||||||
return inner_state.block(alt.item);
|
|
||||||
} else {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
Node::Expr(ref _e) => {
|
|
||||||
if let None = alt.matchable.tag {
|
|
||||||
return self.block(alt.item)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(format!("{:?} failed pattern match", cond))
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,269 +0,0 @@
|
|||||||
#![cfg(test)]
|
|
||||||
|
|
||||||
use std::cell::RefCell;
|
|
||||||
use std::rc::Rc;
|
|
||||||
|
|
||||||
use crate::symbol_table::SymbolTable;
|
|
||||||
use crate::scope_resolution::ScopeResolver;
|
|
||||||
use crate::reduced_ast::reduce;
|
|
||||||
use crate::eval::State;
|
|
||||||
|
|
||||||
fn evaluate_all_outputs(input: &str) -> Vec<Result<String, String>> {
|
|
||||||
let (mut ast, source_map) = crate::util::quick_ast(input);
|
|
||||||
let source_map = Rc::new(RefCell::new(source_map));
|
|
||||||
let symbol_table = Rc::new(RefCell::new(SymbolTable::new(source_map)));
|
|
||||||
symbol_table.borrow_mut().add_top_level_symbols(&ast).unwrap();
|
|
||||||
{
|
|
||||||
let mut scope_resolver = ScopeResolver::new(symbol_table.clone());
|
|
||||||
let _ = scope_resolver.resolve(&mut ast);
|
|
||||||
}
|
|
||||||
|
|
||||||
let reduced = reduce(&ast, &symbol_table.borrow());
|
|
||||||
let mut state = State::new();
|
|
||||||
let all_output = state.evaluate(reduced, true);
|
|
||||||
all_output
|
|
||||||
}
|
|
||||||
|
|
||||||
macro_rules! test_in_fresh_env {
|
|
||||||
($string:expr, $correct:expr) => {
|
|
||||||
{
|
|
||||||
let all_output = evaluate_all_outputs($string);
|
|
||||||
let ref output = all_output.last().unwrap();
|
|
||||||
assert_eq!(**output, Ok($correct.to_string()));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_basic_eval() {
|
|
||||||
test_in_fresh_env!("1 + 2", "3");
|
|
||||||
test_in_fresh_env!("let mut a = 1; a = 2", "Unit");
|
|
||||||
/*
|
|
||||||
test_in_fresh_env!("let mut a = 1; a = 2; a", "2");
|
|
||||||
test_in_fresh_env!(r#"("a", 1 + 2)"#, r#"("a", 3)"#);
|
|
||||||
*/
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn op_eval() {
|
|
||||||
test_in_fresh_env!("- 13", "-13");
|
|
||||||
test_in_fresh_env!("10 - 2", "8");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn function_eval() {
|
|
||||||
test_in_fresh_env!("fn oi(x) { x + 1 }; oi(4)", "5");
|
|
||||||
test_in_fresh_env!("fn oi(x) { x + 1 }; oi(1+2)", "4");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn scopes() {
|
|
||||||
let scope_ok = r#"
|
|
||||||
let a = 20
|
|
||||||
fn haha() {
|
|
||||||
let a = 10
|
|
||||||
a
|
|
||||||
}
|
|
||||||
haha()
|
|
||||||
"#;
|
|
||||||
test_in_fresh_env!(scope_ok, "10");
|
|
||||||
let scope_ok = r#"
|
|
||||||
let a = 20
|
|
||||||
fn queque() {
|
|
||||||
let a = 10
|
|
||||||
a
|
|
||||||
}
|
|
||||||
a
|
|
||||||
"#;
|
|
||||||
test_in_fresh_env!(scope_ok, "20");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn if_is_patterns() {
|
|
||||||
let source = r#"
|
|
||||||
type Option<T> = Some(T) | None
|
|
||||||
let x = Option::Some(9); if x is Option::Some(q) then { q } else { 0 }"#;
|
|
||||||
test_in_fresh_env!(source, "9");
|
|
||||||
|
|
||||||
let source = r#"
|
|
||||||
type Option<T> = Some(T) | None
|
|
||||||
let x = Option::None; if x is Option::Some(q) then { q } else { 0 }"#;
|
|
||||||
test_in_fresh_env!(source, "0");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn full_if_matching() {
|
|
||||||
let source = r#"
|
|
||||||
type Option<T> = Some(T) | None
|
|
||||||
let a = Option::None
|
|
||||||
if a { is Option::None then 4, is Option::Some(x) then x }
|
|
||||||
"#;
|
|
||||||
test_in_fresh_env!(source, "4");
|
|
||||||
|
|
||||||
let source = r#"
|
|
||||||
type Option<T> = Some(T) | None
|
|
||||||
let a = Option::Some(99)
|
|
||||||
if a { is Option::None then 4, is Option::Some(x) then x }
|
|
||||||
"#;
|
|
||||||
test_in_fresh_env!(source, "99");
|
|
||||||
|
|
||||||
let source = r#"
|
|
||||||
let a = 10
|
|
||||||
if a { is 10 then "x", is 4 then "y" }
|
|
||||||
"#;
|
|
||||||
test_in_fresh_env!(source, "\"x\"");
|
|
||||||
|
|
||||||
let source = r#"
|
|
||||||
let a = 10
|
|
||||||
if a { is 15 then "x", is 10 then "y" }
|
|
||||||
"#;
|
|
||||||
test_in_fresh_env!(source, "\"y\"");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn string_pattern() {
|
|
||||||
let source = r#"
|
|
||||||
let a = "foo"
|
|
||||||
if a { is "foo" then "x", is _ then "y" }
|
|
||||||
"#;
|
|
||||||
test_in_fresh_env!(source, "\"x\"");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn boolean_pattern() {
|
|
||||||
let source = r#"
|
|
||||||
let a = true
|
|
||||||
if a {
|
|
||||||
is true then "x",
|
|
||||||
is false then "y"
|
|
||||||
}
|
|
||||||
"#;
|
|
||||||
test_in_fresh_env!(source, "\"x\"");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn boolean_pattern_2() {
|
|
||||||
let source = r#"
|
|
||||||
let a = false
|
|
||||||
if a { is true then "x", is false then "y" }
|
|
||||||
"#;
|
|
||||||
test_in_fresh_env!(source, "\"y\"");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn ignore_pattern() {
|
|
||||||
let source = r#"
|
|
||||||
type Option<T> = Some(T) | None
|
|
||||||
if Option::Some(10) {
|
|
||||||
is _ then "hella"
|
|
||||||
}
|
|
||||||
"#;
|
|
||||||
test_in_fresh_env!(source, "\"hella\"");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn tuple_pattern() {
|
|
||||||
let source = r#"
|
|
||||||
if (1, 2) {
|
|
||||||
is (1, x) then x,
|
|
||||||
is _ then 99
|
|
||||||
}
|
|
||||||
"#;
|
|
||||||
test_in_fresh_env!(source, 2);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn tuple_pattern_2() {
|
|
||||||
let source = r#"
|
|
||||||
if (1, 2) {
|
|
||||||
is (10, x) then x,
|
|
||||||
is (y, x) then x + y
|
|
||||||
}
|
|
||||||
"#;
|
|
||||||
test_in_fresh_env!(source, 3);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn tuple_pattern_3() {
|
|
||||||
let source = r#"
|
|
||||||
if (1, 5) {
|
|
||||||
is (10, x) then x,
|
|
||||||
is (1, x) then x
|
|
||||||
}
|
|
||||||
"#;
|
|
||||||
test_in_fresh_env!(source, 5);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn tuple_pattern_4() {
|
|
||||||
let source = r#"
|
|
||||||
if (1, 5) {
|
|
||||||
is (10, x) then x,
|
|
||||||
is (1, x) then x,
|
|
||||||
}
|
|
||||||
"#;
|
|
||||||
test_in_fresh_env!(source, 5);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn prim_obj_pattern() {
|
|
||||||
let source = r#"
|
|
||||||
type Stuff = Mulch(Nat) | Jugs(Nat, String) | Mardok
|
|
||||||
let a = Stuff::Mulch(20)
|
|
||||||
let b = Stuff::Jugs(1, "haha")
|
|
||||||
let c = Stuff::Mardok
|
|
||||||
|
|
||||||
let x = if a {
|
|
||||||
is Stuff::Mulch(20) then "x",
|
|
||||||
is _ then "ERR"
|
|
||||||
}
|
|
||||||
|
|
||||||
let y = if b {
|
|
||||||
is Stuff::Mulch(n) then "ERR",
|
|
||||||
is Stuff::Jugs(2, _) then "ERR",
|
|
||||||
is Stuff::Jugs(1, s) then s,
|
|
||||||
is _ then "ERR",
|
|
||||||
}
|
|
||||||
|
|
||||||
let z = if c {
|
|
||||||
is Stuff::Jugs(_, _) then "ERR",
|
|
||||||
is Stuff::Mardok then "NIGH",
|
|
||||||
is _ then "ERR",
|
|
||||||
}
|
|
||||||
|
|
||||||
(x, y, z)
|
|
||||||
"#;
|
|
||||||
test_in_fresh_env!(source, r#"("x", "haha", "NIGH")"#);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn basic_lambda_syntax() {
|
|
||||||
let source = r#"
|
|
||||||
let q = \(x, y) { x * y }
|
|
||||||
let x = q(5,2)
|
|
||||||
let y = \(m, n, o) { m + n + o }(1,2,3)
|
|
||||||
(x, y)
|
|
||||||
"#;
|
|
||||||
test_in_fresh_env!(source, r"(10, 6)");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn lambda_syntax_2() {
|
|
||||||
let source = r#"
|
|
||||||
fn milta() {
|
|
||||||
\(x) { x + 33 }
|
|
||||||
}
|
|
||||||
milta()(10)
|
|
||||||
"#;
|
|
||||||
test_in_fresh_env!(source, "43");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn import_all() {
|
|
||||||
let source = r#"
|
|
||||||
type Option<T> = Some(T) | None
|
|
||||||
import Option::*
|
|
||||||
let x = Some(9); if x is Some(q) then { q } else { 0 }"#;
|
|
||||||
test_in_fresh_env!(source, "9");
|
|
||||||
}
|
|
@ -1,46 +0,0 @@
|
|||||||
#![feature(associated_type_defaults)] //needed for Visitor trait
|
|
||||||
#![feature(trace_macros)]
|
|
||||||
#![feature(slice_patterns, box_patterns, box_syntax)]
|
|
||||||
|
|
||||||
//! `schala-lang` is where the Schala programming language is actually implemented.
|
|
||||||
//! It defines the `Schala` type, which contains the state for a Schala REPL, and implements
|
|
||||||
//! `ProgrammingLanguageInterface` and the chain of compiler passes for it.
|
|
||||||
|
|
||||||
extern crate itertools;
|
|
||||||
#[macro_use]
|
|
||||||
extern crate lazy_static;
|
|
||||||
#[macro_use]
|
|
||||||
extern crate maplit;
|
|
||||||
extern crate schala_repl;
|
|
||||||
#[macro_use]
|
|
||||||
extern crate schala_lang_codegen;
|
|
||||||
extern crate ena;
|
|
||||||
extern crate derivative;
|
|
||||||
extern crate colored;
|
|
||||||
extern crate radix_trie;
|
|
||||||
|
|
||||||
|
|
||||||
macro_rules! bx {
|
|
||||||
($e:expr) => { Box::new($e) }
|
|
||||||
}
|
|
||||||
|
|
||||||
#[macro_use]
|
|
||||||
mod util;
|
|
||||||
#[macro_use]
|
|
||||||
mod typechecking;
|
|
||||||
mod debugging;
|
|
||||||
|
|
||||||
mod tokenizing;
|
|
||||||
mod ast;
|
|
||||||
mod parsing;
|
|
||||||
#[macro_use]
|
|
||||||
mod symbol_table;
|
|
||||||
mod scope_resolution;
|
|
||||||
mod builtin;
|
|
||||||
mod reduced_ast;
|
|
||||||
mod eval;
|
|
||||||
mod source_map;
|
|
||||||
|
|
||||||
mod schala;
|
|
||||||
|
|
||||||
pub use schala::Schala;
|
|
File diff suppressed because it is too large
Load Diff
@ -1,828 +0,0 @@
|
|||||||
#![cfg(test)]
|
|
||||||
use std::cell::RefCell;
|
|
||||||
use std::rc::Rc;
|
|
||||||
use std::str::FromStr;
|
|
||||||
|
|
||||||
use super::{Parser, ParseResult, tokenize};
|
|
||||||
use crate::ast::*;
|
|
||||||
use super::Declaration::*;
|
|
||||||
use super::Signature;
|
|
||||||
use super::TypeIdentifier::*;
|
|
||||||
use super::TypeSingletonName;
|
|
||||||
use super::ExpressionKind::*;
|
|
||||||
use super::Variant::*;
|
|
||||||
use super::ForBody::*;
|
|
||||||
|
|
||||||
fn make_parser(input: &str) -> Parser {
|
|
||||||
let source_map = crate::source_map::SourceMap::new();
|
|
||||||
let source_map_handle = Rc::new(RefCell::new(source_map));
|
|
||||||
let tokens: Vec<crate::tokenizing::Token> = tokenize(input);
|
|
||||||
let mut parser = super::Parser::new(source_map_handle);
|
|
||||||
parser.add_new_tokens(tokens);
|
|
||||||
parser
|
|
||||||
}
|
|
||||||
|
|
||||||
fn parse(input: &str) -> ParseResult<AST> {
|
|
||||||
let mut parser = make_parser(input);
|
|
||||||
parser.parse()
|
|
||||||
}
|
|
||||||
|
|
||||||
macro_rules! parse_test {
|
|
||||||
($string:expr, $correct:expr) => {
|
|
||||||
assert_eq!(parse($string).unwrap(), $correct)
|
|
||||||
};
|
|
||||||
}
|
|
||||||
macro_rules! parse_test_wrap_ast {
|
|
||||||
($string:expr, $correct:expr) => { parse_test!($string, AST { id: ItemIdStore::new_id(), statements: vec![$correct] }) }
|
|
||||||
}
|
|
||||||
macro_rules! parse_error {
|
|
||||||
($string:expr) => { assert!(parse($string).is_err()) }
|
|
||||||
}
|
|
||||||
macro_rules! qname {
|
|
||||||
( $( $component:expr),* ) => {
|
|
||||||
{
|
|
||||||
let mut components = vec![];
|
|
||||||
$(
|
|
||||||
components.push(rc!($component));
|
|
||||||
)*
|
|
||||||
QualifiedName { components, id: ItemIdStore::new_id() }
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
macro_rules! val {
|
|
||||||
($var:expr) => { Value(QualifiedName { components: vec![Rc::new($var.to_string())], id: ItemIdStore::new_id() }) };
|
|
||||||
}
|
|
||||||
macro_rules! ty {
|
|
||||||
($name:expr) => { Singleton(tys!($name)) }
|
|
||||||
}
|
|
||||||
macro_rules! tys {
|
|
||||||
($name:expr) => { TypeSingletonName { name: Rc::new($name.to_string()), params: vec![] } };
|
|
||||||
}
|
|
||||||
|
|
||||||
macro_rules! decl {
|
|
||||||
($expr_type:expr) => {
|
|
||||||
Statement { id: ItemIdStore::new_id(), kind: StatementKind::Declaration($expr_type) }
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
macro_rules! import {
|
|
||||||
($import_spec:expr) => {
|
|
||||||
Statement { id: ItemIdStore::new_id(), kind: StatementKind::Import($import_spec) }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
macro_rules! module {
|
|
||||||
($module_spec:expr) => {
|
|
||||||
Statement { id: ItemIdStore::new_id(), kind: StatementKind::Module($module_spec) }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
macro_rules! ex {
|
|
||||||
($expr_type:expr) => { Expression::new(ItemIdStore::new_id(), $expr_type) };
|
|
||||||
($expr_type:expr, $type_anno:expr) => { Expression::with_anno(ItemIdStore::new_id(), $expr_type, $type_anno) };
|
|
||||||
(s $expr_text:expr) => {
|
|
||||||
{
|
|
||||||
let mut parser = make_parser($expr_text);
|
|
||||||
parser.expression().unwrap()
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
macro_rules! inv {
|
|
||||||
($expr_type:expr) => { InvocationArgument::Positional($expr_type) }
|
|
||||||
}
|
|
||||||
|
|
||||||
macro_rules! binexp {
|
|
||||||
($op:expr, $lhs:expr, $rhs:expr) => { BinExp(BinOp::from_sigil($op), bx!(Expression::new(ItemIdStore::new_id(), $lhs).into()), bx!(Expression::new(ItemIdStore::new_id(), $rhs).into())) }
|
|
||||||
}
|
|
||||||
macro_rules! prefexp {
|
|
||||||
($op:expr, $lhs:expr) => { PrefixExp(PrefixOp::from_str($op).unwrap(), bx!(Expression::new(ItemIdStore::new_id(), $lhs).into())) }
|
|
||||||
}
|
|
||||||
macro_rules! exst {
|
|
||||||
($expr_type:expr) => { Statement { id: ItemIdStore::new_id(), kind: StatementKind::Expression(Expression::new(ItemIdStore::new_id(), $expr_type).into())} };
|
|
||||||
($expr_type:expr, $type_anno:expr) => { Statement { id: ItemIdStore::new_id(), kind: StatementKind::Expression(Expression::with_anno(ItemIdStore::new_id(), $expr_type, $type_anno).into())} };
|
|
||||||
($op:expr, $lhs:expr, $rhs:expr) => { Statement { id: ItemIdStore::new_id(), ,kind: StatementKind::Expression(ex!(binexp!($op, $lhs, $rhs)))}
|
|
||||||
};
|
|
||||||
(s $statement_text:expr) => {
|
|
||||||
{
|
|
||||||
let mut parser = make_parser($statement_text);
|
|
||||||
parser.statement().unwrap()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn parsing_number_literals_and_binexps() {
|
|
||||||
parse_test_wrap_ast! { ".2", exst!(FloatLiteral(0.2)) };
|
|
||||||
parse_test_wrap_ast! { "8.1", exst!(FloatLiteral(8.1)) };
|
|
||||||
|
|
||||||
parse_test_wrap_ast! { "0b010", exst!(NatLiteral(2)) };
|
|
||||||
parse_test_wrap_ast! { "0b0_1_0_", exst!(NatLiteral(2)) }
|
|
||||||
|
|
||||||
parse_test_wrap_ast! {"0xff", exst!(NatLiteral(255)) };
|
|
||||||
parse_test_wrap_ast! {"0xf_f_", exst!(NatLiteral(255)) };
|
|
||||||
|
|
||||||
parse_test_wrap_ast! {"0xf_f_+1", exst!(binexp!("+", NatLiteral(255), NatLiteral(1))) };
|
|
||||||
|
|
||||||
parse_test! {"3; 4; 4.3",
|
|
||||||
AST {
|
|
||||||
id: ItemIdStore::new_id(),
|
|
||||||
statements: vec![exst!(NatLiteral(3)), exst!(NatLiteral(4)),
|
|
||||||
exst!(FloatLiteral(4.3))]
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
parse_test_wrap_ast!("1 + 2 * 3",
|
|
||||||
exst!(binexp!("+", NatLiteral(1), binexp!("*", NatLiteral(2), NatLiteral(3))))
|
|
||||||
);
|
|
||||||
|
|
||||||
parse_test_wrap_ast!("1 * 2 + 3",
|
|
||||||
exst!(binexp!("+", binexp!("*", NatLiteral(1), NatLiteral(2)), NatLiteral(3)))
|
|
||||||
) ;
|
|
||||||
|
|
||||||
parse_test_wrap_ast!("1 && 2", exst!(binexp!("&&", NatLiteral(1), NatLiteral(2))));
|
|
||||||
|
|
||||||
parse_test_wrap_ast!("1 + 2 * 3 + 4", exst!(
|
|
||||||
binexp!("+",
|
|
||||||
binexp!("+", NatLiteral(1), binexp!("*", NatLiteral(2), NatLiteral(3))),
|
|
||||||
NatLiteral(4))));
|
|
||||||
|
|
||||||
parse_test_wrap_ast!("(1 + 2) * 3",
|
|
||||||
exst!(binexp!("*", binexp!("+", NatLiteral(1), NatLiteral(2)), NatLiteral(3))));
|
|
||||||
|
|
||||||
parse_test_wrap_ast!(".1 + .2", exst!(binexp!("+", FloatLiteral(0.1), FloatLiteral(0.2))));
|
|
||||||
parse_test_wrap_ast!("1 / 2", exst!(binexp!("/", NatLiteral(1), NatLiteral(2))));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn parsing_tuples() {
|
|
||||||
parse_test_wrap_ast!("()", exst!(TupleLiteral(vec![])));
|
|
||||||
parse_test_wrap_ast!("(\"hella\", 34)", exst!(
|
|
||||||
TupleLiteral(
|
|
||||||
vec![ex!(s r#""hella""#).into(), ex!(s "34").into()]
|
|
||||||
)
|
|
||||||
));
|
|
||||||
parse_test_wrap_ast!("((1+2), \"slough\")", exst!(TupleLiteral(vec![
|
|
||||||
ex!(binexp!("+", NatLiteral(1), NatLiteral(2))).into(),
|
|
||||||
ex!(StringLiteral(rc!(slough))).into(),
|
|
||||||
])))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn parsing_identifiers() {
|
|
||||||
parse_test_wrap_ast!("a", exst!(val!("a")));
|
|
||||||
parse_test_wrap_ast!("some_value", exst!(val!("some_value")));
|
|
||||||
parse_test_wrap_ast!("a + b", exst!(binexp!("+", val!("a"), val!("b"))));
|
|
||||||
//parse_test!("a[b]", AST(vec![Expression(
|
|
||||||
//parse_test!("a[]", <- TODO THIS NEEDS TO FAIL
|
|
||||||
//parse_test("a()[b]()[d]")
|
|
||||||
//TODO fix this parsing stuff
|
|
||||||
/*
|
|
||||||
parse_test! { "perspicacity()[a]", AST(vec![
|
|
||||||
exst!(Index {
|
|
||||||
indexee: bx!(ex!(Call { f: bx!(ex!(val!("perspicacity"))), arguments: vec![] })),
|
|
||||||
indexers: vec![ex!(val!("a"))]
|
|
||||||
})
|
|
||||||
])
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
parse_test_wrap_ast!("a[b,c]", exst!(Index { indexee: bx!(ex!(val!("a"))), indexers: vec![ex!(val!("b")), ex!(val!("c"))]} ));
|
|
||||||
|
|
||||||
parse_test_wrap_ast!("None", exst!(val!("None")));
|
|
||||||
parse_test_wrap_ast!("Pandas { a: x + y }",
|
|
||||||
exst!(NamedStruct { name: qname!(Pandas), fields: vec![(rc!(a), ex!(binexp!("+", val!("x"), val!("y"))))]})
|
|
||||||
);
|
|
||||||
parse_test_wrap_ast! { "Pandas { a: n, b: q, }",
|
|
||||||
exst!(NamedStruct { name: qname!(Pandas), fields:
|
|
||||||
vec![(rc!(a), ex!(val!("n"))), (rc!(b), ex!(val!("q")))]
|
|
||||||
}
|
|
||||||
)
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn qualified_identifiers() {
|
|
||||||
parse_test_wrap_ast! {
|
|
||||||
"let q_q = Yolo::Swaggins",
|
|
||||||
decl!(Binding { name: rc!(q_q), constant: true, type_anno: None,
|
|
||||||
expr: Expression::new(ItemIdStore::new_id(), Value(qname!(Yolo, Swaggins))),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
parse_test_wrap_ast! {
|
|
||||||
"thing::item::call()",
|
|
||||||
exst!(Call { f: bx![ex!(Value(qname!(thing, item, call)))], arguments: vec![] })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn reserved_words() {
|
|
||||||
parse_error!("module::item::call()");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn parsing_complicated_operators() {
|
|
||||||
parse_test_wrap_ast!("a <- b", exst!(binexp!("<-", val!("a"), val!("b"))));
|
|
||||||
parse_test_wrap_ast!("a || b", exst!(binexp!("||", val!("a"), val!("b"))));
|
|
||||||
parse_test_wrap_ast!("a<>b", exst!(binexp!("<>", val!("a"), val!("b"))));
|
|
||||||
parse_test_wrap_ast!("a.b.c.d", exst!(binexp!(".",
|
|
||||||
binexp!(".",
|
|
||||||
binexp!(".", val!("a"), val!("b")),
|
|
||||||
val!("c")),
|
|
||||||
val!("d"))));
|
|
||||||
parse_test_wrap_ast!("-3", exst!(prefexp!("-", NatLiteral(3))));
|
|
||||||
parse_test_wrap_ast!("-0.2", exst!(prefexp!("-", FloatLiteral(0.2))));
|
|
||||||
parse_test_wrap_ast!("!3", exst!(prefexp!("!", NatLiteral(3))));
|
|
||||||
parse_test_wrap_ast!("a <- -b", exst!(binexp!("<-", val!("a"), prefexp!("-", val!("b")))));
|
|
||||||
parse_test_wrap_ast!("a <--b", exst!(binexp!("<--", val!("a"), val!("b"))));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn parsing_functions() {
|
|
||||||
parse_test_wrap_ast!("fn oi()", decl!(FuncSig(Signature { name: rc!(oi), operator: false, params: vec![], type_anno: None })));
|
|
||||||
parse_test_wrap_ast!("oi()", exst!(Call { f: bx!(ex!(val!("oi"))), arguments: vec![] }));
|
|
||||||
parse_test_wrap_ast!("oi(a, 2 + 2)", exst!(Call
|
|
||||||
{ f: bx!(ex!(val!("oi"))),
|
|
||||||
arguments: vec![inv!(ex!(val!("a"))), inv!(ex!(binexp!("+", NatLiteral(2), NatLiteral(2)))).into()]
|
|
||||||
}));
|
|
||||||
parse_error!("a(b,,c)");
|
|
||||||
|
|
||||||
parse_test_wrap_ast!("fn a(b, c: Int): Int", decl!(
|
|
||||||
FuncSig(Signature { name: rc!(a), operator: false, params: vec![
|
|
||||||
FormalParam { name: rc!(b), anno: None, default: None },
|
|
||||||
FormalParam { name: rc!(c), anno: Some(ty!("Int")), default: None }
|
|
||||||
], type_anno: Some(ty!("Int")) })));
|
|
||||||
|
|
||||||
|
|
||||||
parse_test_wrap_ast!("fn a(x) { x() }", decl!(
|
|
||||||
FuncDecl(Signature { name: rc!(a), operator: false, params: vec![FormalParam { name: rc!(x), anno: None, default: None }], type_anno: None },
|
|
||||||
vec![exst!(Call { f: bx!(ex!(val!("x"))), arguments: vec![] })])));
|
|
||||||
parse_test_wrap_ast!("fn a(x) {\n x() }", decl!(
|
|
||||||
FuncDecl(Signature { name: rc!(a), operator: false, params: vec![FormalParam { name: rc!(x), anno: None, default: None }], type_anno: None },
|
|
||||||
vec![exst!(Call { f: bx!(ex!(val!("x"))), arguments: vec![] })])));
|
|
||||||
|
|
||||||
let multiline = r#"
|
|
||||||
fn a(x) {
|
|
||||||
x()
|
|
||||||
}
|
|
||||||
"#;
|
|
||||||
parse_test_wrap_ast!(multiline, decl!(
|
|
||||||
FuncDecl(Signature { name: rc!(a), operator: false, params: vec![FormalParam { name: rc!(x), default: None, anno: None }], type_anno: None },
|
|
||||||
vec![exst!(Call { f: bx!(ex!(val!("x"))), arguments: vec![] })])));
|
|
||||||
let multiline2 = r#"
|
|
||||||
fn a(x) {
|
|
||||||
|
|
||||||
x()
|
|
||||||
|
|
||||||
}
|
|
||||||
"#;
|
|
||||||
parse_test_wrap_ast!(multiline2, decl!(
|
|
||||||
FuncDecl(Signature { name: rc!(a), operator: false, params: vec![FormalParam { name: rc!(x), default: None, anno: None }], type_anno: None },
|
|
||||||
vec![exst!(s "x()")])));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn functions_with_default_args() {
|
|
||||||
parse_test_wrap_ast! {
|
|
||||||
"fn func(x: Int, y: Int = 4) { }",
|
|
||||||
decl!(
|
|
||||||
FuncDecl(Signature { name: rc!(func), operator: false, type_anno: None, params: vec![
|
|
||||||
FormalParam { name: rc!(x), default: None, anno: Some(ty!("Int")) },
|
|
||||||
FormalParam { name: rc!(y), default: Some(ex!(s "4")), anno: Some(ty!("Int")) }
|
|
||||||
]}, vec![])
|
|
||||||
)
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn parsing_bools() {
|
|
||||||
parse_test_wrap_ast!("false", exst!(BoolLiteral(false)));
|
|
||||||
parse_test_wrap_ast!("true", exst!(BoolLiteral(true)));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn parsing_strings() {
|
|
||||||
parse_test_wrap_ast!(r#""hello""#, exst!(StringLiteral(rc!(hello))));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn parsing_types() {
|
|
||||||
parse_test_wrap_ast!("type Yolo = Yolo", decl!(TypeDecl { name: tys!("Yolo"), body: TypeBody(vec![UnitStruct(rc!(Yolo))]), mutable: false} ));
|
|
||||||
parse_test_wrap_ast!("type mut Yolo = Yolo", decl!(TypeDecl { name: tys!("Yolo"), body: TypeBody(vec![UnitStruct(rc!(Yolo))]), mutable: true} ));
|
|
||||||
parse_test_wrap_ast!("type alias Sex = Drugs", decl!(TypeAlias { alias: rc!(Sex), original: rc!(Drugs) }));
|
|
||||||
parse_test_wrap_ast!("type Sanchez = Miguel | Alejandro(Int, Option<a>) | Esperanza { a: Int, b: String }",
|
|
||||||
decl!(TypeDecl {
|
|
||||||
name: tys!("Sanchez"),
|
|
||||||
body: TypeBody(vec![
|
|
||||||
UnitStruct(rc!(Miguel)),
|
|
||||||
TupleStruct(rc!(Alejandro), vec![
|
|
||||||
Singleton(TypeSingletonName { name: rc!(Int), params: vec![] }),
|
|
||||||
Singleton(TypeSingletonName { name: rc!(Option), params: vec![Singleton(TypeSingletonName { name: rc!(a), params: vec![] })] }),
|
|
||||||
]),
|
|
||||||
Record{
|
|
||||||
name: rc!(Esperanza),
|
|
||||||
members: vec![
|
|
||||||
(rc!(a), Singleton(TypeSingletonName { name: rc!(Int), params: vec![] })),
|
|
||||||
(rc!(b), Singleton(TypeSingletonName { name: rc!(String), params: vec![] })),
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]),
|
|
||||||
mutable: false
|
|
||||||
}));
|
|
||||||
|
|
||||||
parse_test_wrap_ast! {
|
|
||||||
"type Jorge<a> = Diego | Kike(a)",
|
|
||||||
decl!(TypeDecl{
|
|
||||||
name: TypeSingletonName { name: rc!(Jorge), params: vec![Singleton(TypeSingletonName { name: rc!(a), params: vec![] })] },
|
|
||||||
body: TypeBody(vec![UnitStruct(rc!(Diego)), TupleStruct(rc!(Kike), vec![Singleton(TypeSingletonName { name: rc!(a), params: vec![] })])]),
|
|
||||||
mutable: false
|
|
||||||
}
|
|
||||||
)
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn parsing_bindings() {
|
|
||||||
parse_test_wrap_ast!("let mut a = 10", decl!(Binding { name: rc!(a), constant: false, type_anno: None, expr: ex!(NatLiteral(10)) } ));
|
|
||||||
parse_test_wrap_ast!("let a = 2 + 2", decl!(Binding { name: rc!(a), constant: true, type_anno: None, expr: ex!(binexp!("+", NatLiteral(2), NatLiteral(2))) }));
|
|
||||||
parse_test_wrap_ast!("let a: Nat = 2 + 2", decl!(
|
|
||||||
Binding { name: rc!(a), constant: true, type_anno: Some(Singleton(TypeSingletonName { name: rc!(Nat), params: vec![] })),
|
|
||||||
expr: ex!(binexp!("+", NatLiteral(2), NatLiteral(2))) }
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn parsing_block_expressions() {
|
|
||||||
parse_test_wrap_ast! {
|
|
||||||
"if a() then { b(); c() }", exst!(
|
|
||||||
IfExpression {
|
|
||||||
discriminator: Some(bx! {
|
|
||||||
ex!(Call { f: bx!(ex!(val!("a"))), arguments: vec![]})
|
|
||||||
}),
|
|
||||||
body: bx! {
|
|
||||||
IfExpressionBody::SimpleConditional {
|
|
||||||
then_case: vec![exst!(Call { f: bx!(ex!(val!("b"))), arguments: vec![]}), exst!(Call { f: bx!(ex!(val!("c"))), arguments: vec![] })],
|
|
||||||
else_case: None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
)
|
|
||||||
};
|
|
||||||
|
|
||||||
parse_test_wrap_ast! {
|
|
||||||
"if a() then { b(); c() } else { q }", exst!(
|
|
||||||
IfExpression {
|
|
||||||
discriminator: Some(bx! {
|
|
||||||
ex!(Call { f: bx!(ex!(val!("a"))), arguments: vec![]})
|
|
||||||
}),
|
|
||||||
body: bx! {
|
|
||||||
IfExpressionBody::SimpleConditional {
|
|
||||||
then_case: vec![exst!(Call { f: bx!(ex!(val!("b"))), arguments: vec![]}), exst!(Call { f: bx!(ex!(val!("c"))), arguments: vec![] })],
|
|
||||||
else_case: Some(vec![exst!(val!("q"))]),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
)
|
|
||||||
};
|
|
||||||
|
|
||||||
/*
|
|
||||||
parse_test!("if a() then { b(); c() }", AST(vec![exst!(
|
|
||||||
IfExpression(bx!(ex!(Call { f: bx!(ex!(val!("a"))), arguments: vec![]})),
|
|
||||||
vec![exst!(Call { f: bx!(ex!(val!("b"))), arguments: vec![]}), exst!(Call { f: bx!(ex!(val!("c"))), arguments: vec![] })],
|
|
||||||
None)
|
|
||||||
)]));
|
|
||||||
parse_test!(r#"
|
|
||||||
if true then {
|
|
||||||
const a = 10
|
|
||||||
b
|
|
||||||
} else {
|
|
||||||
c
|
|
||||||
}"#,
|
|
||||||
AST(vec![exst!(IfExpression(bx!(ex!(BoolLiteral(true))),
|
|
||||||
vec![decl!(Binding { name: rc!(a), constant: true, expr: ex!(NatLiteral(10)) }),
|
|
||||||
exst!(val!(rc!(b)))],
|
|
||||||
Some(vec![exst!(val!(rc!(c)))])))])
|
|
||||||
);
|
|
||||||
|
|
||||||
parse_test!("if a { b } else { c }", AST(vec![exst!(
|
|
||||||
IfExpression(bx!(ex!(val!("a"))),
|
|
||||||
vec![exst!(val!("b"))],
|
|
||||||
Some(vec![exst!(val!("c"))])))]));
|
|
||||||
|
|
||||||
parse_test!("if (A {a: 1}) { b } else { c }", AST(vec![exst!(
|
|
||||||
IfExpression(bx!(ex!(NamedStruct { name: rc!(A), fields: vec![(rc!(a), ex!(NatLiteral(1)))]})),
|
|
||||||
vec![exst!(val!("b"))],
|
|
||||||
Some(vec![exst!(val!("c"))])))]));
|
|
||||||
|
|
||||||
parse_error!("if A {a: 1} { b } else { c }");
|
|
||||||
*/
|
|
||||||
}
|
|
||||||
#[test]
|
|
||||||
fn parsing_interfaces() {
|
|
||||||
parse_test_wrap_ast!("interface Unglueable { fn unglue(a: Glue); fn mar(): Glue }",
|
|
||||||
decl!(Interface {
|
|
||||||
name: rc!(Unglueable),
|
|
||||||
signatures: vec![
|
|
||||||
Signature {
|
|
||||||
name: rc!(unglue),
|
|
||||||
operator: false,
|
|
||||||
params: vec![
|
|
||||||
FormalParam { name: rc!(a), anno: Some(Singleton(TypeSingletonName { name: rc!(Glue), params: vec![] })), default: None }
|
|
||||||
],
|
|
||||||
type_anno: None
|
|
||||||
},
|
|
||||||
Signature { name: rc!(mar), operator: false, params: vec![], type_anno: Some(Singleton(TypeSingletonName { name: rc!(Glue), params: vec![] })) },
|
|
||||||
]
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn parsing_impls() {
|
|
||||||
parse_test_wrap_ast!("impl Heh { fn yolo(); fn swagg(); }",
|
|
||||||
decl!(Impl {
|
|
||||||
type_name: ty!("Heh"),
|
|
||||||
interface_name: None,
|
|
||||||
block: vec![
|
|
||||||
FuncSig(Signature { name: rc!(yolo), operator: false, params: vec![], type_anno: None }),
|
|
||||||
FuncSig(Signature { name: rc!(swagg), operator: false, params: vec![], type_anno: None })
|
|
||||||
] }));
|
|
||||||
|
|
||||||
parse_test_wrap_ast!("impl Mondai for Lollerino { fn yolo(); fn swagg(); }",
|
|
||||||
decl!(Impl {
|
|
||||||
type_name: ty!("Lollerino"),
|
|
||||||
interface_name: Some(TypeSingletonName { name: rc!(Mondai), params: vec![] }),
|
|
||||||
block: vec![
|
|
||||||
FuncSig(Signature { name: rc!(yolo), operator: false, params: vec![], type_anno: None}),
|
|
||||||
FuncSig(Signature { name: rc!(swagg), operator: false, params: vec![], type_anno: None })
|
|
||||||
] }));
|
|
||||||
|
|
||||||
parse_test_wrap_ast!("impl Hella<T> for (Alpha, Omega) { }",
|
|
||||||
decl!(Impl {
|
|
||||||
type_name: Tuple(vec![ty!("Alpha"), ty!("Omega")]),
|
|
||||||
interface_name: Some(TypeSingletonName { name: rc!(Hella), params: vec![ty!("T")] }),
|
|
||||||
block: vec![]
|
|
||||||
})
|
|
||||||
);
|
|
||||||
|
|
||||||
parse_test_wrap_ast!("impl Option<WTFMate> { fn oi() }",
|
|
||||||
decl!(Impl {
|
|
||||||
type_name: Singleton(TypeSingletonName { name: rc!(Option), params: vec![ty!("WTFMate")]}),
|
|
||||||
interface_name: None,
|
|
||||||
block: vec![
|
|
||||||
FuncSig(Signature { name: rc!(oi), operator: false, params: vec![], type_anno: None }),
|
|
||||||
]
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn parsing_type_annotations() {
|
|
||||||
parse_test_wrap_ast!("let a = b : Int",
|
|
||||||
decl!(Binding { name: rc!(a), constant: true, type_anno: None, expr:
|
|
||||||
ex!(val!("b"), ty!("Int")) }));
|
|
||||||
|
|
||||||
parse_test_wrap_ast!("a : Int",
|
|
||||||
exst!(val!("a"), ty!("Int"))
|
|
||||||
);
|
|
||||||
|
|
||||||
parse_test_wrap_ast!("a : Option<Int>",
|
|
||||||
exst!(val!("a"), Singleton(TypeSingletonName { name: rc!(Option), params: vec![ty!("Int")] }))
|
|
||||||
);
|
|
||||||
|
|
||||||
parse_test_wrap_ast!("a : KoreanBBQSpecifier<Kimchi, Option<Bulgogi> >",
|
|
||||||
exst!(val!("a"), Singleton(TypeSingletonName { name: rc!(KoreanBBQSpecifier), params: vec![
|
|
||||||
ty!("Kimchi"), Singleton(TypeSingletonName { name: rc!(Option), params: vec![ty!("Bulgogi")] })
|
|
||||||
] }))
|
|
||||||
);
|
|
||||||
|
|
||||||
parse_test_wrap_ast!("a : (Int, Yolo<a>)",
|
|
||||||
exst!(val!("a"), Tuple(
|
|
||||||
vec![ty!("Int"), Singleton(TypeSingletonName {
|
|
||||||
name: rc!(Yolo), params: vec![ty!("a")]
|
|
||||||
})])));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn parsing_lambdas() {
|
|
||||||
parse_test_wrap_ast! { r#"\(x) { x + 1}"#, exst!(
|
|
||||||
Lambda { params: vec![FormalParam { name: rc!(x), anno: None, default: None } ], type_anno: None, body: vec![exst!(s "x + 1")] }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
parse_test_wrap_ast!(r#"\ (x: Int, y) { a;b;c;}"#,
|
|
||||||
exst!(Lambda {
|
|
||||||
params: vec![
|
|
||||||
FormalParam { name: rc!(x), anno: Some(ty!("Int")), default: None },
|
|
||||||
FormalParam { name: rc!(y), anno: None, default: None }
|
|
||||||
],
|
|
||||||
type_anno: None,
|
|
||||||
body: vec![exst!(s "a"), exst!(s "b"), exst!(s "c")]
|
|
||||||
})
|
|
||||||
);
|
|
||||||
|
|
||||||
parse_test_wrap_ast! { r#"\(x){y}(1)"#,
|
|
||||||
exst!(Call { f: bx!(ex!(
|
|
||||||
Lambda {
|
|
||||||
params: vec![
|
|
||||||
FormalParam { name: rc!(x), anno: None, default: None }
|
|
||||||
],
|
|
||||||
type_anno: None,
|
|
||||||
body: vec![exst!(s "y")] }
|
|
||||||
)),
|
|
||||||
arguments: vec![inv!(ex!(NatLiteral(1))).into()] })
|
|
||||||
};
|
|
||||||
|
|
||||||
parse_test_wrap_ast! {
|
|
||||||
r#"\(x: Int): String { "q" }"#,
|
|
||||||
exst!(Lambda {
|
|
||||||
params: vec![
|
|
||||||
FormalParam { name: rc!(x), anno: Some(ty!("Int")), default: None },
|
|
||||||
],
|
|
||||||
type_anno: Some(ty!("String")),
|
|
||||||
body: vec![exst!(s r#""q""#)]
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn single_param_lambda() {
|
|
||||||
parse_test_wrap_ast! {
|
|
||||||
r"\x { x + 10 }",
|
|
||||||
exst!(Lambda {
|
|
||||||
params: vec![FormalParam { name: rc!(x), anno: None, default: None }],
|
|
||||||
type_anno: None,
|
|
||||||
body: vec![exst!(s r"x + 10")]
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
parse_test_wrap_ast! {
|
|
||||||
r"\x: Nat { x + 10 }",
|
|
||||||
exst!(Lambda {
|
|
||||||
params: vec![FormalParam { name: rc!(x), anno: Some(ty!("Nat")), default: None }],
|
|
||||||
type_anno: None,
|
|
||||||
body: vec![exst!(s r"x + 10")]
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn more_advanced_lambdas() {
|
|
||||||
parse_test! {
|
|
||||||
r#"fn wahoo() { let a = 10; \(x) { x + a } };
|
|
||||||
wahoo()(3) "#,
|
|
||||||
AST {
|
|
||||||
id: ItemIdStore::new_id(),
|
|
||||||
statements: vec![
|
|
||||||
exst!(s r"fn wahoo() { let a = 10; \(x) { x + a } }"),
|
|
||||||
exst! {
|
|
||||||
Call {
|
|
||||||
f: bx!(ex!(Call { f: bx!(ex!(val!("wahoo"))), arguments: vec![] })),
|
|
||||||
arguments: vec![inv!(ex!(NatLiteral(3))).into()],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn list_literals() {
|
|
||||||
parse_test_wrap_ast! {
|
|
||||||
"[1,2]",
|
|
||||||
exst!(ListLiteral(vec![ex!(NatLiteral(1)), ex!(NatLiteral(2))]))
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn while_expr() {
|
|
||||||
parse_test_wrap_ast! {
|
|
||||||
"while { }",
|
|
||||||
exst!(WhileExpression { condition: None, body: vec![] })
|
|
||||||
}
|
|
||||||
|
|
||||||
parse_test_wrap_ast! {
|
|
||||||
"while a == b { }",
|
|
||||||
exst!(WhileExpression { condition: Some(bx![ex![binexp!("==", val!("a"), val!("b"))]]), body: vec![] })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn for_expr() {
|
|
||||||
parse_test_wrap_ast! {
|
|
||||||
"for { a <- maybeValue } return 1",
|
|
||||||
exst!(ForExpression {
|
|
||||||
enumerators: vec![Enumerator { id: rc!(a), generator: ex!(val!("maybeValue")) }],
|
|
||||||
body: bx!(MonadicReturn(ex!(s "1")))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
parse_test_wrap_ast! {
|
|
||||||
"for n <- someRange { f(n); }",
|
|
||||||
exst!(ForExpression { enumerators: vec![Enumerator { id: rc!(n), generator: ex!(val!("someRange"))}],
|
|
||||||
body: bx!(ForBody::StatementBlock(vec![exst!(s "f(n)")]))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn patterns() {
|
|
||||||
parse_test_wrap_ast! {
|
|
||||||
"if x is Some(a) then { 4 } else { 9 }", exst!(
|
|
||||||
IfExpression {
|
|
||||||
discriminator: Some(bx!(ex!(s "x"))),
|
|
||||||
body: bx!(IfExpressionBody::SimplePatternMatch {
|
|
||||||
pattern: Pattern::TupleStruct(qname!(Some), vec![Pattern::VarOrName(qname!(a))]),
|
|
||||||
then_case: vec![exst!(s "4")],
|
|
||||||
else_case: Some(vec![exst!(s "9")]) })
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
parse_test_wrap_ast! {
|
|
||||||
"if x is Some(a) then 4 else 9", exst!(
|
|
||||||
IfExpression {
|
|
||||||
discriminator: Some(bx!(ex!(s "x"))),
|
|
||||||
body: bx!(IfExpressionBody::SimplePatternMatch {
|
|
||||||
pattern: Pattern::TupleStruct(qname!(Some), vec![Pattern::VarOrName(qname!(a))]),
|
|
||||||
then_case: vec![exst!(s "4")],
|
|
||||||
else_case: Some(vec![exst!(s "9")]) }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
parse_test_wrap_ast! {
|
|
||||||
"if x is Something { a, b: x } then { 4 } else { 9 }", exst!(
|
|
||||||
IfExpression {
|
|
||||||
discriminator: Some(bx!(ex!(s "x"))),
|
|
||||||
body: bx!(IfExpressionBody::SimplePatternMatch {
|
|
||||||
pattern: Pattern::Record(qname!(Something), vec![
|
|
||||||
(rc!(a),Pattern::Literal(PatternLiteral::StringPattern(rc!(a)))),
|
|
||||||
(rc!(b),Pattern::VarOrName(qname!(x)))
|
|
||||||
]),
|
|
||||||
then_case: vec![exst!(s "4")],
|
|
||||||
else_case: Some(vec![exst!(s "9")])
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn pattern_literals() {
|
|
||||||
parse_test_wrap_ast! {
|
|
||||||
"if x is -1 then 1 else 2",
|
|
||||||
exst!(
|
|
||||||
IfExpression {
|
|
||||||
discriminator: Some(bx!(ex!(s "x"))),
|
|
||||||
body: bx!(IfExpressionBody::SimplePatternMatch {
|
|
||||||
pattern: Pattern::Literal(PatternLiteral::NumPattern { neg: true, num: NatLiteral(1) }),
|
|
||||||
then_case: vec![exst!(NatLiteral(1))],
|
|
||||||
else_case: Some(vec![exst!(NatLiteral(2))]),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
parse_test_wrap_ast! {
|
|
||||||
"if x is 1 then 1 else 2",
|
|
||||||
exst!(
|
|
||||||
IfExpression {
|
|
||||||
discriminator: Some(bx!(ex!(s "x"))),
|
|
||||||
body: bx!(IfExpressionBody::SimplePatternMatch {
|
|
||||||
pattern: Pattern::Literal(PatternLiteral::NumPattern { neg: false, num: NatLiteral(1) }),
|
|
||||||
then_case: vec![exst!(s "1")],
|
|
||||||
else_case: Some(vec![exst!(s "2")]),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
parse_test_wrap_ast! {
|
|
||||||
"if x is true then 1 else 2",
|
|
||||||
exst!(
|
|
||||||
IfExpression {
|
|
||||||
discriminator: Some(bx!(ex!(s "x"))),
|
|
||||||
body: bx!(
|
|
||||||
IfExpressionBody::SimplePatternMatch {
|
|
||||||
pattern: Pattern::Literal(PatternLiteral::BoolPattern(true)),
|
|
||||||
then_case: vec![exst!(NatLiteral(1))],
|
|
||||||
else_case: Some(vec![exst!(NatLiteral(2))]),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
parse_test_wrap_ast! {
|
|
||||||
"if x is \"gnosticism\" then 1 else 2",
|
|
||||||
exst!(
|
|
||||||
IfExpression {
|
|
||||||
discriminator: Some(bx!(ex!(s "x"))),
|
|
||||||
body: bx!(IfExpressionBody::SimplePatternMatch {
|
|
||||||
pattern: Pattern::Literal(PatternLiteral::StringPattern(rc!(gnosticism))),
|
|
||||||
then_case: vec![exst!(s "1")],
|
|
||||||
else_case: Some(vec![exst!(s "2")]),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn imports() {
|
|
||||||
parse_test_wrap_ast! {
|
|
||||||
"import harbinger::draughts::Norgleheim",
|
|
||||||
import!(ImportSpecifier {
|
|
||||||
id: ItemIdStore::new_id(),
|
|
||||||
path_components: vec![rc!(harbinger), rc!(draughts), rc!(Norgleheim)],
|
|
||||||
imported_names: ImportedNames::LastOfPath
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn imports_2() {
|
|
||||||
parse_test_wrap_ast! {
|
|
||||||
"import harbinger::draughts::{Norgleheim, Xraksenlaigar}",
|
|
||||||
import!(ImportSpecifier {
|
|
||||||
id: ItemIdStore::new_id(),
|
|
||||||
path_components: vec![rc!(harbinger), rc!(draughts)],
|
|
||||||
imported_names: ImportedNames::List(vec![
|
|
||||||
rc!(Norgleheim),
|
|
||||||
rc!(Xraksenlaigar)
|
|
||||||
])
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn imports_3() {
|
|
||||||
parse_test_wrap_ast! {
|
|
||||||
"import bespouri::{}",
|
|
||||||
import!(ImportSpecifier {
|
|
||||||
id: ItemIdStore::new_id(),
|
|
||||||
path_components: vec![rc!(bespouri)],
|
|
||||||
imported_names: ImportedNames::List(vec![])
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn imports_4() {
|
|
||||||
parse_test_wrap_ast! {
|
|
||||||
"import bespouri::*",
|
|
||||||
import!(ImportSpecifier {
|
|
||||||
id: ItemIdStore::new_id(),
|
|
||||||
path_components: vec![rc!(bespouri)],
|
|
||||||
imported_names: ImportedNames::All
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn if_expr() {
|
|
||||||
parse_test_wrap_ast! {
|
|
||||||
"if x { is 1 then 5, else 20 }",
|
|
||||||
exst! {
|
|
||||||
IfExpression {
|
|
||||||
discriminator: Some(bx!(ex!(s "x"))),
|
|
||||||
body: bx!(IfExpressionBody::CondList(
|
|
||||||
vec![
|
|
||||||
ConditionArm {
|
|
||||||
condition: Condition::Pattern(Pattern::Literal(PatternLiteral::NumPattern { neg: false, num: NatLiteral(1)})),
|
|
||||||
guard: None,
|
|
||||||
body: vec![exst!(s "5")],
|
|
||||||
},
|
|
||||||
ConditionArm {
|
|
||||||
condition: Condition::Else,
|
|
||||||
guard: None,
|
|
||||||
body: vec![exst!(s "20")],
|
|
||||||
},
|
|
||||||
]
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn modules() {
|
|
||||||
parse_test_wrap_ast! {
|
|
||||||
r#"
|
|
||||||
module ephraim {
|
|
||||||
let a = 10
|
|
||||||
fn nah() { 33 }
|
|
||||||
}
|
|
||||||
"#,
|
|
||||||
module!(
|
|
||||||
ModuleSpecifier { name: rc!(ephraim), contents: vec![
|
|
||||||
decl!(Binding { name: rc!(a), constant: true, type_anno: None, expr: ex!(s "10") }),
|
|
||||||
decl!(FuncDecl(Signature { name: rc!(nah), operator: false, params: vec![], type_anno: None }, vec![exst!(NatLiteral(33))])),
|
|
||||||
] }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,544 +0,0 @@
|
|||||||
//! # Reduced AST
|
|
||||||
//! The reduced AST is a minimal AST designed to be built from the full AST after all possible
|
|
||||||
//! static checks have been done. Consequently, the AST reduction phase does very little error
|
|
||||||
//! checking itself - any errors should ideally be caught either by an earlier phase, or are
|
|
||||||
//! runtime errors that the evaluator should handle. That said, becuase it does do table lookups
|
|
||||||
//! that can in principle fail [especially at the moment with most static analysis not yet complete],
|
|
||||||
//! there is an Expr variant `ReductionError` to handle these cases.
|
|
||||||
//!
|
|
||||||
//! A design decision to make - should the ReducedAST types contain all information about
|
|
||||||
//! type/layout necessary for the evaluator to work? If so, then the evaluator should not
|
|
||||||
//! have access to the symbol table at all and ReducedAST should carry that information. If not,
|
|
||||||
//! then ReducedAST shouldn't be duplicating information that can be queried at runtime from the
|
|
||||||
//! symbol table. But I think the former might make sense since ultimately the bytecode will be
|
|
||||||
//! built from the ReducedAST.
|
|
||||||
use std::rc::Rc;
|
|
||||||
use std::str::FromStr;
|
|
||||||
|
|
||||||
use crate::ast::*;
|
|
||||||
use crate::symbol_table::{Symbol, SymbolSpec, SymbolTable, FullyQualifiedSymbolName};
|
|
||||||
use crate::builtin::Builtin;
|
|
||||||
use crate::util::deref_optional_box;
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct ReducedAST(pub Vec<Stmt>);
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub enum Stmt {
|
|
||||||
PreBinding {
|
|
||||||
name: Rc<String>,
|
|
||||||
func: Func,
|
|
||||||
},
|
|
||||||
Binding {
|
|
||||||
name: Rc<String>,
|
|
||||||
constant: bool,
|
|
||||||
expr: Expr,
|
|
||||||
},
|
|
||||||
Expr(Expr),
|
|
||||||
Noop,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub enum Expr {
|
|
||||||
Unit,
|
|
||||||
Lit(Lit),
|
|
||||||
Sym(Rc<String>), //a Sym is anything that can be looked up by name at runtime - i.e. a function or variable address
|
|
||||||
Tuple(Vec<Expr>),
|
|
||||||
Func(Func),
|
|
||||||
Constructor {
|
|
||||||
type_name: Rc<String>,
|
|
||||||
name: Rc<String>,
|
|
||||||
tag: usize,
|
|
||||||
arity: usize, // n.b. arity here is always the value from the symbol table - if it doesn't match what it's being called with, that's an eval error, eval will handle it
|
|
||||||
},
|
|
||||||
Call {
|
|
||||||
f: Box<Expr>,
|
|
||||||
args: Vec<Expr>,
|
|
||||||
},
|
|
||||||
Assign {
|
|
||||||
val: Box<Expr>, //TODO this probably can't be a val
|
|
||||||
expr: Box<Expr>,
|
|
||||||
},
|
|
||||||
Conditional {
|
|
||||||
cond: Box<Expr>,
|
|
||||||
then_clause: Vec<Stmt>,
|
|
||||||
else_clause: Vec<Stmt>,
|
|
||||||
},
|
|
||||||
ConditionalTargetSigilValue,
|
|
||||||
CaseMatch {
|
|
||||||
cond: Box<Expr>,
|
|
||||||
alternatives: Vec<Alternative>
|
|
||||||
},
|
|
||||||
UnimplementedSigilValue,
|
|
||||||
ReductionError(String),
|
|
||||||
}
|
|
||||||
|
|
||||||
pub type BoundVars = Vec<Option<Rc<String>>>; //remember that order matters here
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct Alternative {
|
|
||||||
pub matchable: Subpattern,
|
|
||||||
pub item: Vec<Stmt>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct Subpattern {
|
|
||||||
pub tag: Option<usize>,
|
|
||||||
pub subpatterns: Vec<Option<Subpattern>>,
|
|
||||||
pub bound_vars: BoundVars,
|
|
||||||
pub guard: Option<Expr>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub enum Lit {
|
|
||||||
Nat(u64),
|
|
||||||
Int(i64),
|
|
||||||
Float(f64),
|
|
||||||
Bool(bool),
|
|
||||||
StringLit(Rc<String>),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub enum Func {
|
|
||||||
BuiltIn(Builtin),
|
|
||||||
UserDefined {
|
|
||||||
name: Option<Rc<String>>,
|
|
||||||
params: Vec<Rc<String>>,
|
|
||||||
body: Vec<Stmt>,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn reduce(ast: &AST, symbol_table: &SymbolTable) -> ReducedAST {
|
|
||||||
let mut reducer = Reducer { symbol_table };
|
|
||||||
reducer.ast(ast)
|
|
||||||
}
|
|
||||||
|
|
||||||
struct Reducer<'a> {
|
|
||||||
symbol_table: &'a SymbolTable
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> Reducer<'a> {
|
|
||||||
fn ast(&mut self, ast: &AST) -> ReducedAST {
|
|
||||||
let mut output = vec![];
|
|
||||||
for statement in ast.statements.iter() {
|
|
||||||
output.push(self.statement(statement));
|
|
||||||
}
|
|
||||||
ReducedAST(output)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn statement(&mut self, stmt: &Statement) -> Stmt {
|
|
||||||
match &stmt.kind {
|
|
||||||
StatementKind::Expression(expr) => Stmt::Expr(self.expression(&expr)),
|
|
||||||
StatementKind::Declaration(decl) => self.declaration(&decl),
|
|
||||||
StatementKind::Import(_) => Stmt::Noop,
|
|
||||||
StatementKind::Module(modspec) => {
|
|
||||||
for statement in modspec.contents.iter() {
|
|
||||||
self.statement(&statement);
|
|
||||||
}
|
|
||||||
Stmt::Noop
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn block(&mut self, block: &Block) -> Vec<Stmt> {
|
|
||||||
block.iter().map(|stmt| self.statement(stmt)).collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn invocation_argument(&mut self, invoc: &InvocationArgument) -> Expr {
|
|
||||||
use crate::ast::InvocationArgument::*;
|
|
||||||
match invoc {
|
|
||||||
Positional(ex) => self.expression(ex),
|
|
||||||
Keyword { .. } => Expr::UnimplementedSigilValue,
|
|
||||||
Ignored => Expr::UnimplementedSigilValue,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expression(&mut self, expr: &Expression) -> Expr {
|
|
||||||
use crate::ast::ExpressionKind::*;
|
|
||||||
let symbol_table = self.symbol_table;
|
|
||||||
let ref input = expr.kind;
|
|
||||||
match input {
|
|
||||||
NatLiteral(n) => Expr::Lit(Lit::Nat(*n)),
|
|
||||||
FloatLiteral(f) => Expr::Lit(Lit::Float(*f)),
|
|
||||||
StringLiteral(s) => Expr::Lit(Lit::StringLit(s.clone())),
|
|
||||||
BoolLiteral(b) => Expr::Lit(Lit::Bool(*b)),
|
|
||||||
BinExp(binop, lhs, rhs) => self.binop(binop, lhs, rhs),
|
|
||||||
PrefixExp(op, arg) => self.prefix(op, arg),
|
|
||||||
Value(qualified_name) => self.value(qualified_name),
|
|
||||||
Call { f, arguments } => self.reduce_call_expression(f, arguments),
|
|
||||||
TupleLiteral(exprs) => Expr::Tuple(exprs.iter().map(|e| self.expression(e)).collect()),
|
|
||||||
IfExpression { discriminator, body } => self.reduce_if_expression(deref_optional_box(discriminator), body),
|
|
||||||
Lambda { params, body, .. } => self.reduce_lambda(params, body),
|
|
||||||
NamedStruct { name, fields } => self.reduce_named_struct(name, fields),
|
|
||||||
Index { .. } => Expr::UnimplementedSigilValue,
|
|
||||||
WhileExpression { .. } => Expr::UnimplementedSigilValue,
|
|
||||||
ForExpression { .. } => Expr::UnimplementedSigilValue,
|
|
||||||
ListLiteral { .. } => Expr::UnimplementedSigilValue,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn value(&mut self, qualified_name: &QualifiedName) -> Expr {
|
|
||||||
let symbol_table = self.symbol_table;
|
|
||||||
let ref id = qualified_name.id;
|
|
||||||
let ref sym_name = match symbol_table.get_fqsn_from_id(id) {
|
|
||||||
Some(fqsn) => fqsn,
|
|
||||||
None => return Expr::ReductionError(format!("FQSN lookup for Value {:?} failed", qualified_name)),
|
|
||||||
};
|
|
||||||
|
|
||||||
//TODO this probably needs to change
|
|
||||||
let FullyQualifiedSymbolName(ref v) = sym_name;
|
|
||||||
let name = v.last().unwrap().name.clone();
|
|
||||||
|
|
||||||
let Symbol { local_name, spec, .. } = match symbol_table.lookup_by_fqsn(&sym_name) {
|
|
||||||
Some(s) => s,
|
|
||||||
//None => return Expr::ReductionError(format!("Symbol {:?} not found", sym_name)),
|
|
||||||
None => return Expr::Sym(name.clone())
|
|
||||||
};
|
|
||||||
|
|
||||||
match spec {
|
|
||||||
SymbolSpec::RecordConstructor { .. } => Expr::ReductionError(format!("AST reducer doesn't expect a RecordConstructor here")),
|
|
||||||
SymbolSpec::DataConstructor { index, type_args, type_name } => Expr::Constructor {
|
|
||||||
type_name: type_name.clone(),
|
|
||||||
name: name.clone(),
|
|
||||||
tag: index.clone(),
|
|
||||||
arity: type_args.len(),
|
|
||||||
},
|
|
||||||
SymbolSpec::Func(_) => Expr::Sym(local_name.clone()),
|
|
||||||
SymbolSpec::Binding => Expr::Sym(local_name.clone()), //TODO not sure if this is right, probably needs to eventually be fqsn
|
|
||||||
SymbolSpec::Type { .. } => Expr::ReductionError("AST reducer doesnt expect a type here".to_string())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn reduce_lambda(&mut self, params: &Vec<FormalParam>, body: &Block) -> Expr {
|
|
||||||
Expr::Func(Func::UserDefined {
|
|
||||||
name: None,
|
|
||||||
params: params.iter().map(|param| param.name.clone()).collect(),
|
|
||||||
body: self.block(body),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn reduce_named_struct(&mut self, name: &QualifiedName, fields: &Vec<(Rc<String>, Expression)>) -> Expr {
|
|
||||||
let symbol_table = self.symbol_table;
|
|
||||||
let ref sym_name = match symbol_table.get_fqsn_from_id(&name.id) {
|
|
||||||
Some(fqsn) => fqsn,
|
|
||||||
None => return Expr::ReductionError(format!("FQSN lookup for name {:?} failed", name)),
|
|
||||||
};
|
|
||||||
|
|
||||||
let FullyQualifiedSymbolName(ref v) = sym_name;
|
|
||||||
let ref name = v.last().unwrap().name;
|
|
||||||
let (type_name, index, members_from_table) = match symbol_table.lookup_by_fqsn(&sym_name) {
|
|
||||||
Some(Symbol { spec: SymbolSpec::RecordConstructor { members, type_name, index }, .. }) => (type_name.clone(), index, members),
|
|
||||||
_ => return Expr::ReductionError("Not a record constructor".to_string()),
|
|
||||||
};
|
|
||||||
let arity = members_from_table.len();
|
|
||||||
|
|
||||||
let mut args: Vec<(Rc<String>, Expr)> = fields.iter()
|
|
||||||
.map(|(name, expr)| (name.clone(), self.expression(expr)))
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
args.as_mut_slice()
|
|
||||||
.sort_unstable_by(|(name1, _), (name2, _)| name1.cmp(name2)); //arbitrary - sorting by alphabetical order
|
|
||||||
|
|
||||||
let args = args.into_iter().map(|(_, expr)| expr).collect();
|
|
||||||
|
|
||||||
//TODO make sure this sorting actually works
|
|
||||||
let f = box Expr::Constructor { type_name, name: name.clone(), tag: *index, arity, };
|
|
||||||
Expr::Call { f, args }
|
|
||||||
}
|
|
||||||
|
|
||||||
fn reduce_call_expression(&mut self, func: &Expression, arguments: &Vec<InvocationArgument>) -> Expr {
|
|
||||||
Expr::Call {
|
|
||||||
f: Box::new(self.expression(func)),
|
|
||||||
args: arguments.iter().map(|arg| self.invocation_argument(arg)).collect(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn reduce_if_expression(&mut self, discriminator: Option<&Expression>, body: &IfExpressionBody) -> Expr {
|
|
||||||
let symbol_table = self.symbol_table;
|
|
||||||
let cond = Box::new(match discriminator {
|
|
||||||
Some(expr) => self.expression(expr),
|
|
||||||
None => return Expr::ReductionError(format!("blank cond if-expr not supported")),
|
|
||||||
});
|
|
||||||
|
|
||||||
match body {
|
|
||||||
IfExpressionBody::SimpleConditional { then_case, else_case } => {
|
|
||||||
let then_clause = self.block(&then_case);
|
|
||||||
let else_clause = match else_case.as_ref() {
|
|
||||||
None => vec![],
|
|
||||||
Some(stmts) => self.block(&stmts),
|
|
||||||
};
|
|
||||||
Expr::Conditional { cond, then_clause, else_clause }
|
|
||||||
},
|
|
||||||
IfExpressionBody::SimplePatternMatch { pattern, then_case, else_case } => {
|
|
||||||
let then_clause = self.block(&then_case);
|
|
||||||
let else_clause = match else_case.as_ref() {
|
|
||||||
None => vec![],
|
|
||||||
Some(stmts) => self.block(&stmts),
|
|
||||||
};
|
|
||||||
|
|
||||||
let alternatives = vec![
|
|
||||||
pattern.to_alternative(then_clause, symbol_table),
|
|
||||||
Alternative {
|
|
||||||
matchable: Subpattern {
|
|
||||||
tag: None,
|
|
||||||
subpatterns: vec![],
|
|
||||||
bound_vars: vec![],
|
|
||||||
guard: None,
|
|
||||||
},
|
|
||||||
item: else_clause
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
Expr::CaseMatch {
|
|
||||||
cond,
|
|
||||||
alternatives,
|
|
||||||
}
|
|
||||||
},
|
|
||||||
IfExpressionBody::CondList(ref condition_arms) => {
|
|
||||||
let mut alternatives = vec![];
|
|
||||||
for arm in condition_arms {
|
|
||||||
match arm.condition {
|
|
||||||
Condition::Expression(ref _expr) => {
|
|
||||||
return Expr::UnimplementedSigilValue
|
|
||||||
},
|
|
||||||
Condition::Pattern(ref p) => {
|
|
||||||
let item = self.block(&arm.body);
|
|
||||||
let alt = p.to_alternative(item, symbol_table);
|
|
||||||
alternatives.push(alt);
|
|
||||||
},
|
|
||||||
Condition::TruncatedOp(_, _) => {
|
|
||||||
return Expr::UnimplementedSigilValue
|
|
||||||
},
|
|
||||||
Condition::Else => {
|
|
||||||
return Expr::UnimplementedSigilValue
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Expr::CaseMatch { cond, alternatives }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn binop(&mut self, binop: &BinOp, lhs: &Box<Expression>, rhs: &Box<Expression>) -> Expr {
|
|
||||||
let operation = Builtin::from_str(binop.sigil()).ok();
|
|
||||||
match operation {
|
|
||||||
Some(Builtin::Assignment) => Expr::Assign {
|
|
||||||
val: Box::new(self.expression(&*lhs)),
|
|
||||||
expr: Box::new(self.expression(&*rhs)),
|
|
||||||
},
|
|
||||||
Some(op) => {
|
|
||||||
let f = Box::new(Expr::Func(Func::BuiltIn(op)));
|
|
||||||
Expr::Call { f, args: vec![self.expression(&*lhs), self.expression(&*rhs)] }
|
|
||||||
},
|
|
||||||
None => {
|
|
||||||
//TODO handle a user-defined operation
|
|
||||||
Expr::UnimplementedSigilValue
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn prefix(&mut self, prefix: &PrefixOp, arg: &Box<Expression>) -> Expr {
|
|
||||||
match prefix.builtin {
|
|
||||||
Some(op) => {
|
|
||||||
let f = Box::new(Expr::Func(Func::BuiltIn(op)));
|
|
||||||
Expr::Call { f, args: vec![self.expression(arg)] }
|
|
||||||
},
|
|
||||||
None => { //TODO need this for custom prefix ops
|
|
||||||
Expr::UnimplementedSigilValue
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn declaration(&mut self, declaration: &Declaration) -> Stmt {
|
|
||||||
use self::Declaration::*;
|
|
||||||
match declaration {
|
|
||||||
Binding {name, constant, expr, .. } => Stmt::Binding { name: name.clone(), constant: *constant, expr: self.expression(expr) },
|
|
||||||
FuncDecl(Signature { name, params, .. }, statements) => Stmt::PreBinding {
|
|
||||||
name: name.clone(),
|
|
||||||
func: Func::UserDefined {
|
|
||||||
name: Some(name.clone()),
|
|
||||||
params: params.iter().map(|param| param.name.clone()).collect(),
|
|
||||||
body: self.block(&statements),
|
|
||||||
}
|
|
||||||
},
|
|
||||||
TypeDecl { .. } => Stmt::Noop,
|
|
||||||
TypeAlias{ .. } => Stmt::Noop,
|
|
||||||
Interface { .. } => Stmt::Noop,
|
|
||||||
Impl { .. } => Stmt::Expr(Expr::UnimplementedSigilValue),
|
|
||||||
_ => Stmt::Expr(Expr::UnimplementedSigilValue)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
/* ig var pat
|
|
||||||
* x is SomeBigOldEnum(_, x, Some(t))
|
|
||||||
*/
|
|
||||||
|
|
||||||
fn handle_symbol(symbol: Option<&Symbol>, inner_patterns: &Vec<Pattern>, symbol_table: &SymbolTable) -> Subpattern {
|
|
||||||
use self::Pattern::*;
|
|
||||||
let tag = symbol.map(|symbol| match symbol.spec {
|
|
||||||
SymbolSpec::DataConstructor { index, .. } => index.clone(),
|
|
||||||
_ => panic!("Symbol is not a data constructor - this should've been caught in type-checking"),
|
|
||||||
});
|
|
||||||
let bound_vars = inner_patterns.iter().map(|p| match p {
|
|
||||||
VarOrName(qualified_name) => {
|
|
||||||
let fqsn = symbol_table.get_fqsn_from_id(&qualified_name.id);
|
|
||||||
let symbol_exists = fqsn.and_then(|fqsn| symbol_table.lookup_by_fqsn(&fqsn)).is_some();
|
|
||||||
if symbol_exists {
|
|
||||||
None
|
|
||||||
} else {
|
|
||||||
let QualifiedName { components, .. } = qualified_name;
|
|
||||||
if components.len() == 1 {
|
|
||||||
Some(components[0].clone())
|
|
||||||
} else {
|
|
||||||
panic!("Bad variable name in pattern");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
_ => None,
|
|
||||||
}).collect();
|
|
||||||
|
|
||||||
let subpatterns = inner_patterns.iter().map(|p| match p {
|
|
||||||
Ignored => None,
|
|
||||||
VarOrName(_) => None,
|
|
||||||
Literal(other) => Some(other.to_subpattern(symbol_table)),
|
|
||||||
tp @ TuplePattern(_) => Some(tp.to_subpattern(symbol_table)),
|
|
||||||
ts @ TupleStruct(_, _) => Some(ts.to_subpattern(symbol_table)),
|
|
||||||
Record(..) => unimplemented!(),
|
|
||||||
}).collect();
|
|
||||||
|
|
||||||
let guard = None;
|
|
||||||
/*
|
|
||||||
let guard_equality_exprs: Vec<Expr> = subpatterns.iter().map(|p| match p {
|
|
||||||
Literal(lit) => match lit {
|
|
||||||
_ => unimplemented!()
|
|
||||||
},
|
|
||||||
_ => unimplemented!()
|
|
||||||
}).collect();
|
|
||||||
*/
|
|
||||||
|
|
||||||
Subpattern {
|
|
||||||
tag,
|
|
||||||
subpatterns,
|
|
||||||
guard,
|
|
||||||
bound_vars,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Pattern {
|
|
||||||
fn to_alternative(&self, item: Vec<Stmt>, symbol_table: &SymbolTable) -> Alternative {
|
|
||||||
let s = self.to_subpattern(symbol_table);
|
|
||||||
Alternative {
|
|
||||||
matchable: Subpattern {
|
|
||||||
tag: s.tag,
|
|
||||||
subpatterns: s.subpatterns,
|
|
||||||
bound_vars: s.bound_vars,
|
|
||||||
guard: s.guard,
|
|
||||||
},
|
|
||||||
item
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn to_subpattern(&self, symbol_table: &SymbolTable) -> Subpattern {
|
|
||||||
use self::Pattern::*;
|
|
||||||
match self {
|
|
||||||
TupleStruct(QualifiedName{ components, id }, inner_patterns) => {
|
|
||||||
let fqsn = symbol_table.get_fqsn_from_id(&id);
|
|
||||||
match fqsn.and_then(|fqsn| symbol_table.lookup_by_fqsn(&fqsn)) {
|
|
||||||
Some(symbol) => handle_symbol(Some(symbol), inner_patterns, symbol_table),
|
|
||||||
None => {
|
|
||||||
panic!("Symbol {:?} not found", components);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
TuplePattern(inner_patterns) => handle_symbol(None, inner_patterns, symbol_table),
|
|
||||||
Record(_name, _pairs) => {
|
|
||||||
unimplemented!()
|
|
||||||
},
|
|
||||||
Ignored => Subpattern { tag: None, subpatterns: vec![], guard: None, bound_vars: vec![] },
|
|
||||||
Literal(lit) => lit.to_subpattern(symbol_table),
|
|
||||||
VarOrName(QualifiedName { components, id }) => {
|
|
||||||
// if fqsn is Some, treat this as a symbol pattern. If it's None, treat it
|
|
||||||
// as a variable.
|
|
||||||
let fqsn = symbol_table.get_fqsn_from_id(&id);
|
|
||||||
match fqsn.and_then(|fqsn| symbol_table.lookup_by_fqsn(&fqsn)) {
|
|
||||||
Some(symbol) => handle_symbol(Some(symbol), &vec![], symbol_table),
|
|
||||||
None => {
|
|
||||||
let name = if components.len() == 1 {
|
|
||||||
components[0].clone()
|
|
||||||
} else {
|
|
||||||
panic!("check this line of code yo");
|
|
||||||
};
|
|
||||||
Subpattern {
|
|
||||||
tag: None,
|
|
||||||
subpatterns: vec![],
|
|
||||||
guard: None,
|
|
||||||
bound_vars: vec![Some(name.clone())],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PatternLiteral {
|
|
||||||
fn to_subpattern(&self, _symbol_table: &SymbolTable) -> Subpattern {
|
|
||||||
use self::PatternLiteral::*;
|
|
||||||
match self {
|
|
||||||
NumPattern { neg, num } => {
|
|
||||||
let comparison = Expr::Lit(match (neg, num) {
|
|
||||||
(false, ExpressionKind::NatLiteral(n)) => Lit::Nat(*n),
|
|
||||||
(false, ExpressionKind::FloatLiteral(f)) => Lit::Float(*f),
|
|
||||||
(true, ExpressionKind::NatLiteral(n)) => Lit::Int(-1*(*n as i64)),
|
|
||||||
(true, ExpressionKind::FloatLiteral(f)) => Lit::Float(-1.0*f),
|
|
||||||
_ => panic!("This should never happen")
|
|
||||||
});
|
|
||||||
let guard = Some(Expr::Call {
|
|
||||||
f: Box::new(Expr::Func(Func::BuiltIn(Builtin::Equality))),
|
|
||||||
args: vec![comparison, Expr::ConditionalTargetSigilValue],
|
|
||||||
});
|
|
||||||
Subpattern {
|
|
||||||
tag: None,
|
|
||||||
subpatterns: vec![],
|
|
||||||
guard,
|
|
||||||
bound_vars: vec![],
|
|
||||||
}
|
|
||||||
},
|
|
||||||
StringPattern(s) => {
|
|
||||||
let guard = Some(Expr::Call {
|
|
||||||
f: Box::new(Expr::Func(Func::BuiltIn(Builtin::Equality))),
|
|
||||||
args: vec![Expr::Lit(Lit::StringLit(s.clone())), Expr::ConditionalTargetSigilValue]
|
|
||||||
});
|
|
||||||
|
|
||||||
Subpattern {
|
|
||||||
tag: None,
|
|
||||||
subpatterns: vec![],
|
|
||||||
guard,
|
|
||||||
bound_vars: vec![],
|
|
||||||
}
|
|
||||||
},
|
|
||||||
BoolPattern(b) => {
|
|
||||||
let guard = Some(if *b {
|
|
||||||
Expr::ConditionalTargetSigilValue
|
|
||||||
} else {
|
|
||||||
Expr::Call {
|
|
||||||
f: Box::new(Expr::Func(Func::BuiltIn(Builtin::BooleanNot))),
|
|
||||||
args: vec![Expr::ConditionalTargetSigilValue]
|
|
||||||
}
|
|
||||||
});
|
|
||||||
Subpattern {
|
|
||||||
tag: None,
|
|
||||||
subpatterns: vec![],
|
|
||||||
guard,
|
|
||||||
bound_vars: vec![],
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
@ -1,338 +0,0 @@
|
|||||||
use stopwatch::Stopwatch;
|
|
||||||
|
|
||||||
use std::time::Duration;
|
|
||||||
use std::cell::RefCell;
|
|
||||||
use std::rc::Rc;
|
|
||||||
use std::collections::HashSet;
|
|
||||||
|
|
||||||
use itertools::Itertools;
|
|
||||||
use schala_repl::{ProgrammingLanguageInterface,
|
|
||||||
ComputationRequest, ComputationResponse,
|
|
||||||
LangMetaRequest, LangMetaResponse, GlobalOutputStats,
|
|
||||||
DebugResponse, DebugAsk};
|
|
||||||
use crate::{ast, reduced_ast, tokenizing, parsing, eval, typechecking, symbol_table, source_map};
|
|
||||||
|
|
||||||
pub type SymbolTableHandle = Rc<RefCell<symbol_table::SymbolTable>>;
|
|
||||||
pub type SourceMapHandle = Rc<RefCell<source_map::SourceMap>>;
|
|
||||||
|
|
||||||
/// All the state necessary to parse and execute a Schala program are stored in this struct.
|
|
||||||
/// `state` represents the execution state for the AST-walking interpreter, the other fields
|
|
||||||
/// should be self-explanatory.
|
|
||||||
pub struct Schala {
|
|
||||||
source_reference: SourceReference,
|
|
||||||
source_map: SourceMapHandle,
|
|
||||||
state: eval::State<'static>,
|
|
||||||
symbol_table: SymbolTableHandle,
|
|
||||||
resolver: crate::scope_resolution::ScopeResolver<'static>,
|
|
||||||
type_context: typechecking::TypeContext<'static>,
|
|
||||||
active_parser: parsing::Parser,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Schala {
|
|
||||||
fn handle_docs(&self, source: String) -> LangMetaResponse {
|
|
||||||
LangMetaResponse::Docs {
|
|
||||||
doc_string: format!("Schala item `{}` : <<Schala-lang documentation not yet implemented>>", source)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Schala {
|
|
||||||
/// Creates a new Schala environment *without* any prelude.
|
|
||||||
fn new_blank_env() -> Schala {
|
|
||||||
let source_map = Rc::new(RefCell::new(source_map::SourceMap::new()));
|
|
||||||
let symbols = Rc::new(RefCell::new(symbol_table::SymbolTable::new(source_map.clone())));
|
|
||||||
Schala {
|
|
||||||
//TODO maybe these can be the same structure
|
|
||||||
source_reference: SourceReference::new(),
|
|
||||||
symbol_table: symbols.clone(),
|
|
||||||
source_map: source_map.clone(),
|
|
||||||
resolver: crate::scope_resolution::ScopeResolver::new(symbols.clone()),
|
|
||||||
state: eval::State::new(),
|
|
||||||
type_context: typechecking::TypeContext::new(),
|
|
||||||
active_parser: parsing::Parser::new(source_map)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Creates a new Schala environment with the standard prelude, which is defined as ordinary
|
|
||||||
/// Schala code in the file `prelude.schala`
|
|
||||||
pub fn new() -> Schala {
|
|
||||||
let prelude = include_str!("prelude.schala");
|
|
||||||
let mut s = Schala::new_blank_env();
|
|
||||||
|
|
||||||
let request = ComputationRequest { source: prelude, debug_requests: HashSet::default() };
|
|
||||||
let response = s.run_computation(request);
|
|
||||||
if let Err(msg) = response.main_output {
|
|
||||||
panic!("Error in prelude, panicking: {}", msg);
|
|
||||||
}
|
|
||||||
s
|
|
||||||
}
|
|
||||||
|
|
||||||
fn handle_debug_immediate(&self, request: DebugAsk) -> DebugResponse {
|
|
||||||
use DebugAsk::*;
|
|
||||||
match request {
|
|
||||||
Timing => DebugResponse { ask: Timing, value: format!("Invalid") },
|
|
||||||
ByStage { stage_name, token } => match &stage_name[..] {
|
|
||||||
"symbol-table" => {
|
|
||||||
let value = self.symbol_table.borrow().debug_symbol_table();
|
|
||||||
DebugResponse {
|
|
||||||
ask: ByStage { stage_name: format!("symbol-table"), token },
|
|
||||||
value
|
|
||||||
}
|
|
||||||
},
|
|
||||||
s => {
|
|
||||||
DebugResponse {
|
|
||||||
ask: ByStage { stage_name: s.to_string(), token: None },
|
|
||||||
value: format!("Not-implemented")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn tokenizing(input: &str, _handle: &mut Schala, comp: Option<&mut PassDebugArtifact>) -> Result<Vec<tokenizing::Token>, String> {
|
|
||||||
let tokens = tokenizing::tokenize(input);
|
|
||||||
comp.map(|comp| {
|
|
||||||
let token_string = tokens.iter().map(|t| t.to_string_with_metadata()).join(", ");
|
|
||||||
comp.add_artifact(token_string);
|
|
||||||
});
|
|
||||||
|
|
||||||
let errors: Vec<String> = tokens.iter().filter_map(|t| t.get_error()).collect();
|
|
||||||
if errors.len() == 0 {
|
|
||||||
Ok(tokens)
|
|
||||||
} else {
|
|
||||||
Err(format!("{:?}", errors))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn parsing(input: Vec<tokenizing::Token>, handle: &mut Schala, comp: Option<&mut PassDebugArtifact>) -> Result<ast::AST, String> {
|
|
||||||
use ParsingDebugType::*;
|
|
||||||
|
|
||||||
let ref mut parser = handle.active_parser;
|
|
||||||
parser.add_new_tokens(input);
|
|
||||||
let ast = parser.parse();
|
|
||||||
|
|
||||||
comp.map(|comp| {
|
|
||||||
let debug_format = comp.parsing.as_ref().unwrap_or(&CompactAST);
|
|
||||||
let debug_info = match debug_format {
|
|
||||||
CompactAST => match ast{
|
|
||||||
Ok(ref ast) => ast.compact_debug(),
|
|
||||||
Err(_) => "Error - see output".to_string(),
|
|
||||||
},
|
|
||||||
ExpandedAST => match ast{
|
|
||||||
Ok(ref ast) => ast.expanded_debug(),
|
|
||||||
Err(_) => "Error - see output".to_string(),
|
|
||||||
},
|
|
||||||
Trace => parser.format_parse_trace(),
|
|
||||||
};
|
|
||||||
comp.add_artifact(debug_info);
|
|
||||||
});
|
|
||||||
ast.map_err(|err| format_parse_error(err, &handle.source_reference))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn format_parse_error(error: parsing::ParseError, source_reference: &SourceReference) -> String {
|
|
||||||
let line_num = error.token.location.line_num;
|
|
||||||
let ch = error.token.location.char_num;
|
|
||||||
let line_from_program = source_reference.get_line(line_num);
|
|
||||||
let location_pointer = format!("{}^", " ".repeat(ch));
|
|
||||||
|
|
||||||
let line_num_digits = format!("{}", line_num).chars().count();
|
|
||||||
let space_padding = " ".repeat(line_num_digits);
|
|
||||||
|
|
||||||
let production = match error.production_name {
|
|
||||||
Some(n) => format!("\n(from production \"{}\")", n),
|
|
||||||
None => "".to_string()
|
|
||||||
};
|
|
||||||
|
|
||||||
format!(r#"
|
|
||||||
{error_msg}{production}
|
|
||||||
{space_padding} |
|
|
||||||
{line_num} | {}
|
|
||||||
{space_padding} | {}
|
|
||||||
"#, line_from_program, location_pointer, error_msg=error.msg, space_padding=space_padding, line_num=line_num, production=production
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn symbol_table(input: ast::AST, handle: &mut Schala, comp: Option<&mut PassDebugArtifact>) -> Result<ast::AST, String> {
|
|
||||||
let () = handle.symbol_table.borrow_mut().add_top_level_symbols(&input)?;
|
|
||||||
comp.map(|comp| {
|
|
||||||
let debug = handle.symbol_table.borrow().debug_symbol_table();
|
|
||||||
comp.add_artifact(debug);
|
|
||||||
});
|
|
||||||
Ok(input)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn scope_resolution(mut input: ast::AST, handle: &mut Schala, _com: Option<&mut PassDebugArtifact>) -> Result<ast::AST, String> {
|
|
||||||
let () = handle.resolver.resolve(&mut input)?;
|
|
||||||
Ok(input)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn typechecking(input: ast::AST, handle: &mut Schala, comp: Option<&mut PassDebugArtifact>) -> Result<ast::AST, String> {
|
|
||||||
let result = handle.type_context.typecheck(&input);
|
|
||||||
|
|
||||||
comp.map(|comp| {
|
|
||||||
comp.add_artifact(match result {
|
|
||||||
Ok(ty) => ty.to_string(),
|
|
||||||
Err(err) => format!("Type error: {}", err.msg)
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
Ok(input)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn ast_reducing(input: ast::AST, handle: &mut Schala, comp: Option<&mut PassDebugArtifact>) -> Result<reduced_ast::ReducedAST, String> {
|
|
||||||
let ref symbol_table = handle.symbol_table.borrow();
|
|
||||||
let output = reduced_ast::reduce(&input, symbol_table);
|
|
||||||
comp.map(|comp| comp.add_artifact(format!("{:?}", output)));
|
|
||||||
Ok(output)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn eval(input: reduced_ast::ReducedAST, handle: &mut Schala, comp: Option<&mut PassDebugArtifact>) -> Result<String, String> {
|
|
||||||
comp.map(|comp| comp.add_artifact(handle.state.debug_print()));
|
|
||||||
let evaluation_outputs = handle.state.evaluate(input, true);
|
|
||||||
let text_output: Result<Vec<String>, String> = evaluation_outputs
|
|
||||||
.into_iter()
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let eval_output: Result<String, String> = text_output
|
|
||||||
.map(|v| { v.into_iter().intersperse(format!("\n")).collect() });
|
|
||||||
eval_output
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Represents lines of source code
|
|
||||||
struct SourceReference {
|
|
||||||
lines: Option<Vec<String>>
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SourceReference {
|
|
||||||
fn new() -> SourceReference {
|
|
||||||
SourceReference { lines: None }
|
|
||||||
}
|
|
||||||
|
|
||||||
fn load_new_source(&mut self, source: &str) {
|
|
||||||
//TODO this is a lot of heap allocations - maybe there's a way to make it more efficient?
|
|
||||||
self.lines = Some(source.lines().map(|s| s.to_string()).collect()); }
|
|
||||||
|
|
||||||
fn get_line(&self, line: usize) -> String {
|
|
||||||
self.lines.as_ref().and_then(|x| x.get(line).map(|s| s.to_string())).unwrap_or(format!("NO LINE FOUND"))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
enum ParsingDebugType {
|
|
||||||
CompactAST,
|
|
||||||
ExpandedAST,
|
|
||||||
Trace
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Default)]
|
|
||||||
struct PassDebugArtifact {
|
|
||||||
parsing: Option<ParsingDebugType>,
|
|
||||||
artifacts: Vec<String>
|
|
||||||
|
|
||||||
}
|
|
||||||
impl PassDebugArtifact {
|
|
||||||
fn add_artifact(&mut self, artifact: String) {
|
|
||||||
self.artifacts.push(artifact)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn stage_names() -> Vec<&'static str> {
|
|
||||||
vec![
|
|
||||||
"tokenizing",
|
|
||||||
"parsing",
|
|
||||||
"symbol-table",
|
|
||||||
"scope-resolution",
|
|
||||||
"typechecking",
|
|
||||||
"ast-reduction",
|
|
||||||
"ast-walking-evaluation"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
impl ProgrammingLanguageInterface for Schala {
|
|
||||||
fn get_language_name(&self) -> String { format!("Schala") }
|
|
||||||
fn get_source_file_suffix(&self) -> String { format!("schala") }
|
|
||||||
|
|
||||||
fn run_computation(&mut self, request: ComputationRequest) -> ComputationResponse {
|
|
||||||
struct PassToken<'a> {
|
|
||||||
schala: &'a mut Schala,
|
|
||||||
stage_durations: &'a mut Vec<(String, Duration)>,
|
|
||||||
sw: &'a Stopwatch,
|
|
||||||
debug_requests: &'a HashSet<DebugAsk>,
|
|
||||||
debug_responses: &'a mut Vec<DebugResponse>,
|
|
||||||
}
|
|
||||||
|
|
||||||
fn output_wrapper<Input, Output, F>(n: usize, func: F, input: Input, token: &mut PassToken) -> Result<Output, String>
|
|
||||||
where F: Fn(Input, &mut Schala, Option<&mut PassDebugArtifact>) -> Result<Output, String>
|
|
||||||
{
|
|
||||||
let stage_names = stage_names();
|
|
||||||
let cur_stage_name = stage_names[n];
|
|
||||||
let ask = token.debug_requests.iter().find(|ask| ask.is_for_stage(cur_stage_name));
|
|
||||||
|
|
||||||
let parsing = match ask {
|
|
||||||
Some(DebugAsk::ByStage { token, .. }) if cur_stage_name == "parsing" => Some(
|
|
||||||
token.as_ref().map(|token| match &token[..] {
|
|
||||||
"compact" => ParsingDebugType::CompactAST,
|
|
||||||
"expanded" => ParsingDebugType::ExpandedAST,
|
|
||||||
"trace" => ParsingDebugType::Trace,
|
|
||||||
_ => ParsingDebugType::CompactAST,
|
|
||||||
}).unwrap_or(ParsingDebugType::CompactAST)
|
|
||||||
),
|
|
||||||
_ => None,
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut debug_artifact = ask.map(|_| PassDebugArtifact {
|
|
||||||
parsing, ..Default::default()
|
|
||||||
});
|
|
||||||
|
|
||||||
let output = func(input, token.schala, debug_artifact.as_mut());
|
|
||||||
|
|
||||||
//TODO I think this is not counting the time since the *previous* stage
|
|
||||||
token.stage_durations.push((cur_stage_name.to_string(), token.sw.elapsed()));
|
|
||||||
if let Some(artifact) = debug_artifact {
|
|
||||||
for value in artifact.artifacts.into_iter() {
|
|
||||||
let resp = DebugResponse { ask: ask.unwrap().clone(), value };
|
|
||||||
token.debug_responses.push(resp);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
output
|
|
||||||
}
|
|
||||||
|
|
||||||
let ComputationRequest { source, debug_requests } = request;
|
|
||||||
self.source_reference.load_new_source(source);
|
|
||||||
let sw = Stopwatch::start_new();
|
|
||||||
let mut stage_durations = Vec::new();
|
|
||||||
let mut debug_responses = Vec::new();
|
|
||||||
let mut tok = PassToken { schala: self, stage_durations: &mut stage_durations, sw: &sw, debug_requests: &debug_requests, debug_responses: &mut debug_responses };
|
|
||||||
|
|
||||||
let main_output: Result<String, String> = Ok(source)
|
|
||||||
.and_then(|source| output_wrapper(0, tokenizing, source, &mut tok))
|
|
||||||
.and_then(|tokens| output_wrapper(1, parsing, tokens, &mut tok))
|
|
||||||
.and_then(|ast| output_wrapper(2, symbol_table, ast, &mut tok))
|
|
||||||
.and_then(|ast| output_wrapper(3, scope_resolution, ast, &mut tok))
|
|
||||||
.and_then(|ast| output_wrapper(4, typechecking, ast, &mut tok))
|
|
||||||
.and_then(|ast| output_wrapper(5, ast_reducing, ast, &mut tok))
|
|
||||||
.and_then(|reduced_ast| output_wrapper(6, eval, reduced_ast, &mut tok));
|
|
||||||
|
|
||||||
let total_duration = sw.elapsed();
|
|
||||||
let global_output_stats = GlobalOutputStats {
|
|
||||||
total_duration, stage_durations
|
|
||||||
};
|
|
||||||
|
|
||||||
ComputationResponse {
|
|
||||||
main_output,
|
|
||||||
global_output_stats,
|
|
||||||
debug_responses,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn request_meta(&mut self, request: LangMetaRequest) -> LangMetaResponse {
|
|
||||||
match request {
|
|
||||||
LangMetaRequest::StageNames => LangMetaResponse::StageNames(stage_names().iter().map(|s| s.to_string()).collect()),
|
|
||||||
LangMetaRequest::Docs { source } => self.handle_docs(source),
|
|
||||||
LangMetaRequest::ImmediateDebug(debug_request) =>
|
|
||||||
LangMetaResponse::ImmediateDebug(self.handle_debug_immediate(debug_request)),
|
|
||||||
LangMetaRequest::Custom { .. } => LangMetaResponse::Custom { kind: format!("not-implemented"), value: format!("") }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,119 +0,0 @@
|
|||||||
use std::rc::Rc;
|
|
||||||
|
|
||||||
use crate::schala::SymbolTableHandle;
|
|
||||||
use crate::symbol_table::{ScopeSegment, FullyQualifiedSymbolName};
|
|
||||||
use crate::ast::*;
|
|
||||||
use crate::util::ScopeStack;
|
|
||||||
|
|
||||||
type FQSNPrefix = Vec<ScopeSegment>;
|
|
||||||
|
|
||||||
pub struct ScopeResolver<'a> {
|
|
||||||
symbol_table_handle: SymbolTableHandle,
|
|
||||||
name_scope_stack: ScopeStack<'a, Rc<String>, FQSNPrefix>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> ASTVisitor for ScopeResolver<'a> {
|
|
||||||
//TODO need to un-insert these - maybe need to rethink visitor
|
|
||||||
fn import(&mut self, import_spec: &ImportSpecifier) {
|
|
||||||
let ref symbol_table = self.symbol_table_handle.borrow();
|
|
||||||
let ImportSpecifier { ref path_components, ref imported_names, .. } = &import_spec;
|
|
||||||
match imported_names {
|
|
||||||
ImportedNames::All => {
|
|
||||||
let prefix = FullyQualifiedSymbolName(path_components.iter().map(|c| ScopeSegment {
|
|
||||||
name: c.clone(),
|
|
||||||
}).collect());
|
|
||||||
let members = symbol_table.lookup_children_of_fqsn(&prefix);
|
|
||||||
for member in members.into_iter() {
|
|
||||||
let local_name = member.0.last().unwrap().name.clone();
|
|
||||||
self.name_scope_stack.insert(local_name.clone(), member.0);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
ImportedNames::LastOfPath => {
|
|
||||||
let name = path_components.last().unwrap(); //TODO handle better
|
|
||||||
let fqsn_prefix = path_components.iter().map(|c| ScopeSegment {
|
|
||||||
name: c.clone(),
|
|
||||||
}).collect();
|
|
||||||
self.name_scope_stack.insert(name.clone(), fqsn_prefix);
|
|
||||||
}
|
|
||||||
ImportedNames::List(ref names) => {
|
|
||||||
let fqsn_prefix: FQSNPrefix = path_components.iter().map(|c| ScopeSegment {
|
|
||||||
name: c.clone(),
|
|
||||||
}).collect();
|
|
||||||
for name in names.iter() {
|
|
||||||
self.name_scope_stack.insert(name.clone(), fqsn_prefix.clone());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
fn qualified_name(&mut self, qualified_name: &QualifiedName) {
|
|
||||||
let ref mut symbol_table = self.symbol_table_handle.borrow_mut();
|
|
||||||
let fqsn = self.lookup_name_in_scope(&qualified_name);
|
|
||||||
let ref id = qualified_name.id;
|
|
||||||
symbol_table.map_id_to_fqsn(id, fqsn);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn named_struct(&mut self, name: &QualifiedName, _fields: &Vec<(Rc<String>, Expression)>) {
|
|
||||||
let ref mut symbol_table = self.symbol_table_handle.borrow_mut();
|
|
||||||
let ref id = name.id;
|
|
||||||
let fqsn = self.lookup_name_in_scope(&name);
|
|
||||||
symbol_table.map_id_to_fqsn(id, fqsn);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn pattern(&mut self, pat: &Pattern) {
|
|
||||||
use Pattern::*;
|
|
||||||
match pat {
|
|
||||||
Ignored => (),
|
|
||||||
TuplePattern(_) => (),
|
|
||||||
Literal(_) => (),
|
|
||||||
TupleStruct(name, _) => self.qualified_name_in_pattern(name),
|
|
||||||
Record(name, _) => self.qualified_name_in_pattern(name),
|
|
||||||
VarOrName(name) => self.qualified_name_in_pattern(name),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> ScopeResolver<'a> {
|
|
||||||
pub fn new(symbol_table_handle: SymbolTableHandle) -> ScopeResolver<'static> {
|
|
||||||
let name_scope_stack = ScopeStack::new(None);
|
|
||||||
ScopeResolver { symbol_table_handle, name_scope_stack }
|
|
||||||
}
|
|
||||||
pub fn resolve(&mut self, ast: &mut AST) -> Result<(), String> {
|
|
||||||
walk_ast(self, ast);
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn lookup_name_in_scope(&self, sym_name: &QualifiedName) -> FullyQualifiedSymbolName {
|
|
||||||
let QualifiedName { components, .. } = sym_name;
|
|
||||||
let first_component = &components[0];
|
|
||||||
match self.name_scope_stack.lookup(first_component) {
|
|
||||||
None => {
|
|
||||||
FullyQualifiedSymbolName(components.iter().map(|name| ScopeSegment { name: name.clone() }).collect())
|
|
||||||
},
|
|
||||||
Some(fqsn_prefix) => {
|
|
||||||
let mut full_name = fqsn_prefix.clone();
|
|
||||||
let rest_of_name: FQSNPrefix = components[1..].iter().map(|name| ScopeSegment { name: name.clone() }).collect();
|
|
||||||
full_name.extend_from_slice(&rest_of_name);
|
|
||||||
FullyQualifiedSymbolName(full_name)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// this might be a variable or a pattern. if a variable, set to none
|
|
||||||
fn qualified_name_in_pattern(&mut self, qualified_name: &QualifiedName) {
|
|
||||||
let ref mut symbol_table = self.symbol_table_handle.borrow_mut();
|
|
||||||
let ref id = qualified_name.id;
|
|
||||||
let fqsn = self.lookup_name_in_scope(qualified_name);
|
|
||||||
if symbol_table.lookup_by_fqsn(&fqsn).is_some() {
|
|
||||||
symbol_table.map_id_to_fqsn(&id, fqsn);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
#[test]
|
|
||||||
fn basic_scope() {
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,39 +0,0 @@
|
|||||||
use std::collections::HashMap;
|
|
||||||
use std::fmt;
|
|
||||||
|
|
||||||
use crate::ast::ItemId;
|
|
||||||
|
|
||||||
pub type LineNumber = usize;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
|
||||||
pub struct Location {
|
|
||||||
pub line_num: LineNumber,
|
|
||||||
pub char_num: usize,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for Location {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
||||||
write!(f, "{}:{}", self.line_num, self.char_num)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct SourceMap {
|
|
||||||
map: HashMap<ItemId, Location>
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SourceMap {
|
|
||||||
pub fn new() -> SourceMap {
|
|
||||||
SourceMap { map: HashMap::new() }
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn add_location(&mut self, id: &ItemId, loc: Location) {
|
|
||||||
self.map.insert(id.clone(), loc);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn lookup(&self, id: &ItemId) -> Option<Location> {
|
|
||||||
match self.map.get(id) {
|
|
||||||
Some(loc) => Some(loc.clone()),
|
|
||||||
None => None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,343 +0,0 @@
|
|||||||
use std::collections::HashMap;
|
|
||||||
use std::collections::hash_map::Entry;
|
|
||||||
use std::rc::Rc;
|
|
||||||
use std::fmt;
|
|
||||||
use std::fmt::Write;
|
|
||||||
|
|
||||||
use crate::schala::SourceMapHandle;
|
|
||||||
use crate::source_map::{SourceMap, LineNumber};
|
|
||||||
use crate::ast;
|
|
||||||
use crate::ast::{ItemId, TypeBody, TypeSingletonName, Signature, Statement, StatementKind, ModuleSpecifier};
|
|
||||||
use crate::typechecking::TypeName;
|
|
||||||
|
|
||||||
|
|
||||||
#[allow(unused_macros)]
|
|
||||||
macro_rules! fqsn {
|
|
||||||
( $( $name:expr ; $kind:tt),* ) => {
|
|
||||||
{
|
|
||||||
let mut vec = vec![];
|
|
||||||
$(
|
|
||||||
vec.push(crate::symbol_table::ScopeSegment::new(std::rc::Rc::new($name.to_string())));
|
|
||||||
)*
|
|
||||||
FullyQualifiedSymbolName(vec)
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
mod symbol_trie;
|
|
||||||
use symbol_trie::SymbolTrie;
|
|
||||||
mod test;
|
|
||||||
|
|
||||||
/// Keeps track of what names were used in a given namespace. Call try_register to add a name to
|
|
||||||
/// the table, or report an error if a name already exists.
|
|
||||||
struct DuplicateNameTrackTable {
|
|
||||||
table: HashMap<Rc<String>, LineNumber>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl DuplicateNameTrackTable {
|
|
||||||
fn new() -> DuplicateNameTrackTable {
|
|
||||||
DuplicateNameTrackTable { table: HashMap::new() }
|
|
||||||
}
|
|
||||||
|
|
||||||
fn try_register(&mut self, name: &Rc<String>, id: &ItemId, source_map: &SourceMap) -> Result<(), LineNumber> {
|
|
||||||
match self.table.entry(name.clone()) {
|
|
||||||
Entry::Occupied(o) => {
|
|
||||||
let line_number = o.get();
|
|
||||||
Err(*line_number)
|
|
||||||
},
|
|
||||||
Entry::Vacant(v) => {
|
|
||||||
let line_number = if let Some(loc) = source_map.lookup(id) {
|
|
||||||
loc.line_num
|
|
||||||
} else {
|
|
||||||
0
|
|
||||||
};
|
|
||||||
v.insert(line_number);
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(PartialEq, Eq, Hash, Debug, Clone, PartialOrd, Ord)]
|
|
||||||
pub struct FullyQualifiedSymbolName(pub Vec<ScopeSegment>);
|
|
||||||
|
|
||||||
impl fmt::Display for FullyQualifiedSymbolName {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
||||||
let FullyQualifiedSymbolName(v) = self;
|
|
||||||
for segment in v {
|
|
||||||
write!(f, "::{}", segment)?;
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Hash, PartialOrd, Ord)]
|
|
||||||
pub struct ScopeSegment {
|
|
||||||
pub name: Rc<String>, //TODO maybe this could be a &str, for efficiency?
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for ScopeSegment {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
||||||
let kind = ""; //TODO implement some kind of kind-tracking here
|
|
||||||
write!(f, "{}{}", self.name, kind)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ScopeSegment {
|
|
||||||
pub fn new(name: Rc<String>) -> ScopeSegment {
|
|
||||||
ScopeSegment { name }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
//cf. p. 150 or so of Language Implementation Patterns
|
|
||||||
pub struct SymbolTable {
|
|
||||||
source_map_handle: SourceMapHandle,
|
|
||||||
symbol_path_to_symbol: HashMap<FullyQualifiedSymbolName, Symbol>,
|
|
||||||
id_to_fqsn: HashMap<ItemId, FullyQualifiedSymbolName>,
|
|
||||||
symbol_trie: SymbolTrie,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SymbolTable {
|
|
||||||
pub fn new(source_map_handle: SourceMapHandle) -> SymbolTable {
|
|
||||||
SymbolTable {
|
|
||||||
source_map_handle,
|
|
||||||
symbol_path_to_symbol: HashMap::new(),
|
|
||||||
id_to_fqsn: HashMap::new(),
|
|
||||||
symbol_trie: SymbolTrie::new()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn map_id_to_fqsn(&mut self, id: &ItemId, fqsn: FullyQualifiedSymbolName) {
|
|
||||||
self.id_to_fqsn.insert(id.clone(), fqsn);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_fqsn_from_id(&self, id: &ItemId) -> Option<FullyQualifiedSymbolName> {
|
|
||||||
self.id_to_fqsn.get(&id).cloned()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn add_new_symbol(&mut self, local_name: &Rc<String>, scope_path: &Vec<ScopeSegment>, spec: SymbolSpec) {
|
|
||||||
let mut vec: Vec<ScopeSegment> = scope_path.clone();
|
|
||||||
vec.push(ScopeSegment { name: local_name.clone() });
|
|
||||||
let fully_qualified_name = FullyQualifiedSymbolName(vec);
|
|
||||||
let symbol = Symbol { local_name: local_name.clone(), fully_qualified_name: fully_qualified_name.clone(), spec };
|
|
||||||
self.symbol_trie.insert(&fully_qualified_name);
|
|
||||||
self.symbol_path_to_symbol.insert(fully_qualified_name, symbol);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn lookup_by_fqsn(&self, fully_qualified_path: &FullyQualifiedSymbolName) -> Option<&Symbol> {
|
|
||||||
self.symbol_path_to_symbol.get(fully_qualified_path)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn lookup_children_of_fqsn(&self, path: &FullyQualifiedSymbolName) -> Vec<FullyQualifiedSymbolName> {
|
|
||||||
self.symbol_trie.get_children(path)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct Symbol {
|
|
||||||
pub local_name: Rc<String>, //TODO does this need to be pub?
|
|
||||||
fully_qualified_name: FullyQualifiedSymbolName,
|
|
||||||
pub spec: SymbolSpec,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for Symbol {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
||||||
write!(f, "<Local name: {}, Spec: {}>", self.local_name, self.spec)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub enum SymbolSpec {
|
|
||||||
Func(Vec<TypeName>),
|
|
||||||
DataConstructor {
|
|
||||||
index: usize,
|
|
||||||
type_name: TypeName,
|
|
||||||
type_args: Vec<Rc<String>>,
|
|
||||||
},
|
|
||||||
RecordConstructor {
|
|
||||||
index: usize,
|
|
||||||
members: HashMap<Rc<String>, TypeName>,
|
|
||||||
type_name: TypeName,
|
|
||||||
},
|
|
||||||
Binding,
|
|
||||||
Type {
|
|
||||||
name: TypeName
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for SymbolSpec {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
||||||
use self::SymbolSpec::*;
|
|
||||||
match self {
|
|
||||||
Func(type_names) => write!(f, "Func({:?})", type_names),
|
|
||||||
DataConstructor { index, type_name, type_args } => write!(f, "DataConstructor(idx: {})({:?} -> {})", index, type_args, type_name),
|
|
||||||
RecordConstructor { type_name, index, ..} => write!(f, "RecordConstructor(idx: {})(<members> -> {})", index, type_name),
|
|
||||||
Binding => write!(f, "Binding"),
|
|
||||||
Type { name } => write!(f, "Type <{}>", name),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SymbolTable {
|
|
||||||
/* note: this adds names for *forward reference* but doesn't actually create any types. solve that problem
|
|
||||||
* later */
|
|
||||||
|
|
||||||
pub fn add_top_level_symbols(&mut self, ast: &ast::AST) -> Result<(), String> {
|
|
||||||
let mut scope_name_stack = Vec::new();
|
|
||||||
self.add_symbols_from_scope(&ast.statements, &mut scope_name_stack)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn add_symbols_from_scope<'a>(&'a mut self, statements: &Vec<Statement>, scope_name_stack: &mut Vec<ScopeSegment>) -> Result<(), String> {
|
|
||||||
use self::ast::Declaration::*;
|
|
||||||
|
|
||||||
let mut seen_identifiers = DuplicateNameTrackTable::new();
|
|
||||||
let mut seen_modules = DuplicateNameTrackTable::new();
|
|
||||||
|
|
||||||
for statement in statements.iter() {
|
|
||||||
match statement {
|
|
||||||
Statement { kind: StatementKind::Declaration(decl), id } => {
|
|
||||||
match decl {
|
|
||||||
FuncSig(ref signature) => {
|
|
||||||
seen_identifiers.try_register(&signature.name, &id, &self.source_map_handle.borrow())
|
|
||||||
.map_err(|line| format!("Duplicate function definition: {}. It's already defined at {}", signature.name, line))?;
|
|
||||||
self.add_function_signature(signature, scope_name_stack)?
|
|
||||||
}
|
|
||||||
FuncDecl(ref signature, ref body) => {
|
|
||||||
seen_identifiers.try_register(&signature.name, &id, &self.source_map_handle.borrow())
|
|
||||||
.map_err(|line| format!("Duplicate function definition: {}. It's already defined at {}", signature.name, line))?;
|
|
||||||
self.add_function_signature(signature, scope_name_stack)?;
|
|
||||||
scope_name_stack.push(ScopeSegment{
|
|
||||||
name: signature.name.clone(),
|
|
||||||
});
|
|
||||||
let output = self.add_symbols_from_scope(body, scope_name_stack);
|
|
||||||
scope_name_stack.pop();
|
|
||||||
output?
|
|
||||||
},
|
|
||||||
TypeDecl { name, body, mutable } => {
|
|
||||||
seen_identifiers.try_register(&name.name, &id, &self.source_map_handle.borrow())
|
|
||||||
.map_err(|line| format!("Duplicate type definition: {}. It's already defined at {}", name.name, line))?;
|
|
||||||
self.add_type_decl(name, body, mutable, scope_name_stack)?
|
|
||||||
},
|
|
||||||
Binding { name, .. } => {
|
|
||||||
seen_identifiers.try_register(&name, &id, &self.source_map_handle.borrow())
|
|
||||||
.map_err(|line| format!("Duplicate variable definition: {}. It's already defined at {}", name, line))?;
|
|
||||||
self.add_new_symbol(name, scope_name_stack, SymbolSpec::Binding);
|
|
||||||
}
|
|
||||||
_ => ()
|
|
||||||
}
|
|
||||||
},
|
|
||||||
Statement { kind: StatementKind::Module(ModuleSpecifier { name, contents}), id } => {
|
|
||||||
seen_modules.try_register(&name, &id, &self.source_map_handle.borrow())
|
|
||||||
.map_err(|line| format!("Duplicate module definition: {}. It's already defined at {}", name, line))?;
|
|
||||||
scope_name_stack.push(ScopeSegment { name: name.clone() });
|
|
||||||
let output = self.add_symbols_from_scope(contents, scope_name_stack);
|
|
||||||
scope_name_stack.pop();
|
|
||||||
output?
|
|
||||||
},
|
|
||||||
_ => ()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
pub fn debug_symbol_table(&self) -> String {
|
|
||||||
let mut output = format!("Symbol table\n");
|
|
||||||
let mut sorted_symbols: Vec<(&FullyQualifiedSymbolName, &Symbol)> = self.symbol_path_to_symbol.iter().collect();
|
|
||||||
sorted_symbols.sort_by(|(fqsn, _), (other_fqsn, _)| fqsn.cmp(other_fqsn));
|
|
||||||
for (name, sym) in sorted_symbols.iter() {
|
|
||||||
write!(output, "{} -> {}\n", name, sym).unwrap();
|
|
||||||
}
|
|
||||||
output
|
|
||||||
}
|
|
||||||
|
|
||||||
fn add_function_signature(&mut self, signature: &Signature, scope_name_stack: &mut Vec<ScopeSegment>) -> Result<(), String> {
|
|
||||||
let mut local_type_context = LocalTypeContext::new();
|
|
||||||
let types = signature.params.iter().map(|param| match param.anno {
|
|
||||||
Some(ref type_identifier) => Rc::new(format!("{:?}", type_identifier)),
|
|
||||||
None => local_type_context.new_universal_type()
|
|
||||||
}).collect();
|
|
||||||
self.add_new_symbol(&signature.name, scope_name_stack, SymbolSpec::Func(types));
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
//TODO handle type mutability
|
|
||||||
fn add_type_decl(&mut self, type_name: &TypeSingletonName, body: &TypeBody, _mutable: &bool, scope_name_stack: &mut Vec<ScopeSegment>) -> Result<(), String> {
|
|
||||||
use crate::ast::{TypeIdentifier, Variant};
|
|
||||||
let TypeBody(variants) = body;
|
|
||||||
let ref type_name = type_name.name;
|
|
||||||
|
|
||||||
|
|
||||||
let type_spec = SymbolSpec::Type {
|
|
||||||
name: type_name.clone(),
|
|
||||||
};
|
|
||||||
self.add_new_symbol(type_name, &scope_name_stack, type_spec);
|
|
||||||
|
|
||||||
scope_name_stack.push(ScopeSegment{
|
|
||||||
name: type_name.clone(),
|
|
||||||
});
|
|
||||||
//TODO figure out why _params isn't being used here
|
|
||||||
for (index, var) in variants.iter().enumerate() {
|
|
||||||
match var {
|
|
||||||
Variant::UnitStruct(variant_name) => {
|
|
||||||
let spec = SymbolSpec::DataConstructor {
|
|
||||||
index,
|
|
||||||
type_name: type_name.clone(),
|
|
||||||
type_args: vec![],
|
|
||||||
};
|
|
||||||
self.add_new_symbol(variant_name, scope_name_stack, spec);
|
|
||||||
},
|
|
||||||
Variant::TupleStruct(variant_name, tuple_members) => {
|
|
||||||
//TODO fix the notion of a tuple type
|
|
||||||
let type_args = tuple_members.iter().map(|type_name| match type_name {
|
|
||||||
TypeIdentifier::Singleton(TypeSingletonName { name, ..}) => name.clone(),
|
|
||||||
TypeIdentifier::Tuple(_) => unimplemented!(),
|
|
||||||
}).collect();
|
|
||||||
let spec = SymbolSpec::DataConstructor {
|
|
||||||
index,
|
|
||||||
type_name: type_name.clone(),
|
|
||||||
type_args
|
|
||||||
};
|
|
||||||
self.add_new_symbol(variant_name, scope_name_stack, spec);
|
|
||||||
},
|
|
||||||
Variant::Record { name, members: defined_members } => {
|
|
||||||
let mut members = HashMap::new();
|
|
||||||
let mut duplicate_member_definitions = Vec::new();
|
|
||||||
for (member_name, member_type) in defined_members {
|
|
||||||
match members.entry(member_name.clone()) {
|
|
||||||
Entry::Occupied(_) => duplicate_member_definitions.push(member_name.clone()),
|
|
||||||
Entry::Vacant(v) => {
|
|
||||||
v.insert(match member_type {
|
|
||||||
TypeIdentifier::Singleton(TypeSingletonName { name, ..}) => name.clone(),
|
|
||||||
TypeIdentifier::Tuple(_) => unimplemented!(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if duplicate_member_definitions.len() != 0 {
|
|
||||||
return Err(format!("Duplicate member(s) in definition of type {}: {:?}", type_name, duplicate_member_definitions));
|
|
||||||
}
|
|
||||||
let spec = SymbolSpec::RecordConstructor { index, type_name: type_name.clone(), members };
|
|
||||||
self.add_new_symbol(name, scope_name_stack, spec);
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
scope_name_stack.pop();
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
struct LocalTypeContext {
|
|
||||||
state: u8
|
|
||||||
}
|
|
||||||
impl LocalTypeContext {
|
|
||||||
fn new() -> LocalTypeContext {
|
|
||||||
LocalTypeContext { state: 0 }
|
|
||||||
}
|
|
||||||
|
|
||||||
fn new_universal_type(&mut self) -> TypeName {
|
|
||||||
let n = self.state;
|
|
||||||
self.state += 1;
|
|
||||||
Rc::new(format!("{}", (('a' as u8) + n) as char))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,51 +0,0 @@
|
|||||||
use radix_trie::{Trie, TrieCommon, TrieKey};
|
|
||||||
use super::FullyQualifiedSymbolName;
|
|
||||||
use std::hash::{Hasher, Hash};
|
|
||||||
use std::collections::hash_map::DefaultHasher;
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct SymbolTrie(Trie<FullyQualifiedSymbolName, ()>);
|
|
||||||
|
|
||||||
impl TrieKey for FullyQualifiedSymbolName {
|
|
||||||
fn encode_bytes(&self) -> Vec<u8> {
|
|
||||||
let mut hasher = DefaultHasher::new();
|
|
||||||
let mut output = vec![];
|
|
||||||
let FullyQualifiedSymbolName(scopes) = self;
|
|
||||||
for segment in scopes.iter() {
|
|
||||||
segment.name.as_bytes().hash(&mut hasher);
|
|
||||||
output.extend_from_slice(&hasher.finish().to_be_bytes());
|
|
||||||
}
|
|
||||||
output
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SymbolTrie {
|
|
||||||
pub fn new() -> SymbolTrie {
|
|
||||||
SymbolTrie(Trie::new())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn insert(&mut self, fqsn: &FullyQualifiedSymbolName) {
|
|
||||||
self.0.insert(fqsn.clone(), ());
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_children(&self, fqsn: &FullyQualifiedSymbolName) -> Vec<FullyQualifiedSymbolName> {
|
|
||||||
let subtrie = match self.0.subtrie(fqsn) {
|
|
||||||
Some(s) => s,
|
|
||||||
None => return vec![]
|
|
||||||
};
|
|
||||||
let output: Vec<FullyQualifiedSymbolName> = subtrie.keys().filter(|cur_key| **cur_key != *fqsn).map(|fqsn| fqsn.clone()).collect();
|
|
||||||
output
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_trie_insertion() {
|
|
||||||
let mut trie = SymbolTrie::new();
|
|
||||||
|
|
||||||
trie.insert(&fqsn!("unrelated"; ty, "thing"; tr));
|
|
||||||
trie.insert(&fqsn!("outer"; ty, "inner"; tr));
|
|
||||||
trie.insert(&fqsn!("outer"; ty, "inner"; ty, "still_inner"; tr));
|
|
||||||
|
|
||||||
let children = trie.get_children(&fqsn!("outer"; ty, "inner"; tr));
|
|
||||||
assert_eq!(children.len(), 1);
|
|
||||||
}
|
|
@ -1,193 +0,0 @@
|
|||||||
#![cfg(test)]
|
|
||||||
use std::cell::RefCell;
|
|
||||||
use std::rc::Rc;
|
|
||||||
|
|
||||||
use super::*;
|
|
||||||
use crate::util::quick_ast;
|
|
||||||
|
|
||||||
fn add_symbols_from_source(src: &str) -> (SymbolTable, Result<(), String>) {
|
|
||||||
let (ast, source_map) = quick_ast(src);
|
|
||||||
let source_map = Rc::new(RefCell::new(source_map));
|
|
||||||
let mut symbol_table = SymbolTable::new(source_map);
|
|
||||||
let result = symbol_table.add_top_level_symbols(&ast);
|
|
||||||
(symbol_table, result)
|
|
||||||
}
|
|
||||||
|
|
||||||
macro_rules! values_in_table {
|
|
||||||
($source:expr, $single_value:expr) => {
|
|
||||||
values_in_table!($source => $single_value);
|
|
||||||
};
|
|
||||||
($source:expr => $( $value:expr ),* ) => {
|
|
||||||
{
|
|
||||||
let (symbol_table, _) = add_symbols_from_source($source);
|
|
||||||
$(
|
|
||||||
match symbol_table.lookup_by_fqsn($value) {
|
|
||||||
Some(_spec) => (),
|
|
||||||
None => panic!(),
|
|
||||||
};
|
|
||||||
)*
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn basic_symbol_table() {
|
|
||||||
values_in_table! { "let a = 10; fn b() { 20 }", &fqsn!("b"; tr) };
|
|
||||||
values_in_table! { "type Option<T> = Some(T) | None" =>
|
|
||||||
&fqsn!("Option"; tr),
|
|
||||||
&fqsn!("Option"; ty, "Some"; tr),
|
|
||||||
&fqsn!("Option"; ty, "None"; tr) };
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn no_function_definition_duplicates() {
|
|
||||||
let source = r#"
|
|
||||||
fn a() { 1 }
|
|
||||||
fn b() { 2 }
|
|
||||||
fn a() { 3 }
|
|
||||||
"#;
|
|
||||||
let (_, output) = add_symbols_from_source(source);
|
|
||||||
assert!(output.unwrap_err().contains("Duplicate function definition: a"))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn no_variable_definition_duplicates() {
|
|
||||||
let source = r#"
|
|
||||||
let x = 9
|
|
||||||
let a = 20
|
|
||||||
let q = 39
|
|
||||||
let a = 30
|
|
||||||
"#;
|
|
||||||
let (_, output) = add_symbols_from_source(source);
|
|
||||||
let output = output.unwrap_err();
|
|
||||||
assert!(output.contains("Duplicate variable definition: a"));
|
|
||||||
assert!(output.contains("already defined at 2"));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn no_variable_definition_duplicates_in_function() {
|
|
||||||
let source = r#"
|
|
||||||
fn a() {
|
|
||||||
let a = 20
|
|
||||||
let b = 40
|
|
||||||
a + b
|
|
||||||
}
|
|
||||||
|
|
||||||
fn q() {
|
|
||||||
let a = 29
|
|
||||||
let x = 30
|
|
||||||
let x = 33
|
|
||||||
}
|
|
||||||
"#;
|
|
||||||
let (_, output) = add_symbols_from_source(source);
|
|
||||||
assert!(output.unwrap_err().contains("Duplicate variable definition: x"))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn dont_falsely_detect_duplicates() {
|
|
||||||
let source = r#"
|
|
||||||
let a = 20;
|
|
||||||
fn some_func() {
|
|
||||||
let a = 40;
|
|
||||||
77
|
|
||||||
}
|
|
||||||
let q = 39;
|
|
||||||
"#;
|
|
||||||
let (symbol_table, _) = add_symbols_from_source(source);
|
|
||||||
assert!(symbol_table.lookup_by_fqsn(&fqsn!["a"; tr]).is_some());
|
|
||||||
assert!(symbol_table.lookup_by_fqsn(&fqsn!["some_func"; fn, "a";tr]).is_some());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn enclosing_scopes() {
|
|
||||||
let source = r#"
|
|
||||||
fn outer_func(x) {
|
|
||||||
fn inner_func(arg) {
|
|
||||||
arg
|
|
||||||
}
|
|
||||||
x + inner_func(x)
|
|
||||||
}"#;
|
|
||||||
let (symbol_table, _) = add_symbols_from_source(source);
|
|
||||||
assert!(symbol_table.lookup_by_fqsn(&fqsn!("outer_func"; tr)).is_some());
|
|
||||||
assert!(symbol_table.lookup_by_fqsn(&fqsn!("outer_func"; fn, "inner_func"; tr)).is_some());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn enclosing_scopes_2() {
|
|
||||||
let source = r#"
|
|
||||||
fn outer_func(x) {
|
|
||||||
fn inner_func(arg) {
|
|
||||||
arg
|
|
||||||
}
|
|
||||||
|
|
||||||
fn second_inner_func() {
|
|
||||||
fn another_inner_func() {
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
inner_func(x)
|
|
||||||
}"#;
|
|
||||||
let (symbol_table, _) = add_symbols_from_source(source);
|
|
||||||
assert!(symbol_table.lookup_by_fqsn(&fqsn!("outer_func"; tr)).is_some());
|
|
||||||
assert!(symbol_table.lookup_by_fqsn(&fqsn!("outer_func"; fn, "inner_func"; tr)).is_some());
|
|
||||||
assert!(symbol_table.lookup_by_fqsn(&fqsn!("outer_func"; fn, "second_inner_func"; tr)).is_some());
|
|
||||||
assert!(symbol_table.lookup_by_fqsn(&fqsn!("outer_func"; fn, "second_inner_func"; fn, "another_inner_func"; tr)).is_some());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn enclosing_scopes_3() {
|
|
||||||
let source = r#"
|
|
||||||
fn outer_func(x) {
|
|
||||||
fn inner_func(arg) {
|
|
||||||
arg
|
|
||||||
}
|
|
||||||
|
|
||||||
fn second_inner_func() {
|
|
||||||
fn another_inner_func() {
|
|
||||||
}
|
|
||||||
fn another_inner_func() {
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
inner_func(x)
|
|
||||||
}"#;
|
|
||||||
let (_, output) = add_symbols_from_source(source);
|
|
||||||
assert!(output.unwrap_err().contains("Duplicate"))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn modules() {
|
|
||||||
let source = r#"
|
|
||||||
module stuff {
|
|
||||||
fn item() {
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn item()
|
|
||||||
"#;
|
|
||||||
values_in_table! { source =>
|
|
||||||
&fqsn!("item"; tr),
|
|
||||||
&fqsn!("stuff"; tr, "item"; tr)
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn duplicate_modules() {
|
|
||||||
let source = r#"
|
|
||||||
module q {
|
|
||||||
fn foo() { 4 }
|
|
||||||
}
|
|
||||||
|
|
||||||
module a {
|
|
||||||
fn foo() { 334 }
|
|
||||||
}
|
|
||||||
|
|
||||||
module a {
|
|
||||||
fn foo() { 256.1 }
|
|
||||||
}
|
|
||||||
"#;
|
|
||||||
let (_, output) = add_symbols_from_source(source);
|
|
||||||
let output = output.unwrap_err();
|
|
||||||
assert!(output.contains("Duplicate module"));
|
|
||||||
assert!(output.contains("already defined at 5"));
|
|
||||||
}
|
|
@ -1,344 +0,0 @@
|
|||||||
use itertools::Itertools;
|
|
||||||
use std::collections::HashMap;
|
|
||||||
use std::rc::Rc;
|
|
||||||
use std::iter::{Iterator, Peekable};
|
|
||||||
use std::fmt;
|
|
||||||
|
|
||||||
use crate::source_map::Location;
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
|
||||||
pub enum TokenKind {
|
|
||||||
Newline, Semicolon,
|
|
||||||
|
|
||||||
LParen, RParen,
|
|
||||||
LSquareBracket, RSquareBracket,
|
|
||||||
LAngleBracket, RAngleBracket,
|
|
||||||
LCurlyBrace, RCurlyBrace,
|
|
||||||
Pipe, Backslash,
|
|
||||||
|
|
||||||
Comma, Period, Colon, Underscore,
|
|
||||||
Slash, Equals,
|
|
||||||
|
|
||||||
Operator(Rc<String>),
|
|
||||||
DigitGroup(Rc<String>), HexLiteral(Rc<String>), BinNumberSigil,
|
|
||||||
StrLiteral {
|
|
||||||
s: Rc<String>,
|
|
||||||
prefix: Option<Rc<String>>
|
|
||||||
},
|
|
||||||
Identifier(Rc<String>),
|
|
||||||
Keyword(Kw),
|
|
||||||
|
|
||||||
EOF,
|
|
||||||
|
|
||||||
Error(String),
|
|
||||||
}
|
|
||||||
use self::TokenKind::*;
|
|
||||||
|
|
||||||
impl fmt::Display for TokenKind {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
||||||
match self {
|
|
||||||
&Operator(ref s) => write!(f, "Operator({})", **s),
|
|
||||||
&DigitGroup(ref s) => write!(f, "DigitGroup({})", s),
|
|
||||||
&HexLiteral(ref s) => write!(f, "HexLiteral({})", s),
|
|
||||||
&StrLiteral {ref s, .. } => write!(f, "StrLiteral({})", s),
|
|
||||||
&Identifier(ref s) => write!(f, "Identifier({})", s),
|
|
||||||
&Error(ref s) => write!(f, "Error({})", s),
|
|
||||||
other => write!(f, "{:?}", other),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
|
||||||
pub enum Kw {
|
|
||||||
If, Then, Else,
|
|
||||||
Is,
|
|
||||||
Func,
|
|
||||||
For, While,
|
|
||||||
Const, Let, In,
|
|
||||||
Mut,
|
|
||||||
Return,
|
|
||||||
Alias, Type, SelfType, SelfIdent,
|
|
||||||
Interface, Impl,
|
|
||||||
True, False,
|
|
||||||
Module, Import
|
|
||||||
}
|
|
||||||
|
|
||||||
lazy_static! {
|
|
||||||
static ref KEYWORDS: HashMap<&'static str, Kw> =
|
|
||||||
hashmap! {
|
|
||||||
"if" => Kw::If,
|
|
||||||
"then" => Kw::Then,
|
|
||||||
"else" => Kw::Else,
|
|
||||||
"is" => Kw::Is,
|
|
||||||
"fn" => Kw::Func,
|
|
||||||
"for" => Kw::For,
|
|
||||||
"while" => Kw::While,
|
|
||||||
"const" => Kw::Const,
|
|
||||||
"let" => Kw::Let,
|
|
||||||
"in" => Kw::In,
|
|
||||||
"mut" => Kw::Mut,
|
|
||||||
"return" => Kw::Return,
|
|
||||||
"alias" => Kw::Alias,
|
|
||||||
"type" => Kw::Type,
|
|
||||||
"Self" => Kw::SelfType,
|
|
||||||
"self" => Kw::SelfIdent,
|
|
||||||
"interface" => Kw::Interface,
|
|
||||||
"impl" => Kw::Impl,
|
|
||||||
"true" => Kw::True,
|
|
||||||
"false" => Kw::False,
|
|
||||||
"module" => Kw::Module,
|
|
||||||
"import" => Kw::Import,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
|
||||||
pub struct Token {
|
|
||||||
pub kind: TokenKind,
|
|
||||||
pub location: Location,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Token {
|
|
||||||
pub fn get_error(&self) -> Option<String> {
|
|
||||||
match self.kind {
|
|
||||||
TokenKind::Error(ref s) => Some(s.clone()),
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pub fn to_string_with_metadata(&self) -> String {
|
|
||||||
format!("{}({})", self.kind, self.location)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_kind(&self) -> TokenKind {
|
|
||||||
self.kind.clone()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const OPERATOR_CHARS: [char; 18] = ['!', '$', '%', '&', '*', '+', '-', '.', ':', '<', '>', '=', '?', '@', '^', '|', '~', '`'];
|
|
||||||
fn is_operator(c: &char) -> bool {
|
|
||||||
OPERATOR_CHARS.iter().any(|x| x == c)
|
|
||||||
}
|
|
||||||
|
|
||||||
type CharData = (usize, usize, char);
|
|
||||||
|
|
||||||
pub fn tokenize(input: &str) -> Vec<Token> {
|
|
||||||
let mut tokens: Vec<Token> = Vec::new();
|
|
||||||
|
|
||||||
let mut input = input.lines().enumerate()
|
|
||||||
.intersperse((0, "\n"))
|
|
||||||
.flat_map(|(line_idx, ref line)| {
|
|
||||||
line.chars().enumerate().map(move |(ch_idx, ch)| (line_idx, ch_idx, ch))
|
|
||||||
})
|
|
||||||
.peekable();
|
|
||||||
|
|
||||||
while let Some((line_num, char_num, c)) = input.next() {
|
|
||||||
let cur_tok_kind = match c {
|
|
||||||
'/' => match input.peek().map(|t| t.2) {
|
|
||||||
Some('/') => {
|
|
||||||
while let Some((_, _, c)) = input.next() {
|
|
||||||
if c == '\n' {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
continue;
|
|
||||||
},
|
|
||||||
Some('*') => {
|
|
||||||
input.next();
|
|
||||||
let mut comment_level = 1;
|
|
||||||
while let Some((_, _, c)) = input.next() {
|
|
||||||
if c == '*' && input.peek().map(|t| t.2) == Some('/') {
|
|
||||||
input.next();
|
|
||||||
comment_level -= 1;
|
|
||||||
} else if c == '/' && input.peek().map(|t| t.2) == Some('*') {
|
|
||||||
input.next();
|
|
||||||
comment_level += 1;
|
|
||||||
}
|
|
||||||
if comment_level == 0 {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
continue;
|
|
||||||
},
|
|
||||||
_ => Slash
|
|
||||||
},
|
|
||||||
c if c.is_whitespace() && c != '\n' => continue,
|
|
||||||
'\n' => Newline, ';' => Semicolon,
|
|
||||||
':' => Colon, ',' => Comma,
|
|
||||||
'(' => LParen, ')' => RParen,
|
|
||||||
'{' => LCurlyBrace, '}' => RCurlyBrace,
|
|
||||||
'[' => LSquareBracket, ']' => RSquareBracket,
|
|
||||||
'"' => handle_quote(&mut input, None),
|
|
||||||
'\\' => Backslash,
|
|
||||||
c if c.is_digit(10) => handle_digit(c, &mut input),
|
|
||||||
c if c.is_alphabetic() || c == '_' => handle_alphabetic(c, &mut input),
|
|
||||||
c if is_operator(&c) => handle_operator(c, &mut input),
|
|
||||||
unknown => Error(format!("Unexpected character: {}", unknown)),
|
|
||||||
};
|
|
||||||
let location = Location { line_num, char_num };
|
|
||||||
tokens.push(Token { kind: cur_tok_kind, location });
|
|
||||||
}
|
|
||||||
tokens
|
|
||||||
}
|
|
||||||
|
|
||||||
fn handle_digit(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenKind {
|
|
||||||
if c == '0' && input.peek().map_or(false, |&(_, _, c)| { c == 'x' }) {
|
|
||||||
input.next();
|
|
||||||
let rest: String = input.peeking_take_while(|&(_, _, ref c)| c.is_digit(16) || *c == '_').map(|(_, _, c)| { c }).collect();
|
|
||||||
HexLiteral(Rc::new(rest))
|
|
||||||
} else if c == '0' && input.peek().map_or(false, |&(_, _, c)| { c == 'b' }) {
|
|
||||||
input.next();
|
|
||||||
BinNumberSigil
|
|
||||||
} else {
|
|
||||||
let mut buf = c.to_string();
|
|
||||||
buf.extend(input.peeking_take_while(|&(_, _, ref c)| c.is_digit(10)).map(|(_, _, c)| { c }));
|
|
||||||
DigitGroup(Rc::new(buf))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn handle_quote(input: &mut Peekable<impl Iterator<Item=CharData>>, quote_prefix: Option<&str>) -> TokenKind {
|
|
||||||
let mut buf = String::new();
|
|
||||||
loop {
|
|
||||||
match input.next().map(|(_, _, c)| { c }) {
|
|
||||||
Some('"') => break,
|
|
||||||
Some('\\') => {
|
|
||||||
let next = input.peek().map(|&(_, _, c)| { c });
|
|
||||||
if next == Some('n') {
|
|
||||||
input.next();
|
|
||||||
buf.push('\n')
|
|
||||||
} else if next == Some('"') {
|
|
||||||
input.next();
|
|
||||||
buf.push('"');
|
|
||||||
} else if next == Some('t') {
|
|
||||||
input.next();
|
|
||||||
buf.push('\t');
|
|
||||||
}
|
|
||||||
},
|
|
||||||
Some(c) => buf.push(c),
|
|
||||||
None => return TokenKind::Error(format!("Unclosed string")),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
TokenKind::StrLiteral { s: Rc::new(buf), prefix: quote_prefix.map(|s| Rc::new(s.to_string())) }
|
|
||||||
}
|
|
||||||
|
|
||||||
fn handle_alphabetic(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenKind {
|
|
||||||
let mut buf = String::new();
|
|
||||||
buf.push(c);
|
|
||||||
if c == '_' && input.peek().map(|&(_, _, c)| { !c.is_alphabetic() }).unwrap_or(true) {
|
|
||||||
return TokenKind::Underscore
|
|
||||||
}
|
|
||||||
|
|
||||||
loop {
|
|
||||||
match input.peek().map(|&(_, _, c)| { c }) {
|
|
||||||
Some(c) if c == '"' => {
|
|
||||||
input.next();
|
|
||||||
return handle_quote(input, Some(&buf));
|
|
||||||
},
|
|
||||||
Some(c) if c.is_alphanumeric() || c == '_' => {
|
|
||||||
input.next();
|
|
||||||
buf.push(c);
|
|
||||||
},
|
|
||||||
_ => break,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
match KEYWORDS.get(buf.as_str()) {
|
|
||||||
Some(kw) => TokenKind::Keyword(*kw),
|
|
||||||
None => TokenKind::Identifier(Rc::new(buf)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn handle_operator(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenKind {
|
|
||||||
match c {
|
|
||||||
'<' | '>' | '|' | '.' | '=' => {
|
|
||||||
let ref next = input.peek().map(|&(_, _, c)| { c });
|
|
||||||
if !next.map(|n| { is_operator(&n) }).unwrap_or(false) {
|
|
||||||
return match c {
|
|
||||||
'<' => LAngleBracket,
|
|
||||||
'>' => RAngleBracket,
|
|
||||||
'|' => Pipe,
|
|
||||||
'.' => Period,
|
|
||||||
'=' => Equals,
|
|
||||||
_ => unreachable!(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
_ => (),
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut buf = String::new();
|
|
||||||
|
|
||||||
if c == '`' {
|
|
||||||
loop {
|
|
||||||
match input.peek().map(|&(_, _, c)| { c }) {
|
|
||||||
Some(c) if c.is_alphabetic() || c == '_' => {
|
|
||||||
input.next();
|
|
||||||
buf.push(c);
|
|
||||||
},
|
|
||||||
Some('`') => {
|
|
||||||
input.next();
|
|
||||||
break;
|
|
||||||
},
|
|
||||||
_ => break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
buf.push(c);
|
|
||||||
loop {
|
|
||||||
match input.peek().map(|&(_, _, c)| { c }) {
|
|
||||||
Some(c) if is_operator(&c) => {
|
|
||||||
input.next();
|
|
||||||
buf.push(c);
|
|
||||||
},
|
|
||||||
_ => break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
TokenKind::Operator(Rc::new(buf))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod schala_tokenizer_tests {
|
|
||||||
use super::*;
|
|
||||||
use super::Kw::*;
|
|
||||||
|
|
||||||
macro_rules! digit { ($ident:expr) => { DigitGroup(Rc::new($ident.to_string())) } }
|
|
||||||
macro_rules! ident { ($ident:expr) => { Identifier(Rc::new($ident.to_string())) } }
|
|
||||||
macro_rules! op { ($ident:expr) => { Operator(Rc::new($ident.to_string())) } }
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn tokens() {
|
|
||||||
let a = tokenize("let a: A<B> = c ++ d");
|
|
||||||
let token_kinds: Vec<TokenKind> = a.into_iter().map(move |t| t.kind).collect();
|
|
||||||
assert_eq!(token_kinds, vec![Keyword(Let), ident!("a"), Colon, ident!("A"),
|
|
||||||
LAngleBracket, ident!("B"), RAngleBracket, Equals, ident!("c"), op!("++"), ident!("d")]);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn underscores() {
|
|
||||||
let token_kinds: Vec<TokenKind> = tokenize("4_8").into_iter().map(move |t| t.kind).collect();
|
|
||||||
assert_eq!(token_kinds, vec![digit!("4"), Underscore, digit!("8")]);
|
|
||||||
|
|
||||||
let token_kinds2: Vec<TokenKind> = tokenize("aba_yo").into_iter().map(move |t| t.kind).collect();
|
|
||||||
assert_eq!(token_kinds2, vec![ident!("aba_yo")]);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn comments() {
|
|
||||||
let token_kinds: Vec<TokenKind> = tokenize("1 + /* hella /* bro */ */ 2").into_iter().map(move |t| t.kind).collect();
|
|
||||||
assert_eq!(token_kinds, vec![digit!("1"), op!("+"), digit!("2")]);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn backtick_operators() {
|
|
||||||
let token_kinds: Vec<TokenKind> = tokenize("1 `plus` 2").into_iter().map(move |t| t.kind).collect();
|
|
||||||
assert_eq!(token_kinds, vec![digit!("1"), op!("plus"), digit!("2")]);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn string_literals() {
|
|
||||||
let token_kinds: Vec<TokenKind> = tokenize(r#""some string""#).into_iter().map(move |t| t.kind).collect();
|
|
||||||
assert_eq!(token_kinds, vec![StrLiteral { s: Rc::new("some string".to_string()), prefix: None }]);
|
|
||||||
|
|
||||||
let token_kinds: Vec<TokenKind> = tokenize(r#"b"some bytestring""#).into_iter().map(move |t| t.kind).collect();
|
|
||||||
assert_eq!(token_kinds, vec![StrLiteral { s: Rc::new("some bytestring".to_string()), prefix: Some(Rc::new("b".to_string())) }]);
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,68 +0,0 @@
|
|||||||
use std::collections::HashMap;
|
|
||||||
use std::hash::Hash;
|
|
||||||
use std::cmp::Eq;
|
|
||||||
use std::ops::Deref;
|
|
||||||
|
|
||||||
pub fn deref_optional_box<T>(x: &Option<Box<T>>) -> Option<&T> {
|
|
||||||
x.as_ref().map(|b: &Box<T>| Deref::deref(b))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Default, Debug)]
|
|
||||||
pub struct ScopeStack<'a, T: 'a, V: 'a> where T: Hash + Eq {
|
|
||||||
parent: Option<&'a ScopeStack<'a, T, V>>,
|
|
||||||
values: HashMap<T, V>,
|
|
||||||
scope_name: Option<String>
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a, T, V> ScopeStack<'a, T, V> where T: Hash + Eq {
|
|
||||||
pub fn new(name: Option<String>) -> ScopeStack<'a, T, V> where T: Hash + Eq {
|
|
||||||
ScopeStack {
|
|
||||||
parent: None,
|
|
||||||
values: HashMap::new(),
|
|
||||||
scope_name: name
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pub fn insert(&mut self, key: T, value: V) where T: Hash + Eq {
|
|
||||||
self.values.insert(key, value);
|
|
||||||
}
|
|
||||||
pub fn lookup(&self, key: &T) -> Option<&V> where T: Hash + Eq {
|
|
||||||
match (self.values.get(key), self.parent) {
|
|
||||||
(None, None) => None,
|
|
||||||
(None, Some(parent)) => parent.lookup(key),
|
|
||||||
(Some(value), _) => Some(value),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn new_scope(&'a self, name: Option<String>) -> ScopeStack<'a, T, V> where T: Hash + Eq {
|
|
||||||
ScopeStack {
|
|
||||||
parent: Some(self),
|
|
||||||
values: HashMap::default(),
|
|
||||||
scope_name: name,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
#[allow(dead_code)]
|
|
||||||
pub fn get_name(&self) -> Option<&String> {
|
|
||||||
self.scope_name.as_ref()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// this is intended for use in tests, and does no error-handling whatsoever
|
|
||||||
#[allow(dead_code)]
|
|
||||||
pub fn quick_ast(input: &str) -> (crate::ast::AST, crate::source_map::SourceMap) {
|
|
||||||
use std::cell::RefCell;
|
|
||||||
use std::rc::Rc;
|
|
||||||
|
|
||||||
let source_map = crate::source_map::SourceMap::new();
|
|
||||||
let source_map_handle = Rc::new(RefCell::new(source_map));
|
|
||||||
let tokens = crate::tokenizing::tokenize(input);
|
|
||||||
let mut parser = crate::parsing::Parser::new(source_map_handle.clone());
|
|
||||||
parser.add_new_tokens(tokens);
|
|
||||||
let output = parser.parse();
|
|
||||||
std::mem::drop(parser);
|
|
||||||
(output.unwrap(), Rc::try_unwrap(source_map_handle).map_err(|_| ()).unwrap().into_inner())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(unused_macros)]
|
|
||||||
macro_rules! rc {
|
|
||||||
($string:tt) => { Rc::new(stringify!($string).to_string()) }
|
|
||||||
}
|
|
@ -3,11 +3,19 @@ let _SCHALA_VERSION = "0.1.0"
|
|||||||
type Option<T> = Some(T) | None
|
type Option<T> = Some(T) | None
|
||||||
type Ord = LT | EQ | GT
|
type Ord = LT | EQ | GT
|
||||||
|
|
||||||
|
@register_builtin(print)
|
||||||
|
fn print(arg) { }
|
||||||
|
|
||||||
|
@register_builtin(println)
|
||||||
|
fn println(arg) { }
|
||||||
|
|
||||||
|
@register_builtin(getline)
|
||||||
|
fn getline(arg) { }
|
||||||
|
|
||||||
fn map(input: Option<T>, func: Func): Option<T> {
|
fn map(input: Option<T>, func: Func): Option<T> {
|
||||||
if input {
|
if input {
|
||||||
is Option::Some(x) then Option::Some(func(x)),
|
is Option::Some(x) then Option::Some(func(x))
|
||||||
is Option::None then Option::None,
|
is Option::None then Option::None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
345
schala-lang/src/ast/mod.rs
Normal file
345
schala-lang/src/ast/mod.rs
Normal file
@ -0,0 +1,345 @@
|
|||||||
|
#![allow(clippy::upper_case_acronyms)]
|
||||||
|
#![allow(clippy::enum_variant_names)]
|
||||||
|
|
||||||
|
use std::{
|
||||||
|
convert::{AsRef, From},
|
||||||
|
fmt,
|
||||||
|
rc::Rc,
|
||||||
|
};
|
||||||
|
|
||||||
|
mod operators;
|
||||||
|
mod visitor;
|
||||||
|
mod visualize;
|
||||||
|
|
||||||
|
pub use operators::{BinOp, PrefixOp};
|
||||||
|
pub use visitor::*;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
derivative::Derivative,
|
||||||
|
identifier::{define_id_kind, Id},
|
||||||
|
parsing::Location,
|
||||||
|
util::delim_wrapped,
|
||||||
|
};
|
||||||
|
|
||||||
|
define_id_kind!(ASTItem);
|
||||||
|
|
||||||
|
pub type ItemId = Id<ASTItem>;
|
||||||
|
|
||||||
|
#[derive(Derivative, Debug)]
|
||||||
|
#[derivative(PartialEq)]
|
||||||
|
pub struct AST {
|
||||||
|
#[derivative(PartialEq = "ignore")]
|
||||||
|
pub id: ItemId,
|
||||||
|
pub statements: Block,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for AST {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
write!(f, "{}", visualize::render_ast(self))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Derivative, Debug, Clone)]
|
||||||
|
#[derivative(PartialEq)]
|
||||||
|
pub struct Statement<K> {
|
||||||
|
#[derivative(PartialEq = "ignore")]
|
||||||
|
pub id: ItemId,
|
||||||
|
#[derivative(PartialEq = "ignore")]
|
||||||
|
pub location: Location,
|
||||||
|
pub kind: K,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
|
pub enum StatementKind {
|
||||||
|
Expression(Expression),
|
||||||
|
Declaration(Declaration),
|
||||||
|
Import(ImportSpecifier),
|
||||||
|
Flow(FlowControl),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
|
pub enum FlowControl {
|
||||||
|
Continue,
|
||||||
|
Break,
|
||||||
|
Return(Option<Expression>),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq, Default)]
|
||||||
|
pub struct Block {
|
||||||
|
pub statements: Vec<Statement<StatementKind>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Vec<Statement<StatementKind>>> for Block {
|
||||||
|
fn from(statements: Vec<Statement<StatementKind>>) -> Self {
|
||||||
|
Self { statements }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Statement<StatementKind>> for Block {
|
||||||
|
fn from(statement: Statement<StatementKind>) -> Self {
|
||||||
|
Self { statements: vec![statement] }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AsRef<[Statement<StatementKind>]> for Block {
|
||||||
|
fn as_ref(&self) -> &[Statement<StatementKind>] {
|
||||||
|
self.statements.as_ref()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub type ParamName = Rc<String>;
|
||||||
|
|
||||||
|
#[derive(Debug, Derivative, Clone)]
|
||||||
|
#[derivative(PartialEq)]
|
||||||
|
pub struct QualifiedName {
|
||||||
|
#[derivative(PartialEq = "ignore")]
|
||||||
|
pub id: ItemId,
|
||||||
|
pub components: Vec<Rc<String>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for QualifiedName {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
match &self.components[..] {
|
||||||
|
[] => write!(f, "[<empty>]"),
|
||||||
|
[name] => write!(f, "{}", name),
|
||||||
|
[name, rest @ ..] => {
|
||||||
|
write!(f, "{}", name)?;
|
||||||
|
for c in rest {
|
||||||
|
write!(f, "::{}", c)?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
|
pub struct FormalParam {
|
||||||
|
pub name: ParamName,
|
||||||
|
pub default: Option<Expression>,
|
||||||
|
pub anno: Option<TypeIdentifier>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
|
pub enum Declaration {
|
||||||
|
FuncSig(Signature),
|
||||||
|
FuncDecl(Signature, Block),
|
||||||
|
TypeDecl {
|
||||||
|
name: TypeSingletonName,
|
||||||
|
body: TypeBody,
|
||||||
|
mutable: bool,
|
||||||
|
},
|
||||||
|
//TODO TypeAlias `original` needs to be a more complex type definition
|
||||||
|
TypeAlias {
|
||||||
|
alias: Rc<String>,
|
||||||
|
original: Rc<String>,
|
||||||
|
},
|
||||||
|
Binding {
|
||||||
|
name: Rc<String>,
|
||||||
|
constant: bool,
|
||||||
|
type_anno: Option<TypeIdentifier>,
|
||||||
|
expr: Expression,
|
||||||
|
},
|
||||||
|
Impl {
|
||||||
|
type_name: TypeIdentifier,
|
||||||
|
interface_name: Option<TypeSingletonName>,
|
||||||
|
block: Vec<Statement<Declaration>>,
|
||||||
|
},
|
||||||
|
Interface {
|
||||||
|
name: Rc<String>,
|
||||||
|
signatures: Vec<Signature>,
|
||||||
|
},
|
||||||
|
//TODO need to limit the types of statements that can be annotated
|
||||||
|
Annotation {
|
||||||
|
name: Rc<String>,
|
||||||
|
arguments: Vec<Expression>,
|
||||||
|
inner: Box<Statement<StatementKind>>,
|
||||||
|
},
|
||||||
|
Module {
|
||||||
|
name: Rc<String>,
|
||||||
|
items: Block,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
|
pub struct Signature {
|
||||||
|
pub name: Rc<String>,
|
||||||
|
pub operator: bool,
|
||||||
|
pub params: Vec<FormalParam>,
|
||||||
|
pub type_anno: Option<TypeIdentifier>,
|
||||||
|
}
|
||||||
|
|
||||||
|
//TODO I can probably get rid of TypeBody
|
||||||
|
#[derive(Debug, Derivative, Clone)]
|
||||||
|
#[derivative(PartialEq)]
|
||||||
|
pub enum TypeBody {
|
||||||
|
Variants(Vec<Variant>),
|
||||||
|
ImmediateRecord {
|
||||||
|
#[derivative(PartialEq = "ignore")]
|
||||||
|
id: ItemId,
|
||||||
|
fields: Vec<(Rc<String>, TypeIdentifier)>,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Derivative, Clone)]
|
||||||
|
#[derivative(PartialEq)]
|
||||||
|
pub struct Variant {
|
||||||
|
#[derivative(PartialEq = "ignore")]
|
||||||
|
pub id: ItemId,
|
||||||
|
pub name: Rc<String>,
|
||||||
|
pub kind: VariantKind,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
|
pub enum VariantKind {
|
||||||
|
UnitStruct,
|
||||||
|
TupleStruct(Vec<TypeIdentifier>),
|
||||||
|
Record(Vec<(Rc<String>, TypeIdentifier)>),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Derivative, Clone)]
|
||||||
|
#[derivative(PartialEq)]
|
||||||
|
pub struct Expression {
|
||||||
|
#[derivative(PartialEq = "ignore")]
|
||||||
|
pub id: ItemId,
|
||||||
|
pub kind: ExpressionKind,
|
||||||
|
//TODO this should only allow singletons, not tuples
|
||||||
|
pub type_anno: Option<TypeIdentifier>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Expression {
|
||||||
|
pub fn new(id: ItemId, kind: ExpressionKind) -> Expression {
|
||||||
|
Expression { id, kind, type_anno: None }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
|
pub enum TypeIdentifier {
|
||||||
|
Tuple(Vec<TypeIdentifier>),
|
||||||
|
Singleton(TypeSingletonName),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for TypeIdentifier {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
match self {
|
||||||
|
TypeIdentifier::Tuple(items) =>
|
||||||
|
write!(f, "{}", delim_wrapped('(', ')', items.iter().map(|item| item.to_string()))),
|
||||||
|
TypeIdentifier::Singleton(tsn) => {
|
||||||
|
write!(f, "{}", tsn.name)?;
|
||||||
|
if !tsn.params.is_empty() {
|
||||||
|
write!(f, "{}", delim_wrapped('<', '>', tsn.params.iter().map(|item| item.to_string())))?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
|
pub struct TypeSingletonName {
|
||||||
|
pub name: Rc<String>,
|
||||||
|
pub params: Vec<TypeIdentifier>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
|
pub enum ExpressionKind {
|
||||||
|
NatLiteral(u64),
|
||||||
|
FloatLiteral(f64),
|
||||||
|
StringLiteral { prefix: Option<Rc<String>>, s: Rc<String> },
|
||||||
|
BoolLiteral(bool),
|
||||||
|
BinExp(BinOp, Box<Expression>, Box<Expression>),
|
||||||
|
PrefixExp(PrefixOp, Box<Expression>),
|
||||||
|
TupleLiteral(Vec<Expression>),
|
||||||
|
Value(QualifiedName),
|
||||||
|
SelfValue,
|
||||||
|
NamedStruct { name: QualifiedName, fields: Vec<(Rc<String>, Expression)> },
|
||||||
|
Call { f: Box<Expression>, arguments: Vec<InvocationArgument> },
|
||||||
|
Index { indexee: Box<Expression>, indexers: Vec<Expression> },
|
||||||
|
IfExpression { discriminator: Option<Box<Expression>>, body: Box<IfExpressionBody> },
|
||||||
|
WhileExpression { condition: Option<Box<Expression>>, body: Block },
|
||||||
|
ForExpression { enumerators: Vec<Enumerator>, body: Box<ForBody> },
|
||||||
|
Lambda { params: Vec<FormalParam>, type_anno: Option<TypeIdentifier>, body: Block },
|
||||||
|
Access { name: Rc<String>, expr: Box<Expression> },
|
||||||
|
ListLiteral(Vec<Expression>),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
|
pub enum InvocationArgument {
|
||||||
|
Positional(Expression),
|
||||||
|
Keyword { name: Rc<String>, expr: Expression },
|
||||||
|
Ignored,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
|
pub enum IfExpressionBody {
|
||||||
|
SimpleConditional { then_case: Block, else_case: Option<Block> },
|
||||||
|
SimplePatternMatch { pattern: Pattern, then_case: Block, else_case: Option<Block> },
|
||||||
|
CondList(Vec<ConditionArm>),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
|
pub struct ConditionArm {
|
||||||
|
pub condition: Condition,
|
||||||
|
pub guard: Option<Expression>,
|
||||||
|
pub body: Block,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
|
pub enum Condition {
|
||||||
|
Pattern(Pattern),
|
||||||
|
TruncatedOp(BinOp, Expression),
|
||||||
|
//Expression(Expression), //I'm pretty sure I don't actually want this
|
||||||
|
Else,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
|
pub enum Pattern {
|
||||||
|
Ignored,
|
||||||
|
TuplePattern(Vec<Pattern>),
|
||||||
|
Literal(PatternLiteral),
|
||||||
|
TupleStruct(QualifiedName, Vec<Pattern>),
|
||||||
|
Record(QualifiedName, Vec<(Rc<String>, Pattern)>),
|
||||||
|
VarOrName(QualifiedName),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
|
pub enum PatternLiteral {
|
||||||
|
NumPattern { neg: bool, num: ExpressionKind },
|
||||||
|
StringPattern(Rc<String>),
|
||||||
|
BoolPattern(bool),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
|
pub struct Enumerator {
|
||||||
|
pub identifier: Rc<String>,
|
||||||
|
pub generator: Expression,
|
||||||
|
pub assignment: bool, //true if `=`, false if `<-`
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
|
pub enum ForBody {
|
||||||
|
MonadicReturn(Expression),
|
||||||
|
StatementBlock(Block),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Derivative, Clone)]
|
||||||
|
#[derivative(PartialEq)]
|
||||||
|
pub struct ImportSpecifier {
|
||||||
|
#[derivative(PartialEq = "ignore")]
|
||||||
|
pub id: ItemId,
|
||||||
|
pub path_components: Vec<Rc<String>>,
|
||||||
|
pub imported_names: ImportedNames,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
|
pub enum ImportedNames {
|
||||||
|
All,
|
||||||
|
LastOfPath,
|
||||||
|
List(Vec<Rc<String>>),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
|
pub struct ModuleSpecifier {
|
||||||
|
pub name: Rc<String>,
|
||||||
|
pub contents: Block,
|
||||||
|
}
|
61
schala-lang/src/ast/operators.rs
Normal file
61
schala-lang/src/ast/operators.rs
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
use std::rc::Rc;
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
|
pub struct PrefixOp {
|
||||||
|
sigil: Rc<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PrefixOp {
|
||||||
|
pub fn from_sigil(sigil: &str) -> PrefixOp {
|
||||||
|
PrefixOp { sigil: Rc::new(sigil.to_string()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn sigil(&self) -> &str {
|
||||||
|
&self.sigil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
|
pub struct BinOp {
|
||||||
|
sigil: Rc<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl BinOp {
|
||||||
|
pub fn from_sigil(sigil: &str) -> BinOp {
|
||||||
|
BinOp { sigil: Rc::new(sigil.to_string()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn sigil(&self) -> &str {
|
||||||
|
&self.sigil
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn min_precedence() -> i32 {
|
||||||
|
i32::min_value()
|
||||||
|
}
|
||||||
|
pub fn get_precedence(&self) -> i32 {
|
||||||
|
binop_precedences(self.sigil.as_ref())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn binop_precedences(s: &str) -> i32 {
|
||||||
|
let default = 10_000_000;
|
||||||
|
match s {
|
||||||
|
"+" => 10,
|
||||||
|
"-" => 10,
|
||||||
|
"*" => 20,
|
||||||
|
"/" => 20,
|
||||||
|
"%" => 20,
|
||||||
|
"++" => 30,
|
||||||
|
"^" => 30,
|
||||||
|
"&" => 20,
|
||||||
|
"|" => 20,
|
||||||
|
">" => 20,
|
||||||
|
">=" => 20,
|
||||||
|
"<" => 20,
|
||||||
|
"<=" => 20,
|
||||||
|
"==" => 40,
|
||||||
|
"<=>" => 30,
|
||||||
|
"=" => 5, // Assignment shoudl have highest precedence
|
||||||
|
_ => default,
|
||||||
|
}
|
||||||
|
}
|
202
schala-lang/src/ast/visitor.rs
Normal file
202
schala-lang/src/ast/visitor.rs
Normal file
@ -0,0 +1,202 @@
|
|||||||
|
use crate::ast::*;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub enum Recursion {
|
||||||
|
Continue,
|
||||||
|
Stop,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait ASTVisitor: Sized {
|
||||||
|
fn expression(&mut self, _expression: &Expression) -> Recursion {
|
||||||
|
Recursion::Continue
|
||||||
|
}
|
||||||
|
fn declaration(&mut self, _declaration: &Declaration, _id: &ItemId) -> Recursion {
|
||||||
|
Recursion::Continue
|
||||||
|
}
|
||||||
|
|
||||||
|
fn import(&mut self, _import: &ImportSpecifier) -> Recursion {
|
||||||
|
Recursion::Continue
|
||||||
|
}
|
||||||
|
fn pattern(&mut self, _pat: &Pattern) -> Recursion {
|
||||||
|
Recursion::Continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn walk_ast<V: ASTVisitor>(v: &mut V, ast: &AST) {
|
||||||
|
walk_block(v, &ast.statements);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn walk_block<V: ASTVisitor>(v: &mut V, block: &Block) {
|
||||||
|
use StatementKind::*;
|
||||||
|
for statement in block.statements.iter() {
|
||||||
|
match statement.kind {
|
||||||
|
StatementKind::Expression(ref expr) => {
|
||||||
|
walk_expression(v, expr);
|
||||||
|
}
|
||||||
|
Declaration(ref decl) => {
|
||||||
|
walk_declaration(v, decl, &statement.id);
|
||||||
|
}
|
||||||
|
Import(ref import_spec) => {
|
||||||
|
v.import(import_spec);
|
||||||
|
}
|
||||||
|
Flow(ref flow_control) =>
|
||||||
|
if let FlowControl::Return(Some(ref retval)) = flow_control {
|
||||||
|
walk_expression(v, retval);
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn walk_declaration<V: ASTVisitor>(v: &mut V, decl: &Declaration, id: &ItemId) {
|
||||||
|
use Declaration::*;
|
||||||
|
|
||||||
|
if let Recursion::Continue = v.declaration(decl, id) {
|
||||||
|
match decl {
|
||||||
|
FuncDecl(_sig, block) => {
|
||||||
|
walk_block(v, block);
|
||||||
|
}
|
||||||
|
Binding { name: _, constant: _, type_anno: _, expr } => {
|
||||||
|
walk_expression(v, expr);
|
||||||
|
}
|
||||||
|
Module { name: _, items } => {
|
||||||
|
walk_block(v, items);
|
||||||
|
}
|
||||||
|
_ => (),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn walk_expression<V: ASTVisitor>(v: &mut V, expr: &Expression) {
|
||||||
|
use ExpressionKind::*;
|
||||||
|
|
||||||
|
if let Recursion::Continue = v.expression(expr) {
|
||||||
|
match &expr.kind {
|
||||||
|
NatLiteral(_)
|
||||||
|
| FloatLiteral(_)
|
||||||
|
| StringLiteral { .. }
|
||||||
|
| BoolLiteral(_)
|
||||||
|
| Value(_)
|
||||||
|
| SelfValue => (),
|
||||||
|
BinExp(_, lhs, rhs) => {
|
||||||
|
walk_expression(v, lhs);
|
||||||
|
walk_expression(v, rhs);
|
||||||
|
}
|
||||||
|
PrefixExp(_, arg) => {
|
||||||
|
walk_expression(v, arg);
|
||||||
|
}
|
||||||
|
TupleLiteral(exprs) =>
|
||||||
|
for expr in exprs {
|
||||||
|
walk_expression(v, expr);
|
||||||
|
},
|
||||||
|
NamedStruct { name: _, fields } =>
|
||||||
|
for (_, expr) in fields.iter() {
|
||||||
|
walk_expression(v, expr);
|
||||||
|
},
|
||||||
|
Call { f, arguments } => {
|
||||||
|
walk_expression(v, f);
|
||||||
|
for arg in arguments.iter() {
|
||||||
|
match arg {
|
||||||
|
InvocationArgument::Positional(expr) | InvocationArgument::Keyword { expr, .. } =>
|
||||||
|
walk_expression(v, expr),
|
||||||
|
_ => (),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Index { indexee, indexers } => {
|
||||||
|
walk_expression(v, indexee);
|
||||||
|
for indexer in indexers.iter() {
|
||||||
|
walk_expression(v, indexer);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
IfExpression { discriminator, body } => {
|
||||||
|
if let Some(d) = discriminator.as_ref() {
|
||||||
|
walk_expression(v, d);
|
||||||
|
}
|
||||||
|
walk_if_expr_body(v, body.as_ref());
|
||||||
|
}
|
||||||
|
WhileExpression { condition, body } => {
|
||||||
|
if let Some(d) = condition.as_ref() {
|
||||||
|
walk_expression(v, d);
|
||||||
|
}
|
||||||
|
walk_block(v, body);
|
||||||
|
}
|
||||||
|
ForExpression { enumerators, body } => {
|
||||||
|
for enumerator in enumerators {
|
||||||
|
walk_expression(v, &enumerator.generator);
|
||||||
|
}
|
||||||
|
match body.as_ref() {
|
||||||
|
ForBody::MonadicReturn(expr) => walk_expression(v, expr),
|
||||||
|
ForBody::StatementBlock(block) => walk_block(v, block),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
Lambda { params: _, type_anno: _, body } => {
|
||||||
|
walk_block(v, body);
|
||||||
|
}
|
||||||
|
Access { name: _, expr } => {
|
||||||
|
walk_expression(v, expr);
|
||||||
|
}
|
||||||
|
ListLiteral(exprs) =>
|
||||||
|
for expr in exprs {
|
||||||
|
walk_expression(v, expr);
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn walk_if_expr_body<V: ASTVisitor>(v: &mut V, body: &IfExpressionBody) {
|
||||||
|
use IfExpressionBody::*;
|
||||||
|
|
||||||
|
match body {
|
||||||
|
SimpleConditional { then_case, else_case } => {
|
||||||
|
walk_block(v, then_case);
|
||||||
|
if let Some(block) = else_case.as_ref() {
|
||||||
|
walk_block(v, block)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
SimplePatternMatch { pattern, then_case, else_case } => {
|
||||||
|
walk_pattern(v, pattern);
|
||||||
|
walk_block(v, then_case);
|
||||||
|
if let Some(block) = else_case.as_ref() {
|
||||||
|
walk_block(v, block)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
CondList(arms) =>
|
||||||
|
for arm in arms {
|
||||||
|
match arm.condition {
|
||||||
|
Condition::Pattern(ref pat) => {
|
||||||
|
walk_pattern(v, pat);
|
||||||
|
}
|
||||||
|
Condition::TruncatedOp(ref _binop, ref expr) => {
|
||||||
|
walk_expression(v, expr);
|
||||||
|
}
|
||||||
|
Condition::Else => (),
|
||||||
|
}
|
||||||
|
if let Some(ref guard) = arm.guard {
|
||||||
|
walk_expression(v, guard);
|
||||||
|
}
|
||||||
|
walk_block(v, &arm.body);
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn walk_pattern<V: ASTVisitor>(v: &mut V, pat: &Pattern) {
|
||||||
|
use Pattern::*;
|
||||||
|
|
||||||
|
if let Recursion::Continue = v.pattern(pat) {
|
||||||
|
match pat {
|
||||||
|
TuplePattern(patterns) =>
|
||||||
|
for pat in patterns {
|
||||||
|
walk_pattern(v, pat);
|
||||||
|
},
|
||||||
|
TupleStruct(_, patterns) =>
|
||||||
|
for pat in patterns {
|
||||||
|
walk_pattern(v, pat);
|
||||||
|
},
|
||||||
|
Record(_, name_and_patterns) =>
|
||||||
|
for (_, pat) in name_and_patterns {
|
||||||
|
walk_pattern(v, pat);
|
||||||
|
},
|
||||||
|
_ => (),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
282
schala-lang/src/ast/visualize.rs
Normal file
282
schala-lang/src/ast/visualize.rs
Normal file
@ -0,0 +1,282 @@
|
|||||||
|
#![allow(clippy::single_char_add_str)]
|
||||||
|
use std::fmt::Write;
|
||||||
|
|
||||||
|
use super::{
|
||||||
|
Block, Declaration, Expression, ExpressionKind, FlowControl, ImportSpecifier, InvocationArgument,
|
||||||
|
Signature, Statement, StatementKind, AST,
|
||||||
|
};
|
||||||
|
|
||||||
|
const LEVEL: usize = 2;
|
||||||
|
|
||||||
|
fn do_indent(n: usize, buf: &mut String) {
|
||||||
|
for _ in 0..n {
|
||||||
|
buf.push(' ');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn newline(buf: &mut String) {
|
||||||
|
buf.push('\n');
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) fn render_ast(ast: &AST) -> String {
|
||||||
|
let AST { statements, .. } = ast;
|
||||||
|
|
||||||
|
let mut buf = "(AST\n".to_string();
|
||||||
|
render_block(statements, LEVEL, &mut buf);
|
||||||
|
|
||||||
|
buf.push(')');
|
||||||
|
|
||||||
|
buf
|
||||||
|
}
|
||||||
|
|
||||||
|
fn render_statement(stmt: &Statement<StatementKind>, indent: usize, buf: &mut String) {
|
||||||
|
use StatementKind::*;
|
||||||
|
do_indent(indent, buf);
|
||||||
|
match stmt.kind {
|
||||||
|
Expression(ref expr) => render_expression(expr, indent, buf),
|
||||||
|
Declaration(ref decl) => render_declaration(decl, indent, buf),
|
||||||
|
Import(ref spec) => render_import(spec, indent, buf),
|
||||||
|
Flow(ref flow_control) => render_flow_control(flow_control, indent, buf),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn render_expression(expr: &Expression, indent: usize, buf: &mut String) {
|
||||||
|
use ExpressionKind::*;
|
||||||
|
|
||||||
|
buf.push_str("(Expr ");
|
||||||
|
match &expr.kind {
|
||||||
|
SelfValue => write!(buf, "(SelfValue)").unwrap(),
|
||||||
|
NatLiteral(n) => buf.push_str(&format!("(NatLiteral {})", n)),
|
||||||
|
FloatLiteral(f) => buf.push_str(&format!("(FloatLiteral {})", f)),
|
||||||
|
StringLiteral { s, prefix } => buf.push_str(&format!("(StringLiteral prefix: {:?} {})", prefix, s)),
|
||||||
|
BoolLiteral(b) => buf.push_str(&format!("(BoolLiteral {})", b)),
|
||||||
|
BinExp(binop, lhs, rhs) => {
|
||||||
|
let new_indent = indent + LEVEL;
|
||||||
|
buf.push_str(&format!("Binop {}\n", binop.sigil()));
|
||||||
|
|
||||||
|
do_indent(new_indent, buf);
|
||||||
|
render_expression(lhs, new_indent, buf);
|
||||||
|
newline(buf);
|
||||||
|
|
||||||
|
do_indent(new_indent, buf);
|
||||||
|
render_expression(rhs, new_indent, buf);
|
||||||
|
newline(buf);
|
||||||
|
|
||||||
|
do_indent(indent, buf);
|
||||||
|
}
|
||||||
|
PrefixExp(prefix, expr) => {
|
||||||
|
let new_indent = indent + LEVEL;
|
||||||
|
buf.push_str(&format!("PrefixOp {}\n", prefix.sigil()));
|
||||||
|
|
||||||
|
do_indent(new_indent, buf);
|
||||||
|
render_expression(expr, new_indent, buf);
|
||||||
|
newline(buf);
|
||||||
|
|
||||||
|
do_indent(indent, buf);
|
||||||
|
}
|
||||||
|
TupleLiteral(..) => (),
|
||||||
|
Value(name) => {
|
||||||
|
buf.push_str(&format!("Value {})", name));
|
||||||
|
}
|
||||||
|
NamedStruct { name: _, fields: _ } => (),
|
||||||
|
Call { f, arguments } => {
|
||||||
|
let new_indent = indent + LEVEL;
|
||||||
|
buf.push_str("Call ");
|
||||||
|
render_expression(f, new_indent, buf);
|
||||||
|
newline(buf);
|
||||||
|
|
||||||
|
for arg in arguments {
|
||||||
|
do_indent(new_indent, buf);
|
||||||
|
match arg {
|
||||||
|
InvocationArgument::Positional(expr) => render_expression(expr, new_indent, buf),
|
||||||
|
InvocationArgument::Keyword { .. } => buf.push_str("<keyword>"),
|
||||||
|
InvocationArgument::Ignored => buf.push_str("<ignored>"),
|
||||||
|
}
|
||||||
|
newline(buf);
|
||||||
|
do_indent(indent, buf);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Index { .. } => buf.push_str("<index>"),
|
||||||
|
IfExpression { .. } => buf.push_str("<if-expr>"),
|
||||||
|
WhileExpression { .. } => buf.push_str("<while-expr>"),
|
||||||
|
ForExpression { .. } => buf.push_str("<for-expr>"),
|
||||||
|
Lambda { params, type_anno: _, body } => {
|
||||||
|
let new_indent = indent + LEVEL;
|
||||||
|
buf.push_str("Lambda ");
|
||||||
|
newline(buf);
|
||||||
|
|
||||||
|
do_indent(new_indent, buf);
|
||||||
|
buf.push_str("(Args ");
|
||||||
|
for p in params {
|
||||||
|
buf.push_str(&format!("{} ", p.name));
|
||||||
|
}
|
||||||
|
buf.push(')');
|
||||||
|
newline(buf);
|
||||||
|
|
||||||
|
do_indent(new_indent, buf);
|
||||||
|
buf.push_str("(Body ");
|
||||||
|
newline(buf);
|
||||||
|
render_block(body, new_indent + LEVEL, buf);
|
||||||
|
do_indent(new_indent, buf);
|
||||||
|
buf.push(')');
|
||||||
|
|
||||||
|
newline(buf);
|
||||||
|
do_indent(indent, buf);
|
||||||
|
}
|
||||||
|
Access { .. } => buf.push_str("<access-expr>"),
|
||||||
|
ListLiteral(..) => buf.push_str("<list-literal>"),
|
||||||
|
}
|
||||||
|
buf.push(')');
|
||||||
|
}
|
||||||
|
fn render_declaration(decl: &Declaration, indent: usize, buf: &mut String) {
|
||||||
|
use Declaration::*;
|
||||||
|
|
||||||
|
buf.push_str("(Decl ");
|
||||||
|
match decl {
|
||||||
|
FuncSig(ref sig) => render_signature(sig, indent, buf),
|
||||||
|
FuncDecl(ref sig, ref block) => {
|
||||||
|
let indent = indent + LEVEL;
|
||||||
|
buf.push_str("Function");
|
||||||
|
newline(buf);
|
||||||
|
|
||||||
|
do_indent(indent, buf);
|
||||||
|
render_signature(sig, indent, buf);
|
||||||
|
newline(buf);
|
||||||
|
|
||||||
|
do_indent(indent, buf);
|
||||||
|
buf.push_str("(Body");
|
||||||
|
newline(buf);
|
||||||
|
|
||||||
|
render_block(block, indent + LEVEL, buf);
|
||||||
|
do_indent(indent, buf);
|
||||||
|
buf.push_str(")");
|
||||||
|
newline(buf);
|
||||||
|
}
|
||||||
|
TypeDecl { name: _, body: _, .. } => {
|
||||||
|
buf.push_str("<type-decl>");
|
||||||
|
}
|
||||||
|
TypeAlias { alias: _, original: _ } => {
|
||||||
|
buf.push_str("<type-alias>");
|
||||||
|
}
|
||||||
|
Binding { name, constant: _, type_anno: _, expr } => {
|
||||||
|
let new_indent = indent + LEVEL;
|
||||||
|
buf.push_str(&format!("Binding {}", name));
|
||||||
|
newline(buf);
|
||||||
|
do_indent(new_indent, buf);
|
||||||
|
render_expression(expr, new_indent, buf);
|
||||||
|
newline(buf);
|
||||||
|
}
|
||||||
|
Module { name, items: _ } => {
|
||||||
|
write!(buf, "(Module {} <body>)", name).unwrap();
|
||||||
|
}
|
||||||
|
_ => (), /*
|
||||||
|
Impl { type_name: TypeIdentifier, interface_name: Option<TypeSingletonName>, block: Vec<Declaration> },
|
||||||
|
Interface { name: Rc<String>, signatures: Vec<Signature> },
|
||||||
|
Annotation { name: Rc<String>, arguments: Vec<Expression> },
|
||||||
|
*/
|
||||||
|
}
|
||||||
|
do_indent(indent, buf);
|
||||||
|
buf.push(')');
|
||||||
|
}
|
||||||
|
|
||||||
|
fn render_block(block: &Block, indent: usize, buf: &mut String) {
|
||||||
|
for stmt in block.statements.iter() {
|
||||||
|
render_statement(stmt, indent, buf);
|
||||||
|
newline(buf);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn render_signature(sig: &Signature, _indent: usize, buf: &mut String) {
|
||||||
|
buf.push_str(&format!("(Signature {} )", sig.name));
|
||||||
|
}
|
||||||
|
|
||||||
|
fn render_import(_import: &ImportSpecifier, _indent: usize, buf: &mut String) {
|
||||||
|
buf.push_str("(Import <some import>)");
|
||||||
|
}
|
||||||
|
|
||||||
|
fn render_flow_control(flow: &FlowControl, _indent: usize, buf: &mut String) {
|
||||||
|
use FlowControl::*;
|
||||||
|
match flow {
|
||||||
|
Return(ref _expr) => write!(buf, "return <expr>").unwrap(),
|
||||||
|
Break => write!(buf, "break").unwrap(),
|
||||||
|
Continue => write!(buf, "continue").unwrap(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::render_ast;
|
||||||
|
use crate::util::quick_ast;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_visualization() {
|
||||||
|
let ast = quick_ast(
|
||||||
|
r#"
|
||||||
|
fn test(x) {
|
||||||
|
let m = 9
|
||||||
|
1 * 4 <> m |> somemod::output(x)
|
||||||
|
}
|
||||||
|
|
||||||
|
let quincy = \(no, yes, maybe) {
|
||||||
|
let a = 10
|
||||||
|
yes * no + a
|
||||||
|
}
|
||||||
|
|
||||||
|
let b = 54
|
||||||
|
test(b) == 3
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
|
||||||
|
let expected_output = r#"(AST
|
||||||
|
(Decl Function
|
||||||
|
(Signature test )
|
||||||
|
(Body
|
||||||
|
(Decl Binding m
|
||||||
|
(Expr (NatLiteral 9))
|
||||||
|
)
|
||||||
|
(Expr Binop *
|
||||||
|
(Expr (NatLiteral 1))
|
||||||
|
(Expr Binop |>
|
||||||
|
(Expr Binop <>
|
||||||
|
(Expr (NatLiteral 4))
|
||||||
|
(Expr Value m))
|
||||||
|
)
|
||||||
|
(Expr Call (Expr Value somemod::output))
|
||||||
|
(Expr Value x))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
(Decl Binding quincy
|
||||||
|
(Expr Lambda
|
||||||
|
(Args no yes maybe )
|
||||||
|
(Body
|
||||||
|
(Decl Binding a
|
||||||
|
(Expr (NatLiteral 10))
|
||||||
|
)
|
||||||
|
(Expr Binop +
|
||||||
|
(Expr Binop *
|
||||||
|
(Expr Value yes))
|
||||||
|
(Expr Value no))
|
||||||
|
)
|
||||||
|
(Expr Value a))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
(Decl Binding b
|
||||||
|
(Expr (NatLiteral 54))
|
||||||
|
)
|
||||||
|
(Expr Binop ==
|
||||||
|
(Expr Call (Expr Value test))
|
||||||
|
(Expr Value b))
|
||||||
|
)
|
||||||
|
(Expr (NatLiteral 3))
|
||||||
|
)
|
||||||
|
)"#;
|
||||||
|
|
||||||
|
let rendered = render_ast(&ast);
|
||||||
|
assert_eq!(rendered, expected_output);
|
||||||
|
}
|
||||||
|
}
|
130
schala-lang/src/builtin.rs
Normal file
130
schala-lang/src/builtin.rs
Normal file
@ -0,0 +1,130 @@
|
|||||||
|
use std::{convert::TryFrom, str::FromStr};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
ast::{BinOp, PrefixOp},
|
||||||
|
type_inference::Type,
|
||||||
|
};
|
||||||
|
|
||||||
|
/// "Builtin" computational operations with some kind of semantics, mostly mathematical operations.
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||||
|
pub enum Builtin {
|
||||||
|
Add,
|
||||||
|
Increment,
|
||||||
|
Subtract,
|
||||||
|
Negate,
|
||||||
|
Multiply,
|
||||||
|
Divide,
|
||||||
|
Quotient,
|
||||||
|
Modulo,
|
||||||
|
Exponentiation,
|
||||||
|
BitwiseAnd,
|
||||||
|
BitwiseOr,
|
||||||
|
BooleanAnd,
|
||||||
|
BooleanOr,
|
||||||
|
BooleanNot,
|
||||||
|
Equality,
|
||||||
|
LessThan,
|
||||||
|
LessThanOrEqual,
|
||||||
|
GreaterThan,
|
||||||
|
GreaterThanOrEqual,
|
||||||
|
Comparison,
|
||||||
|
IOPrint,
|
||||||
|
IOPrintLn,
|
||||||
|
IOGetLine,
|
||||||
|
Assignment,
|
||||||
|
Concatenate,
|
||||||
|
NotEqual,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Builtin {
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub fn get_type(&self) -> Type {
|
||||||
|
use Builtin::*;
|
||||||
|
match self {
|
||||||
|
Add => ty!(Nat -> Nat -> Nat),
|
||||||
|
Subtract => ty!(Nat -> Nat -> Nat),
|
||||||
|
Multiply => ty!(Nat -> Nat -> Nat),
|
||||||
|
Divide => ty!(Nat -> Nat -> Float),
|
||||||
|
Quotient => ty!(Nat -> Nat -> Nat),
|
||||||
|
Modulo => ty!(Nat -> Nat -> Nat),
|
||||||
|
Exponentiation => ty!(Nat -> Nat -> Nat),
|
||||||
|
BitwiseAnd => ty!(Nat -> Nat -> Nat),
|
||||||
|
BitwiseOr => ty!(Nat -> Nat -> Nat),
|
||||||
|
BooleanAnd => ty!(Bool -> Bool -> Bool),
|
||||||
|
BooleanOr => ty!(Bool -> Bool -> Bool),
|
||||||
|
BooleanNot => ty!(Bool -> Bool),
|
||||||
|
Equality => ty!(Nat -> Nat -> Bool),
|
||||||
|
LessThan => ty!(Nat -> Nat -> Bool),
|
||||||
|
LessThanOrEqual => ty!(Nat -> Nat -> Bool),
|
||||||
|
GreaterThan => ty!(Nat -> Nat -> Bool),
|
||||||
|
GreaterThanOrEqual => ty!(Nat -> Nat -> Bool),
|
||||||
|
Comparison => ty!(Nat -> Nat -> Ordering),
|
||||||
|
IOPrint => ty!(Unit),
|
||||||
|
IOPrintLn => ty!(Unit),
|
||||||
|
IOGetLine => ty!(StringT),
|
||||||
|
Assignment => ty!(Unit),
|
||||||
|
Concatenate => ty!(StringT -> StringT -> StringT),
|
||||||
|
Increment => ty!(Nat -> Int),
|
||||||
|
Negate => ty!(Nat -> Int),
|
||||||
|
NotEqual => ty!(Nat -> Nat -> Bool),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TryFrom<&BinOp> for Builtin {
|
||||||
|
type Error = ();
|
||||||
|
|
||||||
|
fn try_from(binop: &BinOp) -> Result<Self, Self::Error> {
|
||||||
|
FromStr::from_str(binop.sigil())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TryFrom<&PrefixOp> for Builtin {
|
||||||
|
type Error = ();
|
||||||
|
|
||||||
|
fn try_from(prefix_op: &PrefixOp) -> Result<Self, Self::Error> {
|
||||||
|
use Builtin::*;
|
||||||
|
|
||||||
|
match prefix_op.sigil() {
|
||||||
|
"+" => Ok(Increment),
|
||||||
|
"-" => Ok(Negate),
|
||||||
|
"!" => Ok(BooleanNot),
|
||||||
|
_ => Err(()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromStr for Builtin {
|
||||||
|
type Err = ();
|
||||||
|
|
||||||
|
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||||
|
use Builtin::*;
|
||||||
|
Ok(match s {
|
||||||
|
"+" => Add,
|
||||||
|
"-" => Subtract,
|
||||||
|
"*" => Multiply,
|
||||||
|
"/" => Divide,
|
||||||
|
"quot" => Quotient,
|
||||||
|
"%" => Modulo,
|
||||||
|
"++" => Concatenate,
|
||||||
|
"^" => Exponentiation,
|
||||||
|
"&" => BitwiseAnd,
|
||||||
|
"&&" => BooleanAnd,
|
||||||
|
"|" => BitwiseOr,
|
||||||
|
"||" => BooleanOr,
|
||||||
|
"!" => BooleanNot,
|
||||||
|
">" => GreaterThan,
|
||||||
|
">=" => GreaterThanOrEqual,
|
||||||
|
"<" => LessThan,
|
||||||
|
"<=" => LessThanOrEqual,
|
||||||
|
"==" => Equality,
|
||||||
|
"!=" => NotEqual,
|
||||||
|
"=" => Assignment,
|
||||||
|
"<=>" => Comparison,
|
||||||
|
"print" => IOPrint,
|
||||||
|
"println" => IOPrintLn,
|
||||||
|
"getline" => IOGetLine,
|
||||||
|
_ => return Err(()),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
79
schala-lang/src/error.rs
Normal file
79
schala-lang/src/error.rs
Normal file
@ -0,0 +1,79 @@
|
|||||||
|
use crate::{
|
||||||
|
parsing::{Location, ParseError},
|
||||||
|
schala::{SourceReference, Stage},
|
||||||
|
symbol_table::SymbolError,
|
||||||
|
type_inference::TypeError,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub struct SchalaError {
|
||||||
|
errors: Vec<Error>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SchalaError {
|
||||||
|
pub(crate) fn display(&self) -> String {
|
||||||
|
match self.errors[0] {
|
||||||
|
Error::Parse(ref parse_err) => parse_err.to_string(),
|
||||||
|
Error::Standard { ref text, .. } => text.as_ref().cloned().unwrap_or_default(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub(crate) fn from_type_error(err: TypeError) -> Self {
|
||||||
|
Self {
|
||||||
|
errors: vec![Error::Standard { location: None, text: Some(err.msg), stage: Stage::Typechecking }],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn from_symbol_table(symbol_errs: Vec<SymbolError>) -> Self {
|
||||||
|
//TODO this could be better
|
||||||
|
let errors = symbol_errs
|
||||||
|
.into_iter()
|
||||||
|
.map(|_symbol_err| Error::Standard {
|
||||||
|
location: None,
|
||||||
|
text: Some("symbol table error".to_string()),
|
||||||
|
stage: Stage::Symbols,
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
Self { errors }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn from_string(text: String, stage: Stage) -> Self {
|
||||||
|
Self { errors: vec![Error::Standard { location: None, text: Some(text), stage }] }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn from_parse_error(parse_error: ParseError, source_reference: &SourceReference) -> Self {
|
||||||
|
let formatted_parse_error = format_parse_error(parse_error, source_reference);
|
||||||
|
Self { errors: vec![Error::Parse(formatted_parse_error)] }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
enum Error {
|
||||||
|
Standard { location: Option<Location>, text: Option<String>, stage: Stage },
|
||||||
|
Parse(String),
|
||||||
|
}
|
||||||
|
|
||||||
|
fn format_parse_error(error: ParseError, source_reference: &SourceReference) -> String {
|
||||||
|
let offset = error.location.offset;
|
||||||
|
let (line_start, line_num, line_from_program) = source_reference.get_line(offset);
|
||||||
|
let ch = offset - line_start;
|
||||||
|
|
||||||
|
let location_pointer = format!("{}^", " ".repeat(ch));
|
||||||
|
|
||||||
|
let line_num_digits = format!("{}", line_num).chars().count();
|
||||||
|
let space_padding = " ".repeat(line_num_digits);
|
||||||
|
|
||||||
|
format!(
|
||||||
|
r#"
|
||||||
|
{error_msg}
|
||||||
|
{space_padding} |
|
||||||
|
{line_num} | {}
|
||||||
|
{space_padding} | {}
|
||||||
|
"#,
|
||||||
|
line_from_program,
|
||||||
|
location_pointer,
|
||||||
|
error_msg = error.msg,
|
||||||
|
space_padding = space_padding,
|
||||||
|
line_num = line_num,
|
||||||
|
)
|
||||||
|
}
|
75
schala-lang/src/identifier.rs
Normal file
75
schala-lang/src/identifier.rs
Normal file
@ -0,0 +1,75 @@
|
|||||||
|
use std::{
|
||||||
|
fmt::{self, Debug},
|
||||||
|
hash::Hash,
|
||||||
|
marker::PhantomData,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub trait IdKind: Debug + Copy + Clone + Hash + PartialEq + Eq + Default {
|
||||||
|
fn tag() -> &'static str;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A generalized abstract identifier type of up to 2^32-1 entries.
|
||||||
|
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, Default)]
|
||||||
|
pub struct Id<T>
|
||||||
|
where T: IdKind
|
||||||
|
{
|
||||||
|
idx: u32,
|
||||||
|
t: PhantomData<T>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> Id<T>
|
||||||
|
where T: IdKind
|
||||||
|
{
|
||||||
|
fn new(n: u32) -> Self {
|
||||||
|
Self { idx: n, t: PhantomData }
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub fn as_u32(&self) -> u32 {
|
||||||
|
self.idx
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> fmt::Display for Id<T>
|
||||||
|
where T: IdKind
|
||||||
|
{
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
write!(f, "{}:{}", self.idx, T::tag())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct IdStore<T>
|
||||||
|
where T: IdKind
|
||||||
|
{
|
||||||
|
last_idx: u32,
|
||||||
|
t: PhantomData<T>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> IdStore<T>
|
||||||
|
where T: IdKind
|
||||||
|
{
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self { last_idx: 0, t: PhantomData }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn fresh(&mut self) -> Id<T> {
|
||||||
|
let idx = self.last_idx;
|
||||||
|
self.last_idx += 1;
|
||||||
|
Id::new(idx)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
macro_rules! define_id_kind {
|
||||||
|
($name:ident) => {
|
||||||
|
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, Default)]
|
||||||
|
pub struct $name;
|
||||||
|
impl crate::identifier::IdKind for $name {
|
||||||
|
fn tag() -> &'static str {
|
||||||
|
stringify!($name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) use define_id_kind;
|
31
schala-lang/src/lib.rs
Normal file
31
schala-lang/src/lib.rs
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
#![feature(trace_macros)]
|
||||||
|
//#![feature(unrestricted_attribute_tokens)]
|
||||||
|
#![feature(box_patterns, iter_intersperse)]
|
||||||
|
|
||||||
|
//! `schala-lang` is where the Schala programming language is actually implemented.
|
||||||
|
//! It defines the `Schala` type, which contains the state for a Schala REPL, and implements
|
||||||
|
//! `ProgrammingLanguageInterface` and the chain of compiler passes for it.
|
||||||
|
|
||||||
|
extern crate derivative;
|
||||||
|
extern crate schala_repl;
|
||||||
|
|
||||||
|
#[macro_use]
|
||||||
|
mod util;
|
||||||
|
|
||||||
|
#[macro_use]
|
||||||
|
mod type_inference;
|
||||||
|
|
||||||
|
mod ast;
|
||||||
|
mod parsing;
|
||||||
|
#[macro_use]
|
||||||
|
mod symbol_table;
|
||||||
|
mod builtin;
|
||||||
|
mod error;
|
||||||
|
mod reduced_ir;
|
||||||
|
mod tree_walk_eval;
|
||||||
|
#[macro_use]
|
||||||
|
mod identifier;
|
||||||
|
|
||||||
|
mod schala;
|
||||||
|
|
||||||
|
pub use schala::{Schala, SchalaConfig};
|
1098
schala-lang/src/parsing/combinator.rs
Normal file
1098
schala-lang/src/parsing/combinator.rs
Normal file
File diff suppressed because it is too large
Load Diff
126
schala-lang/src/parsing/mod.rs
Normal file
126
schala-lang/src/parsing/mod.rs
Normal file
@ -0,0 +1,126 @@
|
|||||||
|
#![allow(clippy::upper_case_acronyms)]
|
||||||
|
|
||||||
|
pub mod combinator;
|
||||||
|
mod peg_parser;
|
||||||
|
mod test;
|
||||||
|
|
||||||
|
use std::{cell::RefCell, fmt, rc::Rc};
|
||||||
|
|
||||||
|
use combinator::Span;
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
use crate::ast::{Block, Expression};
|
||||||
|
use crate::{
|
||||||
|
ast::{ASTItem, AST},
|
||||||
|
identifier::{Id, IdStore},
|
||||||
|
};
|
||||||
|
|
||||||
|
pub(crate) type StoreRef = Rc<RefCell<IdStore<ASTItem>>>;
|
||||||
|
pub struct Parser {
|
||||||
|
id_store: StoreRef,
|
||||||
|
use_combinator: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Parser {
|
||||||
|
pub(crate) fn new() -> Self {
|
||||||
|
let id_store: IdStore<ASTItem> = IdStore::new();
|
||||||
|
Self { id_store: Rc::new(RefCell::new(id_store)), use_combinator: true }
|
||||||
|
}
|
||||||
|
pub(crate) fn parse(&mut self, input: &str) -> Result<AST, ParseError> {
|
||||||
|
if self.use_combinator {
|
||||||
|
self.parse_comb(input)
|
||||||
|
} else {
|
||||||
|
self.parse_peg(input)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn parse_peg(&mut self, input: &str) -> Result<AST, ParseError> {
|
||||||
|
peg_parser::schala_parser::program(input, self).map_err(ParseError::from_peg)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn parse_comb(&mut self, input: &str) -> Result<AST, ParseError> {
|
||||||
|
let span = Span::new_extra(input, self.id_store.clone());
|
||||||
|
convert(input, combinator::program(span))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
fn expression(&mut self, input: &str) -> Result<Expression, ParseError> {
|
||||||
|
peg_parser::schala_parser::expression(input, self).map_err(ParseError::from_peg)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
fn expression_comb(&mut self, input: &str) -> Result<Expression, ParseError> {
|
||||||
|
let span = Span::new_extra(input, self.id_store.clone());
|
||||||
|
convert(input, combinator::expression(span))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
fn block(&mut self, input: &str) -> Result<Block, ParseError> {
|
||||||
|
peg_parser::schala_parser::block(input, self).map_err(ParseError::from_peg)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
fn block_comb(&mut self, input: &str) -> Result<Block, ParseError> {
|
||||||
|
let span = Span::new_extra(input, self.id_store.clone());
|
||||||
|
convert(input, combinator::block(span))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn fresh(&mut self) -> Id<ASTItem> {
|
||||||
|
self.id_store.borrow_mut().fresh()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn convert<'a, O>(input: &'a str, result: combinator::ParseResult<'a, O>) -> Result<O, ParseError> {
|
||||||
|
use nom::{error::VerboseError, Finish};
|
||||||
|
|
||||||
|
match result.finish() {
|
||||||
|
Ok((rest, output)) => {
|
||||||
|
if rest.fragment() != &"" {
|
||||||
|
return Err(ParseError {
|
||||||
|
location: Default::default(),
|
||||||
|
msg: format!("Bad parse state, remaining text: `{}`", rest.fragment()),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(output)
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
let err = VerboseError {
|
||||||
|
errors: err.errors.into_iter().map(|(sp, kind)| (*sp.fragment(), kind)).collect(),
|
||||||
|
};
|
||||||
|
let msg = nom::error::convert_error(input, err);
|
||||||
|
Err(ParseError { msg, location: (0).into() })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Represents a parsing error
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct ParseError {
|
||||||
|
pub msg: String,
|
||||||
|
pub location: Location,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ParseError {
|
||||||
|
fn from_peg(err: peg::error::ParseError<peg::str::LineCol>) -> Self {
|
||||||
|
let msg = err.to_string();
|
||||||
|
Self { msg, location: err.location.offset.into() }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Default)]
|
||||||
|
pub struct Location {
|
||||||
|
pub(crate) offset: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<usize> for Location {
|
||||||
|
fn from(offset: usize) -> Self {
|
||||||
|
Self { offset }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for Location {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
write!(f, "{}", self.offset)
|
||||||
|
}
|
||||||
|
}
|
567
schala-lang/src/parsing/peg_parser.rs
Normal file
567
schala-lang/src/parsing/peg_parser.rs
Normal file
@ -0,0 +1,567 @@
|
|||||||
|
use std::rc::Rc;
|
||||||
|
|
||||||
|
use super::Parser;
|
||||||
|
use crate::ast::*;
|
||||||
|
|
||||||
|
fn rc_string(s: &str) -> Rc<String> {
|
||||||
|
Rc::new(s.to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
enum ExtendedPart<'a> {
|
||||||
|
Index(Vec<Expression>),
|
||||||
|
Accessor(&'a str),
|
||||||
|
Call(Vec<InvocationArgument>),
|
||||||
|
}
|
||||||
|
|
||||||
|
peg::parser! {
|
||||||
|
pub grammar schala_parser() for str {
|
||||||
|
|
||||||
|
rule whitespace() = [' ' | '\t' ]
|
||||||
|
rule whitespace_or_newline() = [' ' | '\t' | '\n' ]
|
||||||
|
|
||||||
|
rule _ = quiet!{ (block_comment() / line_comment() / whitespace())* }
|
||||||
|
|
||||||
|
rule __ = quiet!{ (block_comment() / line_comment() / whitespace_or_newline())* }
|
||||||
|
|
||||||
|
rule block_comment() = "/*" (block_comment() / !"*/" [_])* "*/"
|
||||||
|
rule line_comment() = "//" (!['\n'] [_])* &"\n"
|
||||||
|
|
||||||
|
|
||||||
|
pub rule program(parser: &mut Parser) -> AST =
|
||||||
|
__ statements:(statement(parser) ** (delimiter()+) ) __ { AST { id: parser.fresh(), statements: statements.into() } }
|
||||||
|
|
||||||
|
rule delimiter() = (";" / "\n")+
|
||||||
|
|
||||||
|
//Note - this is a hack, ideally the rule `rule block() -> Block = "{" _ items:(statement() **
|
||||||
|
//delimiter()) _ "}" { items.into() }` would've worked, but it doesn't.
|
||||||
|
pub rule block(parser: &mut Parser) -> Block =
|
||||||
|
"{" __ items:(statement(parser) ** delimiter()) delimiter()? __ "}" { items.into() } /
|
||||||
|
"{" __ stmt:statement(parser) __ "}" { vec![stmt].into() }
|
||||||
|
|
||||||
|
rule block_item(parser: &mut Parser) -> Statement<StatementKind> =
|
||||||
|
_ stmt:statement(parser) _ delimiter()+ { stmt }
|
||||||
|
|
||||||
|
rule statement(parser: &mut Parser) -> Statement<StatementKind> =
|
||||||
|
_ pos:position!() kind:statement_kind(parser) _ { Statement { id: parser.fresh(), location: pos.into(), kind } }
|
||||||
|
|
||||||
|
rule statement_kind(parser: &mut Parser) -> StatementKind =
|
||||||
|
__ import:import(parser) { StatementKind::Import(import) } /
|
||||||
|
__ decl:declaration(parser) { StatementKind::Declaration(decl) } /
|
||||||
|
__ flow:flow(parser) { StatementKind::Flow(flow) } /
|
||||||
|
__ expr:expression(parser) { StatementKind::Expression(expr) }
|
||||||
|
|
||||||
|
rule flow(parser: &mut Parser) -> FlowControl =
|
||||||
|
"continue" { FlowControl::Continue } /
|
||||||
|
"break" { FlowControl::Break } /
|
||||||
|
"return" _ expr:expression(parser)? { FlowControl::Return(expr) }
|
||||||
|
|
||||||
|
//TODO add the ability to rename and exclude imports
|
||||||
|
rule import(parser: &mut Parser) -> ImportSpecifier =
|
||||||
|
"import" _ path_components:path_components() suffix:import_suffix()? {
|
||||||
|
ImportSpecifier {
|
||||||
|
id: parser.fresh(),
|
||||||
|
path_components,
|
||||||
|
imported_names: suffix.unwrap_or(ImportedNames::LastOfPath)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
rule path_components() -> Vec<Rc<String>> =
|
||||||
|
"::"? name:identifier() rest:path_component()* {
|
||||||
|
let mut items = vec![rc_string(name)];
|
||||||
|
items.extend(rest.into_iter().map(rc_string));
|
||||||
|
items
|
||||||
|
}
|
||||||
|
|
||||||
|
rule path_component() -> &'input str = "::" ident:identifier() { ident }
|
||||||
|
|
||||||
|
rule import_suffix() -> ImportedNames =
|
||||||
|
"::*" { ImportedNames::All } /
|
||||||
|
"::{" __ names:(identifier() ** (_ "," _)) __ "}" {?
|
||||||
|
if names.is_empty() {
|
||||||
|
Err("import groups must have at least one item")
|
||||||
|
} else {
|
||||||
|
Ok(ImportedNames::List(names.into_iter().map(rc_string).collect()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
rule declaration(parser: &mut Parser) -> Declaration =
|
||||||
|
binding(parser) / type_decl(parser) / annotation(parser) / func(parser) / interface(parser) /
|
||||||
|
implementation(parser) / module(parser)
|
||||||
|
|
||||||
|
rule module(parser: &mut Parser) -> Declaration =
|
||||||
|
"module" _ name:identifier() _ items:block(parser) { Declaration::Module { name: rc_string(name), items } }
|
||||||
|
|
||||||
|
rule implementation(parser: &mut Parser) -> Declaration =
|
||||||
|
"impl" _ interface:type_singleton_name() _ "for" _ type_name:type_identifier() _ block:decl_block(parser) {
|
||||||
|
Declaration::Impl { type_name, interface_name: Some(interface), block }
|
||||||
|
|
||||||
|
} /
|
||||||
|
"impl" _ type_name:type_identifier() _ block:decl_block(parser) {
|
||||||
|
Declaration::Impl { type_name, interface_name: None, block }
|
||||||
|
}
|
||||||
|
|
||||||
|
rule decl_block(parser: &mut Parser) -> Vec<Statement<Declaration>> =
|
||||||
|
"{" __ decls:(func_declaration_stmt(parser) ** (delimiter()+)) delimiter()? __ "}" { decls }
|
||||||
|
|
||||||
|
rule func_declaration_stmt(parser: &mut Parser) -> Statement<Declaration> =
|
||||||
|
pos:position!() decl:func_declaration(parser) { Statement { id: parser.fresh(), location: pos.into(), kind: decl } }
|
||||||
|
|
||||||
|
rule interface(parser: &mut Parser) -> Declaration =
|
||||||
|
"interface" _ name:identifier() _ signatures:signature_block(parser) { Declaration::Interface { name: rc_string(name), signatures } }
|
||||||
|
|
||||||
|
rule signature_block(parser: &mut Parser) -> Vec<Signature> =
|
||||||
|
"{" __ signatures:(func_signature(parser) ** (delimiter()+)) __ "}" { signatures }
|
||||||
|
|
||||||
|
rule func(parser: &mut Parser) -> Declaration =
|
||||||
|
decl:func_declaration(parser) { decl } /
|
||||||
|
sig:func_signature(parser) { Declaration::FuncSig(sig) }
|
||||||
|
|
||||||
|
rule func_declaration(parser: &mut Parser) -> Declaration =
|
||||||
|
_ sig:func_signature(parser) __ body:block(parser) { Declaration::FuncDecl(sig, body) }
|
||||||
|
|
||||||
|
rule func_signature(parser: &mut Parser) -> Signature =
|
||||||
|
_ "fn" _ name:identifier() "(" _ params:formal_params(parser) _ ")" _ type_anno:type_anno()? { Signature {
|
||||||
|
name: rc_string(name), operator: false, params, type_anno
|
||||||
|
} } /
|
||||||
|
_ "fn" _ "(" op:operator() ")" _ "(" _ params:formal_params(parser) _ ")" _ type_anno:type_anno()? { Signature {
|
||||||
|
name: rc_string(op), operator: true, params, type_anno
|
||||||
|
} }
|
||||||
|
|
||||||
|
rule formal_params(parser: &mut Parser) -> Vec<FormalParam> =
|
||||||
|
params:(formal_param(parser) ** (_ "," _)) {? if params.len() < 256 { Ok(params) } else {
|
||||||
|
Err("function-too-long") }
|
||||||
|
}
|
||||||
|
|
||||||
|
rule formal_param(parser: &mut Parser) -> FormalParam =
|
||||||
|
name:identifier() _ anno:type_anno()? _ "=" expr:expression(parser) { FormalParam { name: rc_string(name),
|
||||||
|
default: Some(expr), anno } } /
|
||||||
|
name:identifier() _ anno:type_anno()? { FormalParam { name: rc_string(name), default: None, anno } }
|
||||||
|
|
||||||
|
|
||||||
|
rule annotation(parser: &mut Parser) -> Declaration =
|
||||||
|
"@" name:identifier() args:annotation_args(parser)? delimiter()+ _ inner:statement(parser) { Declaration::Annotation {
|
||||||
|
name: rc_string(name), arguments: if let Some(args) = args { args } else { vec![] }, inner: Box::new(inner) }
|
||||||
|
}
|
||||||
|
|
||||||
|
rule annotation_args(parser: &mut Parser) -> Vec<Expression> =
|
||||||
|
"(" _ args:(expression(parser) ** (_ "," _)) _ ")" { args }
|
||||||
|
|
||||||
|
|
||||||
|
rule binding(parser: &mut Parser) -> Declaration =
|
||||||
|
"let" _ mutable:"mut"? _ ident:identifier() _ type_anno:type_anno()? _ "=" _ expr:expression(parser) {
|
||||||
|
Declaration::Binding { name: Rc::new(ident.to_string()), constant: mutable.is_none(),
|
||||||
|
type_anno, expr }
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
rule type_decl(parser: &mut Parser) -> Declaration =
|
||||||
|
"type" _ "alias" _ alias:type_alias() { alias } /
|
||||||
|
"type" _ mutable:"mut"? _ name:type_singleton_name() _ "=" _ body:type_body(parser) {
|
||||||
|
Declaration::TypeDecl { name, body, mutable: mutable.is_some() }
|
||||||
|
}
|
||||||
|
|
||||||
|
rule type_singleton_name() -> TypeSingletonName =
|
||||||
|
name:identifier() params:type_params()? { TypeSingletonName {
|
||||||
|
name: rc_string(name), params: if let Some(params) = params { params } else { vec![] }
|
||||||
|
} }
|
||||||
|
|
||||||
|
rule type_params() -> Vec<TypeIdentifier> =
|
||||||
|
"<" _ idents:(type_identifier() ** (_ "," _)) _ ">" { idents }
|
||||||
|
|
||||||
|
rule type_identifier() -> TypeIdentifier =
|
||||||
|
"(" _ items:(type_identifier() ** (_ "," _)) _ ")" { TypeIdentifier::Tuple(items) } /
|
||||||
|
singleton:type_singleton_name() { TypeIdentifier::Singleton(singleton) }
|
||||||
|
|
||||||
|
rule type_body(parser: &mut Parser) -> TypeBody =
|
||||||
|
"{" _ items:(record_variant_item() ** (__ "," __)) __ "}" { TypeBody::ImmediateRecord { id: parser.fresh(), fields: items } } /
|
||||||
|
variants:(variant_spec(parser) ** (__ "|" __)) { TypeBody::Variants(variants) }
|
||||||
|
|
||||||
|
rule variant_spec(parser: &mut Parser) -> Variant =
|
||||||
|
name:identifier() __ "{" __ typed_identifier_list:(record_variant_item() ** (__ "," __)) __ ","? __ "}" { Variant {
|
||||||
|
id: parser.fresh(), name: rc_string(name), kind: VariantKind::Record(typed_identifier_list)
|
||||||
|
} } /
|
||||||
|
name:identifier() "(" tuple_members:(type_identifier() ++ (__ "," __)) ")" { Variant {
|
||||||
|
id: parser.fresh(), name: rc_string(name), kind: VariantKind::TupleStruct(tuple_members) } } /
|
||||||
|
name:identifier() { Variant { id: parser.fresh(), name: rc_string(name), kind: VariantKind::UnitStruct } }
|
||||||
|
|
||||||
|
rule record_variant_item() -> (Rc<String>, TypeIdentifier) =
|
||||||
|
name:identifier() _ ":" _ ty:type_identifier() { (rc_string(name), ty) }
|
||||||
|
|
||||||
|
rule type_alias() -> Declaration =
|
||||||
|
alias:identifier() _ "=" _ name:identifier() { Declaration::TypeAlias { alias: rc_string(alias), original: rc_string(name), } }
|
||||||
|
|
||||||
|
rule type_anno() -> TypeIdentifier =
|
||||||
|
":" _ identifier:type_identifier() { identifier }
|
||||||
|
|
||||||
|
pub rule expression(parser: &mut Parser) -> Expression =
|
||||||
|
__ kind:expression_kind(true, parser) _ type_anno:type_anno()? { Expression { id: parser.fresh(), type_anno, kind } }
|
||||||
|
|
||||||
|
rule expression_no_struct(parser: &mut Parser) -> Expression =
|
||||||
|
__ kind:expression_kind(false, parser) { Expression { id: parser.fresh(), type_anno: None, kind } }
|
||||||
|
|
||||||
|
rule expression_kind(struct_ok: bool, parser: &mut Parser) -> ExpressionKind =
|
||||||
|
precedence_expr(struct_ok, parser)
|
||||||
|
|
||||||
|
rule precedence_expr(struct_ok: bool, parser: &mut Parser) -> ExpressionKind =
|
||||||
|
first:prefix_expr(struct_ok, parser) _ next:(precedence_continuation(struct_ok, parser))* {
|
||||||
|
let next = next.into_iter().map(|(sigil, expr)| (BinOp::from_sigil(sigil), expr)).collect();
|
||||||
|
BinopSequence { first, next }.do_precedence(parser)
|
||||||
|
}
|
||||||
|
|
||||||
|
rule precedence_continuation(struct_ok: bool, parser: &mut Parser) -> (&'input str, ExpressionKind) =
|
||||||
|
op:operator() _ expr:prefix_expr(struct_ok, parser) _ { (op, expr) }
|
||||||
|
|
||||||
|
rule prefix_expr(struct_ok: bool, parser: &mut Parser) -> ExpressionKind =
|
||||||
|
prefix:prefix()? expr:extended_expr(struct_ok, parser) {
|
||||||
|
if let Some(p) = prefix {
|
||||||
|
let expr = Expression::new(parser.fresh(), expr);
|
||||||
|
let prefix = PrefixOp::from_sigil(p);
|
||||||
|
ExpressionKind::PrefixExp(prefix, Box::new(expr))
|
||||||
|
} else {
|
||||||
|
expr
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
rule prefix() -> &'input str =
|
||||||
|
$(['+' | '-' | '!' ])
|
||||||
|
|
||||||
|
//TODO make the definition of operators more complex
|
||||||
|
rule operator() -> &'input str =
|
||||||
|
quiet!{!"*/" s:$( ['+' | '-' | '*' | '/' | '%' | '<' | '>' | '=' | '!' | '$' | '&' | '|' | '?' | '^' | '`']+ ) { s } } /
|
||||||
|
expected!("operator")
|
||||||
|
|
||||||
|
rule extended_expr(struct_ok: bool, parser: &mut Parser) -> ExpressionKind =
|
||||||
|
primary:primary(struct_ok, parser) parts:(extended_expr_part(parser)*) {
|
||||||
|
let mut expression = Expression::new(parser.fresh(), primary);
|
||||||
|
for part in parts.into_iter() {
|
||||||
|
let kind = match part {
|
||||||
|
ExtendedPart::Index(indexers) => {
|
||||||
|
ExpressionKind::Index { indexee: Box::new(expression), indexers }
|
||||||
|
},
|
||||||
|
ExtendedPart::Accessor(name) => {
|
||||||
|
let name = rc_string(name);
|
||||||
|
ExpressionKind::Access { name, expr: Box::new(expression) }
|
||||||
|
},
|
||||||
|
ExtendedPart::Call(arguments) => {
|
||||||
|
ExpressionKind::Call { f: Box::new(expression), arguments }
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
expression = Expression::new(parser.fresh(), kind);
|
||||||
|
}
|
||||||
|
|
||||||
|
expression.kind
|
||||||
|
}
|
||||||
|
|
||||||
|
rule extended_expr_part(parser: &mut Parser) -> ExtendedPart<'input> =
|
||||||
|
indexers:index_part(parser) { ExtendedPart::Index(indexers) } /
|
||||||
|
arguments:call_part(parser) { ExtendedPart::Call(arguments) } /
|
||||||
|
"." name:identifier() { ExtendedPart::Accessor(name) }
|
||||||
|
|
||||||
|
rule index_part(parser: &mut Parser) -> Vec<Expression> =
|
||||||
|
"[" indexers:(expression(parser) ++ ",") "]" { indexers }
|
||||||
|
|
||||||
|
rule call_part(parser: &mut Parser) -> Vec<InvocationArgument> =
|
||||||
|
"(" arguments:(invocation_argument(parser) ** ",") ")" { arguments }
|
||||||
|
|
||||||
|
rule invocation_argument(parser: &mut Parser) -> InvocationArgument =
|
||||||
|
_ "_" _ { InvocationArgument::Ignored } /
|
||||||
|
_ ident:identifier() _ "=" _ expr:expression(parser) { InvocationArgument::Keyword {
|
||||||
|
name: Rc::new(ident.to_string()),
|
||||||
|
expr
|
||||||
|
} } /
|
||||||
|
_ expr:expression(parser) _ { InvocationArgument::Positional(expr) }
|
||||||
|
|
||||||
|
|
||||||
|
rule primary(struct_ok: bool, parser: &mut Parser) -> ExpressionKind =
|
||||||
|
while_expr(parser) / for_expr(parser) / float_literal() / nat_literal() / bool_literal() /
|
||||||
|
string_literal() / paren_expr(parser) /
|
||||||
|
list_expr(parser) / if_expr(parser) / lambda_expr(parser) /
|
||||||
|
item:named_struct(parser) {? if struct_ok { Ok(item) } else { Err("no-struct-allowed") } } /
|
||||||
|
identifier_expr(parser)
|
||||||
|
|
||||||
|
rule lambda_expr(parser: &mut Parser) -> ExpressionKind =
|
||||||
|
r#"\"# __ "(" _ params:formal_params(parser) _ ")" _ type_anno:(type_anno()?) _ body:block(parser) {
|
||||||
|
ExpressionKind::Lambda { params, type_anno, body }
|
||||||
|
} /
|
||||||
|
r#"\"# param:formal_param(parser) _ type_anno:(type_anno()?) _ body:block(parser) {
|
||||||
|
ExpressionKind::Lambda { params: vec![param], type_anno, body }
|
||||||
|
}
|
||||||
|
|
||||||
|
rule for_expr(parser: &mut Parser) -> ExpressionKind =
|
||||||
|
"for" _ enumerators:for_enumerators(parser) _ body:for_body(parser) {
|
||||||
|
ExpressionKind::ForExpression { enumerators, body }
|
||||||
|
}
|
||||||
|
|
||||||
|
rule for_enumerators(parser: &mut Parser) -> Vec<Enumerator> =
|
||||||
|
"{" _ enumerators:(enumerator(parser) ++ ",") _ "}" { enumerators } /
|
||||||
|
enumerator:enumerator(parser) { vec![enumerator] }
|
||||||
|
|
||||||
|
//TODO add guards, etc.
|
||||||
|
rule enumerator(parser: &mut Parser) -> Enumerator =
|
||||||
|
ident:identifier() _ "<-" _ generator:expression_no_struct(parser) {
|
||||||
|
Enumerator { identifier: Rc::new(ident.to_string()), generator, assignment: false }
|
||||||
|
} /
|
||||||
|
//TODO need to distinguish these two cases in AST
|
||||||
|
ident:identifier() _ "=" _ generator:expression_no_struct(parser) {
|
||||||
|
Enumerator { identifier: Rc::new(ident.to_string()), generator, assignment: true }
|
||||||
|
}
|
||||||
|
|
||||||
|
rule for_body(parser: &mut Parser) -> Box<ForBody> =
|
||||||
|
"return" _ expr:expression(parser) { Box::new(ForBody::MonadicReturn(expr)) } /
|
||||||
|
body:block(parser) { Box::new(ForBody::StatementBlock(body)) }
|
||||||
|
|
||||||
|
rule while_expr(parser: &mut Parser) -> ExpressionKind =
|
||||||
|
"while" _ cond:expression_kind(false, parser)? _ body:block(parser) {
|
||||||
|
ExpressionKind::WhileExpression {
|
||||||
|
condition: cond.map(|kind| Box::new(Expression::new(parser.fresh(), kind))),
|
||||||
|
body,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
rule identifier_expr(parser: &mut Parser) -> ExpressionKind =
|
||||||
|
qn:qualified_identifier(parser) { ExpressionKind::Value(qn) }
|
||||||
|
|
||||||
|
rule named_struct(parser: &mut Parser) -> ExpressionKind =
|
||||||
|
name:qualified_identifier(parser) _ fields:record_block(parser) {
|
||||||
|
ExpressionKind::NamedStruct {
|
||||||
|
name,
|
||||||
|
fields: fields.into_iter().map(|(n, exp)| (Rc::new(n.to_string()), exp)).collect(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
//TODO support anonymous structs and Elm-style update syntax for structs
|
||||||
|
rule record_block(parser: &mut Parser) -> Vec<(&'input str, Expression)> =
|
||||||
|
"{" _ entries:(record_entry(parser) ** ",") _ "}" { entries }
|
||||||
|
|
||||||
|
rule record_entry(parser: &mut Parser) -> (&'input str, Expression) =
|
||||||
|
_ name:identifier() _ ":" _ expr:expression(parser) _ { (name, expr) }
|
||||||
|
|
||||||
|
rule qualified_identifier(parser: &mut Parser) -> QualifiedName =
|
||||||
|
names:(identifier() ++ "::") { QualifiedName { id: parser.fresh(), components: names.into_iter().map(|name| Rc::new(name.to_string())).collect() } }
|
||||||
|
|
||||||
|
//TODO improve the definition of identifiers
|
||||||
|
rule identifier() -> &'input str =
|
||||||
|
!(reserved() !(ident_continuation())) text:$(['a'..='z' | 'A'..='Z' | '_'] ident_continuation()*) { text }
|
||||||
|
|
||||||
|
rule ident_continuation() -> &'input str =
|
||||||
|
text:$(['a'..='z' | 'A'..='Z' | '0'..='9' | '_'])
|
||||||
|
|
||||||
|
rule reserved() = "if" / "then" / "else" / "is" / "fn" / "for" / "while" / "let" / "in" / "mut" / "return" /
|
||||||
|
"break" / "alias" / "type" / "self" / "Self" / "interface" / "impl" / "true" / "false" / "module" / "import"
|
||||||
|
|
||||||
|
|
||||||
|
rule if_expr(parser: &mut Parser) -> ExpressionKind =
|
||||||
|
"if" _ discriminator:(expression(parser)?) _ body:if_expr_body(parser) {
|
||||||
|
ExpressionKind::IfExpression {
|
||||||
|
discriminator: discriminator.map(Box::new),
|
||||||
|
body: Box::new(body),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
rule if_expr_body(parser: &mut Parser) -> IfExpressionBody =
|
||||||
|
cond_block(parser) / simple_pattern_match(parser) / simple_conditional(parser)
|
||||||
|
|
||||||
|
rule simple_conditional(parser: &mut Parser) -> IfExpressionBody =
|
||||||
|
"then" _ then_case:expr_or_block(parser) _ else_case:else_case(parser) {
|
||||||
|
IfExpressionBody::SimpleConditional { then_case, else_case }
|
||||||
|
}
|
||||||
|
|
||||||
|
rule simple_pattern_match(parser: &mut Parser) -> IfExpressionBody =
|
||||||
|
"is" _ pattern:pattern(parser) _ "then" _ then_case:expr_or_block(parser) _ else_case:else_case(parser) {
|
||||||
|
IfExpressionBody::SimplePatternMatch { pattern, then_case, else_case }
|
||||||
|
}
|
||||||
|
|
||||||
|
rule cond_block(parser: &mut Parser) -> IfExpressionBody =
|
||||||
|
"{" __ cond_arms:(cond_arm(parser) ++ (delimiter()+)) __ "}" { IfExpressionBody::CondList(cond_arms) }
|
||||||
|
|
||||||
|
rule cond_arm(parser: &mut Parser) -> ConditionArm =
|
||||||
|
_ "else" _ body:expr_or_block(parser) { ConditionArm { condition: Condition::Else, guard: None, body } } /
|
||||||
|
_ condition:condition(parser) _ guard:condition_guard(parser) _ "then" _ body:expr_or_block(parser)
|
||||||
|
{ ConditionArm { condition, guard, body } }
|
||||||
|
|
||||||
|
rule condition(parser: &mut Parser) -> Condition =
|
||||||
|
"is" _ pat:pattern(parser) { Condition::Pattern(pat) } /
|
||||||
|
op:operator() _ expr:expression(parser) { Condition::TruncatedOp(BinOp::from_sigil(op), expr) }
|
||||||
|
|
||||||
|
rule condition_guard(parser: &mut Parser) -> Option<Expression> =
|
||||||
|
("if" _ expr:expression(parser) { expr } )?
|
||||||
|
|
||||||
|
rule expr_or_block(parser: &mut Parser) -> Block = block(parser) / pos:position!() ex:expression(parser) {
|
||||||
|
Statement {
|
||||||
|
id: parser.fresh() , location: pos.into(),
|
||||||
|
kind: StatementKind::Expression(ex)
|
||||||
|
}.into()
|
||||||
|
}
|
||||||
|
|
||||||
|
rule else_case(parser: &mut Parser) -> Option<Block> =
|
||||||
|
("else" _ eorb:expr_or_block(parser) { eorb })?
|
||||||
|
|
||||||
|
rule pattern(parser: &mut Parser) -> Pattern =
|
||||||
|
"(" _ variants:(pattern(parser) ++ ",") _ ")" { Pattern::TuplePattern(variants) } /
|
||||||
|
_ pat:simple_pattern(parser) { pat }
|
||||||
|
|
||||||
|
rule simple_pattern(parser: &mut Parser) -> Pattern =
|
||||||
|
pattern_literal() /
|
||||||
|
qn:qualified_identifier(parser) "(" members:(pattern(parser) ** ",") ")" {
|
||||||
|
Pattern::TupleStruct(qn, members)
|
||||||
|
} /
|
||||||
|
qn:qualified_identifier(parser) _ "{" _ items:(record_pattern_entry(parser) ** ",") "}" _ {
|
||||||
|
let items = items.into_iter().map(|(name, pat)| (Rc::new(name.to_string()), pat)).collect();
|
||||||
|
Pattern::Record(qn, items)
|
||||||
|
} /
|
||||||
|
qn:qualified_identifier(parser) { Pattern::VarOrName(qn) }
|
||||||
|
|
||||||
|
rule record_pattern_entry(parser: &mut Parser) -> (&'input str, Pattern) =
|
||||||
|
_ name:identifier() _ ":" _ pat:pattern(parser) _ { (name, pat) } /
|
||||||
|
_ name:identifier() _ {
|
||||||
|
let qn = QualifiedName {
|
||||||
|
id: parser.fresh(),
|
||||||
|
components: vec![Rc::new(name.to_string())],
|
||||||
|
};
|
||||||
|
(name, Pattern::VarOrName(qn))
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
rule pattern_literal() -> Pattern =
|
||||||
|
"true" { Pattern::Literal(PatternLiteral::BoolPattern(true)) } /
|
||||||
|
"false" { Pattern::Literal(PatternLiteral::BoolPattern(false)) } /
|
||||||
|
s:bare_string_literal() { Pattern::Literal(PatternLiteral::StringPattern(Rc::new(s))) } /
|
||||||
|
sign:("-"?) num:(float_literal() / nat_literal()) {
|
||||||
|
let neg = sign.is_some();
|
||||||
|
Pattern::Literal(PatternLiteral::NumPattern { neg, num })
|
||||||
|
} /
|
||||||
|
"_" { Pattern::Ignored }
|
||||||
|
|
||||||
|
|
||||||
|
rule list_expr(parser: &mut Parser) -> ExpressionKind =
|
||||||
|
"[" exprs:(expression(parser) ** ",") "]" {
|
||||||
|
let mut exprs = exprs;
|
||||||
|
ExpressionKind::ListLiteral(exprs)
|
||||||
|
}
|
||||||
|
|
||||||
|
rule paren_expr(parser: &mut Parser) -> ExpressionKind =
|
||||||
|
"(" exprs:(expression(parser) ** ",") ")" {
|
||||||
|
let mut exprs = exprs;
|
||||||
|
match exprs.len() {
|
||||||
|
1 => exprs.pop().unwrap().kind,
|
||||||
|
_ => ExpressionKind::TupleLiteral(exprs),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
rule string_literal() -> ExpressionKind =
|
||||||
|
prefix:identifier()? s:bare_string_literal(){ ExpressionKind::StringLiteral{ s: Rc::new(s),
|
||||||
|
prefix: prefix.map(rc_string)
|
||||||
|
} }
|
||||||
|
|
||||||
|
rule bare_string_literal() -> String =
|
||||||
|
"\"" chars:string_component()* "\"" { chars.into_iter().collect::<String>() }
|
||||||
|
|
||||||
|
rule string_component() -> char =
|
||||||
|
!(r#"""# / r#"\"#) ch:$([_]) { ch.chars().next().unwrap() } /
|
||||||
|
r#"\u{"# value:$(['0'..='9' | 'a'..='f' | 'A'..='F']+) "}" { char::from_u32(u32::from_str_radix(value, 16).unwrap()).unwrap() } /
|
||||||
|
r#"\n"# { '\n' } / r#"\t"# { '\t' } / r#"\""# { '"' } / r#"\\"# { '\\' } /
|
||||||
|
expected!("Valid escape sequence")
|
||||||
|
|
||||||
|
rule bool_literal() -> ExpressionKind =
|
||||||
|
"true" { ExpressionKind::BoolLiteral(true) } / "false" { ExpressionKind::BoolLiteral(false) }
|
||||||
|
|
||||||
|
rule nat_literal() -> ExpressionKind =
|
||||||
|
bin_literal() / hex_literal() / unmarked_literal()
|
||||||
|
|
||||||
|
rule unmarked_literal() -> ExpressionKind =
|
||||||
|
digits:digits() { let n = digits.chars().filter(|ch| *ch != '_').collect::<String>().parse().unwrap(); ExpressionKind::NatLiteral(n) }
|
||||||
|
|
||||||
|
rule bin_literal() -> ExpressionKind =
|
||||||
|
"0b" digits:bin_digits() {? parse_binary(digits).map(ExpressionKind::NatLiteral) }
|
||||||
|
|
||||||
|
rule hex_literal() -> ExpressionKind =
|
||||||
|
"0x" digits:hex_digits() {? parse_hex(digits).map(ExpressionKind::NatLiteral) }
|
||||||
|
|
||||||
|
rule float_literal() -> ExpressionKind =
|
||||||
|
ds:$( digits() "." digits()? / "." digits() ) { ExpressionKind::FloatLiteral(ds.parse().unwrap()) }
|
||||||
|
|
||||||
|
rule digits() -> &'input str = $((digit_group() "_"*)+)
|
||||||
|
rule bin_digits() -> &'input str = $((bin_digit_group() "_"*)+)
|
||||||
|
rule hex_digits() -> &'input str = $((hex_digit_group() "_"*)+)
|
||||||
|
|
||||||
|
rule digit_group() -> &'input str = $(['0'..='9']+)
|
||||||
|
rule bin_digit_group() -> &'input str = $(['0' | '1']+)
|
||||||
|
rule hex_digit_group() -> &'input str = $(['0'..='9' | 'a'..='f' | 'A'..='F']+)
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_binary(digits: &str) -> Result<u64, &'static str> {
|
||||||
|
let mut result: u64 = 0;
|
||||||
|
let mut multiplier = 1;
|
||||||
|
for d in digits.chars().rev() {
|
||||||
|
match d {
|
||||||
|
'1' => result += multiplier,
|
||||||
|
'0' => (),
|
||||||
|
'_' => continue,
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
multiplier = match multiplier.checked_mul(2) {
|
||||||
|
Some(m) => m,
|
||||||
|
None => return Err("Binary expression will overflow"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_hex(digits: &str) -> Result<u64, &'static str> {
|
||||||
|
let mut result: u64 = 0;
|
||||||
|
let mut multiplier: u64 = 1;
|
||||||
|
for d in digits.chars().rev() {
|
||||||
|
if d == '_' {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
match d.to_digit(16) {
|
||||||
|
Some(n) => result += n as u64 * multiplier,
|
||||||
|
None => return Err("Internal parser error: invalid hex digit"),
|
||||||
|
}
|
||||||
|
multiplier = match multiplier.checked_mul(16) {
|
||||||
|
Some(m) => m,
|
||||||
|
None => return Err("Hexadecimal expression will overflow"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
struct BinopSequence {
|
||||||
|
first: ExpressionKind,
|
||||||
|
next: Vec<(BinOp, ExpressionKind)>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl BinopSequence {
|
||||||
|
fn do_precedence(self, parser: &mut Parser) -> ExpressionKind {
|
||||||
|
fn helper(
|
||||||
|
precedence: i32,
|
||||||
|
lhs: ExpressionKind,
|
||||||
|
rest: &mut Vec<(BinOp, ExpressionKind)>,
|
||||||
|
parser: &mut Parser,
|
||||||
|
) -> Expression {
|
||||||
|
let mut lhs = Expression::new(parser.fresh(), lhs);
|
||||||
|
while let Some((next_op, next_rhs)) = rest.pop() {
|
||||||
|
let new_precedence = next_op.get_precedence();
|
||||||
|
if precedence >= new_precedence {
|
||||||
|
rest.push((next_op, next_rhs));
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
let rhs = helper(new_precedence, next_rhs, rest, parser);
|
||||||
|
lhs = Expression::new(
|
||||||
|
parser.fresh(),
|
||||||
|
ExpressionKind::BinExp(next_op, Box::new(lhs), Box::new(rhs)),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
lhs
|
||||||
|
}
|
||||||
|
let mut as_stack = self.next.into_iter().rev().collect();
|
||||||
|
helper(BinOp::min_precedence(), self.first, &mut as_stack, parser).kind
|
||||||
|
}
|
||||||
|
}
|
1461
schala-lang/src/parsing/test.rs
Normal file
1461
schala-lang/src/parsing/test.rs
Normal file
File diff suppressed because it is too large
Load Diff
484
schala-lang/src/reduced_ir/mod.rs
Normal file
484
schala-lang/src/reduced_ir/mod.rs
Normal file
@ -0,0 +1,484 @@
|
|||||||
|
use std::{collections::HashMap, rc::Rc, str::FromStr};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
ast,
|
||||||
|
builtin::Builtin,
|
||||||
|
symbol_table::{DefId, SymbolSpec, SymbolTable},
|
||||||
|
type_inference::{TypeContext, TypeId},
|
||||||
|
};
|
||||||
|
|
||||||
|
mod test;
|
||||||
|
mod types;
|
||||||
|
|
||||||
|
pub use types::*;
|
||||||
|
|
||||||
|
pub fn reduce(ast: &ast::AST, symbol_table: &SymbolTable, type_context: &TypeContext) -> ReducedIR {
|
||||||
|
let reducer = Reducer::new(symbol_table, type_context);
|
||||||
|
reducer.reduce(ast)
|
||||||
|
}
|
||||||
|
|
||||||
|
struct Reducer<'a, 'b> {
|
||||||
|
symbol_table: &'a SymbolTable,
|
||||||
|
functions: HashMap<DefId, FunctionDefinition>,
|
||||||
|
type_context: &'b TypeContext,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, 'b> Reducer<'a, 'b> {
|
||||||
|
fn new(symbol_table: &'a SymbolTable, type_context: &'b TypeContext) -> Self {
|
||||||
|
Self { symbol_table, functions: HashMap::new(), type_context }
|
||||||
|
}
|
||||||
|
|
||||||
|
fn reduce(mut self, ast: &ast::AST) -> ReducedIR {
|
||||||
|
// First reduce all functions
|
||||||
|
// TODO once this works, maybe rewrite it using the Visitor
|
||||||
|
for statement in ast.statements.statements.iter() {
|
||||||
|
self.top_level_definition(statement);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Then compute the entrypoint statements (which may reference previously-computed
|
||||||
|
// functions by ID)
|
||||||
|
let mut entrypoint = vec![];
|
||||||
|
for statement in ast.statements.statements.iter() {
|
||||||
|
let ast::Statement { id: item_id, kind, .. } = statement;
|
||||||
|
match &kind {
|
||||||
|
ast::StatementKind::Expression(expr) => {
|
||||||
|
entrypoint.push(Statement::Expression(self.expression(expr)));
|
||||||
|
}
|
||||||
|
ast::StatementKind::Declaration(ast::Declaration::Binding {
|
||||||
|
name: _,
|
||||||
|
constant,
|
||||||
|
expr,
|
||||||
|
..
|
||||||
|
}) => {
|
||||||
|
let symbol = self.symbol_table.lookup_symbol(item_id).unwrap();
|
||||||
|
entrypoint.push(Statement::Binding {
|
||||||
|
id: symbol.def_id(),
|
||||||
|
constant: *constant,
|
||||||
|
expr: self.expression(expr),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
_ => (),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
ReducedIR { functions: self.functions, entrypoint }
|
||||||
|
}
|
||||||
|
|
||||||
|
fn top_level_definition(&mut self, statement: &ast::Statement<ast::StatementKind>) {
|
||||||
|
let ast::Statement { id: item_id, kind, .. } = statement;
|
||||||
|
match kind {
|
||||||
|
ast::StatementKind::Expression(_expr) => {
|
||||||
|
//TODO expressions can in principle contain definitions, but I won't worry
|
||||||
|
//about it now
|
||||||
|
}
|
||||||
|
ast::StatementKind::Declaration(decl) => match decl {
|
||||||
|
ast::Declaration::FuncDecl(_, statements) => {
|
||||||
|
self.insert_function_definition(item_id, statements);
|
||||||
|
}
|
||||||
|
ast::Declaration::Impl { type_name: _, interface_name: _, block } =>
|
||||||
|
for item in block {
|
||||||
|
if let ast::Statement {
|
||||||
|
id: item_id,
|
||||||
|
kind: ast::Declaration::FuncDecl(_, statements),
|
||||||
|
..
|
||||||
|
} = item
|
||||||
|
{
|
||||||
|
self.insert_function_definition(item_id, statements);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
_ => (),
|
||||||
|
},
|
||||||
|
// Imports should have already been processed by the symbol table and are irrelevant
|
||||||
|
// for this representation.
|
||||||
|
ast::StatementKind::Import(..) => (),
|
||||||
|
ast::StatementKind::Flow(..) => {
|
||||||
|
//TODO this should be an error
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn function_internal_statement(
|
||||||
|
&mut self,
|
||||||
|
statement: &ast::Statement<ast::StatementKind>,
|
||||||
|
) -> Option<Statement> {
|
||||||
|
let ast::Statement { id: item_id, kind, .. } = statement;
|
||||||
|
match kind {
|
||||||
|
ast::StatementKind::Expression(expr) => Some(Statement::Expression(self.expression(expr))),
|
||||||
|
ast::StatementKind::Declaration(decl) => match decl {
|
||||||
|
ast::Declaration::FuncDecl(_, statements) => {
|
||||||
|
self.insert_function_definition(item_id, statements);
|
||||||
|
None
|
||||||
|
}
|
||||||
|
ast::Declaration::Binding { constant, expr, .. } => {
|
||||||
|
let symbol = self.symbol_table.lookup_symbol(item_id).unwrap();
|
||||||
|
Some(Statement::Binding {
|
||||||
|
id: symbol.def_id(),
|
||||||
|
constant: *constant,
|
||||||
|
expr: self.expression(expr),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
_ => None,
|
||||||
|
},
|
||||||
|
ast::StatementKind::Import(_) => None,
|
||||||
|
ast::StatementKind::Flow(ast::FlowControl::Return(expr)) =>
|
||||||
|
if let Some(expr) = expr {
|
||||||
|
Some(Statement::Return(self.expression(expr)))
|
||||||
|
} else {
|
||||||
|
Some(Statement::Return(Expression::unit()))
|
||||||
|
},
|
||||||
|
ast::StatementKind::Flow(ast::FlowControl::Break) => Some(Statement::Break),
|
||||||
|
ast::StatementKind::Flow(ast::FlowControl::Continue) => Some(Statement::Continue),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn insert_function_definition(&mut self, item_id: &ast::ItemId, statements: &ast::Block) {
|
||||||
|
let symbol = self.symbol_table.lookup_symbol(item_id).unwrap();
|
||||||
|
let function_def = FunctionDefinition { body: self.function_internal_block(statements) };
|
||||||
|
self.functions.insert(symbol.def_id(), function_def);
|
||||||
|
}
|
||||||
|
|
||||||
|
//TODO this needs to be type-aware to work correctly
|
||||||
|
fn lookup_method(&mut self, name: &str) -> Option<DefId> {
|
||||||
|
for (def_id, function) in self.functions.iter() {
|
||||||
|
let symbol = self.symbol_table.lookup_symbol_by_def(def_id)?;
|
||||||
|
println!("Def Id: {} symbol: {:?}", def_id, symbol);
|
||||||
|
if symbol.local_name() == name {
|
||||||
|
return Some(*def_id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
fn expression(&mut self, expr: &ast::Expression) -> Expression {
|
||||||
|
use crate::ast::ExpressionKind::*;
|
||||||
|
|
||||||
|
match &expr.kind {
|
||||||
|
SelfValue => Expression::Lookup(Lookup::SelfParam),
|
||||||
|
NatLiteral(n) => Expression::Literal(Literal::Nat(*n)),
|
||||||
|
FloatLiteral(f) => Expression::Literal(Literal::Float(*f)),
|
||||||
|
//TODO implement handling string literal prefixes
|
||||||
|
StringLiteral { s, prefix: _ } => Expression::Literal(Literal::StringLit(s.clone())),
|
||||||
|
BoolLiteral(b) => Expression::Literal(Literal::Bool(*b)),
|
||||||
|
BinExp(binop, lhs, rhs) => self.binop(binop, lhs, rhs),
|
||||||
|
PrefixExp(op, arg) => self.prefix(op, arg),
|
||||||
|
Value(qualified_name) => self.value(qualified_name),
|
||||||
|
Call { f, arguments } => {
|
||||||
|
let f = self.expression(f);
|
||||||
|
let args = arguments.iter().map(|arg| self.invocation_argument(arg)).collect();
|
||||||
|
//TODO need to have full type availability at this point to do this method lookup
|
||||||
|
//correctly
|
||||||
|
if let Expression::Access { name, expr } = f {
|
||||||
|
let def_id = self.lookup_method(&name).unwrap();
|
||||||
|
let method = Expression::Lookup(Lookup::Function(def_id));
|
||||||
|
Expression::CallMethod { f: Box::new(method), args, self_expr: expr }
|
||||||
|
} else {
|
||||||
|
Expression::Call { f: Box::new(f), args }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
TupleLiteral(exprs) => Expression::Tuple(exprs.iter().map(|e| self.expression(e)).collect()),
|
||||||
|
IfExpression { discriminator, body } =>
|
||||||
|
self.reduce_if_expression(discriminator.as_ref().map(|x| x.as_ref()), body),
|
||||||
|
Lambda { params, body, .. } => Expression::Callable(Callable::Lambda {
|
||||||
|
arity: params.len() as u8,
|
||||||
|
body: self.function_internal_block(body),
|
||||||
|
}),
|
||||||
|
NamedStruct { name, fields } => {
|
||||||
|
let symbol = match self.symbol_table.lookup_symbol(&name.id) {
|
||||||
|
Some(symbol) => symbol,
|
||||||
|
None => return Expression::ReductionError(format!("No symbol found for {}", name)),
|
||||||
|
};
|
||||||
|
let (tag, type_id) = match symbol.spec() {
|
||||||
|
SymbolSpec::RecordConstructor { tag, type_id } => (tag, type_id),
|
||||||
|
e => return Expression::ReductionError(format!("Bad symbol for NamedStruct: {:?}", e)),
|
||||||
|
};
|
||||||
|
|
||||||
|
let field_order = compute_field_orderings(self.type_context, &type_id, tag).unwrap();
|
||||||
|
|
||||||
|
let mut field_map = HashMap::new();
|
||||||
|
for (name, expr) in fields.iter() {
|
||||||
|
field_map.insert(name.as_ref(), expr);
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut ordered_args = vec![];
|
||||||
|
for field in field_order.iter() {
|
||||||
|
let expr = match field_map.get(&field) {
|
||||||
|
Some(expr) => expr,
|
||||||
|
None =>
|
||||||
|
return Expression::ReductionError(format!(
|
||||||
|
"Field {} not specified for record {}",
|
||||||
|
field, name
|
||||||
|
)),
|
||||||
|
};
|
||||||
|
ordered_args.push(self.expression(expr));
|
||||||
|
}
|
||||||
|
|
||||||
|
let constructor =
|
||||||
|
Expression::Callable(Callable::RecordConstructor { type_id, tag, field_order });
|
||||||
|
Expression::Call { f: Box::new(constructor), args: ordered_args }
|
||||||
|
}
|
||||||
|
Index { indexee, indexers } => self.reduce_index(indexee.as_ref(), indexers.as_slice()),
|
||||||
|
WhileExpression { condition, body } => {
|
||||||
|
let cond = Box::new(if let Some(condition) = condition {
|
||||||
|
self.expression(condition)
|
||||||
|
} else {
|
||||||
|
Expression::Literal(Literal::Bool(true))
|
||||||
|
});
|
||||||
|
let statements = self.function_internal_block(body);
|
||||||
|
Expression::Loop { cond, statements }
|
||||||
|
}
|
||||||
|
ForExpression { .. } => Expression::ReductionError("For expr not implemented".to_string()),
|
||||||
|
ListLiteral(items) => Expression::List(items.iter().map(|item| self.expression(item)).collect()),
|
||||||
|
Access { name, expr } =>
|
||||||
|
Expression::Access { name: name.as_ref().to_string(), expr: Box::new(self.expression(expr)) },
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//TODO figure out the semantics of multiple indexers - for now, just ignore them
|
||||||
|
fn reduce_index(&mut self, indexee: &ast::Expression, indexers: &[ast::Expression]) -> Expression {
|
||||||
|
if indexers.len() != 1 {
|
||||||
|
return Expression::ReductionError("Invalid index expression".to_string());
|
||||||
|
}
|
||||||
|
let indexee = self.expression(indexee);
|
||||||
|
let indexer = self.expression(&indexers[0]);
|
||||||
|
Expression::Index { indexee: Box::new(indexee), indexer: Box::new(indexer) }
|
||||||
|
}
|
||||||
|
|
||||||
|
fn reduce_if_expression(
|
||||||
|
&mut self,
|
||||||
|
discriminator: Option<&ast::Expression>,
|
||||||
|
body: &ast::IfExpressionBody,
|
||||||
|
) -> Expression {
|
||||||
|
use ast::IfExpressionBody::*;
|
||||||
|
|
||||||
|
let cond = Box::new(match discriminator {
|
||||||
|
Some(expr) => self.expression(expr),
|
||||||
|
None => return Expression::ReductionError("blank cond if-expr not supported".to_string()),
|
||||||
|
});
|
||||||
|
match body {
|
||||||
|
SimpleConditional { then_case, else_case } => {
|
||||||
|
let then_clause = self.function_internal_block(then_case);
|
||||||
|
let else_clause = match else_case.as_ref() {
|
||||||
|
None => vec![],
|
||||||
|
Some(stmts) => self.function_internal_block(stmts),
|
||||||
|
};
|
||||||
|
Expression::Conditional { cond, then_clause, else_clause }
|
||||||
|
}
|
||||||
|
SimplePatternMatch { pattern, then_case, else_case } => {
|
||||||
|
let alternatives = vec![
|
||||||
|
Alternative {
|
||||||
|
pattern: match pattern.reduce(self.symbol_table) {
|
||||||
|
Ok(p) => p,
|
||||||
|
Err(e) => return Expression::ReductionError(format!("Bad pattern: {:?}", e)),
|
||||||
|
},
|
||||||
|
item: self.function_internal_block(then_case),
|
||||||
|
},
|
||||||
|
Alternative {
|
||||||
|
pattern: Pattern::Ignored,
|
||||||
|
item: match else_case.as_ref() {
|
||||||
|
Some(else_case) => self.function_internal_block(else_case),
|
||||||
|
None => vec![],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
Expression::CaseMatch { cond, alternatives }
|
||||||
|
}
|
||||||
|
CondList(ref condition_arms) => {
|
||||||
|
let mut alternatives = vec![];
|
||||||
|
for arm in condition_arms {
|
||||||
|
match arm.condition {
|
||||||
|
ast::Condition::Pattern(ref pat) => {
|
||||||
|
let alt = Alternative {
|
||||||
|
pattern: match pat.reduce(self.symbol_table) {
|
||||||
|
Ok(p) => p,
|
||||||
|
Err(e) =>
|
||||||
|
return Expression::ReductionError(format!("Bad pattern: {:?}", e)),
|
||||||
|
},
|
||||||
|
item: self.function_internal_block(&arm.body),
|
||||||
|
};
|
||||||
|
alternatives.push(alt);
|
||||||
|
}
|
||||||
|
ast::Condition::TruncatedOp(_, _) =>
|
||||||
|
return Expression::ReductionError("case-expression-trunc-op".to_string()),
|
||||||
|
ast::Condition::Else =>
|
||||||
|
return Expression::ReductionError("case-expression-else".to_string()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Expression::CaseMatch { cond, alternatives }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn invocation_argument(&mut self, invoc: &ast::InvocationArgument) -> Expression {
|
||||||
|
use crate::ast::InvocationArgument::*;
|
||||||
|
match invoc {
|
||||||
|
Positional(ex) => self.expression(ex),
|
||||||
|
Keyword { .. } => Expression::ReductionError("Keyword arguments not supported".to_string()),
|
||||||
|
Ignored => Expression::ReductionError("Ignored arguments not supported".to_string()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn function_internal_block(&mut self, block: &ast::Block) -> Vec<Statement> {
|
||||||
|
block.statements.iter().filter_map(|stmt| self.function_internal_statement(stmt)).collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn prefix(&mut self, prefix: &ast::PrefixOp, arg: &ast::Expression) -> Expression {
|
||||||
|
let builtin: Option<Builtin> = TryFrom::try_from(prefix).ok();
|
||||||
|
match builtin {
|
||||||
|
Some(op) => Expression::Call {
|
||||||
|
f: Box::new(Expression::Callable(Callable::Builtin(op))),
|
||||||
|
args: vec![self.expression(arg)],
|
||||||
|
},
|
||||||
|
None => {
|
||||||
|
//TODO need this for custom prefix ops
|
||||||
|
Expression::ReductionError("User-defined prefix ops not supported".to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn binop(&mut self, binop: &ast::BinOp, lhs: &ast::Expression, rhs: &ast::Expression) -> Expression {
|
||||||
|
use Expression::ReductionError;
|
||||||
|
|
||||||
|
let operation = Builtin::from_str(binop.sigil()).ok();
|
||||||
|
match operation {
|
||||||
|
Some(Builtin::Assignment) => {
|
||||||
|
let lval = match &lhs.kind {
|
||||||
|
ast::ExpressionKind::Value(qualified_name) => {
|
||||||
|
if let Some(symbol) = self.symbol_table.lookup_symbol(&qualified_name.id) {
|
||||||
|
symbol.def_id()
|
||||||
|
} else {
|
||||||
|
return ReductionError(format!("Couldn't look up name: {:?}", qualified_name));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => return ReductionError("Trying to assign to a non-name".to_string()),
|
||||||
|
};
|
||||||
|
|
||||||
|
Expression::Assign { lval, rval: Box::new(self.expression(rhs)) }
|
||||||
|
}
|
||||||
|
Some(op) => Expression::Call {
|
||||||
|
f: Box::new(Expression::Callable(Callable::Builtin(op))),
|
||||||
|
args: vec![self.expression(lhs), self.expression(rhs)],
|
||||||
|
},
|
||||||
|
//TODO handle a user-defined operation
|
||||||
|
None => ReductionError("User-defined operations not supported".to_string()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn value(&mut self, qualified_name: &ast::QualifiedName) -> Expression {
|
||||||
|
use SymbolSpec::*;
|
||||||
|
|
||||||
|
let symbol = match self.symbol_table.lookup_symbol(&qualified_name.id) {
|
||||||
|
Some(s) => s,
|
||||||
|
None =>
|
||||||
|
return Expression::ReductionError(format!("No symbol found for name: `{}`", qualified_name)),
|
||||||
|
};
|
||||||
|
|
||||||
|
let def_id = symbol.def_id();
|
||||||
|
|
||||||
|
match symbol.spec() {
|
||||||
|
Builtin(b) => Expression::Callable(Callable::Builtin(b)),
|
||||||
|
Func { .. } => Expression::Lookup(Lookup::Function(def_id)),
|
||||||
|
GlobalBinding => Expression::Lookup(Lookup::GlobalVar(def_id)),
|
||||||
|
LocalVariable => Expression::Lookup(Lookup::LocalVar(def_id)),
|
||||||
|
FunctionParam(n) => Expression::Lookup(Lookup::Param(n)),
|
||||||
|
DataConstructor { tag, type_id } =>
|
||||||
|
Expression::Callable(Callable::DataConstructor { type_id, tag }),
|
||||||
|
RecordConstructor { .. } => Expression::ReductionError(format!(
|
||||||
|
"The symbol for value {:?} is unexpectdly a RecordConstructor",
|
||||||
|
qualified_name
|
||||||
|
)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ast::Pattern {
|
||||||
|
fn reduce(&self, symbol_table: &SymbolTable) -> Result<Pattern, PatternError> {
|
||||||
|
Ok(match self {
|
||||||
|
ast::Pattern::Ignored => Pattern::Ignored,
|
||||||
|
ast::Pattern::TuplePattern(subpatterns) => {
|
||||||
|
let items: Result<Vec<Pattern>, PatternError> =
|
||||||
|
subpatterns.iter().map(|pat| pat.reduce(symbol_table)).into_iter().collect();
|
||||||
|
let items = items?;
|
||||||
|
Pattern::Tuple { tag: None, subpatterns: items }
|
||||||
|
}
|
||||||
|
ast::Pattern::Literal(lit) => Pattern::Literal(match lit {
|
||||||
|
ast::PatternLiteral::NumPattern { neg, num } => match (neg, num) {
|
||||||
|
(false, ast::ExpressionKind::NatLiteral(n)) => Literal::Nat(*n),
|
||||||
|
(false, ast::ExpressionKind::FloatLiteral(f)) => Literal::Float(*f),
|
||||||
|
(true, ast::ExpressionKind::NatLiteral(n)) => Literal::Int(-(*n as i64)),
|
||||||
|
(true, ast::ExpressionKind::FloatLiteral(f)) => Literal::Float(-f),
|
||||||
|
(_, e) =>
|
||||||
|
return Err(format!("Internal error, unexpected pattern literal: {:?}", e).into()),
|
||||||
|
},
|
||||||
|
ast::PatternLiteral::StringPattern(s) => Literal::StringLit(s.clone()),
|
||||||
|
ast::PatternLiteral::BoolPattern(b) => Literal::Bool(*b),
|
||||||
|
}),
|
||||||
|
ast::Pattern::TupleStruct(name, subpatterns) => {
|
||||||
|
let symbol = symbol_table.lookup_symbol(&name.id).unwrap();
|
||||||
|
if let SymbolSpec::DataConstructor { tag, type_id: _ } = symbol.spec() {
|
||||||
|
let items: Result<Vec<Pattern>, PatternError> =
|
||||||
|
subpatterns.iter().map(|pat| pat.reduce(symbol_table)).into_iter().collect();
|
||||||
|
let items = items?;
|
||||||
|
Pattern::Tuple { tag: Some(tag), subpatterns: items }
|
||||||
|
} else {
|
||||||
|
return Err(
|
||||||
|
"Internal error, trying to match something that's not a DataConstructor".into()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ast::Pattern::VarOrName(name) => {
|
||||||
|
let symbol = symbol_table.lookup_symbol(&name.id).unwrap();
|
||||||
|
match symbol.spec() {
|
||||||
|
SymbolSpec::DataConstructor { tag, type_id: _ } =>
|
||||||
|
Pattern::Tuple { tag: Some(tag), subpatterns: vec![] },
|
||||||
|
SymbolSpec::LocalVariable => {
|
||||||
|
let def_id = symbol.def_id();
|
||||||
|
Pattern::Binding(def_id)
|
||||||
|
}
|
||||||
|
spec => return Err(format!("Unexpected VarOrName symbol: {:?}", spec).into()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ast::Pattern::Record(name, specified_members) => {
|
||||||
|
let symbol = symbol_table.lookup_symbol(&name.id).unwrap();
|
||||||
|
if let SymbolSpec::RecordConstructor { tag, type_id: _ } = symbol.spec() {
|
||||||
|
//TODO do this computation from the type_id
|
||||||
|
/*
|
||||||
|
if specified_members.iter().any(|(member, _)| !members.contains_key(member)) {
|
||||||
|
return Err(format!("Unknown key in record pattern").into());
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
let subpatterns: Result<Vec<(Rc<String>, Pattern)>, PatternError> = specified_members
|
||||||
|
.iter()
|
||||||
|
.map(|(name, pat)| {
|
||||||
|
pat.reduce(symbol_table).map(|reduced_pat| (name.clone(), reduced_pat))
|
||||||
|
})
|
||||||
|
.into_iter()
|
||||||
|
.collect();
|
||||||
|
let subpatterns = subpatterns?;
|
||||||
|
Pattern::Record { tag, subpatterns }
|
||||||
|
} else {
|
||||||
|
return Err(format!("Unexpected Record pattern symbol: {:?}", symbol.spec()).into());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Given the type context and a variant, compute what order the fields on it were stored.
|
||||||
|
/// This needs to be public until type-checking is fully implemented because the type information
|
||||||
|
/// is only available at runtime.
|
||||||
|
pub fn compute_field_orderings(
|
||||||
|
type_context: &TypeContext,
|
||||||
|
type_id: &TypeId,
|
||||||
|
tag: u32,
|
||||||
|
) -> Option<Vec<String>> {
|
||||||
|
// Eventually, the ReducedIR should decide what field ordering is optimal.
|
||||||
|
// For now, just do it alphabetically.
|
||||||
|
|
||||||
|
let record_members = type_context.lookup_record_members(type_id, tag)?;
|
||||||
|
let mut field_order: Vec<String> =
|
||||||
|
record_members.iter().map(|(field, _type_id)| field).cloned().collect();
|
||||||
|
field_order.sort_unstable();
|
||||||
|
Some(field_order)
|
||||||
|
}
|
61
schala-lang/src/reduced_ir/test.rs
Normal file
61
schala-lang/src/reduced_ir/test.rs
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
#![cfg(test)]
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
use crate::{symbol_table::SymbolTable, type_inference::TypeContext};
|
||||||
|
|
||||||
|
fn build_ir(input: &str) -> ReducedIR {
|
||||||
|
let ast = crate::util::quick_ast(input);
|
||||||
|
|
||||||
|
let mut symbol_table = SymbolTable::new();
|
||||||
|
let mut type_context = TypeContext::new();
|
||||||
|
|
||||||
|
symbol_table.process_ast(&ast, &mut type_context).unwrap();
|
||||||
|
|
||||||
|
let reduced = reduce(&ast, &symbol_table, &type_context);
|
||||||
|
reduced.debug(&symbol_table);
|
||||||
|
reduced
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_ir() {
|
||||||
|
let src = r#"
|
||||||
|
|
||||||
|
let global_one = 10 + 20
|
||||||
|
let global_two = "the string hello"
|
||||||
|
|
||||||
|
fn a_function(i, j, k) {
|
||||||
|
fn nested(x) {
|
||||||
|
x + 10
|
||||||
|
}
|
||||||
|
i + j * nested(k)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn another_function(e) {
|
||||||
|
let local_var = 420
|
||||||
|
e * local_var
|
||||||
|
}
|
||||||
|
|
||||||
|
another_function()
|
||||||
|
"#;
|
||||||
|
|
||||||
|
let reduced = build_ir(src);
|
||||||
|
assert_eq!(reduced.functions.len(), 3);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_methods() {
|
||||||
|
let src = r#"
|
||||||
|
type Thing = Thing
|
||||||
|
impl Thing {
|
||||||
|
fn a_method() {
|
||||||
|
20
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
let a = Thing
|
||||||
|
4 + a.a_method()
|
||||||
|
"#;
|
||||||
|
let reduced = build_ir(src);
|
||||||
|
assert_eq!(reduced.functions.len(), 1);
|
||||||
|
}
|
137
schala-lang/src/reduced_ir/types.rs
Normal file
137
schala-lang/src/reduced_ir/types.rs
Normal file
@ -0,0 +1,137 @@
|
|||||||
|
use std::{collections::HashMap, convert::From, rc::Rc};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
builtin::Builtin,
|
||||||
|
symbol_table::{DefId, SymbolTable},
|
||||||
|
type_inference::TypeId,
|
||||||
|
};
|
||||||
|
|
||||||
|
//TODO most of these Clone impls only exist to support function application, because the
|
||||||
|
//tree-walking evaluator moves the reduced IR members.
|
||||||
|
|
||||||
|
/// The reduced intermediate representation consists of a list of function definitions, and a block
|
||||||
|
/// of entrypoint statements. In a repl or script context this can be an arbitrary list of
|
||||||
|
/// statements, in an executable context will likely just be a pointer to the main() function.
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct ReducedIR {
|
||||||
|
pub functions: HashMap<DefId, FunctionDefinition>,
|
||||||
|
pub entrypoint: Vec<Statement>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ReducedIR {
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub fn debug(&self, symbol_table: &SymbolTable) {
|
||||||
|
println!("Reduced IR:");
|
||||||
|
println!("Functions:");
|
||||||
|
println!("-----------");
|
||||||
|
for (id, callable) in self.functions.iter() {
|
||||||
|
let name = &symbol_table.lookup_symbol_by_def(id).unwrap().local_name();
|
||||||
|
println!("{}({}) -> {:?}", id, name, callable);
|
||||||
|
}
|
||||||
|
println!();
|
||||||
|
println!("Entrypoint:");
|
||||||
|
println!("-----------");
|
||||||
|
for stmt in self.entrypoint.iter() {
|
||||||
|
println!("{:?}", stmt);
|
||||||
|
}
|
||||||
|
println!("-----------");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub enum Statement {
|
||||||
|
Expression(Expression),
|
||||||
|
Binding { id: DefId, constant: bool, expr: Expression },
|
||||||
|
Return(Expression),
|
||||||
|
Continue,
|
||||||
|
Break,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub enum Expression {
|
||||||
|
Literal(Literal),
|
||||||
|
Tuple(Vec<Expression>),
|
||||||
|
List(Vec<Expression>),
|
||||||
|
Lookup(Lookup),
|
||||||
|
Assign { lval: DefId, rval: Box<Expression> },
|
||||||
|
Access { name: String, expr: Box<Expression> },
|
||||||
|
Callable(Callable),
|
||||||
|
Call { f: Box<Expression>, args: Vec<Expression> },
|
||||||
|
CallMethod { f: Box<Expression>, args: Vec<Expression>, self_expr: Box<Expression> },
|
||||||
|
Conditional { cond: Box<Expression>, then_clause: Vec<Statement>, else_clause: Vec<Statement> },
|
||||||
|
CaseMatch { cond: Box<Expression>, alternatives: Vec<Alternative> },
|
||||||
|
Loop { cond: Box<Expression>, statements: Vec<Statement> },
|
||||||
|
Index { indexee: Box<Expression>, indexer: Box<Expression> },
|
||||||
|
ReductionError(String),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Expression {
|
||||||
|
pub fn unit() -> Self {
|
||||||
|
Expression::Tuple(vec![])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct FunctionDefinition {
|
||||||
|
pub body: Vec<Statement>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub enum Callable {
|
||||||
|
Builtin(Builtin),
|
||||||
|
UserDefined(DefId),
|
||||||
|
Lambda { arity: u8, body: Vec<Statement> },
|
||||||
|
DataConstructor { type_id: TypeId, tag: u32 },
|
||||||
|
RecordConstructor { type_id: TypeId, tag: u32, field_order: Vec<String> },
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub enum Lookup {
|
||||||
|
LocalVar(DefId),
|
||||||
|
GlobalVar(DefId),
|
||||||
|
Function(DefId),
|
||||||
|
Param(u8),
|
||||||
|
SelfParam,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
|
pub enum Literal {
|
||||||
|
Nat(u64),
|
||||||
|
Int(i64),
|
||||||
|
Float(f64),
|
||||||
|
Bool(bool),
|
||||||
|
StringLit(Rc<String>),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct Alternative {
|
||||||
|
pub pattern: Pattern,
|
||||||
|
pub item: Vec<Statement>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub enum Pattern {
|
||||||
|
Tuple { subpatterns: Vec<Pattern>, tag: Option<u32> },
|
||||||
|
Record { tag: u32, subpatterns: Vec<(Rc<String>, Pattern)> },
|
||||||
|
Literal(Literal),
|
||||||
|
Ignored,
|
||||||
|
Binding(DefId),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct PatternError {
|
||||||
|
msg: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&str> for PatternError {
|
||||||
|
fn from(s: &str) -> Self {
|
||||||
|
Self { msg: s.to_string() }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<String> for PatternError {
|
||||||
|
fn from(msg: String) -> Self {
|
||||||
|
Self { msg }
|
||||||
|
}
|
||||||
|
}
|
210
schala-lang/src/schala.rs
Normal file
210
schala-lang/src/schala.rs
Normal file
@ -0,0 +1,210 @@
|
|||||||
|
use schala_repl::{
|
||||||
|
ComputationRequest, ComputationResponse, GlobalOutputStats, LangMetaRequest, LangMetaResponse,
|
||||||
|
ProgrammingLanguageInterface,
|
||||||
|
};
|
||||||
|
use stopwatch::Stopwatch;
|
||||||
|
|
||||||
|
use crate::{error::SchalaError, parsing, reduced_ir, symbol_table, tree_walk_eval, type_inference};
|
||||||
|
|
||||||
|
/// All the state necessary to parse and execute a Schala program are stored in this struct.
|
||||||
|
pub struct Schala<'a> {
|
||||||
|
/// Holds a reference to the original source code, parsed into line and character
|
||||||
|
source_reference: SourceReference,
|
||||||
|
|
||||||
|
//state: eval::State<'static>,
|
||||||
|
/// Keeps track of symbols and scopes
|
||||||
|
symbol_table: symbol_table::SymbolTable,
|
||||||
|
/// Contains information for type-checking
|
||||||
|
type_context: type_inference::TypeContext,
|
||||||
|
/// Schala Parser
|
||||||
|
active_parser: parsing::Parser,
|
||||||
|
|
||||||
|
/// Execution state for AST-walking interpreter
|
||||||
|
eval_state: tree_walk_eval::State<'a>,
|
||||||
|
|
||||||
|
timings: Vec<(&'static str, std::time::Duration)>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
impl Schala {
|
||||||
|
//TODO implement documentation for language items
|
||||||
|
/*
|
||||||
|
fn handle_docs(&self, source: String) -> LangMetaResponse {
|
||||||
|
LangMetaResponse::Docs {
|
||||||
|
doc_string: format!("Schala item `{}` : <<Schala-lang documentation not yet implemented>>", source)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
impl<'a> Schala<'a> {
|
||||||
|
/// Creates a new Schala environment *without* any prelude.
|
||||||
|
fn new_blank_env() -> Schala<'a> {
|
||||||
|
Schala {
|
||||||
|
source_reference: SourceReference::new(),
|
||||||
|
symbol_table: symbol_table::SymbolTable::new(),
|
||||||
|
type_context: type_inference::TypeContext::new(),
|
||||||
|
active_parser: parsing::Parser::new(),
|
||||||
|
eval_state: tree_walk_eval::State::new(),
|
||||||
|
timings: Vec::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Creates a new Schala environment with the standard prelude, which is defined as ordinary
|
||||||
|
/// Schala code in the file `prelude.schala`
|
||||||
|
#[allow(clippy::new_without_default)]
|
||||||
|
pub fn new() -> Schala<'a> {
|
||||||
|
let prelude = include_str!("../source-files/prelude.schala");
|
||||||
|
let mut env = Schala::new_blank_env();
|
||||||
|
|
||||||
|
let response = env.run_pipeline(prelude, SchalaConfig::default());
|
||||||
|
if let Err(err) = response {
|
||||||
|
panic!("Error in prelude, panicking: {}", err.display());
|
||||||
|
}
|
||||||
|
env
|
||||||
|
}
|
||||||
|
|
||||||
|
/// This is where the actual action of interpreting/compilation happens.
|
||||||
|
/// Note: this should eventually use a query-based system for parallelization, cf.
|
||||||
|
/// https://rustc-dev-guide.rust-lang.org/overview.html
|
||||||
|
fn run_pipeline(&mut self, source: &str, config: SchalaConfig) -> Result<String, SchalaError> {
|
||||||
|
self.timings = vec![];
|
||||||
|
let sw = Stopwatch::start_new();
|
||||||
|
|
||||||
|
self.source_reference.load_new_source(source);
|
||||||
|
let ast = self
|
||||||
|
.active_parser
|
||||||
|
.parse(source)
|
||||||
|
.map_err(|err| SchalaError::from_parse_error(err, &self.source_reference))?;
|
||||||
|
self.timings.push(("parsing", sw.elapsed()));
|
||||||
|
|
||||||
|
let sw = Stopwatch::start_new();
|
||||||
|
//Perform all symbol table work
|
||||||
|
self.symbol_table
|
||||||
|
.process_ast(&ast, &mut self.type_context)
|
||||||
|
.map_err(SchalaError::from_symbol_table)?;
|
||||||
|
|
||||||
|
self.timings.push(("symbol_table", sw.elapsed()));
|
||||||
|
|
||||||
|
// Typechecking
|
||||||
|
let _overall_type = self.type_context.typecheck(&ast).map_err(SchalaError::from_type_error);
|
||||||
|
|
||||||
|
let sw = Stopwatch::start_new();
|
||||||
|
let reduced_ir = reduced_ir::reduce(&ast, &self.symbol_table, &self.type_context);
|
||||||
|
self.timings.push(("reduced_ir", sw.elapsed()));
|
||||||
|
|
||||||
|
let sw = Stopwatch::start_new();
|
||||||
|
let evaluation_outputs = self.eval_state.evaluate(reduced_ir, &self.type_context, config.repl);
|
||||||
|
self.timings.push(("tree-walking-evaluation", sw.elapsed()));
|
||||||
|
let text_output: Result<Vec<String>, String> = evaluation_outputs.into_iter().collect();
|
||||||
|
|
||||||
|
let text_output: Result<Vec<String>, SchalaError> =
|
||||||
|
text_output.map_err(|err| SchalaError::from_string(err, Stage::Evaluation));
|
||||||
|
|
||||||
|
let eval_output: String =
|
||||||
|
text_output.map(|v| Iterator::intersperse(v.into_iter(), "\n".to_owned()).collect())?;
|
||||||
|
|
||||||
|
Ok(eval_output)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Represents lines of source code
|
||||||
|
pub(crate) struct SourceReference {
|
||||||
|
last_source: Option<String>,
|
||||||
|
/// Offsets in *bytes* (not chars) representing a newline character
|
||||||
|
newline_offsets: Vec<usize>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SourceReference {
|
||||||
|
pub(crate) fn new() -> SourceReference {
|
||||||
|
SourceReference { last_source: None, newline_offsets: vec![] }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn load_new_source(&mut self, source: &str) {
|
||||||
|
self.newline_offsets = vec![];
|
||||||
|
for (offset, ch) in source.as_bytes().iter().enumerate() {
|
||||||
|
if *ch == b'\n' {
|
||||||
|
self.newline_offsets.push(offset);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self.last_source = Some(source.to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
// (line_start, line_num, the string itself)
|
||||||
|
pub fn get_line(&self, line: usize) -> (usize, usize, String) {
|
||||||
|
if self.newline_offsets.is_empty() {
|
||||||
|
return (0, 0, self.last_source.as_ref().cloned().unwrap());
|
||||||
|
}
|
||||||
|
|
||||||
|
//TODO make sure this is utf8-safe
|
||||||
|
let start_idx = match self.newline_offsets.binary_search(&line) {
|
||||||
|
Ok(index) | Err(index) => index,
|
||||||
|
};
|
||||||
|
|
||||||
|
let last_source = self.last_source.as_ref().unwrap();
|
||||||
|
|
||||||
|
let start = self.newline_offsets[start_idx];
|
||||||
|
let end = self.newline_offsets.get(start_idx + 1).cloned().unwrap_or_else(|| last_source.len());
|
||||||
|
|
||||||
|
let slice = &last_source.as_bytes()[start..end];
|
||||||
|
(start, start_idx, std::str::from_utf8(slice).unwrap().to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
#[derive(Clone, Copy, Debug)]
|
||||||
|
pub(crate) enum Stage {
|
||||||
|
Parsing,
|
||||||
|
Symbols,
|
||||||
|
ScopeResolution,
|
||||||
|
Typechecking,
|
||||||
|
AstReduction,
|
||||||
|
Evaluation,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn stage_names() -> Vec<&'static str> {
|
||||||
|
vec!["parsing", "symbol-table", "typechecking", "ast-reduction", "ast-walking-evaluation"]
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Default, Clone)]
|
||||||
|
pub struct SchalaConfig {
|
||||||
|
pub repl: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> ProgrammingLanguageInterface for Schala<'a> {
|
||||||
|
//TODO flesh out Config
|
||||||
|
type Config = SchalaConfig;
|
||||||
|
fn language_name() -> String {
|
||||||
|
"Schala".to_owned()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn source_file_suffix() -> String {
|
||||||
|
"schala".to_owned()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run_computation(&mut self, request: ComputationRequest<Self::Config>) -> ComputationResponse {
|
||||||
|
let ComputationRequest { source, debug_requests: _, config: _ } = request;
|
||||||
|
let sw = Stopwatch::start_new();
|
||||||
|
|
||||||
|
let main_output =
|
||||||
|
self.run_pipeline(source, request.config).map_err(|schala_err| schala_err.display());
|
||||||
|
let total_duration = sw.elapsed();
|
||||||
|
|
||||||
|
let stage_durations: Vec<_> = std::mem::take(&mut self.timings)
|
||||||
|
.into_iter()
|
||||||
|
.map(|(label, duration)| (label.to_string(), duration))
|
||||||
|
.collect();
|
||||||
|
let global_output_stats = GlobalOutputStats { total_duration, stage_durations };
|
||||||
|
|
||||||
|
ComputationResponse { main_output, global_output_stats, debug_responses: vec![] }
|
||||||
|
}
|
||||||
|
|
||||||
|
fn request_meta(&mut self, request: LangMetaRequest) -> LangMetaResponse {
|
||||||
|
match request {
|
||||||
|
LangMetaRequest::StageNames =>
|
||||||
|
LangMetaResponse::StageNames(stage_names().iter().map(|s| s.to_string()).collect()),
|
||||||
|
_ => LangMetaResponse::Custom { kind: "not-implemented".to_string(), value: "".to_string() },
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
65
schala-lang/src/symbol_table/fqsn.rs
Normal file
65
schala-lang/src/symbol_table/fqsn.rs
Normal file
@ -0,0 +1,65 @@
|
|||||||
|
use std::{fmt, rc::Rc};
|
||||||
|
|
||||||
|
/// Fully-qualified symbol name
|
||||||
|
#[derive(Debug, Clone, Eq, PartialEq, Hash, PartialOrd, Ord)]
|
||||||
|
pub struct Fqsn {
|
||||||
|
//TODO Fqsn's need to be cheaply cloneable
|
||||||
|
pub scopes: Vec<ScopeSegment>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Fqsn {
|
||||||
|
pub fn from_scope_stack(scopes: &[ScopeSegment], new_name: Rc<String>) -> Self {
|
||||||
|
let mut v = Vec::new();
|
||||||
|
for s in scopes {
|
||||||
|
v.push(s.clone());
|
||||||
|
}
|
||||||
|
v.push(ScopeSegment::Name(new_name));
|
||||||
|
Fqsn { scopes: v }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn extend(&self, new_item: &str) -> Self {
|
||||||
|
let mut new = self.clone();
|
||||||
|
new.scopes.push(ScopeSegment::Name(Rc::new(new_item.to_string())));
|
||||||
|
new
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub fn from_strs(strs: &[&str]) -> Fqsn {
|
||||||
|
let mut scopes = vec![];
|
||||||
|
for s in strs {
|
||||||
|
scopes.push(ScopeSegment::Name(Rc::new(s.to_string())));
|
||||||
|
}
|
||||||
|
Fqsn { scopes }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn last_elem(&self) -> Rc<String> {
|
||||||
|
let ScopeSegment::Name(name) = self.scopes.last().unwrap();
|
||||||
|
name.clone()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for Fqsn {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
let delim = "::";
|
||||||
|
let Fqsn { scopes } = self;
|
||||||
|
write!(f, "FQSN<{}", scopes[0])?;
|
||||||
|
for item in scopes[1..].iter() {
|
||||||
|
write!(f, "{}{}", delim, item)?;
|
||||||
|
}
|
||||||
|
write!(f, ">")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//TODO eventually this should use ItemId's to avoid String-cloning
|
||||||
|
/// One segment within a scope.
|
||||||
|
#[derive(Debug, Clone, Eq, PartialEq, Hash, PartialOrd, Ord)]
|
||||||
|
pub enum ScopeSegment {
|
||||||
|
Name(Rc<String>),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for ScopeSegment {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
let ScopeSegment::Name(name) = self;
|
||||||
|
write!(f, "{}", name)
|
||||||
|
}
|
||||||
|
}
|
244
schala-lang/src/symbol_table/mod.rs
Normal file
244
schala-lang/src/symbol_table/mod.rs
Normal file
@ -0,0 +1,244 @@
|
|||||||
|
#![allow(clippy::enum_variant_names)]
|
||||||
|
|
||||||
|
use std::{
|
||||||
|
collections::{hash_map::Entry, HashMap},
|
||||||
|
fmt,
|
||||||
|
rc::Rc,
|
||||||
|
};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
ast,
|
||||||
|
ast::ItemId,
|
||||||
|
builtin::Builtin,
|
||||||
|
parsing::Location,
|
||||||
|
type_inference::{TypeContext, TypeId},
|
||||||
|
};
|
||||||
|
|
||||||
|
mod populator;
|
||||||
|
use populator::SymbolTablePopulator;
|
||||||
|
mod fqsn;
|
||||||
|
pub use fqsn::{Fqsn, ScopeSegment};
|
||||||
|
mod resolver;
|
||||||
|
mod symbol_trie;
|
||||||
|
use symbol_trie::SymbolTrie;
|
||||||
|
mod test;
|
||||||
|
use crate::identifier::{define_id_kind, Id, IdStore};
|
||||||
|
|
||||||
|
define_id_kind!(DefItem);
|
||||||
|
pub type DefId = Id<DefItem>;
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub enum SymbolError {
|
||||||
|
DuplicateName { prev_name: Fqsn, location: Location },
|
||||||
|
DuplicateVariant { type_fqsn: Fqsn, name: String },
|
||||||
|
DuplicateRecord { type_fqsn: Fqsn, location: Location, record: String, member: String },
|
||||||
|
UnknownAnnotation { name: String },
|
||||||
|
BadAnnotation { name: String, msg: String },
|
||||||
|
BadImplBlockEntry,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
#[derive(Debug)]
|
||||||
|
struct NameSpec<K> {
|
||||||
|
location: Location,
|
||||||
|
kind: K,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
enum NameKind {
|
||||||
|
Module,
|
||||||
|
Function,
|
||||||
|
Binding,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
enum TypeKind {
|
||||||
|
Function,
|
||||||
|
Constructor,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Keeps track of what names were used in a given namespace.
|
||||||
|
struct NameTable<K> {
|
||||||
|
table: HashMap<Fqsn, NameSpec<K>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K> NameTable<K> {
|
||||||
|
fn new() -> Self {
|
||||||
|
Self { table: HashMap::new() }
|
||||||
|
}
|
||||||
|
|
||||||
|
fn register(&mut self, name: Fqsn, spec: NameSpec<K>) -> Result<(), SymbolError> {
|
||||||
|
match self.table.entry(name.clone()) {
|
||||||
|
Entry::Occupied(o) =>
|
||||||
|
Err(SymbolError::DuplicateName { prev_name: name, location: o.get().location }),
|
||||||
|
Entry::Vacant(v) => {
|
||||||
|
v.insert(spec);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//cf. p. 150 or so of Language Implementation Patterns
|
||||||
|
pub struct SymbolTable {
|
||||||
|
def_id_store: IdStore<DefItem>,
|
||||||
|
|
||||||
|
/// Used for import resolution.
|
||||||
|
symbol_trie: SymbolTrie,
|
||||||
|
|
||||||
|
/// These tables are responsible for preventing duplicate names.
|
||||||
|
fq_names: NameTable<NameKind>, //Note that presence of two tables implies that a type and other binding with the same name can co-exist
|
||||||
|
types: NameTable<TypeKind>,
|
||||||
|
|
||||||
|
id_to_def: HashMap<ItemId, DefId>,
|
||||||
|
def_to_symbol: HashMap<DefId, Rc<Symbol>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SymbolTable {
|
||||||
|
/// Create a new, empty SymbolTable
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
def_id_store: IdStore::new(),
|
||||||
|
symbol_trie: SymbolTrie::new(),
|
||||||
|
fq_names: NameTable::new(),
|
||||||
|
types: NameTable::new(),
|
||||||
|
id_to_def: HashMap::new(),
|
||||||
|
def_to_symbol: HashMap::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The main entry point into the symbol table. This will traverse the AST in several
|
||||||
|
/// different ways and populate subtables with information that will be used further in the
|
||||||
|
/// compilation process.
|
||||||
|
pub fn process_ast(
|
||||||
|
&mut self,
|
||||||
|
ast: &ast::AST,
|
||||||
|
type_context: &mut TypeContext,
|
||||||
|
) -> Result<(), Vec<SymbolError>> {
|
||||||
|
let mut populator = SymbolTablePopulator { type_context, table: self };
|
||||||
|
|
||||||
|
let errs = populator.populate_definition_tables(ast);
|
||||||
|
if !errs.is_empty() {
|
||||||
|
return Err(errs);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Walks the AST, matching the ID of an identifier used in some expression to
|
||||||
|
// the corresponding Symbol.
|
||||||
|
let mut resolver = resolver::ScopeResolver::new(self);
|
||||||
|
resolver.resolve(ast);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn lookup_symbol(&self, id: &ItemId) -> Option<&Symbol> {
|
||||||
|
let def = self.id_to_def.get(id)?;
|
||||||
|
self.def_to_symbol.get(def).map(|s| s.as_ref())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn lookup_symbol_by_def(&self, def: &DefId) -> Option<&Symbol> {
|
||||||
|
self.def_to_symbol.get(def).map(|s| s.as_ref())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub fn debug(&self) {
|
||||||
|
println!("Symbol table:");
|
||||||
|
println!("----------------");
|
||||||
|
for (id, def) in self.id_to_def.iter() {
|
||||||
|
if let Some(symbol) = self.def_to_symbol.get(def) {
|
||||||
|
println!("{} => {}: {}", id, def, symbol);
|
||||||
|
} else {
|
||||||
|
println!("{} => {} <NO SYMBOL FOUND>", id, def);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Register a new mapping of a fully-qualified symbol name (e.g. `Option::Some`)
|
||||||
|
/// to a Symbol, a descriptor of what that name refers to.
|
||||||
|
fn add_symbol(&mut self, id: &ItemId, fqsn: Fqsn, spec: SymbolSpec) {
|
||||||
|
let def_id = self.def_id_store.fresh();
|
||||||
|
let local_name = fqsn.last_elem();
|
||||||
|
let symbol = Rc::new(Symbol { fully_qualified_name: fqsn.clone(), local_name, spec, def_id });
|
||||||
|
self.symbol_trie.insert(&fqsn, def_id);
|
||||||
|
self.id_to_def.insert(*id, def_id);
|
||||||
|
self.def_to_symbol.insert(def_id, symbol);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn populate_single_builtin(&mut self, fqsn: Fqsn, builtin: Builtin) {
|
||||||
|
let def_id = self.def_id_store.fresh();
|
||||||
|
let spec = SymbolSpec::Builtin(builtin);
|
||||||
|
let local_name = fqsn.last_elem();
|
||||||
|
let symbol = Rc::new(Symbol { fully_qualified_name: fqsn.clone(), local_name, spec, def_id });
|
||||||
|
|
||||||
|
self.symbol_trie.insert(&fqsn, def_id);
|
||||||
|
self.def_to_symbol.insert(def_id, symbol);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct Symbol {
|
||||||
|
fully_qualified_name: Fqsn,
|
||||||
|
local_name: Rc<String>,
|
||||||
|
spec: SymbolSpec,
|
||||||
|
def_id: DefId,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Symbol {
|
||||||
|
pub fn local_name(&self) -> &str {
|
||||||
|
self.local_name.as_ref()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn def_id(&self) -> DefId {
|
||||||
|
self.def_id
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn spec(&self) -> SymbolSpec {
|
||||||
|
self.spec.clone()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for Symbol {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
write!(f, "<Local name: {}, {}, Spec: {}>", self.local_name(), self.fully_qualified_name, self.spec)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//TODO - I think I eventually want to draw a distinction between true global items
|
||||||
|
//i.e. global vars, and items whose definitions are scoped. Right now there's a sense
|
||||||
|
//in which Func, DataConstructor, RecordConstructor, and GlobalBinding are "globals",
|
||||||
|
//whereas LocalVarible and FunctionParam have local scope. But right now, they all
|
||||||
|
//get put into a common table, and all get DefId's from a common source.
|
||||||
|
//
|
||||||
|
//It would be good if individual functions could in parallel look up their own
|
||||||
|
//local vars without interfering with other lookups. Also some type definitions
|
||||||
|
//should be scoped in a similar way.
|
||||||
|
//
|
||||||
|
//Also it makes sense that non-globals should not use DefId's, particularly not
|
||||||
|
//function parameters (even though they are currently assigned).
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub enum SymbolSpec {
|
||||||
|
Builtin(Builtin),
|
||||||
|
Func { method: Option<crate::ast::TypeSingletonName> },
|
||||||
|
DataConstructor { tag: u32, type_id: TypeId },
|
||||||
|
RecordConstructor { tag: u32, type_id: TypeId },
|
||||||
|
GlobalBinding, //Only for global variables, not for function-local ones or ones within a `let` scope context
|
||||||
|
LocalVariable,
|
||||||
|
FunctionParam(u8),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for SymbolSpec {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
use self::SymbolSpec::*;
|
||||||
|
match self {
|
||||||
|
Builtin(b) => write!(f, "Builtin: {:?}", b),
|
||||||
|
Func { .. } => write!(f, "Func"),
|
||||||
|
DataConstructor { tag, type_id } => write!(f, "DataConstructor(tag: {}, type: {})", tag, type_id),
|
||||||
|
RecordConstructor { type_id, tag, .. } =>
|
||||||
|
write!(f, "RecordConstructor(tag: {})(<members> -> {})", tag, type_id),
|
||||||
|
GlobalBinding => write!(f, "GlobalBinding"),
|
||||||
|
LocalVariable => write!(f, "Local variable"),
|
||||||
|
FunctionParam(n) => write!(f, "Function param: {}", n),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
352
schala-lang/src/symbol_table/populator.rs
Normal file
352
schala-lang/src/symbol_table/populator.rs
Normal file
@ -0,0 +1,352 @@
|
|||||||
|
use std::{
|
||||||
|
collections::{hash_map::Entry, HashMap, HashSet},
|
||||||
|
rc::Rc,
|
||||||
|
str::FromStr,
|
||||||
|
};
|
||||||
|
|
||||||
|
use super::{Fqsn, NameKind, NameSpec, ScopeSegment, SymbolError, SymbolSpec, SymbolTable, TypeKind};
|
||||||
|
use crate::{
|
||||||
|
ast::{
|
||||||
|
Declaration, Expression, ExpressionKind, ItemId, Statement, StatementKind, TypeBody,
|
||||||
|
TypeSingletonName, Variant, VariantKind, AST,
|
||||||
|
},
|
||||||
|
builtin::Builtin,
|
||||||
|
parsing::Location,
|
||||||
|
type_inference::{self, PendingType, TypeBuilder, TypeContext, VariantBuilder},
|
||||||
|
};
|
||||||
|
|
||||||
|
pub(super) struct SymbolTablePopulator<'a> {
|
||||||
|
pub(super) type_context: &'a mut TypeContext,
|
||||||
|
pub(super) table: &'a mut SymbolTable,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> SymbolTablePopulator<'a> {
|
||||||
|
/* note: this adds names for *forward reference* but doesn't actually create any types. solve that problem
|
||||||
|
* later */
|
||||||
|
|
||||||
|
fn add_symbol(&mut self, id: &ItemId, fqsn: Fqsn, spec: SymbolSpec) {
|
||||||
|
self.table.add_symbol(id, fqsn, spec)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// This function traverses the AST and adds symbol table entries for
|
||||||
|
/// constants, functions, types, and modules defined within. This simultaneously
|
||||||
|
/// checks for dupicate definitions (and returns errors if discovered), and sets
|
||||||
|
/// up name tables that will be used by further parts of the compiler
|
||||||
|
pub fn populate_definition_tables(&mut self, ast: &AST) -> Vec<SymbolError> {
|
||||||
|
let mut scope_stack = vec![];
|
||||||
|
self.add_from_scope(ast.statements.as_ref(), &mut scope_stack, false)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn add_from_scope(
|
||||||
|
&mut self,
|
||||||
|
statements: &[Statement<StatementKind>],
|
||||||
|
scope_stack: &mut Vec<ScopeSegment>,
|
||||||
|
function_scope: bool,
|
||||||
|
) -> Vec<SymbolError> {
|
||||||
|
let mut errors = vec![];
|
||||||
|
|
||||||
|
for statement in statements {
|
||||||
|
let Statement { id, kind, location } = statement;
|
||||||
|
let location = *location;
|
||||||
|
if let Err(err) = self.add_single_statement(id, kind, location, scope_stack, function_scope) {
|
||||||
|
errors.push(err);
|
||||||
|
} else {
|
||||||
|
let decl = match kind {
|
||||||
|
StatementKind::Declaration(decl) => decl,
|
||||||
|
_ => continue,
|
||||||
|
};
|
||||||
|
// If there's an error with a name, don't recurse into subscopes of that name
|
||||||
|
let recursive_errs = match decl {
|
||||||
|
Declaration::FuncDecl(signature, body) => {
|
||||||
|
let new_scope = ScopeSegment::Name(signature.name.clone());
|
||||||
|
scope_stack.push(new_scope);
|
||||||
|
let output = self.add_from_scope(body.as_ref(), scope_stack, true);
|
||||||
|
scope_stack.pop();
|
||||||
|
output
|
||||||
|
}
|
||||||
|
Declaration::Module { name, items } => {
|
||||||
|
let new_scope = ScopeSegment::Name(name.clone());
|
||||||
|
scope_stack.push(new_scope);
|
||||||
|
let output = self.add_from_scope(items.as_ref(), scope_stack, false);
|
||||||
|
scope_stack.pop();
|
||||||
|
output
|
||||||
|
}
|
||||||
|
Declaration::TypeDecl { name, body, mutable } => {
|
||||||
|
let type_fqsn = Fqsn::from_scope_stack(scope_stack, name.name.clone());
|
||||||
|
self.add_type_members(name, body, mutable, location, type_fqsn)
|
||||||
|
}
|
||||||
|
|
||||||
|
Declaration::Impl { type_name, interface_name: _, block } => {
|
||||||
|
let mut errors = vec![];
|
||||||
|
let new_scope = ScopeSegment::Name(Rc::new(format!("<impl-block>{}", type_name)));
|
||||||
|
scope_stack.push(new_scope);
|
||||||
|
|
||||||
|
for decl_stmt in block.iter() {
|
||||||
|
let Statement { id, kind, location } = decl_stmt;
|
||||||
|
let location = *location;
|
||||||
|
match kind {
|
||||||
|
decl @ Declaration::FuncDecl(signature, body) => {
|
||||||
|
let output =
|
||||||
|
self.add_single_declaration(id, decl, location, scope_stack, true);
|
||||||
|
if let Err(e) = output {
|
||||||
|
errors.push(e);
|
||||||
|
};
|
||||||
|
let new_scope = ScopeSegment::Name(signature.name.clone());
|
||||||
|
scope_stack.push(new_scope);
|
||||||
|
let output = self.add_from_scope(body.as_ref(), scope_stack, true);
|
||||||
|
scope_stack.pop();
|
||||||
|
errors.extend(output.into_iter());
|
||||||
|
}
|
||||||
|
_other => errors.push(SymbolError::BadImplBlockEntry),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
scope_stack.pop();
|
||||||
|
errors
|
||||||
|
}
|
||||||
|
_ => vec![],
|
||||||
|
};
|
||||||
|
errors.extend(recursive_errs.into_iter());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
errors
|
||||||
|
}
|
||||||
|
|
||||||
|
fn add_single_statement(
|
||||||
|
&mut self,
|
||||||
|
id: &ItemId,
|
||||||
|
kind: &StatementKind,
|
||||||
|
location: Location,
|
||||||
|
scope_stack: &[ScopeSegment],
|
||||||
|
function_scope: bool,
|
||||||
|
) -> Result<(), SymbolError> {
|
||||||
|
match kind {
|
||||||
|
StatementKind::Declaration(decl) =>
|
||||||
|
self.add_single_declaration(id, decl, location, scope_stack, function_scope),
|
||||||
|
_ => return Ok(()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn add_single_declaration(
|
||||||
|
&mut self,
|
||||||
|
id: &ItemId,
|
||||||
|
decl: &Declaration,
|
||||||
|
location: Location,
|
||||||
|
scope_stack: &[ScopeSegment],
|
||||||
|
function_scope: bool,
|
||||||
|
) -> Result<(), SymbolError> {
|
||||||
|
match decl {
|
||||||
|
Declaration::FuncSig(signature) => {
|
||||||
|
let fq_function = Fqsn::from_scope_stack(scope_stack, signature.name.clone());
|
||||||
|
self.table
|
||||||
|
.fq_names
|
||||||
|
.register(fq_function.clone(), NameSpec { location, kind: NameKind::Function })?;
|
||||||
|
self.table
|
||||||
|
.types
|
||||||
|
.register(fq_function.clone(), NameSpec { location, kind: TypeKind::Function })?;
|
||||||
|
|
||||||
|
self.add_symbol(id, fq_function, SymbolSpec::Func { method: None });
|
||||||
|
}
|
||||||
|
Declaration::FuncDecl(signature, ..) => {
|
||||||
|
let fn_name = &signature.name;
|
||||||
|
let fq_function = Fqsn::from_scope_stack(scope_stack, fn_name.clone());
|
||||||
|
self.table
|
||||||
|
.fq_names
|
||||||
|
.register(fq_function.clone(), NameSpec { location, kind: NameKind::Function })?;
|
||||||
|
self.table
|
||||||
|
.types
|
||||||
|
.register(fq_function.clone(), NameSpec { location, kind: TypeKind::Function })?;
|
||||||
|
|
||||||
|
self.add_symbol(id, fq_function, SymbolSpec::Func { method: None });
|
||||||
|
}
|
||||||
|
Declaration::TypeDecl { name, .. } => {
|
||||||
|
let fq_type = Fqsn::from_scope_stack(scope_stack, name.name.clone());
|
||||||
|
self.table.types.register(fq_type, NameSpec { location, kind: TypeKind::Constructor })?;
|
||||||
|
}
|
||||||
|
//TODO handle type aliases
|
||||||
|
Declaration::TypeAlias { .. } => (),
|
||||||
|
Declaration::Binding { name, .. } => {
|
||||||
|
let fq_binding = Fqsn::from_scope_stack(scope_stack, name.clone());
|
||||||
|
self.table
|
||||||
|
.fq_names
|
||||||
|
.register(fq_binding.clone(), NameSpec { location, kind: NameKind::Binding })?;
|
||||||
|
if !function_scope {
|
||||||
|
self.add_symbol(id, fq_binding, SymbolSpec::GlobalBinding);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
//TODO implement interfaces
|
||||||
|
Declaration::Interface { .. } => (),
|
||||||
|
Declaration::Impl { .. } => (),
|
||||||
|
Declaration::Module { name, .. } => {
|
||||||
|
let fq_module = Fqsn::from_scope_stack(scope_stack, name.clone());
|
||||||
|
self.table.fq_names.register(fq_module, NameSpec { location, kind: NameKind::Module })?;
|
||||||
|
}
|
||||||
|
Declaration::Annotation { name, arguments, inner } => {
|
||||||
|
let inner = inner.as_ref();
|
||||||
|
self.add_single_statement(
|
||||||
|
&inner.id,
|
||||||
|
&inner.kind,
|
||||||
|
inner.location,
|
||||||
|
scope_stack,
|
||||||
|
function_scope,
|
||||||
|
)?;
|
||||||
|
self.process_annotation(name.as_ref(), arguments.as_slice(), scope_stack, inner)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn process_annotation(
|
||||||
|
&mut self,
|
||||||
|
name: &str,
|
||||||
|
arguments: &[Expression],
|
||||||
|
scope_stack: &[ScopeSegment],
|
||||||
|
inner: &Statement<StatementKind>,
|
||||||
|
) -> Result<(), SymbolError> {
|
||||||
|
if name == "register_builtin" {
|
||||||
|
if let Statement {
|
||||||
|
id: _,
|
||||||
|
location: _,
|
||||||
|
kind: StatementKind::Declaration(Declaration::FuncDecl(sig, _)),
|
||||||
|
} = inner
|
||||||
|
{
|
||||||
|
let fqsn = Fqsn::from_scope_stack(scope_stack, sig.name.clone());
|
||||||
|
let builtin_name = match arguments {
|
||||||
|
[Expression { kind: ExpressionKind::Value(qname), .. }]
|
||||||
|
if qname.components.len() == 1 =>
|
||||||
|
qname.components[0].clone(),
|
||||||
|
_ =>
|
||||||
|
return Err(SymbolError::BadAnnotation {
|
||||||
|
name: name.to_string(),
|
||||||
|
msg: "Bad argument for register_builtin".to_string(),
|
||||||
|
}),
|
||||||
|
};
|
||||||
|
|
||||||
|
let builtin =
|
||||||
|
Builtin::from_str(builtin_name.as_str()).map_err(|_| SymbolError::BadAnnotation {
|
||||||
|
name: name.to_string(),
|
||||||
|
msg: format!("Invalid builtin: {}", builtin_name),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
self.table.populate_single_builtin(fqsn, builtin);
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
Err(SymbolError::BadAnnotation {
|
||||||
|
name: name.to_string(),
|
||||||
|
msg: "register_builtin not annotating a function".to_string(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Err(SymbolError::UnknownAnnotation { name: name.to_string() })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn add_type_members(
|
||||||
|
&mut self,
|
||||||
|
type_name: &TypeSingletonName,
|
||||||
|
type_body: &TypeBody,
|
||||||
|
_mutable: &bool,
|
||||||
|
location: Location,
|
||||||
|
type_fqsn: Fqsn,
|
||||||
|
) -> Vec<SymbolError> {
|
||||||
|
let (variants, immediate_variant) = match type_body {
|
||||||
|
TypeBody::Variants(variants) => (variants.clone(), false),
|
||||||
|
TypeBody::ImmediateRecord { id, fields } => (
|
||||||
|
vec![Variant {
|
||||||
|
id: *id,
|
||||||
|
name: type_name.name.clone(),
|
||||||
|
kind: VariantKind::Record(fields.clone()),
|
||||||
|
}],
|
||||||
|
true,
|
||||||
|
),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Check for duplicates before registering any types with the TypeContext
|
||||||
|
let mut seen_variants = HashSet::new();
|
||||||
|
let mut errors = vec![];
|
||||||
|
|
||||||
|
for variant in variants.iter() {
|
||||||
|
if seen_variants.contains(&variant.name) {
|
||||||
|
errors.push(SymbolError::DuplicateVariant {
|
||||||
|
type_fqsn: type_fqsn.clone(),
|
||||||
|
name: variant.name.as_ref().to_string(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
seen_variants.insert(variant.name.clone());
|
||||||
|
|
||||||
|
if let VariantKind::Record(ref members) = variant.kind {
|
||||||
|
let mut seen_members = HashMap::new();
|
||||||
|
for (member_name, _) in members.iter() {
|
||||||
|
match seen_members.entry(member_name.as_ref()) {
|
||||||
|
Entry::Occupied(o) => {
|
||||||
|
let location = *o.get();
|
||||||
|
errors.push(SymbolError::DuplicateRecord {
|
||||||
|
type_fqsn: type_fqsn.clone(),
|
||||||
|
location,
|
||||||
|
record: variant.name.as_ref().to_string(),
|
||||||
|
member: member_name.as_ref().to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
//TODO eventually this should track meaningful locations
|
||||||
|
Entry::Vacant(v) => {
|
||||||
|
v.insert(location);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !errors.is_empty() {
|
||||||
|
return errors;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut type_builder = TypeBuilder::new(type_name.name.as_ref());
|
||||||
|
|
||||||
|
let mut variant_name_map = HashMap::new();
|
||||||
|
for variant in variants.iter() {
|
||||||
|
let Variant { name, kind, id } = variant;
|
||||||
|
|
||||||
|
variant_name_map.insert(name.clone(), id);
|
||||||
|
|
||||||
|
let mut variant_builder = VariantBuilder::new(name.as_ref());
|
||||||
|
match kind {
|
||||||
|
VariantKind::UnitStruct => (),
|
||||||
|
VariantKind::TupleStruct(items) =>
|
||||||
|
for type_identifier in items {
|
||||||
|
let pending: PendingType = type_identifier.into();
|
||||||
|
variant_builder.add_member(pending);
|
||||||
|
},
|
||||||
|
VariantKind::Record(members) =>
|
||||||
|
for (field_name, type_identifier) in members.iter() {
|
||||||
|
let pending: PendingType = type_identifier.into();
|
||||||
|
variant_builder.add_record_member(field_name.as_ref(), pending);
|
||||||
|
},
|
||||||
|
}
|
||||||
|
type_builder.add_variant(variant_builder);
|
||||||
|
}
|
||||||
|
|
||||||
|
let type_id = self.type_context.register_type(type_builder);
|
||||||
|
let type_definition = self.type_context.lookup_type(&type_id).unwrap();
|
||||||
|
|
||||||
|
// This index is guaranteed to be the correct tag
|
||||||
|
for (index, variant) in type_definition.variants.iter().enumerate() {
|
||||||
|
let id = variant_name_map.get(&variant.name).unwrap();
|
||||||
|
let tag = index as u32;
|
||||||
|
let spec = match &variant.members {
|
||||||
|
type_inference::VariantMembers::Unit => SymbolSpec::DataConstructor { tag, type_id },
|
||||||
|
type_inference::VariantMembers::Tuple(..) => SymbolSpec::DataConstructor { tag, type_id },
|
||||||
|
type_inference::VariantMembers::Record(..) => SymbolSpec::RecordConstructor { tag, type_id },
|
||||||
|
};
|
||||||
|
self.table.add_symbol(id, type_fqsn.extend(&variant.name), spec);
|
||||||
|
}
|
||||||
|
|
||||||
|
if immediate_variant {
|
||||||
|
let variant = &type_definition.variants[0];
|
||||||
|
let id = variant_name_map.get(&variant.name).unwrap();
|
||||||
|
let spec = SymbolSpec::RecordConstructor { tag: 0, type_id };
|
||||||
|
self.table.add_symbol(id, type_fqsn, spec);
|
||||||
|
}
|
||||||
|
|
||||||
|
vec![]
|
||||||
|
}
|
||||||
|
}
|
253
schala-lang/src/symbol_table/resolver.rs
Normal file
253
schala-lang/src/symbol_table/resolver.rs
Normal file
@ -0,0 +1,253 @@
|
|||||||
|
use std::rc::Rc;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
ast::*,
|
||||||
|
symbol_table::{Fqsn, ScopeSegment, SymbolSpec, SymbolTable},
|
||||||
|
util::ScopeStack,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
enum NameType {
|
||||||
|
//TODO eventually this needs to support closures
|
||||||
|
Param(u8),
|
||||||
|
LocalVariable(ItemId),
|
||||||
|
LocalFunction(ItemId),
|
||||||
|
Import(Fqsn),
|
||||||
|
}
|
||||||
|
|
||||||
|
type LexScope<'a> = ScopeStack<'a, Rc<String>, NameType, ScopeType>;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
enum ScopeType {
|
||||||
|
Function { name: Rc<String> },
|
||||||
|
Lambda,
|
||||||
|
PatternMatch,
|
||||||
|
ImplBlock,
|
||||||
|
//TODO add some notion of a let-like scope?
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct ScopeResolver<'a> {
|
||||||
|
symbol_table: &'a mut super::SymbolTable,
|
||||||
|
lexical_scopes: LexScope<'a>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> ScopeResolver<'a> {
|
||||||
|
pub fn new(symbol_table: &'a mut SymbolTable) -> Self {
|
||||||
|
let lexical_scopes = ScopeStack::new(None);
|
||||||
|
Self { symbol_table, lexical_scopes }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn resolve(&mut self, ast: &AST) {
|
||||||
|
walk_ast(self, ast);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// This method correctly modifies the id_to_def table (ItemId) to have the appropriate
|
||||||
|
/// mappings.
|
||||||
|
fn lookup_name_in_scope(&mut self, name: &QualifiedName) {
|
||||||
|
//TODO this method badly needs attention
|
||||||
|
let QualifiedName { id, components } = name;
|
||||||
|
|
||||||
|
let local_name = components.first().unwrap().clone();
|
||||||
|
let name_type = self.lexical_scopes.lookup(&local_name);
|
||||||
|
let fqsn = Fqsn { scopes: components.iter().map(|name| ScopeSegment::Name(name.clone())).collect() };
|
||||||
|
let def_id = self.symbol_table.symbol_trie.lookup(&fqsn);
|
||||||
|
|
||||||
|
//TODO handle a "partial" qualified name, and also handle it down in the pattern-matching
|
||||||
|
//section
|
||||||
|
if components.len() == 1 {
|
||||||
|
match name_type {
|
||||||
|
Some(NameType::Import(fqsn)) => {
|
||||||
|
let def_id = self.symbol_table.symbol_trie.lookup(fqsn);
|
||||||
|
|
||||||
|
if let Some(def_id) = def_id {
|
||||||
|
self.symbol_table.id_to_def.insert(*id, def_id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Some(NameType::Param(n)) => {
|
||||||
|
let spec = SymbolSpec::FunctionParam(*n);
|
||||||
|
//TODO need to come up with a better solution for local variable FQSNs
|
||||||
|
let lscope = ScopeSegment::Name(Rc::new("<local-param>".to_string()));
|
||||||
|
let fqsn = Fqsn { scopes: vec![lscope, ScopeSegment::Name(local_name.clone())] };
|
||||||
|
self.symbol_table.add_symbol(id, fqsn, spec);
|
||||||
|
}
|
||||||
|
Some(NameType::LocalFunction(item_id)) => {
|
||||||
|
let def_id = self.symbol_table.id_to_def.get(item_id);
|
||||||
|
if let Some(def_id) = def_id {
|
||||||
|
let def_id = *def_id;
|
||||||
|
self.symbol_table.id_to_def.insert(*id, def_id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Some(NameType::LocalVariable(item_id)) => {
|
||||||
|
let def_id = self.symbol_table.id_to_def.get(item_id);
|
||||||
|
if let Some(def_id) = def_id {
|
||||||
|
let def_id = *def_id;
|
||||||
|
self.symbol_table.id_to_def.insert(*id, def_id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None =>
|
||||||
|
if let Some(def_id) = def_id {
|
||||||
|
self.symbol_table.id_to_def.insert(*id, def_id);
|
||||||
|
},
|
||||||
|
}
|
||||||
|
} else if let Some(def_id) = def_id {
|
||||||
|
self.symbol_table.id_to_def.insert(*id, def_id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> ASTVisitor for ScopeResolver<'a> {
|
||||||
|
// Import statements bring in a bunch of local names that all map to a specific FQSN.
|
||||||
|
// FQSNs map to a Symbol (or this is an error), Symbols have a DefId. So for every
|
||||||
|
// name we import, we map a local name (a string) to a NameType::ImportedDefinition(DefId).
|
||||||
|
fn import(&mut self, import_spec: &ImportSpecifier) -> Recursion {
|
||||||
|
let ImportSpecifier { ref path_components, ref imported_names, .. } = &import_spec;
|
||||||
|
match imported_names {
|
||||||
|
ImportedNames::All => {
|
||||||
|
let prefix =
|
||||||
|
Fqsn { scopes: path_components.iter().map(|c| ScopeSegment::Name(c.clone())).collect() };
|
||||||
|
let members = self.symbol_table.symbol_trie.get_children(&prefix);
|
||||||
|
for fqsn in members.into_iter() {
|
||||||
|
self.lexical_scopes.insert(fqsn.last_elem(), NameType::Import(fqsn));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ImportedNames::LastOfPath => {
|
||||||
|
let fqsn =
|
||||||
|
Fqsn { scopes: path_components.iter().map(|c| ScopeSegment::Name(c.clone())).collect() };
|
||||||
|
self.lexical_scopes.insert(fqsn.last_elem(), NameType::Import(fqsn));
|
||||||
|
}
|
||||||
|
ImportedNames::List(ref names) => {
|
||||||
|
let fqsn_prefix: Vec<ScopeSegment> =
|
||||||
|
path_components.iter().map(|c| ScopeSegment::Name(c.clone())).collect();
|
||||||
|
for name in names.iter() {
|
||||||
|
let mut scopes = fqsn_prefix.clone();
|
||||||
|
scopes.push(ScopeSegment::Name(name.clone()));
|
||||||
|
let fqsn = Fqsn { scopes };
|
||||||
|
self.lexical_scopes.insert(fqsn.last_elem(), NameType::Import(fqsn));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
Recursion::Continue
|
||||||
|
}
|
||||||
|
|
||||||
|
fn declaration(&mut self, declaration: &Declaration, id: &ItemId) -> Recursion {
|
||||||
|
let cur_function_name = match self.lexical_scopes.get_name() {
|
||||||
|
//TODO this needs to be a fqsn
|
||||||
|
Some(ScopeType::Function { name }) => Some(name.clone()),
|
||||||
|
_ => None,
|
||||||
|
};
|
||||||
|
match declaration {
|
||||||
|
Declaration::FuncDecl(signature, block) => {
|
||||||
|
let param_names = signature.params.iter().map(|param| param.name.clone());
|
||||||
|
//TODO I'm 90% sure this is right, until I get to closures
|
||||||
|
//let mut new_scope = self.lexical_scopes.new_scope(Some(ScopeType::Function { name: signature.name.clone() }));
|
||||||
|
//TODO this will recurse unwantedly into scopes; need to pop an outer function
|
||||||
|
//scope off first before going into a non-closure scope
|
||||||
|
let mut new_scope =
|
||||||
|
ScopeStack::new(Some(ScopeType::Function { name: signature.name.clone() }));
|
||||||
|
|
||||||
|
for (n, param) in param_names.enumerate() {
|
||||||
|
new_scope.insert(param, NameType::Param(n as u8));
|
||||||
|
}
|
||||||
|
|
||||||
|
self.lexical_scopes.insert(signature.name.clone(), NameType::LocalFunction(*id));
|
||||||
|
|
||||||
|
let mut new_resolver =
|
||||||
|
ScopeResolver { symbol_table: self.symbol_table, lexical_scopes: new_scope };
|
||||||
|
walk_block(&mut new_resolver, block);
|
||||||
|
Recursion::Stop
|
||||||
|
}
|
||||||
|
Declaration::Binding { name, .. } => {
|
||||||
|
if let Some(fn_name) = cur_function_name {
|
||||||
|
// We are within a function scope
|
||||||
|
let fqsn =
|
||||||
|
Fqsn { scopes: vec![ScopeSegment::Name(fn_name), ScopeSegment::Name(name.clone())] };
|
||||||
|
self.symbol_table.add_symbol(id, fqsn, SymbolSpec::LocalVariable);
|
||||||
|
self.lexical_scopes.insert(name.clone(), NameType::LocalVariable(*id));
|
||||||
|
}
|
||||||
|
Recursion::Continue
|
||||||
|
}
|
||||||
|
Declaration::Impl { block, .. } => {
|
||||||
|
let new_scope = ScopeStack::new(Some(ScopeType::ImplBlock));
|
||||||
|
let mut new_resolver =
|
||||||
|
ScopeResolver { symbol_table: self.symbol_table, lexical_scopes: new_scope };
|
||||||
|
for stmt in block.iter() {
|
||||||
|
walk_declaration(&mut new_resolver, &stmt.kind, &stmt.id);
|
||||||
|
}
|
||||||
|
Recursion::Stop
|
||||||
|
}
|
||||||
|
_ => Recursion::Continue,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn expression(&mut self, expression: &Expression) -> Recursion {
|
||||||
|
use ExpressionKind::*;
|
||||||
|
match &expression.kind {
|
||||||
|
Value(name) => {
|
||||||
|
self.lookup_name_in_scope(name);
|
||||||
|
}
|
||||||
|
NamedStruct { name, fields: _ } => {
|
||||||
|
self.lookup_name_in_scope(name);
|
||||||
|
}
|
||||||
|
Lambda { params, body, .. } => {
|
||||||
|
let param_names = params.iter().map(|param| param.name.clone());
|
||||||
|
//TODO need to properly handle closure scope, this is currently broken
|
||||||
|
//let mut new_scope = self.lexical_scopes.new_scope(Some(ScopeType::Function { name: signature.name.clone() }));
|
||||||
|
let mut new_scope = ScopeStack::new(Some(ScopeType::Lambda));
|
||||||
|
|
||||||
|
for (n, param) in param_names.enumerate() {
|
||||||
|
new_scope.insert(param, NameType::Param(n as u8));
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut new_resolver =
|
||||||
|
ScopeResolver { symbol_table: self.symbol_table, lexical_scopes: new_scope };
|
||||||
|
walk_block(&mut new_resolver, body);
|
||||||
|
return Recursion::Stop;
|
||||||
|
}
|
||||||
|
IfExpression { discriminator, body } => {
|
||||||
|
if let Some(d) = discriminator.as_ref() {
|
||||||
|
walk_expression(self, d);
|
||||||
|
}
|
||||||
|
let mut resolver = ScopeResolver {
|
||||||
|
lexical_scopes: self.lexical_scopes.new_scope(Some(ScopeType::PatternMatch)),
|
||||||
|
symbol_table: self.symbol_table,
|
||||||
|
};
|
||||||
|
walk_if_expr_body(&mut resolver, body);
|
||||||
|
return Recursion::Stop;
|
||||||
|
}
|
||||||
|
_ => (),
|
||||||
|
}
|
||||||
|
Recursion::Continue
|
||||||
|
}
|
||||||
|
|
||||||
|
fn pattern(&mut self, pat: &Pattern) -> Recursion {
|
||||||
|
use Pattern::*;
|
||||||
|
|
||||||
|
match pat {
|
||||||
|
Literal(..) | Ignored | TuplePattern(..) => (),
|
||||||
|
TupleStruct(name, _) | Record(name, _) => {
|
||||||
|
self.lookup_name_in_scope(name);
|
||||||
|
}
|
||||||
|
//TODO this isn't really the right syntax for a VarOrName
|
||||||
|
VarOrName(QualifiedName { id, components }) => {
|
||||||
|
if components.len() == 1 {
|
||||||
|
//TODO need a better way to construct a FQSN from a QualifiedName
|
||||||
|
let local_name: Rc<String> = components[0].clone();
|
||||||
|
let lscope = ScopeSegment::Name(Rc::new("<local-case-match>".to_string()));
|
||||||
|
let fqsn = Fqsn { scopes: vec![lscope, ScopeSegment::Name(local_name.clone())] };
|
||||||
|
self.symbol_table.add_symbol(id, fqsn, SymbolSpec::LocalVariable);
|
||||||
|
self.lexical_scopes.insert(local_name, NameType::LocalVariable(*id));
|
||||||
|
} else {
|
||||||
|
let fqsn = Fqsn {
|
||||||
|
scopes: components.iter().map(|name| ScopeSegment::Name(name.clone())).collect(),
|
||||||
|
};
|
||||||
|
let def_id = self.symbol_table.symbol_trie.lookup(&fqsn);
|
||||||
|
|
||||||
|
if let Some(def_id) = def_id {
|
||||||
|
self.symbol_table.id_to_def.insert(*id, def_id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
Recursion::Continue
|
||||||
|
}
|
||||||
|
}
|
70
schala-lang/src/symbol_table/symbol_trie.rs
Normal file
70
schala-lang/src/symbol_table/symbol_trie.rs
Normal file
@ -0,0 +1,70 @@
|
|||||||
|
use std::{
|
||||||
|
collections::hash_map::DefaultHasher,
|
||||||
|
hash::{Hash, Hasher},
|
||||||
|
};
|
||||||
|
|
||||||
|
use radix_trie::{Trie, TrieCommon, TrieKey};
|
||||||
|
|
||||||
|
use super::{DefId, Fqsn, ScopeSegment};
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct SymbolTrie(Trie<Fqsn, DefId>);
|
||||||
|
|
||||||
|
impl TrieKey for Fqsn {
|
||||||
|
fn encode_bytes(&self) -> Vec<u8> {
|
||||||
|
let mut hasher = DefaultHasher::new();
|
||||||
|
let mut output = vec![];
|
||||||
|
for segment in self.scopes.iter() {
|
||||||
|
let ScopeSegment::Name(s) = segment;
|
||||||
|
s.as_bytes().hash(&mut hasher);
|
||||||
|
output.extend_from_slice(&hasher.finish().to_be_bytes());
|
||||||
|
}
|
||||||
|
output
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SymbolTrie {
|
||||||
|
pub fn new() -> SymbolTrie {
|
||||||
|
SymbolTrie(Trie::new())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn insert(&mut self, fqsn: &Fqsn, def_id: DefId) {
|
||||||
|
self.0.insert(fqsn.clone(), def_id);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn lookup(&self, fqsn: &Fqsn) -> Option<DefId> {
|
||||||
|
self.0.get(fqsn).cloned()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_children(&self, fqsn: &Fqsn) -> Vec<Fqsn> {
|
||||||
|
let subtrie = match self.0.subtrie(fqsn) {
|
||||||
|
Some(s) => s,
|
||||||
|
None => return vec![],
|
||||||
|
};
|
||||||
|
let output: Vec<Fqsn> = subtrie.keys().filter(|cur_key| **cur_key != *fqsn).cloned().collect();
|
||||||
|
output
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::*;
|
||||||
|
use crate::symbol_table::Fqsn;
|
||||||
|
|
||||||
|
fn make_fqsn(strs: &[&str]) -> Fqsn {
|
||||||
|
Fqsn::from_strs(strs)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_trie_insertion() {
|
||||||
|
let id = DefId::default();
|
||||||
|
let mut trie = SymbolTrie::new();
|
||||||
|
|
||||||
|
trie.insert(&make_fqsn(&["unrelated", "thing"]), id);
|
||||||
|
trie.insert(&make_fqsn(&["outer", "inner"]), id);
|
||||||
|
trie.insert(&make_fqsn(&["outer", "inner", "still_inner"]), id);
|
||||||
|
|
||||||
|
let children = trie.get_children(&make_fqsn(&["outer", "inner"]));
|
||||||
|
assert_eq!(children.len(), 1);
|
||||||
|
}
|
||||||
|
}
|
314
schala-lang/src/symbol_table/test.rs
Normal file
314
schala-lang/src/symbol_table/test.rs
Normal file
@ -0,0 +1,314 @@
|
|||||||
|
#![cfg(test)]
|
||||||
|
use assert_matches::assert_matches;
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
use crate::util::quick_ast;
|
||||||
|
|
||||||
|
fn add_symbols(src: &str) -> (SymbolTable, Result<(), Vec<SymbolError>>) {
|
||||||
|
let ast = quick_ast(src);
|
||||||
|
let mut symbol_table = SymbolTable::new();
|
||||||
|
let mut type_context = crate::type_inference::TypeContext::new();
|
||||||
|
let result = symbol_table.process_ast(&ast, &mut type_context);
|
||||||
|
(symbol_table, result)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn make_fqsn(strs: &[&str]) -> Fqsn {
|
||||||
|
Fqsn::from_strs(strs)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn basic_symbol_table() {
|
||||||
|
let src = "let a = 10; fn b() { 20 }";
|
||||||
|
let (symbols, _) = add_symbols(src);
|
||||||
|
|
||||||
|
fn make_fqsn(strs: &[&str]) -> Fqsn {
|
||||||
|
Fqsn::from_strs(strs)
|
||||||
|
}
|
||||||
|
|
||||||
|
symbols.fq_names.table.get(&make_fqsn(&["b"])).unwrap();
|
||||||
|
|
||||||
|
let src = "type Option<T> = Some(T) | None";
|
||||||
|
let (symbols, _) = add_symbols(src);
|
||||||
|
|
||||||
|
symbols.types.table.get(&make_fqsn(&["Option"])).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn no_function_definition_duplicates() {
|
||||||
|
let source = r#"
|
||||||
|
fn a() { 1 }
|
||||||
|
fn b() { 2 }
|
||||||
|
fn a() { 3 }
|
||||||
|
"#;
|
||||||
|
let (_, output) = add_symbols(source);
|
||||||
|
let errs = output.unwrap_err();
|
||||||
|
assert_matches!(&errs[..], [
|
||||||
|
SymbolError::DuplicateName { prev_name, ..}
|
||||||
|
] if prev_name == &Fqsn::from_strs(&["a"])
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn no_variable_definition_duplicates() {
|
||||||
|
let source = r#"
|
||||||
|
let x = 9
|
||||||
|
let a = 20
|
||||||
|
let q = 39
|
||||||
|
let a = 30
|
||||||
|
let x = 34
|
||||||
|
"#;
|
||||||
|
let (_, output) = add_symbols(source);
|
||||||
|
let errs = output.unwrap_err();
|
||||||
|
|
||||||
|
assert_matches!(&errs[..], [
|
||||||
|
SymbolError::DuplicateName { prev_name: pn1, ..},
|
||||||
|
SymbolError::DuplicateName { prev_name: pn2, ..}
|
||||||
|
] if pn1 == &Fqsn::from_strs(&["a"]) && pn2 == &Fqsn::from_strs(&["x"])
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn no_type_definition_duplicates() {
|
||||||
|
let source = r#"
|
||||||
|
let x = 9
|
||||||
|
type Food = Japchae | Burrito | Other
|
||||||
|
type Food = GoodJapchae | Breadfruit
|
||||||
|
"#;
|
||||||
|
let (_, output) = add_symbols(source);
|
||||||
|
let errs = output.unwrap_err();
|
||||||
|
let err = &errs[0];
|
||||||
|
|
||||||
|
match err {
|
||||||
|
SymbolError::DuplicateName { location: _, prev_name } => {
|
||||||
|
assert_eq!(prev_name, &Fqsn::from_strs(&["Food"]));
|
||||||
|
|
||||||
|
//TODO restore this Location test
|
||||||
|
//assert_eq!(location, &Location { line_num: 2, char_num: 2 });
|
||||||
|
}
|
||||||
|
_ => panic!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn no_variant_duplicates() {
|
||||||
|
let source = r#"
|
||||||
|
type Panda = FoolsGold | Kappa(i32) | Remix | Kappa | Thursday | Remix
|
||||||
|
"#;
|
||||||
|
let (_, output) = add_symbols(source);
|
||||||
|
let errs = output.unwrap_err();
|
||||||
|
assert_eq!(errs.len(), 2);
|
||||||
|
assert_matches!(&errs[0], SymbolError::DuplicateVariant {
|
||||||
|
type_fqsn, name } if *type_fqsn == Fqsn::from_strs(&["Panda"]) &&
|
||||||
|
name == "Kappa");
|
||||||
|
|
||||||
|
assert_matches!(&errs[1], SymbolError::DuplicateVariant {
|
||||||
|
type_fqsn, name } if *type_fqsn == Fqsn::from_strs(&["Panda"]) &&
|
||||||
|
name == "Remix");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn no_variable_definition_duplicates_in_function() {
|
||||||
|
let source = r#"
|
||||||
|
fn a() {
|
||||||
|
let a = 20
|
||||||
|
let b = 40
|
||||||
|
a + b
|
||||||
|
}
|
||||||
|
|
||||||
|
fn q() {
|
||||||
|
let a = 29
|
||||||
|
let x = 30
|
||||||
|
let x = 33
|
||||||
|
}
|
||||||
|
"#;
|
||||||
|
let (_, output) = add_symbols(source);
|
||||||
|
let errs = output.unwrap_err();
|
||||||
|
assert_matches!(&errs[..], [
|
||||||
|
SymbolError::DuplicateName { prev_name: pn1, ..},
|
||||||
|
] if pn1 == &Fqsn::from_strs(&["q", "x"])
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn dont_falsely_detect_duplicates() {
|
||||||
|
let source = r#"
|
||||||
|
let a = 20;
|
||||||
|
fn some_func() {
|
||||||
|
let a = 40;
|
||||||
|
77
|
||||||
|
}
|
||||||
|
let q = 39
|
||||||
|
"#;
|
||||||
|
let (symbols, _) = add_symbols(source);
|
||||||
|
|
||||||
|
assert!(symbols.fq_names.table.get(&make_fqsn(&["a"])).is_some());
|
||||||
|
assert!(symbols.fq_names.table.get(&make_fqsn(&["some_func", "a"])).is_some());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn enclosing_scopes() {
|
||||||
|
let source = r#"
|
||||||
|
fn outer_func(x) {
|
||||||
|
fn inner_func(arg) {
|
||||||
|
arg
|
||||||
|
}
|
||||||
|
x + inner_func(x)
|
||||||
|
}"#;
|
||||||
|
let (symbols, _) = add_symbols(source);
|
||||||
|
assert!(symbols.fq_names.table.get(&make_fqsn(&["outer_func"])).is_some());
|
||||||
|
assert!(symbols.fq_names.table.get(&make_fqsn(&["outer_func", "inner_func"])).is_some());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn enclosing_scopes_2() {
|
||||||
|
let source = r#"
|
||||||
|
fn outer_func(x) {
|
||||||
|
fn inner_func(arg) {
|
||||||
|
arg
|
||||||
|
}
|
||||||
|
|
||||||
|
fn second_inner_func() {
|
||||||
|
fn another_inner_func() {
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
inner_func(x)
|
||||||
|
}
|
||||||
|
"#;
|
||||||
|
let (symbols, _) = add_symbols(source);
|
||||||
|
assert!(symbols.fq_names.table.get(&make_fqsn(&["outer_func"])).is_some());
|
||||||
|
assert!(symbols.fq_names.table.get(&make_fqsn(&["outer_func", "inner_func"])).is_some());
|
||||||
|
assert!(symbols.fq_names.table.get(&make_fqsn(&["outer_func", "second_inner_func"])).is_some());
|
||||||
|
assert!(symbols
|
||||||
|
.fq_names
|
||||||
|
.table
|
||||||
|
.get(&make_fqsn(&["outer_func", "second_inner_func", "another_inner_func"]))
|
||||||
|
.is_some());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn enclosing_scopes_3() {
|
||||||
|
let source = r#"
|
||||||
|
fn outer_func(x) {
|
||||||
|
|
||||||
|
fn inner_func(arg) {
|
||||||
|
arg
|
||||||
|
}
|
||||||
|
|
||||||
|
fn second_inner_func() {
|
||||||
|
fn another_inner_func() {
|
||||||
|
}
|
||||||
|
fn another_inner_func() {
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
inner_func(x)
|
||||||
|
}"#;
|
||||||
|
let (_, output) = add_symbols(source);
|
||||||
|
let _err = output.unwrap_err();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn modules() {
|
||||||
|
let source = r#"
|
||||||
|
module stuff {
|
||||||
|
fn item() {
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn item()
|
||||||
|
"#;
|
||||||
|
|
||||||
|
let (symbols, _) = add_symbols(source);
|
||||||
|
symbols.fq_names.table.get(&make_fqsn(&["stuff"])).unwrap();
|
||||||
|
symbols.fq_names.table.get(&make_fqsn(&["item"])).unwrap();
|
||||||
|
symbols.fq_names.table.get(&make_fqsn(&["stuff", "item"])).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn duplicate_modules() {
|
||||||
|
let source = r#"
|
||||||
|
module q {
|
||||||
|
fn foo() { 4 }
|
||||||
|
}
|
||||||
|
|
||||||
|
module a {
|
||||||
|
fn foo() { 334 }
|
||||||
|
}
|
||||||
|
|
||||||
|
module a {
|
||||||
|
fn sarat() { 39 }
|
||||||
|
fn foo() { 256.1 }
|
||||||
|
}
|
||||||
|
"#;
|
||||||
|
let (_, output) = add_symbols(source);
|
||||||
|
let errs = output.unwrap_err();
|
||||||
|
|
||||||
|
assert_matches!(&errs[..], [
|
||||||
|
SymbolError::DuplicateName { prev_name: pn1, ..},
|
||||||
|
] if pn1 == &Fqsn::from_strs(&["a"])
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn duplicate_struct_members() {
|
||||||
|
let source = r#"
|
||||||
|
type Tarak = Tarak {
|
||||||
|
loujet: i32
|
||||||
|
,
|
||||||
|
mets: i32,
|
||||||
|
mets: i32
|
||||||
|
,
|
||||||
|
}
|
||||||
|
"#;
|
||||||
|
|
||||||
|
let (_, output) = add_symbols(source);
|
||||||
|
let errs = dbg!(output.unwrap_err());
|
||||||
|
assert_matches!(&errs[..], [
|
||||||
|
SymbolError::DuplicateRecord {
|
||||||
|
type_fqsn, member, record, ..},
|
||||||
|
] if type_fqsn == &Fqsn::from_strs(&["Tarak"]) && member == "mets" && record == "Tarak"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn method_definition_added_to_symbol_table() {
|
||||||
|
let source = r#"
|
||||||
|
|
||||||
|
type Foo = { x: Int, y: Int }
|
||||||
|
|
||||||
|
impl Foo {
|
||||||
|
fn hella() {
|
||||||
|
let a = 50
|
||||||
|
self.x + a
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"#;
|
||||||
|
let (symbols, _) = add_symbols(source);
|
||||||
|
symbols.debug();
|
||||||
|
assert!(symbols.fq_names.table.get(&make_fqsn(&["<impl-block>Foo", "hella"])).is_some());
|
||||||
|
assert!(symbols.fq_names.table.get(&make_fqsn(&["<impl-block>Foo", "hella", "a"])).is_some());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn duplicate_method_definitions_detected() {
|
||||||
|
let source = r#"
|
||||||
|
|
||||||
|
type Foo = { x: Int, y: Int }
|
||||||
|
|
||||||
|
impl Foo {
|
||||||
|
fn hella() {
|
||||||
|
self.x + 50
|
||||||
|
}
|
||||||
|
|
||||||
|
fn hella() {
|
||||||
|
self.x + 40
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"#;
|
||||||
|
let (_symbols, output) = add_symbols(source);
|
||||||
|
let errs = output.unwrap_err();
|
||||||
|
assert_matches!(&errs[..], [
|
||||||
|
SymbolError::DuplicateName { prev_name: pn1, ..},
|
||||||
|
] if pn1 == &Fqsn::from_strs(&["<impl-block>Foo", "hella"]));
|
||||||
|
}
|
513
schala-lang/src/tree_walk_eval/evaluator.rs
Normal file
513
schala-lang/src/tree_walk_eval/evaluator.rs
Normal file
@ -0,0 +1,513 @@
|
|||||||
|
use std::rc::Rc;
|
||||||
|
|
||||||
|
use super::{EvalResult, Memory, MemoryValue, Primitive, State};
|
||||||
|
use crate::{
|
||||||
|
builtin::Builtin,
|
||||||
|
reduced_ir::{
|
||||||
|
Alternative, Callable, Expression, FunctionDefinition, Literal, Lookup, Pattern, ReducedIR, Statement,
|
||||||
|
},
|
||||||
|
type_inference::TypeContext,
|
||||||
|
util::ScopeStack,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
enum StatementOutput {
|
||||||
|
Primitive(Primitive),
|
||||||
|
Nothing,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Copy)]
|
||||||
|
enum LoopControlFlow {
|
||||||
|
Break,
|
||||||
|
Continue,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Evaluator<'a, 'b> {
|
||||||
|
type_context: &'b TypeContext,
|
||||||
|
state: &'b mut State<'a>,
|
||||||
|
early_returning: bool,
|
||||||
|
loop_control: Option<LoopControlFlow>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, 'b> Evaluator<'a, 'b> {
|
||||||
|
pub(crate) fn new(state: &'b mut State<'a>, type_context: &'b TypeContext) -> Self {
|
||||||
|
Self { state, type_context, early_returning: false, loop_control: None }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn evaluate(&mut self, reduced: ReducedIR, repl: bool) -> Vec<Result<String, String>> {
|
||||||
|
let mut acc = vec![];
|
||||||
|
for (def_id, function) in reduced.functions.into_iter() {
|
||||||
|
let mem = (&def_id).into();
|
||||||
|
self.state.memory.insert(mem, MemoryValue::Function(function));
|
||||||
|
}
|
||||||
|
|
||||||
|
for statement in reduced.entrypoint.into_iter() {
|
||||||
|
match self.statement(statement) {
|
||||||
|
Ok(StatementOutput::Primitive(output)) if repl =>
|
||||||
|
acc.push(Ok(output.to_repl(self.type_context))),
|
||||||
|
Ok(_) => (),
|
||||||
|
Err(error) => {
|
||||||
|
acc.push(Err(error.msg));
|
||||||
|
return acc;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
acc
|
||||||
|
}
|
||||||
|
|
||||||
|
fn block(&mut self, statements: Vec<Statement>) -> EvalResult<Primitive> {
|
||||||
|
let mut retval = None;
|
||||||
|
for stmt in statements.into_iter() {
|
||||||
|
match self.statement(stmt)? {
|
||||||
|
StatementOutput::Nothing => (),
|
||||||
|
StatementOutput::Primitive(prim) => {
|
||||||
|
retval = Some(prim);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
if self.early_returning {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
if self.loop_control.is_some() {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(if let Some(ret) = retval { ret } else { self.expression(Expression::unit())? })
|
||||||
|
}
|
||||||
|
|
||||||
|
fn statement(&mut self, stmt: Statement) -> EvalResult<StatementOutput> {
|
||||||
|
match stmt {
|
||||||
|
Statement::Binding { ref id, expr, constant: _ } => {
|
||||||
|
let evaluated = self.expression(expr)?;
|
||||||
|
self.state.memory.insert(id.into(), evaluated.into());
|
||||||
|
Ok(StatementOutput::Nothing)
|
||||||
|
}
|
||||||
|
Statement::Expression(expr) => {
|
||||||
|
let evaluated = self.expression(expr)?;
|
||||||
|
Ok(StatementOutput::Primitive(evaluated))
|
||||||
|
}
|
||||||
|
Statement::Return(expr) => {
|
||||||
|
let evaluated = self.expression(expr)?;
|
||||||
|
self.early_returning = true;
|
||||||
|
Ok(StatementOutput::Primitive(evaluated))
|
||||||
|
}
|
||||||
|
Statement::Break => {
|
||||||
|
self.loop_control = Some(LoopControlFlow::Break);
|
||||||
|
Ok(StatementOutput::Nothing)
|
||||||
|
}
|
||||||
|
Statement::Continue => {
|
||||||
|
self.loop_control = Some(LoopControlFlow::Continue);
|
||||||
|
Ok(StatementOutput::Nothing)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn expression(&mut self, expression: Expression) -> EvalResult<Primitive> {
|
||||||
|
Ok(match expression {
|
||||||
|
Expression::Literal(lit) => Primitive::Literal(lit),
|
||||||
|
Expression::Tuple(items) => Primitive::Tuple(
|
||||||
|
items
|
||||||
|
.into_iter()
|
||||||
|
.map(|expr| self.expression(expr))
|
||||||
|
.collect::<EvalResult<Vec<Primitive>>>()?,
|
||||||
|
),
|
||||||
|
Expression::List(items) => Primitive::List(
|
||||||
|
items
|
||||||
|
.into_iter()
|
||||||
|
.map(|expr| self.expression(expr))
|
||||||
|
.collect::<EvalResult<Vec<Primitive>>>()?,
|
||||||
|
),
|
||||||
|
Expression::Lookup(kind) => match kind {
|
||||||
|
Lookup::Function(ref id) => {
|
||||||
|
let mem = id.into();
|
||||||
|
match self.state.memory.lookup(&mem) {
|
||||||
|
// This just checks that the function exists in "memory" by ID, we don't
|
||||||
|
// actually retrieve it until `apply_function()`
|
||||||
|
Some(MemoryValue::Function(_)) => Primitive::Callable(Callable::UserDefined(*id)),
|
||||||
|
x => return Err(format!("Function not found for id: {} : {:?}", id, x).into()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Lookup::Param(n) => {
|
||||||
|
let mem = n.into();
|
||||||
|
match self.state.memory.lookup(&mem) {
|
||||||
|
Some(MemoryValue::Primitive(prim)) => prim.clone(),
|
||||||
|
e => return Err(format!("Param lookup error, got {:?}", e).into()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Lookup::SelfParam => {
|
||||||
|
let mem = Memory::self_param();
|
||||||
|
match self.state.memory.lookup(&mem) {
|
||||||
|
Some(MemoryValue::Primitive(prim)) => prim.clone(),
|
||||||
|
e => return Err(format!("SelfParam lookup error, got {:?}", e).into()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Lookup::LocalVar(ref id) | Lookup::GlobalVar(ref id) => {
|
||||||
|
let mem = id.into();
|
||||||
|
match self.state.memory.lookup(&mem) {
|
||||||
|
Some(MemoryValue::Primitive(expr)) => expr.clone(),
|
||||||
|
_ =>
|
||||||
|
return Err(
|
||||||
|
format!("Nothing found for local/gloval variable lookup {}", id).into()
|
||||||
|
),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Expression::Assign { ref lval, box rval } => {
|
||||||
|
let mem = lval.into();
|
||||||
|
let evaluated = self.expression(rval)?;
|
||||||
|
println!("Inserting {:?} into {:?}", evaluated, mem);
|
||||||
|
self.state.memory.insert(mem, MemoryValue::Primitive(evaluated));
|
||||||
|
Primitive::unit()
|
||||||
|
}
|
||||||
|
Expression::Call { box f, args } => self.call_expression(f, args, None)?,
|
||||||
|
Expression::CallMethod { box f, args, box self_expr } =>
|
||||||
|
self.call_expression(f, args, Some(self_expr))?,
|
||||||
|
Expression::Callable(Callable::DataConstructor { type_id, tag }) => {
|
||||||
|
let arity = self.type_context.lookup_variant_arity(&type_id, tag).unwrap();
|
||||||
|
if arity == 0 {
|
||||||
|
Primitive::Object { type_id, tag, items: vec![], ordered_fields: None }
|
||||||
|
} else {
|
||||||
|
Primitive::Callable(Callable::DataConstructor { type_id, tag })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Expression::Callable(func) => Primitive::Callable(func),
|
||||||
|
Expression::Conditional { box cond, then_clause, else_clause } => {
|
||||||
|
let cond = self.expression(cond)?;
|
||||||
|
match cond {
|
||||||
|
Primitive::Literal(Literal::Bool(true)) => self.block(then_clause)?,
|
||||||
|
Primitive::Literal(Literal::Bool(false)) => self.block(else_clause)?,
|
||||||
|
v => return Err(format!("Non-boolean value {:?} in if-statement", v).into()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Expression::CaseMatch { box cond, alternatives } =>
|
||||||
|
self.case_match_expression(cond, alternatives)?,
|
||||||
|
Expression::Index { box indexee, box indexer } => {
|
||||||
|
let indexee = self.expression(indexee)?;
|
||||||
|
let indexer = self.expression(indexer)?;
|
||||||
|
match (indexee, indexer) {
|
||||||
|
(Primitive::List(items), Primitive::Literal(Literal::Nat(n))) =>
|
||||||
|
match items.get(n as usize) {
|
||||||
|
Some(item) => item.clone(),
|
||||||
|
None => return Err(format!("Invalid index {} for this value", n).into()),
|
||||||
|
},
|
||||||
|
_ => return Err("Invalid index type".to_string().into()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Expression::Loop { box cond, statements } => self.loop_expression(cond, statements)?,
|
||||||
|
Expression::ReductionError(e) => return Err(e.into()),
|
||||||
|
Expression::Access { name, box expr } => {
|
||||||
|
let expr = self.expression(expr)?;
|
||||||
|
match expr {
|
||||||
|
Primitive::Object { items, ordered_fields: Some(ordered_fields), .. } => {
|
||||||
|
let idx = match ordered_fields.iter().position(|s| s == &name) {
|
||||||
|
Some(idx) => idx,
|
||||||
|
None => return Err(format!("Field `{}` not found", name).into()),
|
||||||
|
};
|
||||||
|
|
||||||
|
let item = match items.get(idx) {
|
||||||
|
Some(item) => item,
|
||||||
|
None => return Err(format!("Field lookup `{}` failed", name).into()),
|
||||||
|
};
|
||||||
|
|
||||||
|
item.clone()
|
||||||
|
}
|
||||||
|
e =>
|
||||||
|
return Err(
|
||||||
|
format!("Trying to do a field lookup on a non-object value: {:?}", e).into()
|
||||||
|
),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn loop_expression(&mut self, cond: Expression, statements: Vec<Statement>) -> EvalResult<Primitive> {
|
||||||
|
let existing = self.loop_control;
|
||||||
|
let output = self.loop_expression_inner(cond, statements);
|
||||||
|
self.loop_control = existing;
|
||||||
|
output
|
||||||
|
}
|
||||||
|
|
||||||
|
fn loop_expression_inner(
|
||||||
|
&mut self,
|
||||||
|
cond: Expression,
|
||||||
|
statements: Vec<Statement>,
|
||||||
|
) -> EvalResult<Primitive> {
|
||||||
|
loop {
|
||||||
|
let cond = self.expression(cond.clone())?;
|
||||||
|
println!("COND: {:?}", cond);
|
||||||
|
match cond {
|
||||||
|
Primitive::Literal(Literal::Bool(true)) => (),
|
||||||
|
Primitive::Literal(Literal::Bool(false)) => break,
|
||||||
|
e => return Err(format!("Loop condition evaluates to non-boolean: {:?}", e).into()),
|
||||||
|
};
|
||||||
|
//TODO eventually loops shoudl be able to return something
|
||||||
|
let _output = self.block(statements.clone())?;
|
||||||
|
match self.loop_control {
|
||||||
|
None => (),
|
||||||
|
Some(LoopControlFlow::Continue) => {
|
||||||
|
self.loop_control = None;
|
||||||
|
}
|
||||||
|
Some(LoopControlFlow::Break) => {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(Primitive::unit())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn case_match_expression(
|
||||||
|
&mut self,
|
||||||
|
cond: Expression,
|
||||||
|
alternatives: Vec<Alternative>,
|
||||||
|
) -> EvalResult<Primitive> {
|
||||||
|
fn matches(scrut: &Primitive, pat: &Pattern, scope: &mut ScopeStack<Memory, MemoryValue>) -> bool {
|
||||||
|
match pat {
|
||||||
|
Pattern::Ignored => true,
|
||||||
|
Pattern::Binding(ref def_id) => {
|
||||||
|
let mem = def_id.into();
|
||||||
|
scope.insert(mem, MemoryValue::Primitive(scrut.clone())); //TODO make sure this doesn't cause problems with nesting
|
||||||
|
true
|
||||||
|
}
|
||||||
|
Pattern::Literal(pat_literal) =>
|
||||||
|
if let Primitive::Literal(scrut_literal) = scrut {
|
||||||
|
pat_literal == scrut_literal
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
},
|
||||||
|
Pattern::Tuple { subpatterns, tag } => match tag {
|
||||||
|
None => match scrut {
|
||||||
|
Primitive::Tuple(items) if items.len() == subpatterns.len() => items
|
||||||
|
.iter()
|
||||||
|
.zip(subpatterns.iter())
|
||||||
|
.all(|(item, subpat)| matches(item, subpat, scope)),
|
||||||
|
_ => false, //TODO should be a type error
|
||||||
|
},
|
||||||
|
Some(pattern_tag) => match scrut {
|
||||||
|
//TODO should test type_ids for runtime type checking, once those work
|
||||||
|
Primitive::Object { tag, items, .. }
|
||||||
|
if tag == pattern_tag && items.len() == subpatterns.len() =>
|
||||||
|
items
|
||||||
|
.iter()
|
||||||
|
.zip(subpatterns.iter())
|
||||||
|
.all(|(item, subpat)| matches(item, subpat, scope)),
|
||||||
|
_ => false,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Pattern::Record { tag: pattern_tag, subpatterns } => match scrut {
|
||||||
|
//TODO several types of possible error here
|
||||||
|
Primitive::Object { tag, items, ordered_fields: Some(ordered_fields), .. }
|
||||||
|
if tag == pattern_tag =>
|
||||||
|
subpatterns.iter().all(|(field_name, subpat)| {
|
||||||
|
let idx = ordered_fields
|
||||||
|
.iter()
|
||||||
|
.position(|field| field.as_str() == field_name.as_ref())
|
||||||
|
.unwrap();
|
||||||
|
let item = &items[idx];
|
||||||
|
matches(item, subpat, scope)
|
||||||
|
}),
|
||||||
|
_ => false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let cond = self.expression(cond)?;
|
||||||
|
|
||||||
|
for alt in alternatives.into_iter() {
|
||||||
|
let mut new_scope = self.state.memory.new_scope(None);
|
||||||
|
if matches(&cond, &alt.pattern, &mut new_scope) {
|
||||||
|
let mut new_state = State { memory: new_scope };
|
||||||
|
let mut evaluator = Evaluator::new(&mut new_state, self.type_context);
|
||||||
|
let output = evaluator.block(alt.item);
|
||||||
|
self.early_returning = evaluator.early_returning;
|
||||||
|
return output;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err("No valid match in match expression".into())
|
||||||
|
}
|
||||||
|
|
||||||
|
//TODO need to do something with self_expr to make method invocations actually work
|
||||||
|
fn call_expression(
|
||||||
|
&mut self,
|
||||||
|
f: Expression,
|
||||||
|
args: Vec<Expression>,
|
||||||
|
self_expr: Option<Expression>,
|
||||||
|
) -> EvalResult<Primitive> {
|
||||||
|
let func = match self.expression(f)? {
|
||||||
|
Primitive::Callable(func) => func,
|
||||||
|
other => return Err(format!("Trying to call non-function value: {:?}", other).into()),
|
||||||
|
};
|
||||||
|
match func {
|
||||||
|
Callable::Builtin(builtin) => self.apply_builtin(builtin, args),
|
||||||
|
Callable::UserDefined(def_id) => {
|
||||||
|
let mem = (&def_id).into();
|
||||||
|
match self.state.memory.lookup(&mem) {
|
||||||
|
Some(MemoryValue::Function(FunctionDefinition { body })) => {
|
||||||
|
let body = body.clone(); //TODO ideally this clone would not happen
|
||||||
|
self.apply_function(body, args, self_expr)
|
||||||
|
}
|
||||||
|
e => Err(format!("Error looking up function with id {}: {:?}", def_id, e).into()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Callable::Lambda { arity, body } => {
|
||||||
|
if arity as usize != args.len() {
|
||||||
|
return Err(format!(
|
||||||
|
"Lambda expression requries {} arguments, only {} provided",
|
||||||
|
arity,
|
||||||
|
args.len()
|
||||||
|
)
|
||||||
|
.into());
|
||||||
|
}
|
||||||
|
self.apply_function(body, args, None)
|
||||||
|
}
|
||||||
|
Callable::DataConstructor { type_id, tag } => {
|
||||||
|
let arity = self.type_context.lookup_variant_arity(&type_id, tag).unwrap();
|
||||||
|
if arity as usize != args.len() {
|
||||||
|
return Err(format!(
|
||||||
|
"Constructor expression requries {} arguments, only {} provided",
|
||||||
|
arity,
|
||||||
|
args.len()
|
||||||
|
)
|
||||||
|
.into());
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut items: Vec<Primitive> = vec![];
|
||||||
|
for arg in args.into_iter() {
|
||||||
|
items.push(self.expression(arg)?);
|
||||||
|
}
|
||||||
|
Ok(Primitive::Object { type_id, tag, items, ordered_fields: None })
|
||||||
|
}
|
||||||
|
Callable::RecordConstructor { type_id, tag, field_order } => {
|
||||||
|
//TODO maybe I'll want to do a runtime check of the evaluated fields
|
||||||
|
/*
|
||||||
|
let record_members = self.type_context.lookup_record_members(type_id, tag)
|
||||||
|
.ok_or(format!("Runtime record lookup for: {} {} not found", type_id, tag).into())?;
|
||||||
|
*/
|
||||||
|
|
||||||
|
let mut items: Vec<Primitive> = vec![];
|
||||||
|
for arg in args.into_iter() {
|
||||||
|
items.push(self.expression(arg)?);
|
||||||
|
}
|
||||||
|
Ok(Primitive::Object { type_id, tag, items, ordered_fields: Some(field_order) })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn apply_builtin(&mut self, builtin: Builtin, args: Vec<Expression>) -> EvalResult<Primitive> {
|
||||||
|
use Builtin::*;
|
||||||
|
use Literal::*;
|
||||||
|
use Primitive::Literal as Lit;
|
||||||
|
|
||||||
|
let evaled_args: EvalResult<Vec<Primitive>> =
|
||||||
|
args.into_iter().map(|arg| self.expression(arg)).collect();
|
||||||
|
let evaled_args = evaled_args?;
|
||||||
|
|
||||||
|
Ok(match (builtin, evaled_args.as_slice()) {
|
||||||
|
/* builtin functions */
|
||||||
|
(IOPrint, &[ref anything]) => {
|
||||||
|
print!("{}", anything.to_repl(self.type_context));
|
||||||
|
Primitive::Tuple(vec![])
|
||||||
|
}
|
||||||
|
(IOPrintLn, &[ref anything]) => {
|
||||||
|
println!("{}", anything.to_repl(self.type_context));
|
||||||
|
Primitive::Tuple(vec![])
|
||||||
|
}
|
||||||
|
(IOGetLine, &[]) => {
|
||||||
|
let mut buf = String::new();
|
||||||
|
std::io::stdin().read_line(&mut buf).expect("Error readling line in 'getline'");
|
||||||
|
StringLit(Rc::new(buf.trim().to_string())).into()
|
||||||
|
}
|
||||||
|
/* Binops */
|
||||||
|
(binop, &[ref lhs, ref rhs]) => match (binop, lhs, rhs) {
|
||||||
|
// TODO need a better way of handling these literals
|
||||||
|
(Add, Lit(Nat(l)), Lit(Nat(r))) => Nat(l + r).into(),
|
||||||
|
(Add, Lit(Int(l)), Lit(Int(r))) => Int(l + r).into(),
|
||||||
|
(Add, Lit(Nat(l)), Lit(Int(r))) => Int((*l as i64) + (*r as i64)).into(),
|
||||||
|
(Add, Lit(Int(l)), Lit(Nat(r))) => Int((*l as i64) + (*r as i64)).into(),
|
||||||
|
(Concatenate, Lit(StringLit(ref s1)), Lit(StringLit(ref s2))) =>
|
||||||
|
StringLit(Rc::new(format!("{}{}", s1, s2))).into(),
|
||||||
|
(Subtract, Lit(Nat(l)), Lit(Nat(r))) => Nat(l - r).into(),
|
||||||
|
(Multiply, Lit(Nat(l)), Lit(Nat(r))) => Nat(l * r).into(),
|
||||||
|
(Divide, Lit(Nat(l)), Lit(Nat(r))) => Float((*l as f64) / (*r as f64)).into(),
|
||||||
|
(Quotient, Lit(Nat(l)), Lit(Nat(r))) =>
|
||||||
|
if *r == 0 {
|
||||||
|
return Err("Divide-by-zero error".into());
|
||||||
|
} else {
|
||||||
|
Nat(l / r).into()
|
||||||
|
},
|
||||||
|
(Modulo, Lit(Nat(l)), Lit(Nat(r))) => Nat(l % r).into(),
|
||||||
|
(Exponentiation, Lit(Nat(l)), Lit(Nat(r))) => Nat(l ^ r).into(),
|
||||||
|
(BitwiseAnd, Lit(Nat(l)), Lit(Nat(r))) => Nat(l & r).into(),
|
||||||
|
(BitwiseOr, Lit(Nat(l)), Lit(Nat(r))) => Nat(l | r).into(),
|
||||||
|
|
||||||
|
/* comparisons */
|
||||||
|
(Equality, Lit(Nat(l)), Lit(Nat(r))) => Bool(l == r).into(),
|
||||||
|
(Equality, Lit(Int(l)), Lit(Int(r))) => Bool(l == r).into(),
|
||||||
|
(Equality, Lit(Float(l)), Lit(Float(r))) => Bool(l == r).into(),
|
||||||
|
(Equality, Lit(Bool(l)), Lit(Bool(r))) => Bool(l == r).into(),
|
||||||
|
(Equality, Lit(StringLit(ref l)), Lit(StringLit(ref r))) => Bool(l == r).into(),
|
||||||
|
|
||||||
|
(NotEqual, Lit(Nat(l)), Lit(Nat(r))) => Bool(l != r).into(),
|
||||||
|
(NotEqual, Lit(Int(l)), Lit(Int(r))) => Bool(l != r).into(),
|
||||||
|
(NotEqual, Lit(Float(l)), Lit(Float(r))) => Bool(l != r).into(),
|
||||||
|
(NotEqual, Lit(Bool(l)), Lit(Bool(r))) => Bool(l != r).into(),
|
||||||
|
(NotEqual, Lit(StringLit(ref l)), Lit(StringLit(ref r))) => Bool(l != r).into(),
|
||||||
|
|
||||||
|
(LessThan, Lit(Nat(l)), Lit(Nat(r))) => Bool(l < r).into(),
|
||||||
|
(LessThan, Lit(Int(l)), Lit(Int(r))) => Bool(l < r).into(),
|
||||||
|
(LessThan, Lit(Float(l)), Lit(Float(r))) => Bool(l < r).into(),
|
||||||
|
|
||||||
|
(LessThanOrEqual, Lit(Nat(l)), Lit(Nat(r))) => Bool(l <= r).into(),
|
||||||
|
(LessThanOrEqual, Lit(Int(l)), Lit(Int(r))) => Bool(l <= r).into(),
|
||||||
|
(LessThanOrEqual, Lit(Float(l)), Lit(Float(r))) => Bool(l <= r).into(),
|
||||||
|
|
||||||
|
(GreaterThan, Lit(Nat(l)), Lit(Nat(r))) => Bool(l > r).into(),
|
||||||
|
(GreaterThan, Lit(Int(l)), Lit(Int(r))) => Bool(l > r).into(),
|
||||||
|
(GreaterThan, Lit(Float(l)), Lit(Float(r))) => Bool(l > r).into(),
|
||||||
|
|
||||||
|
(GreaterThanOrEqual, Lit(Nat(l)), Lit(Nat(r))) => Bool(l >= r).into(),
|
||||||
|
(GreaterThanOrEqual, Lit(Int(l)), Lit(Int(r))) => Bool(l >= r).into(),
|
||||||
|
(GreaterThanOrEqual, Lit(Float(l)), Lit(Float(r))) => Bool(l >= r).into(),
|
||||||
|
|
||||||
|
(binop, lhs, rhs) =>
|
||||||
|
return Err(format!("Invalid binop expression {:?} {:?} {:?}", lhs, binop, rhs).into()),
|
||||||
|
},
|
||||||
|
(prefix, &[ref arg]) => match (prefix, arg) {
|
||||||
|
(BooleanNot, Lit(Bool(true))) => Bool(false),
|
||||||
|
(BooleanNot, Lit(Bool(false))) => Bool(true),
|
||||||
|
(Negate, Lit(Nat(n))) => Int(-(*n as i64)),
|
||||||
|
(Negate, Lit(Int(n))) => Int(-(*n as i64)),
|
||||||
|
(Negate, Lit(Float(f))) => Float(-(*f as f64)),
|
||||||
|
(Increment, Lit(Int(n))) => Int(*n),
|
||||||
|
(Increment, Lit(Nat(n))) => Nat(*n),
|
||||||
|
_ => return Err("No valid prefix op".into()),
|
||||||
|
}
|
||||||
|
.into(),
|
||||||
|
(x, args) => return Err(format!("bad or unimplemented builtin {:?} | {:?}", x, args).into()),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn apply_function(
|
||||||
|
&mut self,
|
||||||
|
body: Vec<Statement>,
|
||||||
|
args: Vec<Expression>,
|
||||||
|
self_expr: Option<Expression>,
|
||||||
|
) -> EvalResult<Primitive> {
|
||||||
|
let self_expr = if let Some(expr) = self_expr { Some(self.expression(expr)?) } else { None };
|
||||||
|
let mut evaluated_args: Vec<Primitive> = vec![];
|
||||||
|
for arg in args.into_iter() {
|
||||||
|
evaluated_args.push(self.expression(arg)?);
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut frame_state = State { memory: self.state.memory.new_scope(None) };
|
||||||
|
let mut evaluator = Evaluator::new(&mut frame_state, self.type_context);
|
||||||
|
|
||||||
|
if let Some(evaled) = self_expr {
|
||||||
|
let mem = Memory::self_param();
|
||||||
|
evaluator.state.memory.insert(mem, MemoryValue::Primitive(evaled));
|
||||||
|
}
|
||||||
|
for (n, evaled) in evaluated_args.into_iter().enumerate() {
|
||||||
|
let n = n as u8;
|
||||||
|
let mem = n.into();
|
||||||
|
evaluator.state.memory.insert(mem, MemoryValue::Primitive(evaled));
|
||||||
|
}
|
||||||
|
evaluator.block(body)
|
||||||
|
}
|
||||||
|
}
|
173
schala-lang/src/tree_walk_eval/mod.rs
Normal file
173
schala-lang/src/tree_walk_eval/mod.rs
Normal file
@ -0,0 +1,173 @@
|
|||||||
|
use std::{convert::From, fmt::Write};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
reduced_ir::{Callable, Expression, FunctionDefinition, Literal, ReducedIR},
|
||||||
|
symbol_table::DefId,
|
||||||
|
type_inference::{TypeContext, TypeId},
|
||||||
|
util::{delim_wrapped, ScopeStack},
|
||||||
|
};
|
||||||
|
|
||||||
|
mod evaluator;
|
||||||
|
mod test;
|
||||||
|
|
||||||
|
type EvalResult<T> = Result<T, RuntimeError>;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct State<'a> {
|
||||||
|
memory: ScopeStack<'a, Memory, MemoryValue>,
|
||||||
|
}
|
||||||
|
|
||||||
|
//TODO - eh, I dunno, maybe it doesn't matter exactly how memory works in the tree-walking
|
||||||
|
//evaluator
|
||||||
|
#[derive(Debug, PartialEq, Eq, Hash, Clone)]
|
||||||
|
enum Memory {
|
||||||
|
Index(u32),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Memory {
|
||||||
|
fn self_param() -> Self {
|
||||||
|
Memory::Index(3_999_999)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// This is for function param lookups, and is a hack
|
||||||
|
impl From<u8> for Memory {
|
||||||
|
fn from(n: u8) -> Self {
|
||||||
|
Memory::Index(4_000_000 + (n as u32))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&DefId> for Memory {
|
||||||
|
fn from(id: &DefId) -> Self {
|
||||||
|
Self::Index(id.as_u32())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
struct RuntimeError {
|
||||||
|
msg: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<String> for RuntimeError {
|
||||||
|
fn from(msg: String) -> Self {
|
||||||
|
Self { msg }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&str> for RuntimeError {
|
||||||
|
fn from(msg: &str) -> Self {
|
||||||
|
Self { msg: msg.to_string() }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl RuntimeError {
|
||||||
|
#[allow(dead_code)]
|
||||||
|
fn get_msg(&self) -> String {
|
||||||
|
format!("Runtime error: {}", self.msg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Anything that can be stored in memory; that is, a function definition, or a fully-evaluated
|
||||||
|
/// program value.
|
||||||
|
#[derive(Debug)]
|
||||||
|
enum MemoryValue {
|
||||||
|
Function(FunctionDefinition),
|
||||||
|
Primitive(Primitive),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Primitive> for MemoryValue {
|
||||||
|
fn from(prim: Primitive) -> Self {
|
||||||
|
Self::Primitive(prim)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
enum RuntimeValue {
|
||||||
|
Expression(Expression),
|
||||||
|
Evaluated(Primitive),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Expression> for RuntimeValue {
|
||||||
|
fn from(expr: Expression) -> Self {
|
||||||
|
Self::Expression(expr)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Primitive> for RuntimeValue {
|
||||||
|
fn from(prim: Primitive) -> Self {
|
||||||
|
Self::Evaluated(prim)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A fully-reduced value
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
enum Primitive {
|
||||||
|
Tuple(Vec<Primitive>),
|
||||||
|
List(Vec<Primitive>),
|
||||||
|
Literal(Literal),
|
||||||
|
Callable(Callable),
|
||||||
|
Object { type_id: TypeId, tag: u32, ordered_fields: Option<Vec<String>>, items: Vec<Primitive> },
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Primitive {
|
||||||
|
fn to_repl(&self, type_context: &TypeContext) -> String {
|
||||||
|
match self {
|
||||||
|
Primitive::Object { type_id, items, tag, ordered_fields: _ } if items.is_empty() =>
|
||||||
|
type_context.variant_local_name(type_id, *tag).unwrap().to_string(),
|
||||||
|
Primitive::Object { type_id, items, tag, ordered_fields: None } => {
|
||||||
|
format!(
|
||||||
|
"{}{}",
|
||||||
|
type_context.variant_local_name(type_id, *tag).unwrap(),
|
||||||
|
delim_wrapped('(', ')', items.iter().map(|item| item.to_repl(type_context)))
|
||||||
|
)
|
||||||
|
}
|
||||||
|
Primitive::Object { type_id, items, tag, ordered_fields: Some(fields) } => {
|
||||||
|
let mut buf = format!("{} {{ ", type_context.variant_local_name(type_id, *tag).unwrap());
|
||||||
|
for item in fields.iter().zip(items.iter()).map(Some).intersperse(None) {
|
||||||
|
match item {
|
||||||
|
Some((name, val)) => write!(buf, "{}: {}", name, val.to_repl(type_context)).unwrap(),
|
||||||
|
None => write!(buf, ", ").unwrap(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
write!(buf, " }}").unwrap();
|
||||||
|
buf
|
||||||
|
}
|
||||||
|
Primitive::Literal(lit) => match lit {
|
||||||
|
Literal::Nat(n) => format!("{}", n),
|
||||||
|
Literal::Int(i) => format!("{}", i),
|
||||||
|
Literal::Float(f) => format!("{}", f),
|
||||||
|
Literal::Bool(b) => format!("{}", b),
|
||||||
|
Literal::StringLit(s) => format!("\"{}\"", s),
|
||||||
|
},
|
||||||
|
Primitive::Tuple(terms) => delim_wrapped('(', ')', terms.iter().map(|x| x.to_repl(type_context))),
|
||||||
|
Primitive::List(terms) => delim_wrapped('[', ']', terms.iter().map(|x| x.to_repl(type_context))),
|
||||||
|
Primitive::Callable(..) => "<some-callable>".to_string(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn unit() -> Self {
|
||||||
|
Primitive::Tuple(vec![])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Literal> for Primitive {
|
||||||
|
fn from(lit: Literal) -> Self {
|
||||||
|
Primitive::Literal(lit)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> State<'a> {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self { memory: ScopeStack::new(Some("global".to_string())) }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn evaluate(
|
||||||
|
&mut self,
|
||||||
|
reduced: ReducedIR,
|
||||||
|
type_context: &TypeContext,
|
||||||
|
repl: bool,
|
||||||
|
) -> Vec<Result<String, String>> {
|
||||||
|
let mut evaluator = evaluator::Evaluator::new(self, type_context);
|
||||||
|
evaluator.evaluate(reduced, repl)
|
||||||
|
}
|
||||||
|
}
|
564
schala-lang/src/tree_walk_eval/test.rs
Normal file
564
schala-lang/src/tree_walk_eval/test.rs
Normal file
@ -0,0 +1,564 @@
|
|||||||
|
#![cfg(test)]
|
||||||
|
use pretty_assertions::assert_eq;
|
||||||
|
use test_case::test_case;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
symbol_table::SymbolTable,
|
||||||
|
tree_walk_eval::{evaluator::Evaluator, State},
|
||||||
|
type_inference::TypeContext,
|
||||||
|
};
|
||||||
|
|
||||||
|
fn evaluate_input(input: &str) -> Result<String, String> {
|
||||||
|
let ast = crate::util::quick_ast(input);
|
||||||
|
let mut symbol_table = SymbolTable::new();
|
||||||
|
let mut type_context = TypeContext::new();
|
||||||
|
|
||||||
|
symbol_table.process_ast(&ast, &mut type_context).unwrap();
|
||||||
|
|
||||||
|
let reduced_ir = crate::reduced_ir::reduce(&ast, &symbol_table, &type_context);
|
||||||
|
reduced_ir.debug(&symbol_table);
|
||||||
|
println!("========");
|
||||||
|
symbol_table.debug();
|
||||||
|
|
||||||
|
let mut state = State::new();
|
||||||
|
let mut evaluator = Evaluator::new(&mut state, &type_context);
|
||||||
|
let mut outputs = evaluator.evaluate(reduced_ir, true);
|
||||||
|
outputs.pop().unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn eval_assert(input: &str, expected: &str) {
|
||||||
|
assert_eq!(evaluate_input(input), Ok(expected.to_string()));
|
||||||
|
}
|
||||||
|
|
||||||
|
fn eval_assert_failure(input: &str, expected: &str) {
|
||||||
|
assert_eq!(evaluate_input(input), Err(expected.to_string()));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_basic_eval() {
|
||||||
|
eval_assert("1 + 2", "3");
|
||||||
|
eval_assert("let mut a = 1; a = 2", "()");
|
||||||
|
eval_assert("let mut a = 1; a = a + 2; a", "3");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn op_eval() {
|
||||||
|
eval_assert("-13", "-13");
|
||||||
|
eval_assert("10 - 2", "8");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn function_eval() {
|
||||||
|
eval_assert("fn oi(x) { x + 1 }; oi(4)", "5");
|
||||||
|
eval_assert("fn oi(x) { x + 1 }; oi(1+2)", "4");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn scopes() {
|
||||||
|
let scope_ok = r#"
|
||||||
|
let a = 20
|
||||||
|
fn haha() {
|
||||||
|
let something = 38
|
||||||
|
let a = 10
|
||||||
|
a
|
||||||
|
}
|
||||||
|
haha()
|
||||||
|
"#;
|
||||||
|
|
||||||
|
eval_assert(scope_ok, "10");
|
||||||
|
|
||||||
|
let scope_ok = r#"
|
||||||
|
let a = 20
|
||||||
|
fn queque() {
|
||||||
|
let a = 10
|
||||||
|
a
|
||||||
|
}
|
||||||
|
a
|
||||||
|
"#;
|
||||||
|
eval_assert(scope_ok, "20");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn eval_scopes_2() {
|
||||||
|
eval_assert(
|
||||||
|
r#"
|
||||||
|
fn trad() {
|
||||||
|
let a = 10
|
||||||
|
fn jinner() {
|
||||||
|
let b = 20
|
||||||
|
b
|
||||||
|
}
|
||||||
|
|
||||||
|
a + jinner()
|
||||||
|
}
|
||||||
|
trad()"#,
|
||||||
|
"30",
|
||||||
|
);
|
||||||
|
|
||||||
|
let err = "No symbol found for name: `a`";
|
||||||
|
|
||||||
|
eval_assert_failure(
|
||||||
|
r#"
|
||||||
|
fn trad() {
|
||||||
|
let a = 10
|
||||||
|
fn inner() {
|
||||||
|
let b = 20
|
||||||
|
a + b
|
||||||
|
}
|
||||||
|
|
||||||
|
inner()
|
||||||
|
}
|
||||||
|
|
||||||
|
trad()
|
||||||
|
"#,
|
||||||
|
err,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn adt_output_1() {
|
||||||
|
let source = r#"
|
||||||
|
|
||||||
|
type Option<T> = Some(T) | None
|
||||||
|
let a = Option::None
|
||||||
|
let b = Option::Some(10)
|
||||||
|
(b, a)
|
||||||
|
"#;
|
||||||
|
eval_assert(source, "(Some(10), None)");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn adt_output_2() {
|
||||||
|
let source = r#"
|
||||||
|
type Gobble = Unknown | Rufus { a: Int, torrid: Nat }
|
||||||
|
let b = Gobble::Rufus { a: 3, torrid: 99 }
|
||||||
|
b
|
||||||
|
"#;
|
||||||
|
eval_assert(source, "Rufus { a: 3, torrid: 99 }");
|
||||||
|
|
||||||
|
let source = r#"
|
||||||
|
type Gobble = Unknown | Rufus { a: Int, torrid: Nat }
|
||||||
|
let b = Gobble::Rufus { torrid: 3, a: 84 }
|
||||||
|
b
|
||||||
|
"#;
|
||||||
|
eval_assert(source, "Rufus { a: 84, torrid: 3 }");
|
||||||
|
|
||||||
|
let source = r#"
|
||||||
|
type Gobble = Unknown | Rufus { a: Int, torrid: Nat }
|
||||||
|
let b = Gobble::Rufus { a: 84 }
|
||||||
|
b
|
||||||
|
"#;
|
||||||
|
eval_assert_failure(source, "Field torrid not specified for record Gobble::Rufus");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn basic_if_statement() {
|
||||||
|
let source = r#"
|
||||||
|
let a = 10
|
||||||
|
let b = 10
|
||||||
|
if a == b then { 69 } else { 420 }
|
||||||
|
"#;
|
||||||
|
eval_assert(source, "69");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn basic_patterns_1() {
|
||||||
|
let source = r#"
|
||||||
|
let x = 10
|
||||||
|
let a = if x is 10 then { 255 } else { 256 }
|
||||||
|
let b = if 23 is 99 then { 255 } else { 256 }
|
||||||
|
let c = if true is false then { 9 } else { 10 }
|
||||||
|
let d = if "xxx" is "yyy" then { 20 } else { 30 }
|
||||||
|
(a, b, c, d)
|
||||||
|
"#;
|
||||||
|
eval_assert(source, "(255, 256, 10, 30)");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test_case("sanchez", "1")]
|
||||||
|
#[test_case("mouri", "2")]
|
||||||
|
#[test_case("hella", "3")]
|
||||||
|
#[test_case("cyrus", "4")]
|
||||||
|
fn basic_patterns_2(input: &str, expected: &str) {
|
||||||
|
let mut source = format!(r#"let x = "{}""#, input);
|
||||||
|
source.push_str(
|
||||||
|
r#"
|
||||||
|
if x {
|
||||||
|
is "sanchez" then 1
|
||||||
|
is "mouri" then 2
|
||||||
|
is "hella" then 3
|
||||||
|
is _ then 4
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
eval_assert(&source, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test_case(r#"(45, "panda", false, 2.2)"#, r#""yes""#)]
|
||||||
|
#[test_case(r#"(99, "panda", false, -2.45)"#, r#""maybe""#)]
|
||||||
|
fn tuple_patterns(input: &str, expected: &str) {
|
||||||
|
let mut source = format!("let x = {}", input);
|
||||||
|
source.push_str(
|
||||||
|
r#"
|
||||||
|
if x {
|
||||||
|
is (45, "pablo", _, 28.4) then "no"
|
||||||
|
is (_, "panda", _, 2.2) then "yes"
|
||||||
|
is _ then "maybe"
|
||||||
|
}"#,
|
||||||
|
);
|
||||||
|
|
||||||
|
eval_assert(&source, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn record_patterns_1() {
|
||||||
|
let source = r#"
|
||||||
|
type Ara = Kueh { a: Int, b: String } | Morbuk
|
||||||
|
|
||||||
|
let alpha = Ara::Kueh { a: 10, b: "sanchez" }
|
||||||
|
if alpha {
|
||||||
|
is Ara::Kueh { a, b } then (b, a)
|
||||||
|
is _ then ("nooo", 8888)
|
||||||
|
}"#;
|
||||||
|
eval_assert(source, r#"("sanchez", 10)"#);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn record_patterns_2() {
|
||||||
|
let source = r#"
|
||||||
|
type Ara = Kueh { a: Int, b: String } | Morbuk
|
||||||
|
|
||||||
|
let alpha = Ara::Kueh { a: 10, b: "sanchez" }
|
||||||
|
if alpha {
|
||||||
|
is Ara::Kueh { a, b: le_value } then (le_value, (a*2))
|
||||||
|
is _ then ("nooo", 8888)
|
||||||
|
}"#;
|
||||||
|
eval_assert(source, r#"("sanchez", 20)"#);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn record_patterns_3() {
|
||||||
|
let source = r#"
|
||||||
|
type Vstsavlobs = { tkveni: Int, b: Ia }
|
||||||
|
type Ia = { sitqva: Int, ghmerts: String }
|
||||||
|
let b = Vstsavlobs { tkveni: 3, b: Ia::Ia { sitqva: 5, ghmerts: "ooo" } }
|
||||||
|
if b {
|
||||||
|
is Vstsavlobs::Vstsavlobs { tkveni: _, b: Ia::Ia { sitqva, ghmerts } } then sitqva
|
||||||
|
is _ then 5000
|
||||||
|
}"#;
|
||||||
|
eval_assert(source, "5");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn if_is_patterns() {
|
||||||
|
let source = r#"
|
||||||
|
type Option<T> = Some(T) | None
|
||||||
|
let q = "a string"
|
||||||
|
let x = Option::Some(9); if x is Option::Some(q) then { q } else { 0 }"#;
|
||||||
|
|
||||||
|
eval_assert(source, "9");
|
||||||
|
|
||||||
|
let source = r#"
|
||||||
|
type Option<T> = Some(T) | None
|
||||||
|
let q = "a string"
|
||||||
|
let outer = 2
|
||||||
|
let x = Option::None; if x is Option::Some(q) then { q } else { -2 + outer }"#;
|
||||||
|
eval_assert(source, "0");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn full_if_matching() {
|
||||||
|
let source = r#"
|
||||||
|
type Option<T> = Some(T) | None
|
||||||
|
let a = Option::None
|
||||||
|
if a { is Option::None then 4; is Option::Some(x) then x }
|
||||||
|
"#;
|
||||||
|
eval_assert(source, "4");
|
||||||
|
|
||||||
|
let source = r#"
|
||||||
|
type Option<T> = Some(T) | None
|
||||||
|
let sara = Option::Some(99)
|
||||||
|
if sara { is Option::None then 1 + 3; is Option::Some(x) then x }
|
||||||
|
"#;
|
||||||
|
eval_assert(source, "99");
|
||||||
|
|
||||||
|
let source = r#"
|
||||||
|
let a = 10
|
||||||
|
if a { is 10 then "x"; is 4 then "y" }
|
||||||
|
"#;
|
||||||
|
eval_assert(source, "\"x\"");
|
||||||
|
|
||||||
|
let source = r#"
|
||||||
|
let a = 10
|
||||||
|
if a { is 15 then "x"; is 10 then "y" }
|
||||||
|
"#;
|
||||||
|
eval_assert(source, "\"y\"");
|
||||||
|
}
|
||||||
|
|
||||||
|
//TODO - I can probably cut down some of these
|
||||||
|
#[test]
|
||||||
|
fn string_pattern() {
|
||||||
|
let source = r#"
|
||||||
|
let a = "foo"
|
||||||
|
if a { is "foo" then "x"; is _ then "y" }
|
||||||
|
"#;
|
||||||
|
eval_assert(source, "\"x\"");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn boolean_pattern() {
|
||||||
|
let source = r#"
|
||||||
|
let a = true
|
||||||
|
if a {
|
||||||
|
is true then "x"
|
||||||
|
is false then "y"
|
||||||
|
}
|
||||||
|
"#;
|
||||||
|
eval_assert(source, "\"x\"");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn boolean_pattern_2() {
|
||||||
|
let source = r#"
|
||||||
|
let a = false
|
||||||
|
if a { is true then "x"; is false then "y" }
|
||||||
|
"#;
|
||||||
|
eval_assert(source, "\"y\"");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn ignore_pattern() {
|
||||||
|
let source = r#"
|
||||||
|
type Option<T> = Some(T) | None
|
||||||
|
if Option::Some(10) {
|
||||||
|
is _ then "hella"
|
||||||
|
}
|
||||||
|
"#;
|
||||||
|
eval_assert(source, "\"hella\"");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn tuple_pattern() {
|
||||||
|
let source = r#"
|
||||||
|
if (1, 2) {
|
||||||
|
is (1, x) then x;
|
||||||
|
is _ then 99
|
||||||
|
}
|
||||||
|
"#;
|
||||||
|
eval_assert(source, "2");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn tuple_pattern_2() {
|
||||||
|
let source = r#"
|
||||||
|
if (1, 2) {
|
||||||
|
is (10, x) then x
|
||||||
|
is (y, x) then x + y
|
||||||
|
}
|
||||||
|
"#;
|
||||||
|
eval_assert(source, "3");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn tuple_pattern_3() {
|
||||||
|
let source = r#"
|
||||||
|
if (1, 5) {
|
||||||
|
is (10, x) then x
|
||||||
|
is (1, x) then x
|
||||||
|
}
|
||||||
|
"#;
|
||||||
|
eval_assert(source, "5");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn tuple_pattern_4() {
|
||||||
|
let source = r#"
|
||||||
|
if (1, 5) {
|
||||||
|
is (10, x) then x
|
||||||
|
is (1, x) then x
|
||||||
|
}
|
||||||
|
"#;
|
||||||
|
eval_assert(source, "5");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn prim_obj_pattern() {
|
||||||
|
let source = r#"
|
||||||
|
type Stuff = Mulch(Nat) | Jugs(Nat, String) | Mardok
|
||||||
|
let a = Stuff::Mulch(20)
|
||||||
|
let b = Stuff::Jugs(1, "haha")
|
||||||
|
let c = Stuff::Mardok
|
||||||
|
|
||||||
|
let x = if a {
|
||||||
|
is Stuff::Mulch(20) then "x"
|
||||||
|
is _ then "ERR"
|
||||||
|
}
|
||||||
|
|
||||||
|
let y = if b {
|
||||||
|
is Stuff::Mulch(n) then "ERR"
|
||||||
|
is Stuff::Jugs(2, _) then "ERR"
|
||||||
|
is Stuff::Jugs(1, s) then s
|
||||||
|
is _ then "ERR"
|
||||||
|
}
|
||||||
|
|
||||||
|
let z = if c {
|
||||||
|
is Stuff::Jugs(_, _) then "ERR"
|
||||||
|
is Stuff::Mardok then "NIGH"
|
||||||
|
is _ then "ERR"
|
||||||
|
}
|
||||||
|
|
||||||
|
(x, y, z)
|
||||||
|
"#;
|
||||||
|
eval_assert(source, r#"("x", "haha", "NIGH")"#);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn basic_lambda_evaluation_1() {
|
||||||
|
let source = r#"
|
||||||
|
let q = \(x, y) { x * y }
|
||||||
|
let x = q(5, 2)
|
||||||
|
let y = \(m, n, o) { m + n + o }(1,2,3)
|
||||||
|
(x, y)
|
||||||
|
"#;
|
||||||
|
|
||||||
|
eval_assert(source, r"(10, 6)");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn basic_lambda_evaluation_2() {
|
||||||
|
let source = r#"
|
||||||
|
fn milta() {
|
||||||
|
\(x) { x + 33 }
|
||||||
|
}
|
||||||
|
milta()(10)
|
||||||
|
"#;
|
||||||
|
|
||||||
|
eval_assert(source, "43");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn import_all() {
|
||||||
|
let source = r#"
|
||||||
|
type Option<T> = Some(T) | None
|
||||||
|
import Option::*
|
||||||
|
let x = Some(9); if x is Some(q) then { q } else { 0 }"#;
|
||||||
|
eval_assert(source, "9");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn accessors() {
|
||||||
|
let source = r#"
|
||||||
|
type Klewos = { a: Int, b: String }
|
||||||
|
let value = Klewos::Klewos { a: 50, b: "nah" }
|
||||||
|
(value.a, value.b)
|
||||||
|
"#;
|
||||||
|
|
||||||
|
eval_assert(source, r#"(50, "nah")"#);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn early_return() {
|
||||||
|
let source = r#"
|
||||||
|
fn chnurmek(a: Int): Int {
|
||||||
|
if a == 5 then {
|
||||||
|
return 9999;
|
||||||
|
}
|
||||||
|
return (a + 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
(chnurmek(5), chnurmek(0))
|
||||||
|
"#;
|
||||||
|
eval_assert(source, r#"(9999, 2)"#);
|
||||||
|
|
||||||
|
let source = r#"
|
||||||
|
fn marbuk(a: Int, b: Int): (Int, Int) {
|
||||||
|
if a == 5 then {
|
||||||
|
if b == 6 then {
|
||||||
|
return (50, 50);
|
||||||
|
}
|
||||||
|
|
||||||
|
return (a, b + 1)
|
||||||
|
}
|
||||||
|
(a * 100, b * 100)
|
||||||
|
}
|
||||||
|
|
||||||
|
let x = marbuk(1, 1)
|
||||||
|
let y = marbuk(5, 1)
|
||||||
|
let z = marbuk(5, 6)
|
||||||
|
|
||||||
|
(x, y, z)
|
||||||
|
"#;
|
||||||
|
eval_assert(source, "((100, 100), (5, 2), (50, 50))");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn loops() {
|
||||||
|
let source = r#"
|
||||||
|
let mut a = 0
|
||||||
|
let mut count = 0
|
||||||
|
while a != 5 {
|
||||||
|
a = a + 1
|
||||||
|
count = count + 100
|
||||||
|
}
|
||||||
|
|
||||||
|
count
|
||||||
|
"#;
|
||||||
|
eval_assert(source, "500");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn loops_2() {
|
||||||
|
let source = r#"
|
||||||
|
let mut a = 0
|
||||||
|
let mut acc = 0
|
||||||
|
while a < 10 {
|
||||||
|
acc = acc + 1
|
||||||
|
a = a + 1
|
||||||
|
|
||||||
|
// Without this continue, the output would be 20
|
||||||
|
if a == 5 then {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
acc = acc + 1
|
||||||
|
}
|
||||||
|
|
||||||
|
acc"#;
|
||||||
|
eval_assert(source, "19");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn list_literals() {
|
||||||
|
eval_assert(
|
||||||
|
r#"
|
||||||
|
let a = [7, 8, 9]
|
||||||
|
a
|
||||||
|
"#,
|
||||||
|
"[7, 8, 9]",
|
||||||
|
);
|
||||||
|
|
||||||
|
eval_assert(
|
||||||
|
r#"
|
||||||
|
let a = [7, 8, 9]
|
||||||
|
fn foo() { return 2 }
|
||||||
|
(a[0], a[foo()])
|
||||||
|
"#,
|
||||||
|
"(7, 9)",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn eval_method() {
|
||||||
|
let src = r#"
|
||||||
|
type Thing = Thing
|
||||||
|
impl Thing {
|
||||||
|
fn a_method() {
|
||||||
|
20
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
let a = Thing::Thing
|
||||||
|
4 + a.a_method()
|
||||||
|
"#;
|
||||||
|
eval_assert(src, "24");
|
||||||
|
}
|
227
schala-lang/src/type_inference/mod.rs
Normal file
227
schala-lang/src/type_inference/mod.rs
Normal file
@ -0,0 +1,227 @@
|
|||||||
|
use std::{collections::HashMap, convert::From};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
ast::{TypeIdentifier, AST},
|
||||||
|
identifier::{define_id_kind, Id, IdStore},
|
||||||
|
};
|
||||||
|
|
||||||
|
define_id_kind!(TypeItem);
|
||||||
|
pub type TypeId = Id<TypeItem>;
|
||||||
|
|
||||||
|
pub struct TypeContext {
|
||||||
|
defined_types: HashMap<TypeId, DefinedType>,
|
||||||
|
type_id_store: IdStore<TypeItem>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TypeContext {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self { defined_types: HashMap::new(), type_id_store: IdStore::new() }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn register_type(&mut self, builder: TypeBuilder) -> TypeId {
|
||||||
|
let type_id = self.type_id_store.fresh();
|
||||||
|
|
||||||
|
let mut pending_variants = vec![];
|
||||||
|
for variant_builder in builder.variants.into_iter() {
|
||||||
|
let members = variant_builder.members;
|
||||||
|
if members.is_empty() {
|
||||||
|
pending_variants.push(Variant { name: variant_builder.name, members: VariantMembers::Unit });
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
let record_variant = matches!(members.get(0).unwrap(), VariantMemberBuilder::KeyVal(..));
|
||||||
|
|
||||||
|
if record_variant {
|
||||||
|
let pending_members = members.into_iter().map(|var| match var {
|
||||||
|
VariantMemberBuilder::KeyVal(name, ty) => (name, ty),
|
||||||
|
_ => panic!("Compiler internal error: variant mismatch"),
|
||||||
|
});
|
||||||
|
|
||||||
|
//TODO make this mapping meaningful
|
||||||
|
let type_ids = pending_members
|
||||||
|
.into_iter()
|
||||||
|
.map(|(name, _ty_id)| (name, self.type_id_store.fresh()))
|
||||||
|
.collect();
|
||||||
|
pending_variants
|
||||||
|
.push(Variant { name: variant_builder.name, members: VariantMembers::Record(type_ids) });
|
||||||
|
} else {
|
||||||
|
let pending_members = members.into_iter().map(|var| match var {
|
||||||
|
VariantMemberBuilder::Pending(pending_type) => pending_type,
|
||||||
|
_ => panic!("Compiler internal error: variant mismatch"),
|
||||||
|
});
|
||||||
|
|
||||||
|
//TODO make this mapping meaningful
|
||||||
|
let type_ids = pending_members.into_iter().map(|_ty_id| self.type_id_store.fresh()).collect();
|
||||||
|
|
||||||
|
pending_variants
|
||||||
|
.push(Variant { name: variant_builder.name, members: VariantMembers::Tuple(type_ids) });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Eventually, I will want to have a better way of determining which numeric tag goes with
|
||||||
|
// which variant. For now, just sort them alphabetically.
|
||||||
|
pending_variants.sort_unstable_by(|a, b| a.name.cmp(&b.name));
|
||||||
|
|
||||||
|
let defined = DefinedType { name: builder.name, variants: pending_variants };
|
||||||
|
|
||||||
|
self.defined_types.insert(type_id, defined);
|
||||||
|
type_id
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn variant_local_name(&self, type_id: &TypeId, tag: u32) -> Option<&str> {
|
||||||
|
self.defined_types
|
||||||
|
.get(type_id)
|
||||||
|
.and_then(|defined| defined.variants.get(tag as usize))
|
||||||
|
.map(|variant| variant.name.as_ref())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn lookup_variant_arity(&self, type_id: &TypeId, tag: u32) -> Option<u32> {
|
||||||
|
self.defined_types.get(type_id).and_then(|defined| defined.variants.get(tag as usize)).map(
|
||||||
|
|variant| match &variant.members {
|
||||||
|
VariantMembers::Unit => 0,
|
||||||
|
VariantMembers::Tuple(items) => items.len() as u32,
|
||||||
|
VariantMembers::Record(items) => items.len() as u32,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn lookup_record_members(&self, type_id: &TypeId, tag: u32) -> Option<&[(String, TypeId)]> {
|
||||||
|
self.defined_types.get(type_id).and_then(|defined| defined.variants.get(tag as usize)).and_then(
|
||||||
|
|variant| match &variant.members {
|
||||||
|
VariantMembers::Record(items) => Some(items.as_ref()),
|
||||||
|
_ => None,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn lookup_type(&self, type_id: &TypeId) -> Option<&DefinedType> {
|
||||||
|
self.defined_types.get(type_id)
|
||||||
|
}
|
||||||
|
|
||||||
|
//TODO return some kind of overall type later?
|
||||||
|
pub fn typecheck(&mut self, ast: &AST) -> Result<(), TypeError> {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A type defined in program source code, as opposed to a builtin.
|
||||||
|
#[allow(dead_code)]
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct DefinedType {
|
||||||
|
pub name: String,
|
||||||
|
|
||||||
|
// the variants are in this list according to tag order
|
||||||
|
pub variants: Vec<Variant>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct Variant {
|
||||||
|
pub name: String,
|
||||||
|
pub members: VariantMembers,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub enum VariantMembers {
|
||||||
|
Unit,
|
||||||
|
// Should be non-empty
|
||||||
|
Tuple(Vec<TypeId>),
|
||||||
|
Record(Vec<(String, TypeId)>),
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Represents a type mentioned as a member of another type during the type registration process.
|
||||||
|
/// It may not have been registered itself in the relevant context.
|
||||||
|
#[allow(dead_code)]
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct PendingType {
|
||||||
|
inner: TypeIdentifier,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&TypeIdentifier> for PendingType {
|
||||||
|
fn from(type_identifier: &TypeIdentifier) -> Self {
|
||||||
|
Self { inner: type_identifier.clone() }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct TypeBuilder {
|
||||||
|
name: String,
|
||||||
|
variants: Vec<VariantBuilder>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TypeBuilder {
|
||||||
|
pub fn new(name: &str) -> Self {
|
||||||
|
Self { name: name.to_string(), variants: vec![] }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn add_variant(&mut self, vb: VariantBuilder) {
|
||||||
|
self.variants.push(vb);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct VariantBuilder {
|
||||||
|
name: String,
|
||||||
|
members: Vec<VariantMemberBuilder>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl VariantBuilder {
|
||||||
|
pub fn new(name: &str) -> Self {
|
||||||
|
Self { name: name.to_string(), members: vec![] }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn add_member(&mut self, member_ty: PendingType) {
|
||||||
|
self.members.push(VariantMemberBuilder::Pending(member_ty));
|
||||||
|
}
|
||||||
|
|
||||||
|
// You can't call this and `add_member` on the same fn, there should be a runtime error when
|
||||||
|
// that's detected.
|
||||||
|
pub fn add_record_member(&mut self, name: &str, ty: PendingType) {
|
||||||
|
self.members.push(VariantMemberBuilder::KeyVal(name.to_string(), ty));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
enum VariantMemberBuilder {
|
||||||
|
Pending(PendingType),
|
||||||
|
KeyVal(String, PendingType),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct TypeError {
|
||||||
|
pub msg: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
|
pub enum TypeConst {
|
||||||
|
Unit,
|
||||||
|
Nat,
|
||||||
|
Int,
|
||||||
|
Float,
|
||||||
|
StringT,
|
||||||
|
Bool,
|
||||||
|
Ordering,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
|
pub enum Type {
|
||||||
|
Const(TypeConst),
|
||||||
|
//Var(TypeVar),
|
||||||
|
Arrow { params: Vec<Type>, ret: Box<Type> },
|
||||||
|
Compound { ty_name: String, args: Vec<Type> },
|
||||||
|
}
|
||||||
|
|
||||||
|
macro_rules! ty {
|
||||||
|
($type_name:ident) => {
|
||||||
|
Type::Const(crate::type_inference::TypeConst::$type_name)
|
||||||
|
};
|
||||||
|
($t1:ident -> $t2:ident) => {
|
||||||
|
Type::Arrow { params: vec![ty!($t1)], ret: Box::new(ty!($t2)) }
|
||||||
|
};
|
||||||
|
($t1:ident -> $t2:ident -> $t3:ident) => {
|
||||||
|
Type::Arrow { params: vec![ty!($t1), ty!($t2)], ret: Box::new(ty!($t3)) }
|
||||||
|
};
|
||||||
|
($type_list:ident, $ret_type:ident) => {
|
||||||
|
Type::Arrow { params: $type_list, ret: Box::new($ret_type) }
|
||||||
|
};
|
||||||
|
}
|
@ -1,8 +1,10 @@
|
|||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
use std::fmt::Write;
|
use std::convert::TryFrom;
|
||||||
|
use std::fmt;
|
||||||
|
|
||||||
use ena::unify::{UnifyKey, InPlaceUnificationTable, UnificationTable, EqUnifyValue};
|
use ena::unify::{UnifyKey, InPlaceUnificationTable, UnificationTable, EqUnifyValue};
|
||||||
|
|
||||||
|
use crate::builtin::Builtin;
|
||||||
use crate::ast::*;
|
use crate::ast::*;
|
||||||
use crate::util::ScopeStack;
|
use crate::util::ScopeStack;
|
||||||
use crate::util::deref_optional_box;
|
use crate::util::deref_optional_box;
|
||||||
@ -20,7 +22,31 @@ impl TypeData {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub type TypeName = Rc<String>;
|
//TODO need to hook this into the actual typechecking system somehow
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct TypeId {
|
||||||
|
local_name: Rc<String>
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TypeId {
|
||||||
|
//TODO this is definitely incomplete
|
||||||
|
pub fn lookup_name(name: &str) -> TypeId {
|
||||||
|
TypeId {
|
||||||
|
local_name: Rc::new(name.to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn local_name(&self) -> &str {
|
||||||
|
self.local_name.as_ref()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for TypeId {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
write!(f, "TypeId:{}", self.local_name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
pub struct TypeContext<'a> {
|
pub struct TypeContext<'a> {
|
||||||
variable_map: ScopeStack<'a, Rc<String>, Type>,
|
variable_map: ScopeStack<'a, Rc<String>, Type>,
|
||||||
@ -77,18 +103,21 @@ pub enum TypeConst {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl TypeConst {
|
impl TypeConst {
|
||||||
|
/*
|
||||||
|
#[allow(dead_code)]
|
||||||
pub fn to_string(&self) -> String {
|
pub fn to_string(&self) -> String {
|
||||||
use self::TypeConst::*;
|
use self::TypeConst::*;
|
||||||
match self {
|
match self {
|
||||||
Unit => format!("()"),
|
Unit => "()".to_string(),
|
||||||
Nat => format!("Nat"),
|
Nat => "Nat".to_string(),
|
||||||
Int => format!("Int"),
|
Int => "Int".to_string(),
|
||||||
Float => format!("Float"),
|
Float => "Float".to_string(),
|
||||||
StringT => format!("String"),
|
StringT => "String".to_string(),
|
||||||
Bool => format!("Bool"),
|
Bool => "Bool".to_string(),
|
||||||
Ordering => format!("Ordering"),
|
Ordering => "Ordering".to_string(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
*/
|
||||||
}
|
}
|
||||||
|
|
||||||
impl EqUnifyValue for TypeConst { }
|
impl EqUnifyValue for TypeConst { }
|
||||||
@ -107,13 +136,15 @@ macro_rules! ty {
|
|||||||
|
|
||||||
//TODO find a better way to capture the to/from string logic
|
//TODO find a better way to capture the to/from string logic
|
||||||
impl Type {
|
impl Type {
|
||||||
|
/*
|
||||||
|
#[allow(dead_code)]
|
||||||
pub fn to_string(&self) -> String {
|
pub fn to_string(&self) -> String {
|
||||||
use self::Type::*;
|
use self::Type::*;
|
||||||
match self {
|
match self {
|
||||||
Const(c) => c.to_string(),
|
Const(c) => c.to_string(),
|
||||||
Var(v) => format!("t_{}", v.0),
|
Var(v) => format!("t_{}", v.0),
|
||||||
Arrow { params, box ref ret } => {
|
Arrow { params, box ref ret } => {
|
||||||
if params.len() == 0 {
|
if params.is_empty() {
|
||||||
format!("-> {}", ret.to_string())
|
format!("-> {}", ret.to_string())
|
||||||
} else {
|
} else {
|
||||||
let mut buf = String::new();
|
let mut buf = String::new();
|
||||||
@ -124,9 +155,10 @@ impl Type {
|
|||||||
buf
|
buf
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
Compound { .. } => format!("<some compound type>")
|
Compound { .. } => "<some compound type>".to_string()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
fn from_string(string: &str) -> Option<Type> {
|
fn from_string(string: &str) -> Option<Type> {
|
||||||
Some(match string {
|
Some(match string {
|
||||||
@ -252,7 +284,7 @@ impl<'a> TypeContext<'a> {
|
|||||||
use self::TypeIdentifier::*;
|
use self::TypeIdentifier::*;
|
||||||
Ok(match name {
|
Ok(match name {
|
||||||
Singleton(TypeSingletonName { name,.. }) => {
|
Singleton(TypeSingletonName { name,.. }) => {
|
||||||
match Type::from_string(&name) {
|
match Type::from_string(name) {
|
||||||
Some(ty) => ty,
|
Some(ty) => ty,
|
||||||
None => return TypeError::new(format!("Unknown type name: {}", name))
|
None => return TypeError::new(format!("Unknown type name: {}", name))
|
||||||
}
|
}
|
||||||
@ -266,7 +298,7 @@ impl<'a> TypeContext<'a> {
|
|||||||
/// the AST to ReducedAST
|
/// the AST to ReducedAST
|
||||||
pub fn typecheck(&mut self, ast: &AST) -> Result<Type, TypeError> {
|
pub fn typecheck(&mut self, ast: &AST) -> Result<Type, TypeError> {
|
||||||
let mut returned_type = Type::Const(TypeConst::Unit);
|
let mut returned_type = Type::Const(TypeConst::Unit);
|
||||||
for statement in ast.statements.iter() {
|
for statement in ast.statements.statements.iter() {
|
||||||
returned_type = self.statement(statement)?;
|
returned_type = self.statement(statement)?;
|
||||||
}
|
}
|
||||||
Ok(returned_type)
|
Ok(returned_type)
|
||||||
@ -275,7 +307,7 @@ impl<'a> TypeContext<'a> {
|
|||||||
fn statement(&mut self, statement: &Statement) -> InferResult<Type> {
|
fn statement(&mut self, statement: &Statement) -> InferResult<Type> {
|
||||||
match &statement.kind {
|
match &statement.kind {
|
||||||
StatementKind::Expression(e) => self.expr(e),
|
StatementKind::Expression(e) => self.expr(e),
|
||||||
StatementKind::Declaration(decl) => self.decl(&decl),
|
StatementKind::Declaration(decl) => self.decl(decl),
|
||||||
StatementKind::Import(_) => Ok(ty!(Unit)),
|
StatementKind::Import(_) => Ok(ty!(Unit)),
|
||||||
StatementKind::Module(_) => Ok(ty!(Unit)),
|
StatementKind::Module(_) => Ok(ty!(Unit)),
|
||||||
}
|
}
|
||||||
@ -283,12 +315,9 @@ impl<'a> TypeContext<'a> {
|
|||||||
|
|
||||||
fn decl(&mut self, decl: &Declaration) -> InferResult<Type> {
|
fn decl(&mut self, decl: &Declaration) -> InferResult<Type> {
|
||||||
use self::Declaration::*;
|
use self::Declaration::*;
|
||||||
match decl {
|
if let Binding { name, expr, .. } = decl {
|
||||||
Binding { name, expr, .. } => {
|
|
||||||
let ty = self.expr(expr)?;
|
let ty = self.expr(expr)?;
|
||||||
self.variable_map.insert(name.clone(), ty);
|
self.variable_map.insert(name.clone(), ty);
|
||||||
},
|
|
||||||
_ => (),
|
|
||||||
}
|
}
|
||||||
Ok(ty!(Unit))
|
Ok(ty!(Unit))
|
||||||
}
|
}
|
||||||
@ -330,7 +359,8 @@ impl<'a> TypeContext<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn prefix(&mut self, op: &PrefixOp, expr: &Expression) -> InferResult<Type> {
|
fn prefix(&mut self, op: &PrefixOp, expr: &Expression) -> InferResult<Type> {
|
||||||
let tf = match op.builtin.map(|b| b.get_type()) {
|
let builtin: Option<Builtin> = TryFrom::try_from(op).ok();
|
||||||
|
let tf = match builtin.map(|b| b.get_type()) {
|
||||||
Some(ty) => ty,
|
Some(ty) => ty,
|
||||||
None => return TypeError::new("no type found")
|
None => return TypeError::new("no type found")
|
||||||
};
|
};
|
||||||
@ -340,7 +370,8 @@ impl<'a> TypeContext<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn binexp(&mut self, op: &BinOp, lhs: &Expression, rhs: &Expression) -> InferResult<Type> {
|
fn binexp(&mut self, op: &BinOp, lhs: &Expression, rhs: &Expression) -> InferResult<Type> {
|
||||||
let tf = match op.builtin.map(|b| b.get_type()) {
|
let builtin: Option<Builtin> = TryFrom::try_from(op).ok();
|
||||||
|
let tf = match builtin.map(|b| b.get_type()) {
|
||||||
Some(ty) => ty,
|
Some(ty) => ty,
|
||||||
None => return TypeError::new("no type found"),
|
None => return TypeError::new("no type found"),
|
||||||
};
|
};
|
||||||
@ -355,10 +386,11 @@ impl<'a> TypeContext<'a> {
|
|||||||
use self::IfExpressionBody::*;
|
use self::IfExpressionBody::*;
|
||||||
match (discriminator, body) {
|
match (discriminator, body) {
|
||||||
(Some(expr), SimpleConditional{ then_case, else_case }) => self.handle_simple_if(expr, then_case, else_case),
|
(Some(expr), SimpleConditional{ then_case, else_case }) => self.handle_simple_if(expr, then_case, else_case),
|
||||||
_ => TypeError::new(format!("Complex conditionals not supported"))
|
_ => TypeError::new("Complex conditionals not supported".to_string())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::ptr_arg)]
|
||||||
fn handle_simple_if(&mut self, expr: &Expression, then_clause: &Block, else_clause: &Option<Block>) -> InferResult<Type> {
|
fn handle_simple_if(&mut self, expr: &Expression, then_clause: &Block, else_clause: &Option<Block>) -> InferResult<Type> {
|
||||||
let t1 = self.expr(expr)?;
|
let t1 = self.expr(expr)?;
|
||||||
let t2 = self.block(then_clause)?;
|
let t2 = self.block(then_clause)?;
|
||||||
@ -371,6 +403,7 @@ impl<'a> TypeContext<'a> {
|
|||||||
self.unify(t2, t3)
|
self.unify(t2, t3)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::ptr_arg)]
|
||||||
fn lambda(&mut self, params: &Vec<FormalParam>, type_anno: &Option<TypeIdentifier>, _body: &Block) -> InferResult<Type> {
|
fn lambda(&mut self, params: &Vec<FormalParam>, type_anno: &Option<TypeIdentifier>, _body: &Block) -> InferResult<Type> {
|
||||||
let argument_types: InferResult<Vec<Type>> = params.iter().map(|param: &FormalParam| {
|
let argument_types: InferResult<Vec<Type>> = params.iter().map(|param: &FormalParam| {
|
||||||
if let FormalParam { anno: Some(type_identifier), .. } = param {
|
if let FormalParam { anno: Some(type_identifier), .. } = param {
|
||||||
@ -388,7 +421,7 @@ impl<'a> TypeContext<'a> {
|
|||||||
Ok(ty!(argument_types, ret_type))
|
Ok(ty!(argument_types, ret_type))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn call(&mut self, f: &Expression, args: &Vec<InvocationArgument>) -> InferResult<Type> {
|
fn call(&mut self, f: &Expression, args: &[ InvocationArgument ]) -> InferResult<Type> {
|
||||||
let tf = self.expr(f)?;
|
let tf = self.expr(f)?;
|
||||||
let arg_types: InferResult<Vec<Type>> = args.iter().map(|ex| self.invoc(ex)).collect();
|
let arg_types: InferResult<Vec<Type>> = args.iter().map(|ex| self.invoc(ex)).collect();
|
||||||
let arg_types = arg_types?;
|
let arg_types = arg_types?;
|
||||||
@ -404,13 +437,14 @@ impl<'a> TypeContext<'a> {
|
|||||||
t_ret.clone()
|
t_ret.clone()
|
||||||
},
|
},
|
||||||
Type::Arrow { .. } => return TypeError::new("Wrong length"),
|
Type::Arrow { .. } => return TypeError::new("Wrong length"),
|
||||||
_ => return TypeError::new(format!("Not a function"))
|
_ => return TypeError::new("Not a function".to_string())
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::ptr_arg)]
|
||||||
fn block(&mut self, block: &Block) -> InferResult<Type> {
|
fn block(&mut self, block: &Block) -> InferResult<Type> {
|
||||||
let mut output = ty!(Unit);
|
let mut output = ty!(Unit);
|
||||||
for statement in block.iter() {
|
for statement in block.statements.iter() {
|
||||||
output = self.statement(statement)?;
|
output = self.statement(statement)?;
|
||||||
}
|
}
|
||||||
Ok(output)
|
Ok(output)
|
||||||
@ -432,26 +466,26 @@ impl<'a> TypeContext<'a> {
|
|||||||
(Const(ref c1), Const(ref c2)) if c1 == c2 => Ok(Const(c1.clone())), //choice of c1 is arbitrary I *think*
|
(Const(ref c1), Const(ref c2)) if c1 == c2 => Ok(Const(c1.clone())), //choice of c1 is arbitrary I *think*
|
||||||
(a @ Var(_), b @ Const(_)) => self.unify(b, a),
|
(a @ Var(_), b @ Const(_)) => self.unify(b, a),
|
||||||
(Const(ref c1), Var(ref v2)) => {
|
(Const(ref c1), Var(ref v2)) => {
|
||||||
self.unification_table.unify_var_value(v2.clone(), Some(c1.clone()))
|
self.unification_table.unify_var_value(*v2, Some(c1.clone()))
|
||||||
.or_else(|_| TypeError::new(format!("Couldn't unify {:?} and {:?}", Const(c1.clone()), Var(*v2))))?;
|
.or_else(|_| TypeError::new(format!("Couldn't unify {:?} and {:?}", Const(c1.clone()), Var(*v2))))?;
|
||||||
Ok(Const(c1.clone()))
|
Ok(Const(c1.clone()))
|
||||||
},
|
},
|
||||||
(Var(v1), Var(v2)) => {
|
(Var(v1), Var(v2)) => {
|
||||||
//TODO add occurs check
|
//TODO add occurs check
|
||||||
self.unification_table.unify_var_var(v1.clone(), v2.clone())
|
self.unification_table.unify_var_var(v1, v2)
|
||||||
.or_else(|e| {
|
.or_else(|e| {
|
||||||
println!("Unify error: {:?}", e);
|
println!("Unify error: {:?}", e);
|
||||||
TypeError::new(format!("Two type variables {:?} and {:?} couldn't unify", v1, v2))
|
TypeError::new(format!("Two type variables {:?} and {:?} couldn't unify", v1, v2))
|
||||||
})?;
|
})?;
|
||||||
Ok(Var(v1.clone())) //arbitrary decision I think
|
Ok(Var(v1)) //arbitrary decision I think
|
||||||
},
|
},
|
||||||
(a, b) => TypeError::new(format!("{:?} and {:?} do not unify", a, b)),
|
(a, b) => TypeError::new(format!("{:?} and {:?} do not unify", a, b)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fresh_type_variable(&mut self) -> TypeVar {
|
fn fresh_type_variable(&mut self) -> TypeVar {
|
||||||
let new_type_var = self.unification_table.new_key(None);
|
|
||||||
new_type_var
|
self.unification_table.new_key(None)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -462,7 +496,7 @@ mod typechecking_tests {
|
|||||||
macro_rules! assert_type_in_fresh_context {
|
macro_rules! assert_type_in_fresh_context {
|
||||||
($string:expr, $type:expr) => {
|
($string:expr, $type:expr) => {
|
||||||
let mut tc = TypeContext::new();
|
let mut tc = TypeContext::new();
|
||||||
let (ref ast, _) = crate::util::quick_ast($string);
|
let ast = &crate::util::quick_ast($string);
|
||||||
let ty = tc.typecheck(ast).unwrap();
|
let ty = tc.typecheck(ast).unwrap();
|
||||||
assert_eq!(ty, $type)
|
assert_eq!(ty, $type)
|
||||||
}
|
}
|
85
schala-lang/src/util.rs
Normal file
85
schala-lang/src/util.rs
Normal file
@ -0,0 +1,85 @@
|
|||||||
|
use std::{cmp::Eq, collections::HashMap, fmt::Write, hash::Hash};
|
||||||
|
|
||||||
|
/// Utility function for printing a comma-delimited list of things
|
||||||
|
pub(crate) fn delim_wrapped(lhs: char, rhs: char, terms: impl Iterator<Item = String>) -> String {
|
||||||
|
let mut buf = String::new();
|
||||||
|
write!(buf, "{}", lhs).unwrap();
|
||||||
|
for term in terms.map(Some).intersperse(None) {
|
||||||
|
match term {
|
||||||
|
Some(e) => write!(buf, "{}", e).unwrap(),
|
||||||
|
None => write!(buf, ", ").unwrap(),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
write!(buf, "{}", rhs).unwrap();
|
||||||
|
buf
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Default, Debug)]
|
||||||
|
pub struct ScopeStack<'a, T: 'a, V: 'a, N = String>
|
||||||
|
where T: Hash + Eq
|
||||||
|
{
|
||||||
|
parent: Option<&'a ScopeStack<'a, T, V, N>>,
|
||||||
|
values: HashMap<T, V>,
|
||||||
|
scope_name: Option<N>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, T, V, N> ScopeStack<'a, T, V, N>
|
||||||
|
where T: Hash + Eq
|
||||||
|
{
|
||||||
|
pub fn new(scope_name: Option<N>) -> Self
|
||||||
|
where T: Hash + Eq {
|
||||||
|
ScopeStack { parent: None, values: HashMap::new(), scope_name }
|
||||||
|
}
|
||||||
|
pub fn insert(&mut self, key: T, value: V)
|
||||||
|
where T: Hash + Eq {
|
||||||
|
self.values.insert(key, value);
|
||||||
|
}
|
||||||
|
pub fn lookup(&self, key: &T) -> Option<&V>
|
||||||
|
where T: Hash + Eq {
|
||||||
|
match (self.values.get(key), self.parent) {
|
||||||
|
(None, None) => None,
|
||||||
|
(None, Some(parent)) => parent.lookup(key),
|
||||||
|
(Some(value), _) => Some(value),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn new_scope(&'a self, scope_name: Option<N>) -> Self
|
||||||
|
where T: Hash + Eq {
|
||||||
|
ScopeStack { parent: Some(self), values: HashMap::default(), scope_name }
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub fn lookup_with_scope(&self, key: &T) -> Option<(&V, Option<&N>)>
|
||||||
|
where T: Hash + Eq {
|
||||||
|
match (self.values.get(key), self.parent) {
|
||||||
|
(None, None) => None,
|
||||||
|
(None, Some(parent)) => parent.lookup_with_scope(key),
|
||||||
|
(Some(value), _) => Some((value, self.scope_name.as_ref())),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_name(&self) -> Option<&N> {
|
||||||
|
self.scope_name.as_ref()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Quickly create an AST from a string, with no error checking. For test use only
|
||||||
|
#[cfg(test)]
|
||||||
|
pub fn quick_ast(input: &str) -> crate::ast::AST {
|
||||||
|
let mut parser = crate::parsing::Parser::new();
|
||||||
|
let output = parser.parse(input);
|
||||||
|
match output {
|
||||||
|
Ok(output) => output,
|
||||||
|
Err(err) => {
|
||||||
|
println!("Parse error: {}", err.msg);
|
||||||
|
panic!();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(unused_macros)]
|
||||||
|
macro_rules! rc {
|
||||||
|
($string:tt) => {
|
||||||
|
Rc::new(stringify!($string).to_string())
|
||||||
|
};
|
||||||
|
}
|
@ -2,19 +2,18 @@
|
|||||||
name = "schala-repl"
|
name = "schala-repl"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
authors = ["greg <greg.shuflin@protonmail.com>"]
|
authors = ["greg <greg.shuflin@protonmail.com>"]
|
||||||
edition = "2018"
|
edition = "2021"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
llvm-sys = "70.0.2"
|
llvm-sys = "70.0.2"
|
||||||
take_mut = "0.2.2"
|
take_mut = "0.2.2"
|
||||||
itertools = "0.5.8"
|
itertools = "0.10"
|
||||||
getopts = "0.2.18"
|
|
||||||
lazy_static = "0.2.8"
|
lazy_static = "0.2.8"
|
||||||
maplit = "*"
|
maplit = "*"
|
||||||
colored = "1.8"
|
colored = "1.8"
|
||||||
serde = "1.0.91"
|
serde = "1.0"
|
||||||
serde_derive = "1.0.91"
|
serde_derive = "1.0"
|
||||||
serde_json = "1.0.15"
|
serde_json = "1.0"
|
||||||
phf = "0.7.12"
|
phf = "0.7.12"
|
||||||
includedir = "0.2.0"
|
includedir = "0.2.0"
|
||||||
linefeed = "0.6.0"
|
linefeed = "0.6.0"
|
||||||
|
@ -3,8 +3,5 @@ extern crate includedir_codegen;
|
|||||||
use includedir_codegen::Compression;
|
use includedir_codegen::Compression;
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
includedir_codegen::start("WEBFILES")
|
includedir_codegen::start("WEBFILES").dir("../static", Compression::Gzip).build("static.rs").unwrap();
|
||||||
.dir("../static", Compression::Gzip)
|
|
||||||
.build("static.rs")
|
|
||||||
.unwrap();
|
|
||||||
}
|
}
|
||||||
|
116
schala-repl/src/command_tree.rs
Normal file
116
schala-repl/src/command_tree.rs
Normal file
@ -0,0 +1,116 @@
|
|||||||
|
use colored::*;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
directive_actions::DirectiveAction, language::ProgrammingLanguageInterface, InterpreterDirectiveOutput,
|
||||||
|
Repl,
|
||||||
|
};
|
||||||
|
|
||||||
|
/// A CommandTree is either a `Terminal` or a `NonTerminal`. When command parsing reaches the first
|
||||||
|
/// Terminal, it will use the `DirectiveAction` found there to find an appropriate function to execute,
|
||||||
|
/// and then execute it with any remaining arguments
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub enum CommandTree {
|
||||||
|
Terminal {
|
||||||
|
name: String,
|
||||||
|
children: Vec<CommandTree>,
|
||||||
|
help_msg: Option<String>,
|
||||||
|
action: DirectiveAction,
|
||||||
|
},
|
||||||
|
NonTerminal {
|
||||||
|
name: String,
|
||||||
|
children: Vec<CommandTree>,
|
||||||
|
help_msg: Option<String>,
|
||||||
|
action: DirectiveAction,
|
||||||
|
},
|
||||||
|
Top(Vec<CommandTree>),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CommandTree {
|
||||||
|
pub fn nonterm_no_further_tab_completions(s: &str, help: Option<&str>) -> CommandTree {
|
||||||
|
CommandTree::NonTerminal {
|
||||||
|
name: s.to_string(),
|
||||||
|
help_msg: help.map(|x| x.to_string()),
|
||||||
|
children: vec![],
|
||||||
|
action: DirectiveAction::Null,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn terminal(
|
||||||
|
s: &str,
|
||||||
|
help: Option<&str>,
|
||||||
|
children: Vec<CommandTree>,
|
||||||
|
action: DirectiveAction,
|
||||||
|
) -> CommandTree {
|
||||||
|
CommandTree::Terminal { name: s.to_string(), help_msg: help.map(|x| x.to_string()), children, action }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn nonterm(s: &str, help: Option<&str>, children: Vec<CommandTree>) -> CommandTree {
|
||||||
|
CommandTree::NonTerminal {
|
||||||
|
name: s.to_string(),
|
||||||
|
help_msg: help.map(|x| x.to_string()),
|
||||||
|
children,
|
||||||
|
action: DirectiveAction::Null,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_cmd(&self) -> &str {
|
||||||
|
match self {
|
||||||
|
CommandTree::Terminal { name, .. } => name.as_str(),
|
||||||
|
CommandTree::NonTerminal { name, .. } => name.as_str(),
|
||||||
|
CommandTree::Top(_) => "",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn get_help(&self) -> &str {
|
||||||
|
match self {
|
||||||
|
CommandTree::Terminal { help_msg, .. } =>
|
||||||
|
help_msg.as_ref().map(|s| s.as_str()).unwrap_or("<no help text provided>"),
|
||||||
|
CommandTree::NonTerminal { help_msg, .. } =>
|
||||||
|
help_msg.as_ref().map(|s| s.as_str()).unwrap_or("<no help text provided>"),
|
||||||
|
CommandTree::Top(_) => "",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn get_children(&self) -> &Vec<CommandTree> {
|
||||||
|
use CommandTree::*;
|
||||||
|
match self {
|
||||||
|
Terminal { children, .. } | NonTerminal { children, .. } | Top(children) => children,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn get_subcommands(&self) -> Vec<&str> {
|
||||||
|
self.get_children().iter().map(|x| x.get_cmd()).collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn perform<L: ProgrammingLanguageInterface>(
|
||||||
|
&self,
|
||||||
|
repl: &mut Repl<L>,
|
||||||
|
arguments: &[&str],
|
||||||
|
) -> InterpreterDirectiveOutput {
|
||||||
|
let mut dir_pointer: &CommandTree = self;
|
||||||
|
let mut idx = 0;
|
||||||
|
|
||||||
|
let res: Result<(DirectiveAction, usize), String> = loop {
|
||||||
|
match dir_pointer {
|
||||||
|
CommandTree::Top(subcommands) | CommandTree::NonTerminal { children: subcommands, .. } => {
|
||||||
|
let next_command = match arguments.get(idx) {
|
||||||
|
Some(cmd) => cmd,
|
||||||
|
None => break Err("Command requires arguments".to_owned()),
|
||||||
|
};
|
||||||
|
idx += 1;
|
||||||
|
match subcommands.iter().find(|sc| sc.get_cmd() == *next_command) {
|
||||||
|
Some(command_tree) => {
|
||||||
|
dir_pointer = command_tree;
|
||||||
|
}
|
||||||
|
None => break Err(format!("Command {} not found", next_command)),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
CommandTree::Terminal { action, .. } => {
|
||||||
|
break Ok((action.clone(), idx));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
match res {
|
||||||
|
Ok((action, idx)) => action.perform(repl, &arguments[idx..]),
|
||||||
|
Err(err) => Some(err.red().to_string()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
77
schala-repl/src/directive_actions.rs
Normal file
77
schala-repl/src/directive_actions.rs
Normal file
@ -0,0 +1,77 @@
|
|||||||
|
use std::fmt::Write as FmtWrite;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
help::help,
|
||||||
|
language::{LangMetaRequest, LangMetaResponse, ProgrammingLanguageInterface},
|
||||||
|
InterpreterDirectiveOutput, Repl,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub enum DirectiveAction {
|
||||||
|
Null,
|
||||||
|
Help,
|
||||||
|
QuitProgram,
|
||||||
|
ListPasses,
|
||||||
|
TotalTime(bool),
|
||||||
|
StageTime(bool),
|
||||||
|
Doc,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DirectiveAction {
|
||||||
|
pub fn perform<L: ProgrammingLanguageInterface>(
|
||||||
|
&self,
|
||||||
|
repl: &mut Repl<L>,
|
||||||
|
arguments: &[&str],
|
||||||
|
) -> InterpreterDirectiveOutput {
|
||||||
|
use DirectiveAction::*;
|
||||||
|
match self {
|
||||||
|
Null => None,
|
||||||
|
Help => help(repl, arguments),
|
||||||
|
QuitProgram => {
|
||||||
|
repl.save_before_exit();
|
||||||
|
::std::process::exit(0)
|
||||||
|
}
|
||||||
|
ListPasses => {
|
||||||
|
let pass_names = match repl.language_state.request_meta(LangMetaRequest::StageNames) {
|
||||||
|
LangMetaResponse::StageNames(names) => names,
|
||||||
|
_ => vec![],
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut buf = String::new();
|
||||||
|
for pass in pass_names.iter().map(Some).intersperse(None) {
|
||||||
|
match pass {
|
||||||
|
Some(pass) => write!(buf, "{}", pass).unwrap(),
|
||||||
|
None => write!(buf, " -> ").unwrap(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Some(buf)
|
||||||
|
}
|
||||||
|
TotalTime(value) => {
|
||||||
|
repl.options.show_total_time = *value;
|
||||||
|
None
|
||||||
|
}
|
||||||
|
StageTime(value) => {
|
||||||
|
repl.options.show_stage_times = *value;
|
||||||
|
None
|
||||||
|
}
|
||||||
|
Doc => doc(repl, arguments),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn doc<L: ProgrammingLanguageInterface>(
|
||||||
|
repl: &mut Repl<L>,
|
||||||
|
arguments: &[&str],
|
||||||
|
) -> InterpreterDirectiveOutput {
|
||||||
|
arguments
|
||||||
|
.get(0)
|
||||||
|
.map(|cmd| {
|
||||||
|
let source = cmd.to_string();
|
||||||
|
let meta = LangMetaRequest::Docs { source };
|
||||||
|
match repl.language_state.request_meta(meta) {
|
||||||
|
LangMetaResponse::Docs { doc_string } => Some(doc_string),
|
||||||
|
_ => Some("Invalid doc response".to_owned()),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.unwrap_or_else(|| Some(":docs needs an argument".to_owned()))
|
||||||
|
}
|
68
schala-repl/src/directives.rs
Normal file
68
schala-repl/src/directives.rs
Normal file
@ -0,0 +1,68 @@
|
|||||||
|
use crate::{command_tree::CommandTree, directive_actions::DirectiveAction};
|
||||||
|
|
||||||
|
pub fn directives_from_pass_names(pass_names: &[String]) -> CommandTree {
|
||||||
|
let passes_directives: Vec<CommandTree> = pass_names
|
||||||
|
.iter()
|
||||||
|
.map(|pass_name| {
|
||||||
|
if pass_name == "parsing" {
|
||||||
|
CommandTree::nonterm(
|
||||||
|
pass_name,
|
||||||
|
None,
|
||||||
|
vec![
|
||||||
|
CommandTree::nonterm_no_further_tab_completions("compact", None),
|
||||||
|
CommandTree::nonterm_no_further_tab_completions("expanded", None),
|
||||||
|
CommandTree::nonterm_no_further_tab_completions("trace", None),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
CommandTree::nonterm_no_further_tab_completions(pass_name, None)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
CommandTree::Top(get_list(&passes_directives, true))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_list(passes_directives: &[CommandTree], include_help: bool) -> Vec<CommandTree> {
|
||||||
|
use DirectiveAction::*;
|
||||||
|
|
||||||
|
vec![
|
||||||
|
CommandTree::terminal("exit", Some("exit the REPL"), vec![], QuitProgram),
|
||||||
|
//TODO there should be an alias for this
|
||||||
|
CommandTree::terminal("quit", Some("exit the REPL"), vec![], QuitProgram),
|
||||||
|
CommandTree::terminal(
|
||||||
|
"help",
|
||||||
|
Some("Print this help message"),
|
||||||
|
if include_help { get_list(passes_directives, false) } else { vec![] },
|
||||||
|
Help,
|
||||||
|
),
|
||||||
|
CommandTree::nonterm(
|
||||||
|
"debug",
|
||||||
|
Some("Configure debug information"),
|
||||||
|
vec![
|
||||||
|
CommandTree::terminal(
|
||||||
|
"list-passes",
|
||||||
|
Some("List all registered compiler passes"),
|
||||||
|
vec![],
|
||||||
|
ListPasses,
|
||||||
|
),
|
||||||
|
CommandTree::nonterm(
|
||||||
|
"total-time",
|
||||||
|
None,
|
||||||
|
vec![
|
||||||
|
CommandTree::terminal("on", None, vec![], TotalTime(true)),
|
||||||
|
CommandTree::terminal("off", None, vec![], TotalTime(false)),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
CommandTree::nonterm(
|
||||||
|
"stage-times",
|
||||||
|
Some("Computation time per-stage"),
|
||||||
|
vec![
|
||||||
|
CommandTree::terminal("on", None, vec![], StageTime(true)),
|
||||||
|
CommandTree::terminal("off", None, vec![], StageTime(false)),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
CommandTree::terminal("doc", Some("Get language-specific help for an item"), vec![], Doc),
|
||||||
|
]
|
||||||
|
}
|
63
schala-repl/src/help.rs
Normal file
63
schala-repl/src/help.rs
Normal file
@ -0,0 +1,63 @@
|
|||||||
|
use std::fmt::Write as FmtWrite;
|
||||||
|
|
||||||
|
use colored::*;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
command_tree::CommandTree, language::ProgrammingLanguageInterface, InterpreterDirectiveOutput, Repl,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn help<L: ProgrammingLanguageInterface>(
|
||||||
|
repl: &mut Repl<L>,
|
||||||
|
arguments: &[&str],
|
||||||
|
) -> InterpreterDirectiveOutput {
|
||||||
|
match arguments {
|
||||||
|
[] => global_help(repl),
|
||||||
|
commands => {
|
||||||
|
let dirs = repl.get_directives();
|
||||||
|
Some(match get_directive_from_commands(commands, &dirs) {
|
||||||
|
None => format!("Directive `{}` not found", commands.last().unwrap()),
|
||||||
|
Some(dir) => {
|
||||||
|
let mut buf = String::new();
|
||||||
|
let cmd = dir.get_cmd();
|
||||||
|
let children = dir.get_children();
|
||||||
|
writeln!(buf, "`{}` - {}", cmd, dir.get_help()).unwrap();
|
||||||
|
for sub in children.iter() {
|
||||||
|
writeln!(buf, "\t`{} {}` - {}", cmd, sub.get_cmd(), sub.get_help()).unwrap();
|
||||||
|
}
|
||||||
|
buf
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_directive_from_commands<'a>(commands: &[&str], dirs: &'a CommandTree) -> Option<&'a CommandTree> {
|
||||||
|
let mut directive_list = dirs.get_children();
|
||||||
|
let mut matched_directive = None;
|
||||||
|
for cmd in commands {
|
||||||
|
let found = directive_list.iter().find(|directive| directive.get_cmd() == *cmd);
|
||||||
|
if let Some(dir) = found {
|
||||||
|
directive_list = dir.get_children();
|
||||||
|
}
|
||||||
|
|
||||||
|
matched_directive = found;
|
||||||
|
}
|
||||||
|
matched_directive
|
||||||
|
}
|
||||||
|
|
||||||
|
fn global_help<L: ProgrammingLanguageInterface>(repl: &mut Repl<L>) -> InterpreterDirectiveOutput {
|
||||||
|
let mut buf = String::new();
|
||||||
|
|
||||||
|
writeln!(buf, "{} version {}", "Schala REPL".bright_red().bold(), crate::VERSION_STRING).unwrap();
|
||||||
|
writeln!(buf, "-----------------------").unwrap();
|
||||||
|
|
||||||
|
for directive in repl.get_directives().get_children() {
|
||||||
|
writeln!(buf, "{}{} - {}", repl.sigil, directive.get_cmd(), directive.get_help()).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
writeln!(buf).unwrap();
|
||||||
|
writeln!(buf, "Language-specific help for {}", <L as ProgrammingLanguageInterface>::language_name())
|
||||||
|
.unwrap();
|
||||||
|
writeln!(buf, "-----------------------").unwrap();
|
||||||
|
Some(buf)
|
||||||
|
}
|
@ -1,25 +1,20 @@
|
|||||||
use std::time;
|
use std::{collections::HashSet, time};
|
||||||
use std::collections::HashSet;
|
|
||||||
|
|
||||||
pub trait ProgrammingLanguageInterface {
|
pub trait ProgrammingLanguageInterface {
|
||||||
fn get_language_name(&self) -> String;
|
type Config: Default + Clone;
|
||||||
fn get_source_file_suffix(&self) -> String;
|
fn language_name() -> String;
|
||||||
|
fn source_file_suffix() -> String;
|
||||||
|
|
||||||
fn run_computation(&mut self, _request: ComputationRequest) -> ComputationResponse {
|
fn run_computation(&mut self, _request: ComputationRequest<Self::Config>) -> ComputationResponse;
|
||||||
ComputationResponse {
|
|
||||||
main_output: Err(format!("Computation pipeline not implemented")),
|
|
||||||
global_output_stats: GlobalOutputStats::default(),
|
|
||||||
debug_responses: vec![],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn request_meta(&mut self, _request: LangMetaRequest) -> LangMetaResponse {
|
fn request_meta(&mut self, _request: LangMetaRequest) -> LangMetaResponse {
|
||||||
LangMetaResponse::Custom { kind: format!("not-implemented"), value: format!("") }
|
LangMetaResponse::Custom { kind: "not-implemented".to_owned(), value: format!("") }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct ComputationRequest<'a> {
|
pub struct ComputationRequest<'a, T> {
|
||||||
pub source: &'a str,
|
pub source: &'a str,
|
||||||
|
pub config: T,
|
||||||
pub debug_requests: HashSet<DebugAsk>,
|
pub debug_requests: HashSet<DebugAsk>,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -32,7 +27,7 @@ pub struct ComputationResponse {
|
|||||||
#[derive(Default, Debug)]
|
#[derive(Default, Debug)]
|
||||||
pub struct GlobalOutputStats {
|
pub struct GlobalOutputStats {
|
||||||
pub total_duration: time::Duration,
|
pub total_duration: time::Duration,
|
||||||
pub stage_durations: Vec<(String, time::Duration)>
|
pub stage_durations: Vec<(String, time::Duration)>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Hash, Eq, PartialEq, Deserialize, Serialize)]
|
#[derive(Debug, Clone, Hash, Eq, PartialEq, Deserialize, Serialize)]
|
||||||
@ -41,40 +36,21 @@ pub enum DebugAsk {
|
|||||||
ByStage { stage_name: String, token: Option<String> },
|
ByStage { stage_name: String, token: Option<String> },
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DebugAsk {
|
|
||||||
pub fn is_for_stage(&self, name: &str) -> bool {
|
|
||||||
match self {
|
|
||||||
DebugAsk::ByStage { stage_name, .. } if stage_name == name => true,
|
|
||||||
_ => false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct DebugResponse {
|
pub struct DebugResponse {
|
||||||
pub ask: DebugAsk,
|
pub ask: DebugAsk,
|
||||||
pub value: String
|
pub value: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub enum LangMetaRequest {
|
pub enum LangMetaRequest {
|
||||||
StageNames,
|
StageNames,
|
||||||
Docs {
|
Docs { source: String },
|
||||||
source: String,
|
Custom { kind: String, value: String },
|
||||||
},
|
|
||||||
Custom {
|
|
||||||
kind: String,
|
|
||||||
value: String
|
|
||||||
},
|
|
||||||
ImmediateDebug(DebugAsk),
|
ImmediateDebug(DebugAsk),
|
||||||
}
|
}
|
||||||
|
|
||||||
pub enum LangMetaResponse {
|
pub enum LangMetaResponse {
|
||||||
StageNames(Vec<String>),
|
StageNames(Vec<String>),
|
||||||
Docs {
|
Docs { doc_string: String },
|
||||||
doc_string: String,
|
Custom { kind: String, value: String },
|
||||||
},
|
|
||||||
Custom {
|
|
||||||
kind: String,
|
|
||||||
value: String
|
|
||||||
},
|
|
||||||
ImmediateDebug(DebugResponse),
|
ImmediateDebug(DebugResponse),
|
||||||
}
|
}
|
||||||
|
@ -1,92 +1,249 @@
|
|||||||
#![feature(link_args)]
|
#![feature(box_patterns, proc_macro_hygiene, decl_macro, iter_intersperse)]
|
||||||
#![feature(slice_patterns, box_patterns, box_syntax, proc_macro_hygiene, decl_macro)]
|
|
||||||
#![feature(plugin)]
|
|
||||||
extern crate getopts;
|
|
||||||
extern crate linefeed;
|
|
||||||
extern crate itertools;
|
|
||||||
extern crate colored;
|
|
||||||
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate serde_derive;
|
extern crate serde_derive;
|
||||||
extern crate serde_json;
|
|
||||||
extern crate includedir;
|
extern crate includedir;
|
||||||
extern crate phf;
|
extern crate phf;
|
||||||
|
extern crate serde_json;
|
||||||
|
|
||||||
use std::collections::HashSet;
|
mod command_tree;
|
||||||
use std::path::Path;
|
|
||||||
use std::fs::File;
|
|
||||||
use std::io::Read;
|
|
||||||
use std::process::exit;
|
|
||||||
|
|
||||||
mod repl;
|
|
||||||
mod language;
|
mod language;
|
||||||
|
use self::command_tree::CommandTree;
|
||||||
|
mod repl_options;
|
||||||
|
use repl_options::ReplOptions;
|
||||||
|
mod directive_actions;
|
||||||
|
mod directives;
|
||||||
|
use directives::directives_from_pass_names;
|
||||||
|
mod help;
|
||||||
|
mod response;
|
||||||
|
use std::{collections::HashSet, sync::Arc};
|
||||||
|
|
||||||
pub use language::{ProgrammingLanguageInterface,
|
use colored::*;
|
||||||
ComputationRequest, ComputationResponse,
|
pub use language::{
|
||||||
LangMetaRequest, LangMetaResponse,
|
ComputationRequest, ComputationResponse, DebugAsk, DebugResponse, GlobalOutputStats, LangMetaRequest,
|
||||||
DebugResponse, DebugAsk, GlobalOutputStats};
|
LangMetaResponse, ProgrammingLanguageInterface,
|
||||||
|
};
|
||||||
|
use response::ReplResponse;
|
||||||
|
|
||||||
include!(concat!(env!("OUT_DIR"), "/static.rs"));
|
include!(concat!(env!("OUT_DIR"), "/static.rs"));
|
||||||
const VERSION_STRING: &'static str = "0.1.0";
|
const VERSION_STRING: &str = "0.1.0";
|
||||||
|
|
||||||
pub fn start_repl(langs: Vec<Box<dyn ProgrammingLanguageInterface>>) {
|
const HISTORY_SAVE_FILE: &str = ".schala_history";
|
||||||
let options = command_line_options().parse(std::env::args()).unwrap_or_else(|e| {
|
const OPTIONS_SAVE_FILE: &str = ".schala_repl";
|
||||||
println!("{:?}", e);
|
|
||||||
exit(1);
|
type InterpreterDirectiveOutput = Option<String>;
|
||||||
|
|
||||||
|
pub struct Repl<L: ProgrammingLanguageInterface> {
|
||||||
|
/// If this is the first character typed by a user into the repl, the following
|
||||||
|
/// will be interpreted as a directive to the REPL rather than a command in the
|
||||||
|
/// running programming language.
|
||||||
|
sigil: char,
|
||||||
|
line_reader: ::linefeed::interface::Interface<::linefeed::terminal::DefaultTerminal>,
|
||||||
|
language_state: L,
|
||||||
|
options: ReplOptions,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
enum PromptStyle {
|
||||||
|
Normal,
|
||||||
|
Multiline,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<L: ProgrammingLanguageInterface> Repl<L> {
|
||||||
|
pub fn new(initial_state: L) -> Self {
|
||||||
|
use linefeed::Interface;
|
||||||
|
let line_reader = Interface::new("schala-repl").unwrap();
|
||||||
|
let sigil = ':';
|
||||||
|
|
||||||
|
Repl { sigil, line_reader, language_state: initial_state, options: ReplOptions::new() }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn run_repl(&mut self, config: L::Config) {
|
||||||
|
println!("Schala meta-interpeter version {}", VERSION_STRING);
|
||||||
|
println!("Type {} for help with the REPL", format!("{}help", self.sigil).bright_green().bold());
|
||||||
|
self.load_options();
|
||||||
|
self.handle_repl_loop(config);
|
||||||
|
self.save_before_exit();
|
||||||
|
println!("Exiting...");
|
||||||
|
}
|
||||||
|
|
||||||
|
fn load_options(&mut self) {
|
||||||
|
self.line_reader.load_history(HISTORY_SAVE_FILE).unwrap_or(());
|
||||||
|
match ReplOptions::load_from_file(OPTIONS_SAVE_FILE) {
|
||||||
|
Ok(options) => {
|
||||||
|
self.options = options;
|
||||||
|
}
|
||||||
|
Err(e) => eprintln!("{}", e),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn handle_repl_loop(&mut self, config: L::Config) {
|
||||||
|
use linefeed::ReadResult::*;
|
||||||
|
|
||||||
|
'main: loop {
|
||||||
|
macro_rules! match_or_break {
|
||||||
|
($line:expr) => {
|
||||||
|
match $line {
|
||||||
|
Err(e) => {
|
||||||
|
println!("readline IO Error: {}", e);
|
||||||
|
break 'main;
|
||||||
|
}
|
||||||
|
Ok(Eof) | Ok(Signal(_)) => break 'main,
|
||||||
|
Ok(Input(ref input)) => input,
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
self.update_line_reader();
|
||||||
|
let line = self.line_reader.read_line();
|
||||||
|
let input: &str = match_or_break!(line);
|
||||||
|
|
||||||
|
self.line_reader.add_history_unique(input.to_string());
|
||||||
|
let mut chars = input.chars().peekable();
|
||||||
|
let repl_responses = match chars.next() {
|
||||||
|
Some(ch) if ch == self.sigil =>
|
||||||
|
if chars.peek() == Some(&'{') {
|
||||||
|
let mut buf = String::new();
|
||||||
|
buf.push_str(input.get(2..).unwrap());
|
||||||
|
'multiline: loop {
|
||||||
|
self.set_prompt(PromptStyle::Multiline);
|
||||||
|
let new_line = self.line_reader.read_line();
|
||||||
|
let new_input = match_or_break!(new_line);
|
||||||
|
if new_input.starts_with(":}") {
|
||||||
|
break 'multiline;
|
||||||
|
} else {
|
||||||
|
buf.push_str(new_input);
|
||||||
|
buf.push('\n');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self.handle_input(&buf, &config)
|
||||||
|
} else {
|
||||||
|
if let Some(output) = self.handle_interpreter_directive(input.get(1..).unwrap()) {
|
||||||
|
println!("{}", output);
|
||||||
|
}
|
||||||
|
continue;
|
||||||
|
},
|
||||||
|
_ => self.handle_input(input, &config),
|
||||||
|
};
|
||||||
|
|
||||||
|
for repl_response in repl_responses.iter() {
|
||||||
|
println!("{}", repl_response);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn update_line_reader(&mut self) {
|
||||||
|
let tab_complete_handler = TabCompleteHandler::new(self.sigil, self.get_directives());
|
||||||
|
self.line_reader.set_completer(Arc::new(tab_complete_handler)); //TODO fix this here
|
||||||
|
self.set_prompt(PromptStyle::Normal);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn set_prompt(&mut self, prompt_style: PromptStyle) {
|
||||||
|
let prompt_str = match prompt_style {
|
||||||
|
PromptStyle::Normal => ">> ",
|
||||||
|
PromptStyle::Multiline => ">| ",
|
||||||
|
};
|
||||||
|
|
||||||
|
self.line_reader.set_prompt(prompt_str).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
fn save_before_exit(&self) {
|
||||||
|
self.line_reader.save_history(HISTORY_SAVE_FILE).unwrap_or(());
|
||||||
|
self.options.save_to_file(OPTIONS_SAVE_FILE);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn handle_interpreter_directive(&mut self, input: &str) -> InterpreterDirectiveOutput {
|
||||||
|
let arguments: Vec<&str> = input.split_whitespace().collect();
|
||||||
|
|
||||||
|
if arguments.is_empty() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
let directives = self.get_directives();
|
||||||
|
directives.perform(self, &arguments)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn handle_input(&mut self, input: &str, config: &L::Config) -> Vec<ReplResponse> {
|
||||||
|
let mut debug_requests = HashSet::new();
|
||||||
|
for ask in self.options.debug_asks.iter() {
|
||||||
|
debug_requests.insert(ask.clone());
|
||||||
|
}
|
||||||
|
|
||||||
|
let request = ComputationRequest { source: input, config: config.clone(), debug_requests };
|
||||||
|
let response = self.language_state.run_computation(request);
|
||||||
|
response::handle_computation_response(response, &self.options)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_directives(&mut self) -> CommandTree {
|
||||||
|
let pass_names = match self.language_state.request_meta(LangMetaRequest::StageNames) {
|
||||||
|
LangMetaResponse::StageNames(names) => names,
|
||||||
|
_ => vec![],
|
||||||
|
};
|
||||||
|
|
||||||
|
directives_from_pass_names(&pass_names)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct TabCompleteHandler {
|
||||||
|
sigil: char,
|
||||||
|
top_level_commands: CommandTree,
|
||||||
|
}
|
||||||
|
|
||||||
|
use linefeed::{
|
||||||
|
complete::{Completer, Completion},
|
||||||
|
terminal::Terminal,
|
||||||
|
};
|
||||||
|
|
||||||
|
impl TabCompleteHandler {
|
||||||
|
fn new(sigil: char, top_level_commands: CommandTree) -> TabCompleteHandler {
|
||||||
|
TabCompleteHandler { top_level_commands, sigil }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: Terminal> Completer<T> for TabCompleteHandler {
|
||||||
|
fn complete(
|
||||||
|
&self,
|
||||||
|
word: &str,
|
||||||
|
prompter: &::linefeed::prompter::Prompter<T>,
|
||||||
|
start: usize,
|
||||||
|
_end: usize,
|
||||||
|
) -> Option<Vec<Completion>> {
|
||||||
|
let line = prompter.buffer();
|
||||||
|
|
||||||
|
if !line.starts_with(self.sigil) {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut words = line[1..(if start == 0 { 1 } else { start })].split_whitespace();
|
||||||
|
let mut completions = Vec::new();
|
||||||
|
let mut command_tree: Option<&CommandTree> = Some(&self.top_level_commands);
|
||||||
|
|
||||||
|
loop {
|
||||||
|
match words.next() {
|
||||||
|
None => {
|
||||||
|
let top = matches!(command_tree, Some(CommandTree::Top(_)));
|
||||||
|
let word = if top { word.get(1..).unwrap() } else { word };
|
||||||
|
for cmd in command_tree.map(|x| x.get_subcommands()).unwrap_or_default().into_iter() {
|
||||||
|
if cmd.starts_with(word) {
|
||||||
|
completions.push(Completion {
|
||||||
|
completion: format!("{}{}", if top { ":" } else { "" }, cmd),
|
||||||
|
display: Some(cmd.to_string()),
|
||||||
|
suffix: ::linefeed::complete::Suffix::Some(' '),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
Some(s) => {
|
||||||
|
let new_ptr: Option<&CommandTree> = command_tree.and_then(|cm| match cm {
|
||||||
|
CommandTree::Top(children) => children.iter().find(|c| c.get_cmd() == s),
|
||||||
|
CommandTree::NonTerminal { children, .. } =>
|
||||||
|
children.iter().find(|c| c.get_cmd() == s),
|
||||||
|
CommandTree::Terminal { children, .. } => children.iter().find(|c| c.get_cmd() == s),
|
||||||
});
|
});
|
||||||
|
command_tree = new_ptr;
|
||||||
if options.opt_present("help") {
|
|
||||||
println!("{}", command_line_options().usage("Schala metainterpreter"));
|
|
||||||
exit(0);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
match options.free[..] {
|
|
||||||
[] | [_] => {
|
|
||||||
let mut repl = repl::Repl::new(langs);
|
|
||||||
repl.run_repl();
|
|
||||||
}
|
}
|
||||||
[_, ref filename, ..] => {
|
|
||||||
run_noninteractive(filename, langs);
|
|
||||||
}
|
}
|
||||||
};
|
Some(completions)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn run_noninteractive(filename: &str, languages: Vec<Box<dyn ProgrammingLanguageInterface>>) {
|
|
||||||
let path = Path::new(filename);
|
|
||||||
let ext = path.extension().and_then(|e| e.to_str()).unwrap_or_else(|| {
|
|
||||||
println!("Source file lacks extension");
|
|
||||||
exit(1);
|
|
||||||
});
|
|
||||||
let mut language = Box::new(languages.into_iter().find(|lang| lang.get_source_file_suffix() == ext)
|
|
||||||
.unwrap_or_else(|| {
|
|
||||||
println!("Extension .{} not recognized", ext);
|
|
||||||
exit(1);
|
|
||||||
}));
|
|
||||||
|
|
||||||
let mut source_file = File::open(path).unwrap();
|
|
||||||
let mut buffer = String::new();
|
|
||||||
source_file.read_to_string(&mut buffer).unwrap();
|
|
||||||
|
|
||||||
let request = ComputationRequest {
|
|
||||||
source: &buffer,
|
|
||||||
debug_requests: HashSet::new(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let response = language.run_computation(request);
|
|
||||||
match response.main_output {
|
|
||||||
Ok(s) => println!("{}", s),
|
|
||||||
Err(s) => println!("{}", s)
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
fn command_line_options() -> getopts::Options {
|
|
||||||
let mut options = getopts::Options::new();
|
|
||||||
options.optflag("h",
|
|
||||||
"help",
|
|
||||||
"Show help text");
|
|
||||||
options.optflag("w",
|
|
||||||
"webapp",
|
|
||||||
"Start up web interpreter");
|
|
||||||
options
|
|
||||||
}
|
}
|
||||||
|
@ -1,99 +0,0 @@
|
|||||||
use super::{Repl, InterpreterDirectiveOutput};
|
|
||||||
use crate::repl::directive_actions::DirectiveAction;
|
|
||||||
use colored::*;
|
|
||||||
|
|
||||||
/// A CommandTree is either a `Terminal` or a `NonTerminal`. When command parsing reaches the first
|
|
||||||
/// Terminal, it will use the `DirectiveAction` found there to find an appropriate function to execute,
|
|
||||||
/// and then execute it with any remaining arguments
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub enum CommandTree {
|
|
||||||
Terminal {
|
|
||||||
name: String,
|
|
||||||
children: Vec<CommandTree>,
|
|
||||||
help_msg: Option<String>,
|
|
||||||
action: DirectiveAction,
|
|
||||||
},
|
|
||||||
NonTerminal {
|
|
||||||
name: String,
|
|
||||||
children: Vec<CommandTree>,
|
|
||||||
help_msg: Option<String>,
|
|
||||||
action: DirectiveAction,
|
|
||||||
},
|
|
||||||
Top(Vec<CommandTree>),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl CommandTree {
|
|
||||||
pub fn nonterm_no_further_tab_completions(s: &str, help: Option<&str>) -> CommandTree {
|
|
||||||
CommandTree::NonTerminal {name: s.to_string(), help_msg: help.map(|x| x.to_string()), children: vec![], action: DirectiveAction::Null }
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn terminal(s: &str, help: Option<&str>, children: Vec<CommandTree>, action: DirectiveAction) -> CommandTree {
|
|
||||||
CommandTree::Terminal {name: s.to_string(), help_msg: help.map(|x| x.to_string()), children, action}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn nonterm(s: &str, help: Option<&str>, children: Vec<CommandTree>) -> CommandTree {
|
|
||||||
CommandTree::NonTerminal {
|
|
||||||
name: s.to_string(),
|
|
||||||
help_msg: help.map(|x| x.to_string()),
|
|
||||||
children,
|
|
||||||
action: DirectiveAction::Null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_cmd(&self) -> &str {
|
|
||||||
match self {
|
|
||||||
CommandTree::Terminal { name, .. } => name.as_str(),
|
|
||||||
CommandTree::NonTerminal {name, ..} => name.as_str(),
|
|
||||||
CommandTree::Top(_) => "",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pub fn get_help(&self) -> &str {
|
|
||||||
match self {
|
|
||||||
CommandTree::Terminal { help_msg, ..} => help_msg.as_ref().map(|s| s.as_str()).unwrap_or("<no help text provided>"),
|
|
||||||
CommandTree::NonTerminal { help_msg, .. } => help_msg.as_ref().map(|s| s.as_str()).unwrap_or("<no help text provided>"),
|
|
||||||
CommandTree::Top(_) => ""
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pub fn get_children(&self) -> &Vec<CommandTree> {
|
|
||||||
use CommandTree::*;
|
|
||||||
match self {
|
|
||||||
Terminal { children, .. } |
|
|
||||||
NonTerminal { children, .. } |
|
|
||||||
Top(children) => children
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pub fn get_subcommands(&self) -> Vec<&str> {
|
|
||||||
self.get_children().iter().map(|x| x.get_cmd()).collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn perform(&self, repl: &mut Repl, arguments: &Vec<&str>) -> InterpreterDirectiveOutput {
|
|
||||||
let mut dir_pointer: &CommandTree = self;
|
|
||||||
let mut idx = 0;
|
|
||||||
|
|
||||||
let res: Result<(DirectiveAction, usize), String> = loop {
|
|
||||||
match dir_pointer {
|
|
||||||
CommandTree::Top(subcommands) | CommandTree::NonTerminal { children: subcommands, .. } => {
|
|
||||||
let next_command = match arguments.get(idx) {
|
|
||||||
Some(cmd) => cmd,
|
|
||||||
None => break Err(format!("Command requires arguments"))
|
|
||||||
};
|
|
||||||
idx += 1;
|
|
||||||
match subcommands.iter().find(|sc| sc.get_cmd() == *next_command) {
|
|
||||||
Some(command_tree) => {
|
|
||||||
dir_pointer = command_tree;
|
|
||||||
},
|
|
||||||
None => break Err(format!("Command {} not found", next_command))
|
|
||||||
};
|
|
||||||
},
|
|
||||||
CommandTree::Terminal { action, .. } => {
|
|
||||||
break Ok((action.clone(), idx));
|
|
||||||
},
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
match res {
|
|
||||||
Ok((action, idx)) => action.perform(repl, &arguments[idx..]),
|
|
||||||
Err(err) => Some(err.red().to_string())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,133 +0,0 @@
|
|||||||
use super::{Repl, InterpreterDirectiveOutput};
|
|
||||||
use crate::repl::help::help;
|
|
||||||
use crate::language::{LangMetaRequest, LangMetaResponse, DebugAsk, DebugResponse};
|
|
||||||
use itertools::Itertools;
|
|
||||||
use std::fmt::Write as FmtWrite;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub enum DirectiveAction {
|
|
||||||
Null,
|
|
||||||
Help,
|
|
||||||
QuitProgram,
|
|
||||||
ListPasses,
|
|
||||||
ShowImmediate,
|
|
||||||
Show,
|
|
||||||
Hide,
|
|
||||||
TotalTimeOff,
|
|
||||||
TotalTimeOn,
|
|
||||||
StageTimeOff,
|
|
||||||
StageTimeOn,
|
|
||||||
Doc,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl DirectiveAction {
|
|
||||||
pub fn perform(&self, repl: &mut Repl, arguments: &[&str]) -> InterpreterDirectiveOutput {
|
|
||||||
use DirectiveAction::*;
|
|
||||||
match self {
|
|
||||||
Null => None,
|
|
||||||
Help => help(repl, arguments),
|
|
||||||
QuitProgram => {
|
|
||||||
repl.save_before_exit();
|
|
||||||
::std::process::exit(0)
|
|
||||||
},
|
|
||||||
ListPasses => {
|
|
||||||
let language_state = repl.get_cur_language_state();
|
|
||||||
let pass_names = match language_state.request_meta(LangMetaRequest::StageNames) {
|
|
||||||
LangMetaResponse::StageNames(names) => names,
|
|
||||||
_ => vec![],
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut buf = String::new();
|
|
||||||
for pass in pass_names.iter().map(|name| Some(name)).intersperse(None) {
|
|
||||||
match pass {
|
|
||||||
Some(pass) => write!(buf, "{}", pass).unwrap(),
|
|
||||||
None => write!(buf, " -> ").unwrap(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Some(buf)
|
|
||||||
},
|
|
||||||
ShowImmediate => {
|
|
||||||
let cur_state = repl.get_cur_language_state();
|
|
||||||
let stage_name = match arguments.get(0) {
|
|
||||||
Some(s) => s.to_string(),
|
|
||||||
None => return Some(format!("Must specify a thing to debug")),
|
|
||||||
};
|
|
||||||
let meta = LangMetaRequest::ImmediateDebug(DebugAsk::ByStage { stage_name: stage_name.clone(), token: None });
|
|
||||||
let meta_response = cur_state.request_meta(meta);
|
|
||||||
|
|
||||||
let response = match meta_response {
|
|
||||||
LangMetaResponse::ImmediateDebug(DebugResponse { ask, value }) => match ask {
|
|
||||||
DebugAsk::ByStage { stage_name: ref this_stage_name, ..} if *this_stage_name == stage_name => value,
|
|
||||||
_ => return Some(format!("Wrong debug stage"))
|
|
||||||
},
|
|
||||||
_ => return Some(format!("Invalid language meta response")),
|
|
||||||
};
|
|
||||||
Some(response)
|
|
||||||
},
|
|
||||||
Show => {
|
|
||||||
let this_stage_name = match arguments.get(0) {
|
|
||||||
Some(s) => s.to_string(),
|
|
||||||
None => return Some(format!("Must specify a stage to show")),
|
|
||||||
};
|
|
||||||
let token = arguments.get(1).map(|s| s.to_string());
|
|
||||||
repl.options.debug_asks.retain(|ask| match ask {
|
|
||||||
DebugAsk::ByStage { stage_name, .. } if *stage_name == this_stage_name => false,
|
|
||||||
_ => true
|
|
||||||
});
|
|
||||||
|
|
||||||
let ask = DebugAsk::ByStage { stage_name: this_stage_name, token };
|
|
||||||
repl.options.debug_asks.insert(ask);
|
|
||||||
None
|
|
||||||
},
|
|
||||||
Hide => {
|
|
||||||
let stage_name_to_remove = match arguments.get(0) {
|
|
||||||
Some(s) => s.to_string(),
|
|
||||||
None => return Some(format!("Must specify a stage to hide")),
|
|
||||||
};
|
|
||||||
repl.options.debug_asks.retain(|ask| match ask {
|
|
||||||
DebugAsk::ByStage { stage_name, .. } if *stage_name == stage_name_to_remove => false,
|
|
||||||
_ => true
|
|
||||||
});
|
|
||||||
None
|
|
||||||
},
|
|
||||||
TotalTimeOff => total_time_off(repl, arguments),
|
|
||||||
TotalTimeOn => total_time_on(repl, arguments),
|
|
||||||
StageTimeOff => stage_time_off(repl, arguments),
|
|
||||||
StageTimeOn => stage_time_on(repl, arguments),
|
|
||||||
Doc => doc(repl, arguments),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn total_time_on(repl: &mut Repl, _: &[&str]) -> InterpreterDirectiveOutput {
|
|
||||||
repl.options.show_total_time = true;
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
fn total_time_off(repl: &mut Repl, _: &[&str]) -> InterpreterDirectiveOutput {
|
|
||||||
repl.options.show_total_time = false;
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
fn stage_time_on(repl: &mut Repl, _: &[&str]) -> InterpreterDirectiveOutput {
|
|
||||||
repl.options.show_stage_times = true;
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
fn stage_time_off(repl: &mut Repl, _: &[&str]) -> InterpreterDirectiveOutput {
|
|
||||||
repl.options.show_stage_times = false;
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
fn doc(repl: &mut Repl, arguments: &[&str]) -> InterpreterDirectiveOutput {
|
|
||||||
arguments.get(0).map(|cmd| {
|
|
||||||
let source = cmd.to_string();
|
|
||||||
let meta = LangMetaRequest::Docs { source };
|
|
||||||
let cur_state = repl.get_cur_language_state();
|
|
||||||
match cur_state.request_meta(meta) {
|
|
||||||
LangMetaResponse::Docs { doc_string } => Some(doc_string),
|
|
||||||
_ => Some(format!("Invalid doc response"))
|
|
||||||
}
|
|
||||||
}).unwrap_or(Some(format!(":docs needs an argument")))
|
|
||||||
}
|
|
||||||
|
|
@ -1,55 +0,0 @@
|
|||||||
use crate::repl::command_tree::CommandTree;
|
|
||||||
use crate::repl::directive_actions::DirectiveAction;
|
|
||||||
|
|
||||||
pub fn directives_from_pass_names(pass_names: &Vec<String>) -> CommandTree {
|
|
||||||
let passes_directives: Vec<CommandTree> = pass_names.iter()
|
|
||||||
.map(|pass_name| {
|
|
||||||
if pass_name == "parsing" {
|
|
||||||
CommandTree::nonterm(pass_name, None, vec![
|
|
||||||
CommandTree::nonterm_no_further_tab_completions("compact", None),
|
|
||||||
CommandTree::nonterm_no_further_tab_completions("expanded", None),
|
|
||||||
CommandTree::nonterm_no_further_tab_completions("trace", None),
|
|
||||||
])
|
|
||||||
} else {
|
|
||||||
CommandTree::nonterm_no_further_tab_completions(pass_name, None)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
CommandTree::Top(get_list(&passes_directives, true))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_list(passes_directives: &Vec<CommandTree>, include_help: bool) -> Vec<CommandTree> {
|
|
||||||
use DirectiveAction::*;
|
|
||||||
|
|
||||||
vec![
|
|
||||||
CommandTree::terminal("exit", Some("exit the REPL"), vec![], QuitProgram),
|
|
||||||
CommandTree::terminal("quit", Some("exit the REPL"), vec![], QuitProgram),
|
|
||||||
CommandTree::terminal("help", Some("Print this help message"), if include_help { get_list(passes_directives, false) } else { vec![] }, Help),
|
|
||||||
CommandTree::nonterm("debug",
|
|
||||||
Some("Configure debug information"),
|
|
||||||
vec![
|
|
||||||
CommandTree::terminal("list-passes", Some("List all registered compiler passes"), vec![], ListPasses),
|
|
||||||
CommandTree::terminal("show-immediate", None, passes_directives.clone(), ShowImmediate),
|
|
||||||
CommandTree::terminal("show", Some("Show debug output for a specific pass"), passes_directives.clone(), Show),
|
|
||||||
CommandTree::terminal("hide", Some("Hide debug output for a specific pass"), passes_directives.clone(), Hide),
|
|
||||||
CommandTree::nonterm("total-time", None, vec![
|
|
||||||
CommandTree::terminal("on", None, vec![], TotalTimeOn),
|
|
||||||
CommandTree::terminal("off", None, vec![], TotalTimeOff),
|
|
||||||
]),
|
|
||||||
CommandTree::nonterm("stage-times", Some("Computation time per-stage"), vec![
|
|
||||||
CommandTree::terminal("on", None, vec![], StageTimeOn),
|
|
||||||
CommandTree::terminal("off", None, vec![], StageTimeOff),
|
|
||||||
])
|
|
||||||
]
|
|
||||||
),
|
|
||||||
CommandTree::nonterm("lang",
|
|
||||||
Some("switch between languages, or go directly to a langauge by name"),
|
|
||||||
vec![
|
|
||||||
CommandTree::nonterm_no_further_tab_completions("next", None),
|
|
||||||
CommandTree::nonterm_no_further_tab_completions("prev", None),
|
|
||||||
CommandTree::nonterm("go", None, vec![]),
|
|
||||||
]
|
|
||||||
),
|
|
||||||
CommandTree::terminal("doc", Some("Get language-specific help for an item"), vec![], Doc),
|
|
||||||
]
|
|
||||||
}
|
|
@ -1,59 +0,0 @@
|
|||||||
use std::fmt::Write as FmtWrite;
|
|
||||||
|
|
||||||
use colored::*;
|
|
||||||
use super::command_tree::CommandTree;
|
|
||||||
use super::{Repl, InterpreterDirectiveOutput};
|
|
||||||
|
|
||||||
pub fn help(repl: &mut Repl, arguments: &[&str]) -> InterpreterDirectiveOutput {
|
|
||||||
match arguments {
|
|
||||||
[] => return global_help(repl),
|
|
||||||
commands => {
|
|
||||||
let dirs = repl.get_directives();
|
|
||||||
Some(match get_directive_from_commands(commands, &dirs) {
|
|
||||||
None => format!("Directive `{}` not found", commands.last().unwrap()),
|
|
||||||
Some(dir) => {
|
|
||||||
let mut buf = String::new();
|
|
||||||
let cmd = dir.get_cmd();
|
|
||||||
let children = dir.get_children();
|
|
||||||
writeln!(buf, "`{}` - {}", cmd, dir.get_help()).unwrap();
|
|
||||||
for sub in children.iter() {
|
|
||||||
writeln!(buf, "\t`{} {}` - {}", cmd, sub.get_cmd(), sub.get_help()).unwrap();
|
|
||||||
}
|
|
||||||
buf
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_directive_from_commands<'a>(commands: &[&str], dirs: &'a CommandTree) -> Option<&'a CommandTree> {
|
|
||||||
let mut directive_list = dirs.get_children();
|
|
||||||
let mut matched_directive = None;
|
|
||||||
for cmd in commands {
|
|
||||||
let found = directive_list.iter().find(|directive| directive.get_cmd() == *cmd);
|
|
||||||
if let Some(dir) = found {
|
|
||||||
directive_list = dir.get_children();
|
|
||||||
}
|
|
||||||
|
|
||||||
matched_directive = found;
|
|
||||||
}
|
|
||||||
matched_directive
|
|
||||||
}
|
|
||||||
|
|
||||||
fn global_help(repl: &mut Repl) -> InterpreterDirectiveOutput {
|
|
||||||
let mut buf = String::new();
|
|
||||||
let sigil = repl.interpreter_directive_sigil;
|
|
||||||
|
|
||||||
writeln!(buf, "{} version {}", "Schala REPL".bright_red().bold(), crate::VERSION_STRING).unwrap();
|
|
||||||
writeln!(buf, "-----------------------").unwrap();
|
|
||||||
|
|
||||||
for directive in repl.get_directives().get_children() {
|
|
||||||
writeln!(buf, "{}{} - {}", sigil, directive.get_cmd(), directive.get_help()).unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
let ref lang = repl.get_cur_language_state();
|
|
||||||
writeln!(buf, "").unwrap();
|
|
||||||
writeln!(buf, "Language-specific help for {}", lang.get_language_name()).unwrap();
|
|
||||||
writeln!(buf, "-----------------------").unwrap();
|
|
||||||
Some(buf)
|
|
||||||
}
|
|
@ -1,251 +0,0 @@
|
|||||||
use std::sync::Arc;
|
|
||||||
use std::collections::HashSet;
|
|
||||||
|
|
||||||
use crate::language::{ProgrammingLanguageInterface,
|
|
||||||
ComputationRequest, LangMetaResponse, LangMetaRequest};
|
|
||||||
|
|
||||||
mod command_tree;
|
|
||||||
use self::command_tree::CommandTree;
|
|
||||||
mod repl_options;
|
|
||||||
use repl_options::ReplOptions;
|
|
||||||
mod directive_actions;
|
|
||||||
mod directives;
|
|
||||||
use directives::directives_from_pass_names;
|
|
||||||
mod help;
|
|
||||||
mod response;
|
|
||||||
use response::ReplResponse;
|
|
||||||
|
|
||||||
const HISTORY_SAVE_FILE: &'static str = ".schala_history";
|
|
||||||
const OPTIONS_SAVE_FILE: &'static str = ".schala_repl";
|
|
||||||
|
|
||||||
type InterpreterDirectiveOutput = Option<String>;
|
|
||||||
|
|
||||||
pub struct Repl {
|
|
||||||
pub interpreter_directive_sigil: char,
|
|
||||||
line_reader: ::linefeed::interface::Interface<::linefeed::terminal::DefaultTerminal>,
|
|
||||||
language_states: Vec<Box<dyn ProgrammingLanguageInterface>>,
|
|
||||||
options: ReplOptions,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
enum PromptStyle {
|
|
||||||
Normal,
|
|
||||||
Multiline
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Repl {
|
|
||||||
pub fn new(initial_states: Vec<Box<dyn ProgrammingLanguageInterface>>) -> Repl {
|
|
||||||
use linefeed::Interface;
|
|
||||||
let line_reader = Interface::new("schala-repl").unwrap();
|
|
||||||
let interpreter_directive_sigil = ':';
|
|
||||||
|
|
||||||
Repl {
|
|
||||||
interpreter_directive_sigil,
|
|
||||||
line_reader,
|
|
||||||
language_states: initial_states,
|
|
||||||
options: ReplOptions::new(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn run_repl(&mut self) {
|
|
||||||
println!("Schala MetaInterpreter version {}", crate::VERSION_STRING);
|
|
||||||
println!("Type {}help for help with the REPL", self.interpreter_directive_sigil);
|
|
||||||
self.load_options();
|
|
||||||
self.handle_repl_loop();
|
|
||||||
self.save_before_exit();
|
|
||||||
println!("Exiting...");
|
|
||||||
}
|
|
||||||
|
|
||||||
fn load_options(&mut self) {
|
|
||||||
self.line_reader.load_history(HISTORY_SAVE_FILE).unwrap_or(());
|
|
||||||
match ReplOptions::load_from_file(OPTIONS_SAVE_FILE) {
|
|
||||||
Ok(options) => {
|
|
||||||
self.options = options;
|
|
||||||
},
|
|
||||||
Err(()) => ()
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
fn handle_repl_loop(&mut self) {
|
|
||||||
use linefeed::ReadResult::*;
|
|
||||||
let sigil = self.interpreter_directive_sigil;
|
|
||||||
|
|
||||||
'main: loop {
|
|
||||||
macro_rules! match_or_break {
|
|
||||||
($line:expr) => {
|
|
||||||
match $line {
|
|
||||||
Err(e) => {
|
|
||||||
println!("readline IO Error: {}", e);
|
|
||||||
break 'main;
|
|
||||||
},
|
|
||||||
Ok(Eof) | Ok(Signal(_)) => break 'main,
|
|
||||||
Ok(Input(ref input)) => input,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
self.update_line_reader();
|
|
||||||
let line = self.line_reader.read_line();
|
|
||||||
let input: &str = match_or_break!(line);
|
|
||||||
|
|
||||||
self.line_reader.add_history_unique(input.to_string());
|
|
||||||
let mut chars = input.chars().peekable();
|
|
||||||
let repl_responses = match chars.nth(0) {
|
|
||||||
Some(ch) if ch == sigil => {
|
|
||||||
if chars.peek() == Some(&'{') {
|
|
||||||
let mut buf = String::new();
|
|
||||||
buf.push_str(input.get(2..).unwrap());
|
|
||||||
'multiline: loop {
|
|
||||||
self.set_prompt(PromptStyle::Multiline);
|
|
||||||
let new_line = self.line_reader.read_line();
|
|
||||||
let new_input = match_or_break!(new_line);
|
|
||||||
if new_input.starts_with(":}") {
|
|
||||||
break 'multiline;
|
|
||||||
} else {
|
|
||||||
buf.push_str(new_input);
|
|
||||||
buf.push_str("\n");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
self.handle_input(&buf)
|
|
||||||
} else {
|
|
||||||
match self.handle_interpreter_directive(input) {
|
|
||||||
Some(directive_output) => println!("<> {}", directive_output),
|
|
||||||
None => (),
|
|
||||||
}
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
},
|
|
||||||
_ => self.handle_input(input)
|
|
||||||
};
|
|
||||||
|
|
||||||
for repl_response in repl_responses.iter() {
|
|
||||||
println!("{}", repl_response);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn update_line_reader(&mut self) {
|
|
||||||
let tab_complete_handler = TabCompleteHandler::new(self.interpreter_directive_sigil, self.get_directives());
|
|
||||||
self.line_reader.set_completer(Arc::new(tab_complete_handler)); //TODO fix this here
|
|
||||||
self.set_prompt(PromptStyle::Normal);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn set_prompt(&mut self, prompt_style: PromptStyle) {
|
|
||||||
let prompt_str = match prompt_style {
|
|
||||||
PromptStyle::Normal => ">> ".to_string(),
|
|
||||||
PromptStyle::Multiline => ">| ".to_string(),
|
|
||||||
};
|
|
||||||
|
|
||||||
self.line_reader.set_prompt(&prompt_str).unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
fn save_before_exit(&self) {
|
|
||||||
self.line_reader.save_history(HISTORY_SAVE_FILE).unwrap_or(());
|
|
||||||
self.options.save_to_file(OPTIONS_SAVE_FILE);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn handle_interpreter_directive(&mut self, input: &str) -> InterpreterDirectiveOutput {
|
|
||||||
let mut iter = input.chars();
|
|
||||||
iter.next();
|
|
||||||
let arguments: Vec<&str> = iter
|
|
||||||
.as_str()
|
|
||||||
.split_whitespace()
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
if arguments.len() < 1 {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
let directives = self.get_directives();
|
|
||||||
directives.perform(self, &arguments)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_cur_language_state(&mut self) -> &mut Box<dyn ProgrammingLanguageInterface> {
|
|
||||||
//TODO this is obviously not complete
|
|
||||||
&mut self.language_states[0]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn handle_input(&mut self, input: &str) -> Vec<ReplResponse> {
|
|
||||||
let mut debug_requests = HashSet::new();
|
|
||||||
for ask in self.options.debug_asks.iter() {
|
|
||||||
debug_requests.insert(ask.clone());
|
|
||||||
}
|
|
||||||
|
|
||||||
let request = ComputationRequest { source: input, debug_requests };
|
|
||||||
let ref mut language_state = self.get_cur_language_state();
|
|
||||||
let response = language_state.run_computation(request);
|
|
||||||
response::handle_computation_response(response, &self.options)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_directives(&mut self) -> CommandTree {
|
|
||||||
let language_state = self.get_cur_language_state();
|
|
||||||
let pass_names = match language_state.request_meta(LangMetaRequest::StageNames) {
|
|
||||||
LangMetaResponse::StageNames(names) => names,
|
|
||||||
_ => vec![],
|
|
||||||
};
|
|
||||||
|
|
||||||
directives_from_pass_names(&pass_names)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
struct TabCompleteHandler {
|
|
||||||
sigil: char,
|
|
||||||
top_level_commands: CommandTree,
|
|
||||||
}
|
|
||||||
|
|
||||||
use linefeed::complete::{Completion, Completer};
|
|
||||||
use linefeed::terminal::Terminal;
|
|
||||||
|
|
||||||
impl TabCompleteHandler {
|
|
||||||
fn new(sigil: char, top_level_commands: CommandTree) -> TabCompleteHandler {
|
|
||||||
TabCompleteHandler {
|
|
||||||
top_level_commands,
|
|
||||||
sigil,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: Terminal> Completer<T> for TabCompleteHandler {
|
|
||||||
fn complete(&self, word: &str, prompter: &::linefeed::prompter::Prompter<T>, start: usize, _end: usize) -> Option<Vec<Completion>> {
|
|
||||||
let line = prompter.buffer();
|
|
||||||
|
|
||||||
if !line.starts_with(self.sigil) {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut words = line[1..(if start == 0 { 1 } else { start })].split_whitespace();
|
|
||||||
let mut completions = Vec::new();
|
|
||||||
let mut command_tree: Option<&CommandTree> = Some(&self.top_level_commands);
|
|
||||||
|
|
||||||
loop {
|
|
||||||
match words.next() {
|
|
||||||
None => {
|
|
||||||
let top = match command_tree {
|
|
||||||
Some(CommandTree::Top(_)) => true,
|
|
||||||
_ => false
|
|
||||||
};
|
|
||||||
let word = if top { word.get(1..).unwrap() } else { word };
|
|
||||||
for cmd in command_tree.map(|x| x.get_subcommands()).unwrap_or(vec![]).into_iter() {
|
|
||||||
if cmd.starts_with(word) {
|
|
||||||
completions.push(Completion {
|
|
||||||
completion: format!("{}{}", if top { ":" } else { "" }, cmd),
|
|
||||||
display: Some(cmd.to_string()),
|
|
||||||
suffix: ::linefeed::complete::Suffix::Some(' ')
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
},
|
|
||||||
Some(s) => {
|
|
||||||
let new_ptr: Option<&CommandTree> = command_tree.and_then(|cm| match cm {
|
|
||||||
CommandTree::Top(children) => children.iter().find(|c| c.get_cmd() == s),
|
|
||||||
CommandTree::NonTerminal { children, .. } => children.iter().find(|c| c.get_cmd() == s),
|
|
||||||
CommandTree::Terminal { children, .. } => children.iter().find(|c| c.get_cmd() == s),
|
|
||||||
});
|
|
||||||
command_tree = new_ptr;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Some(completions)
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,47 +0,0 @@
|
|||||||
use crate::language::DebugAsk;
|
|
||||||
|
|
||||||
use std::io::{Read, Write};
|
|
||||||
use std::collections::HashSet;
|
|
||||||
use std::fs::File;
|
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize)]
|
|
||||||
pub struct ReplOptions {
|
|
||||||
pub debug_asks: HashSet<DebugAsk>,
|
|
||||||
pub show_total_time: bool,
|
|
||||||
pub show_stage_times: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ReplOptions {
|
|
||||||
pub fn new() -> ReplOptions {
|
|
||||||
ReplOptions {
|
|
||||||
debug_asks: HashSet::new(),
|
|
||||||
show_total_time: true,
|
|
||||||
show_stage_times: false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn save_to_file(&self, filename: &str) {
|
|
||||||
let res = File::create(filename)
|
|
||||||
.and_then(|mut file| {
|
|
||||||
let buf = crate::serde_json::to_string(self).unwrap();
|
|
||||||
file.write_all(buf.as_bytes())
|
|
||||||
});
|
|
||||||
if let Err(err) = res {
|
|
||||||
println!("Error saving {} file {}", filename, err);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn load_from_file(filename: &str) -> Result<ReplOptions, ()> {
|
|
||||||
File::open(filename)
|
|
||||||
.and_then(|mut file| {
|
|
||||||
let mut contents = String::new();
|
|
||||||
file.read_to_string(&mut contents)?;
|
|
||||||
Ok(contents)
|
|
||||||
})
|
|
||||||
.and_then(|contents| {
|
|
||||||
let output: ReplOptions = crate::serde_json::from_str(&contents)?;
|
|
||||||
Ok(output)
|
|
||||||
})
|
|
||||||
.map_err(|_| ())
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,67 +0,0 @@
|
|||||||
use colored::*;
|
|
||||||
use std::fmt;
|
|
||||||
use std::fmt::Write;
|
|
||||||
|
|
||||||
use super::ReplOptions;
|
|
||||||
use crate::language::{ DebugAsk, ComputationResponse};
|
|
||||||
|
|
||||||
pub struct ReplResponse {
|
|
||||||
label: Option<String>,
|
|
||||||
text: String,
|
|
||||||
color: Option<Color>
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for ReplResponse {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
||||||
let mut buf = String::new();
|
|
||||||
if let Some(ref label) = self.label {
|
|
||||||
write!(buf, "({})", label).unwrap();
|
|
||||||
}
|
|
||||||
write!(buf, "=> {}", self.text).unwrap();
|
|
||||||
write!(f, "{}", match self.color {
|
|
||||||
Some(c) => buf.color(c),
|
|
||||||
None => buf.normal()
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
pub fn handle_computation_response(response: ComputationResponse, options: &ReplOptions) -> Vec<ReplResponse> {
|
|
||||||
let mut responses = vec![];
|
|
||||||
|
|
||||||
if options.show_total_time {
|
|
||||||
responses.push(ReplResponse {
|
|
||||||
label: Some("Total time".to_string()),
|
|
||||||
text: format!("{:?}", response.global_output_stats.total_duration),
|
|
||||||
color: None,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if options.show_stage_times {
|
|
||||||
responses.push(ReplResponse {
|
|
||||||
label: Some("Stage times".to_string()),
|
|
||||||
text: format!("{:?}", response.global_output_stats.stage_durations),
|
|
||||||
color: None,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
for debug_resp in response.debug_responses {
|
|
||||||
let stage_name = match debug_resp.ask {
|
|
||||||
DebugAsk::ByStage { stage_name, .. } => stage_name,
|
|
||||||
_ => continue,
|
|
||||||
};
|
|
||||||
responses.push(ReplResponse {
|
|
||||||
label: Some(stage_name.to_string()),
|
|
||||||
text: debug_resp.value,
|
|
||||||
color: Some(Color::Red),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
responses.push(match response.main_output {
|
|
||||||
Ok(s) => ReplResponse { label: None, text: s, color: None },
|
|
||||||
Err(e) => ReplResponse { label: Some("Error".to_string()), text: e, color: Some(Color::Red) },
|
|
||||||
});
|
|
||||||
|
|
||||||
responses
|
|
||||||
}
|
|
||||||
|
|
43
schala-repl/src/repl_options.rs
Normal file
43
schala-repl/src/repl_options.rs
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
use std::{
|
||||||
|
collections::HashSet,
|
||||||
|
fs::File,
|
||||||
|
io::{self, Read, Write},
|
||||||
|
};
|
||||||
|
|
||||||
|
use crate::language::DebugAsk;
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize)]
|
||||||
|
pub struct ReplOptions {
|
||||||
|
pub debug_asks: HashSet<DebugAsk>,
|
||||||
|
pub show_total_time: bool,
|
||||||
|
pub show_stage_times: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ReplOptions {
|
||||||
|
pub fn new() -> ReplOptions {
|
||||||
|
ReplOptions { debug_asks: HashSet::new(), show_total_time: true, show_stage_times: false }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn save_to_file(&self, filename: &str) {
|
||||||
|
let res = File::create(filename).and_then(|mut file| {
|
||||||
|
let buf = crate::serde_json::to_string(self).unwrap();
|
||||||
|
file.write_all(buf.as_bytes())
|
||||||
|
});
|
||||||
|
if let Err(err) = res {
|
||||||
|
eprintln!("Error saving {} file {}", filename, err);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn load_from_file(filename: &str) -> Result<ReplOptions, io::Error> {
|
||||||
|
File::open(filename)
|
||||||
|
.and_then(|mut file| {
|
||||||
|
let mut contents = String::new();
|
||||||
|
file.read_to_string(&mut contents)?;
|
||||||
|
Ok(contents)
|
||||||
|
})
|
||||||
|
.and_then(|contents| {
|
||||||
|
let output: ReplOptions = crate::serde_json::from_str(&contents)?;
|
||||||
|
Ok(output)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
74
schala-repl/src/response.rs
Normal file
74
schala-repl/src/response.rs
Normal file
@ -0,0 +1,74 @@
|
|||||||
|
use std::{fmt, fmt::Write};
|
||||||
|
|
||||||
|
use colored::*;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
language::{ComputationResponse, DebugAsk},
|
||||||
|
ReplOptions,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub struct ReplResponse {
|
||||||
|
label: Option<String>,
|
||||||
|
text: String,
|
||||||
|
color: Option<Color>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for ReplResponse {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
let mut buf = String::new();
|
||||||
|
if let Some(ref label) = self.label {
|
||||||
|
write!(buf, "({})", label).unwrap();
|
||||||
|
}
|
||||||
|
write!(buf, "=> {}", self.text).unwrap();
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"{}",
|
||||||
|
match self.color {
|
||||||
|
Some(c) => buf.color(c),
|
||||||
|
None => buf.normal(),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn handle_computation_response(
|
||||||
|
response: ComputationResponse,
|
||||||
|
options: &ReplOptions,
|
||||||
|
) -> Vec<ReplResponse> {
|
||||||
|
let mut responses = vec![];
|
||||||
|
|
||||||
|
if options.show_total_time {
|
||||||
|
responses.push(ReplResponse {
|
||||||
|
label: Some("Total time".to_string()),
|
||||||
|
text: format!("{:?}", response.global_output_stats.total_duration),
|
||||||
|
color: None,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if options.show_stage_times {
|
||||||
|
responses.push(ReplResponse {
|
||||||
|
label: Some("Stage times".to_string()),
|
||||||
|
text: format!("{:?}", response.global_output_stats.stage_durations),
|
||||||
|
color: None,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
for debug_resp in response.debug_responses {
|
||||||
|
let stage_name = match debug_resp.ask {
|
||||||
|
DebugAsk::ByStage { stage_name, .. } => stage_name,
|
||||||
|
_ => continue,
|
||||||
|
};
|
||||||
|
responses.push(ReplResponse {
|
||||||
|
label: Some(stage_name.to_string()),
|
||||||
|
text: debug_resp.value,
|
||||||
|
color: Some(Color::Red),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
responses.push(match response.main_output {
|
||||||
|
Ok(s) => ReplResponse { label: None, text: s, color: None },
|
||||||
|
Err(e) => ReplResponse { label: Some("Error".to_string()), text: e, color: Some(Color::Red) },
|
||||||
|
});
|
||||||
|
|
||||||
|
responses
|
||||||
|
}
|
@ -6,36 +6,37 @@ fn main() {
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@annotations are with @-
|
|
||||||
|
@annotations use the @ sigil
|
||||||
|
|
||||||
// variable expressions
|
// variable expressions
|
||||||
var a: I32 = 20
|
//variable declaration works like Rust
|
||||||
const b: String = 20
|
let a: I32 = 20
|
||||||
|
let mut b: String = 20
|
||||||
|
|
||||||
there(); can(); be(); multiple(); statements(); per_line();
|
there(); can(); be(); multiple(); statements(); per_line();
|
||||||
|
|
||||||
//string interpolation
|
//string interpolation
|
||||||
const yolo = "I have ${a + b} people in my house"
|
// maybe
|
||||||
|
let yolo = "I have ${a + b} people in my house"
|
||||||
|
|
||||||
// let expressions ??? not sure if I want this
|
// let expressions
|
||||||
let a = 10, b = 20, c = 30 in a + b + c
|
let a = 10, b = 20, c = 30 in a + b + c
|
||||||
|
|
||||||
//list literal
|
//list literal
|
||||||
const q = [1,2,3,4]
|
let q = [1,2,3,4]
|
||||||
|
|
||||||
//lambda literal
|
//lambda literal - uses haskell-ish syntax
|
||||||
q.map({|item| item * 100 })
|
q.map(\(item) { item * 100 })
|
||||||
|
|
||||||
fn yolo(a: MyType, b: YourType): ReturnType<Param1, Param2> {
|
fn yolo(a: MyType, b: YourType): ReturnType<Param1, Param2> {
|
||||||
if a == 20 {
|
if a == 20 {
|
||||||
return "early"
|
return "early"
|
||||||
}
|
}
|
||||||
var sex = 20
|
|
||||||
sex
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/* for/while loop topics */
|
/* for/while loop topics */
|
||||||
|
//TODO I can probably get away with having one of `for`, `while`
|
||||||
|
|
||||||
//infinite loop
|
//infinite loop
|
||||||
while {
|
while {
|
||||||
@ -70,13 +71,13 @@ fn main() {
|
|||||||
|
|
||||||
/* conditionals/pattern matching */
|
/* conditionals/pattern matching */
|
||||||
|
|
||||||
// "is" operator for "does this pattern match"
|
// `is` functions as an operator asking "does this pattern match"
|
||||||
|
|
||||||
x is Some(t) // type bool
|
x is Some(t) // type bool
|
||||||
|
|
||||||
if x {
|
if x {
|
||||||
is Some(t) => {
|
is Some(t) => {
|
||||||
},
|
}
|
||||||
is None => {
|
is None => {
|
||||||
|
|
||||||
}
|
}
|
||||||
@ -86,7 +87,7 @@ if x {
|
|||||||
//syntax is, I guess, for <expr> <brace-block>, where <expr> is a bool, or a <arrow-expr>
|
//syntax is, I guess, for <expr> <brace-block>, where <expr> is a bool, or a <arrow-expr>
|
||||||
|
|
||||||
// type level alises
|
// type level alises
|
||||||
typealias <name> = <other type> #maybe thsi should be 'alias'?
|
type alias <name> = <other type> #maybe thsi should be 'alias'?
|
||||||
|
|
||||||
/*
|
/*
|
||||||
what if type A = B meant that you could had to create A's with A(B), but when you used A's the interface was exactly like B's?
|
what if type A = B meant that you could had to create A's with A(B), but when you used A's the interface was exactly like B's?
|
||||||
@ -94,12 +95,12 @@ what if type A = B meant that you could had to create A's with A(B), but when yo
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
//declaring types of all stripes
|
//declaring types of all stripes
|
||||||
type MyData = { a: i32, b: String }
|
type MyData = { a: i32, b: String } // shorthand special-case for `type MyData = MyData { a: i32, b: String }`
|
||||||
type MyType = MyType
|
type MyType = MyType
|
||||||
type Option<a> = None | Some(a)
|
type Option<a> = None | Some(a)
|
||||||
type Signal = Absence | SimplePresence(i32) | ComplexPresence {a: i32, b: MyCustomData}
|
type Signal = Absence | SimplePresence(i32) | ComplexPresence {a: i32, b: MyCustomData}
|
||||||
|
|
||||||
//traits
|
//traits TODO I probably want to rename this
|
||||||
|
|
||||||
trait Bashable { }
|
trait Bashable { }
|
||||||
trait Luggable {
|
trait Luggable {
|
||||||
@ -108,7 +109,7 @@ what if type A = B meant that you could had to create A's with A(B), but when yo
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// lambdas - maybe I want to use ruby-style (not rust style) syntax
|
||||||
// lambdas
|
// e.g.
|
||||||
// ruby-style not rust-style
|
// Also TODO Nix uses `X: Y: Z` for in its value-level syntax, why can't I?
|
||||||
const a: X -> Y -> Z = {|x,y| }
|
let a: X -> Y -> Z = {|x,y| }
|
||||||
|
76
src/main.rs
76
src/main.rs
@ -1,15 +1,71 @@
|
|||||||
extern crate schala_repl;
|
use std::{collections::HashSet, fs::File, io::Read, path::PathBuf, process::exit};
|
||||||
|
|
||||||
//extern crate maaru_lang;
|
use schala_lang::{Schala, SchalaConfig};
|
||||||
//extern crate rukka_lang;
|
use schala_repl::{ComputationRequest, ProgrammingLanguageInterface, Repl};
|
||||||
//extern crate robo_lang;
|
|
||||||
extern crate schala_lang;
|
|
||||||
use schala_repl::{ProgrammingLanguageInterface, start_repl};
|
|
||||||
|
|
||||||
extern { }
|
|
||||||
|
|
||||||
|
//TODO specify multiple langs, and have a way to switch between them
|
||||||
fn main() {
|
fn main() {
|
||||||
let langs: Vec<Box<dyn ProgrammingLanguageInterface>> = vec![Box::new(schala_lang::Schala::new())];
|
let args: Vec<String> = std::env::args().collect();
|
||||||
start_repl(langs);
|
let matches = command_line_options().parse(&args[1..]).unwrap_or_else(|e| {
|
||||||
|
eprintln!("Error parsing options: {}", e);
|
||||||
|
exit(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
if matches.opt_present("help") {
|
||||||
|
println!("{}", command_line_options().usage("Schala metainterpreter"));
|
||||||
|
exit(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
if matches.free.is_empty() {
|
||||||
|
let state = Schala::new();
|
||||||
|
let mut repl = Repl::new(state);
|
||||||
|
let config = SchalaConfig { repl: true };
|
||||||
|
repl.run_repl(config);
|
||||||
|
} else {
|
||||||
|
let paths: Vec<PathBuf> = matches.free.iter().map(PathBuf::from).collect();
|
||||||
|
//TODO handle more than one file
|
||||||
|
let filename = &paths[0];
|
||||||
|
let extension = filename.extension().and_then(|e| e.to_str()).unwrap_or_else(|| {
|
||||||
|
eprintln!("Source file `{}` has no extension.", filename.display());
|
||||||
|
exit(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
//TODO this proably should be a macro for every supported language
|
||||||
|
if extension == Schala::source_file_suffix() {
|
||||||
|
let config = SchalaConfig { repl: false };
|
||||||
|
|
||||||
|
run_noninteractive(paths, Schala::new(), config);
|
||||||
|
} else {
|
||||||
|
eprintln!("Extension .{} not recognized", extension);
|
||||||
|
exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn run_noninteractive<L: ProgrammingLanguageInterface>(
|
||||||
|
filenames: Vec<PathBuf>,
|
||||||
|
mut language: L,
|
||||||
|
config: L::Config,
|
||||||
|
) {
|
||||||
|
// for now, ony do something with the first filename
|
||||||
|
|
||||||
|
let filename = &filenames[0];
|
||||||
|
let mut source_file = File::open(filename).unwrap();
|
||||||
|
let mut buffer = String::new();
|
||||||
|
source_file.read_to_string(&mut buffer).unwrap();
|
||||||
|
|
||||||
|
let request = ComputationRequest { source: &buffer, config, debug_requests: HashSet::new() };
|
||||||
|
|
||||||
|
let response = language.run_computation(request);
|
||||||
|
match response.main_output {
|
||||||
|
Ok(s) => println!("{}", s),
|
||||||
|
Err(s) => eprintln!("{}", s),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
fn command_line_options() -> getopts::Options {
|
||||||
|
let mut options = getopts::Options::new();
|
||||||
|
options.optflag("h", "help", "Show help text");
|
||||||
|
//options.optflag("w", "webapp", "Start up web interpreter");
|
||||||
|
options
|
||||||
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user