Compare commits
508 Commits
failure_st
...
use_nom
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7282e9f05e | ||
|
|
91e7dcb6d0 | ||
|
|
08ce718fb6 | ||
|
|
5da61658ec | ||
|
|
3ce73a4b55 | ||
|
|
cde52efcf2 | ||
|
|
e0a4ea1675 | ||
|
|
ed68b57736 | ||
|
|
df99ff1e06 | ||
|
|
7ffd6b2bd7 | ||
|
|
67b98c47fc | ||
|
|
ad0434007b | ||
|
|
78b86b3531 | ||
|
|
efda75860c | ||
|
|
b6a60a05ba | ||
|
|
744ba2fc74 | ||
|
|
dd9a1b8a2e | ||
|
|
33d4b28786 | ||
|
|
9e898d86b3 | ||
|
|
d034a99ac1 | ||
|
|
31da29c324 | ||
|
|
ce848906c9 | ||
|
|
8f0dc6cab8 | ||
|
|
34fd29ebca | ||
|
|
b1ffcd709b | ||
|
|
84455d11d5 | ||
|
|
16559d2e55 | ||
|
|
43cad55735 | ||
|
|
a6d065864c | ||
|
|
e2fc454c82 | ||
|
|
54649246b0 | ||
|
|
6759640389 | ||
|
|
c6b0f7d7d1 | ||
|
|
b7f7ba57d7 | ||
|
|
a0955e07dc | ||
|
|
afcb10bb72 | ||
|
|
8de625e540 | ||
|
|
a2bd9a3985 | ||
|
|
e4a1a23f4d | ||
|
|
2cd325ba12 | ||
|
|
8218007f1c | ||
|
|
040ab11873 | ||
|
|
b967fa1911 | ||
|
|
4c718ed977 | ||
|
|
d20acf7166 | ||
|
|
efc8497235 | ||
|
|
d824b8d6ef | ||
|
|
4a1987b5a2 | ||
|
|
c96644ddce | ||
|
|
cc0ac83709 | ||
|
|
d6019e6f9a | ||
|
|
3344f6827d | ||
|
|
b38c4b3298 | ||
|
|
a2f30b6136 | ||
|
|
11a9a60a34 | ||
|
|
5bb1a245c4 | ||
|
|
1ffe61cf5f | ||
|
|
7495f30e16 | ||
|
|
82520aa28d | ||
|
|
129d9ec673 | ||
|
|
7825ef1eb9 | ||
|
|
f3ecdc61cb | ||
|
|
bf59e6cc63 | ||
|
|
c560c29b2d | ||
|
|
4dcd9d0198 | ||
|
|
7ac63160c5 | ||
|
|
8656992945 | ||
|
|
bb87a87848 | ||
|
|
2f467702e3 | ||
|
|
5ac5425fac | ||
|
|
944916d6af | ||
|
|
3906210db8 | ||
|
|
f7357d4498 | ||
|
|
1493d12a22 | ||
|
|
016d8fc900 | ||
|
|
86dc5eca02 | ||
|
|
e75958c2a2 | ||
|
|
7a56b6dfc0 | ||
|
|
f9633ebe55 | ||
|
|
854740a63f | ||
|
|
ca10481d7c | ||
|
|
26fa4a29ec | ||
|
|
97b59d7e70 | ||
|
|
92ad4767c8 | ||
|
|
7cabca2987 | ||
|
|
98e53a6d0f | ||
|
|
77cc1f3824 | ||
|
|
9e64a22328 | ||
|
|
5afdc16f2e | ||
|
|
f818e86f48 | ||
|
|
5a01b12d9b | ||
|
|
7c75f9b2a8 | ||
|
|
2c34ab52c4 | ||
|
|
44d1f4692f | ||
|
|
3cf3fce72d | ||
|
|
ddea470ba8 | ||
|
|
745afe981a | ||
|
|
a6c86d6447 | ||
|
|
8d3639ab8e | ||
|
|
3bca82a8c8 | ||
|
|
811c52c8d3 | ||
|
|
95e278d1b5 | ||
|
|
61b757313d | ||
|
|
24b48551dc | ||
|
|
2ed84de641 | ||
|
|
22efd39114 | ||
|
|
a48bb61eb3 | ||
|
|
904d5c4431 | ||
|
|
28056b1f89 | ||
|
|
f9a59838b0 | ||
|
|
f02d7cb924 | ||
|
|
489819a28e | ||
|
|
c427646e75 | ||
|
|
f06b5922de | ||
|
|
253b5d88f0 | ||
|
|
f654cd6b50 | ||
|
|
89649273d8 | ||
|
|
9fa4e3797c | ||
|
|
c8804eeefb | ||
|
|
d80a0036b1 | ||
|
|
7533c69c49 | ||
|
|
39bb175722 | ||
|
|
ae65455374 | ||
|
|
1fc028c9fc | ||
|
|
031ff9fe7e | ||
|
|
5a9f3c1850 | ||
|
|
58251d3f28 | ||
|
|
2e42313991 | ||
|
|
355604d911 | ||
|
|
0b57561114 | ||
|
|
dbd81ca83d | ||
|
|
6368d10d92 | ||
|
|
9cd64d97a5 | ||
|
|
41cad61e34 | ||
|
|
a054de56a2 | ||
|
|
603ea89b98 | ||
|
|
06026604cc | ||
|
|
03f8abac6a | ||
|
|
fd3922d866 | ||
|
|
71b3365de2 | ||
|
|
cf9ce74394 | ||
|
|
f5d1c89574 | ||
|
|
8d1e0ebdea | ||
|
|
69c215eac9 | ||
|
|
8a34034819 | ||
|
|
403b171c72 | ||
|
|
e5a09a6ee8 | ||
|
|
e1a83b5de3 | ||
|
|
8b1dd561f2 | ||
|
|
6ebe893acb | ||
|
|
c9052e0a3b | ||
|
|
56e6eb44f9 | ||
|
|
642f21d298 | ||
|
|
c12cb99b24 | ||
|
|
8dc8833eb3 | ||
|
|
b517bc2366 | ||
|
|
73519d5be5 | ||
|
|
8b6de6961f | ||
|
|
3eaeeb5509 | ||
|
|
b91c3c9da5 | ||
|
|
08da787aae | ||
|
|
d6f2fe6e02 | ||
|
|
a85d3c46bd | ||
|
|
25f51a314d | ||
|
|
6c3a4f907b | ||
|
|
22887678bd | ||
|
|
1ecf1e506c | ||
|
|
72944ded1b | ||
|
|
b65779fb93 | ||
|
|
418d77770f | ||
|
|
5572e0eebb | ||
|
|
65bc32b033 | ||
|
|
29f4060a71 | ||
|
|
09dbe5b736 | ||
|
|
cfa65e5339 | ||
|
|
9a28ccfd85 | ||
|
|
ea542192be | ||
|
|
79635f2f86 | ||
|
|
2b5b1589b0 | ||
|
|
44c073320b | ||
|
|
c04e4356a1 | ||
|
|
24e0ecbe73 | ||
|
|
fd66a9711d | ||
|
|
a5c9aca4d7 | ||
|
|
cefaeb1180 | ||
|
|
724237545f | ||
|
|
0f7f5cb416 | ||
|
|
b4da57f5c5 | ||
|
|
8b87945bee | ||
|
|
f96469178d | ||
|
|
34abb9b081 | ||
|
|
89d967aee4 | ||
|
|
0540df4024 | ||
|
|
61182a847f | ||
|
|
f6dcd7f0b8 | ||
|
|
16dc973aa6 | ||
|
|
611e46938d | ||
|
|
3d6447abb4 | ||
|
|
a74027bb1f | ||
|
|
583e87c19a | ||
|
|
12ed2f5c8e | ||
|
|
3caf9c763c | ||
|
|
cd20afc3c7 | ||
|
|
063a13f7ff | ||
|
|
b0a1f3337c | ||
|
|
2e147e141e | ||
|
|
44938aa4e6 | ||
|
|
44ae10b7ae | ||
|
|
fa1544c71f | ||
|
|
fde169b623 | ||
|
|
6e92b03f81 | ||
|
|
0dd6b26e5a | ||
|
|
a3bb3ee514 | ||
|
|
7ae41e717d | ||
|
|
24089da788 | ||
|
|
bfb36b90e4 | ||
|
|
e750247134 | ||
|
|
a8efe40b57 | ||
|
|
dae619c6fa | ||
|
|
c9bfa2b540 | ||
|
|
e708c728d2 | ||
|
|
b65d6e4c8e | ||
|
|
d9eca8ffb3 | ||
|
|
a600d34712 | ||
|
|
aae2ee53cd | ||
|
|
bf3dcc18d0 | ||
|
|
baf499ee5a | ||
|
|
3b19fc5aa9 | ||
|
|
16bf166fa9 | ||
|
|
d832583ed9 | ||
|
|
87ecc6f0cb | ||
|
|
ee87695626 | ||
|
|
37c77d93d7 | ||
|
|
b62968379a | ||
|
|
aa705b4eee | ||
|
|
d67ccf5c7a | ||
|
|
d9330bed26 | ||
|
|
efe65edfe6 | ||
|
|
7c9154de53 | ||
|
|
10e40669b5 | ||
|
|
ca37e006b9 | ||
|
|
6d3f5f4b81 | ||
|
|
e3bd108e6c | ||
|
|
2ec3b21ebf | ||
|
|
b6e3469573 | ||
|
|
32fe7430a4 | ||
|
|
c332747c3e | ||
|
|
33c2786ea1 | ||
|
|
30498d5c98 | ||
|
|
bc01a5ded8 | ||
|
|
71386be80e | ||
|
|
ccdc02bbd0 | ||
|
|
3a207cf7a7 | ||
|
|
66f71606ef | ||
|
|
53ce31ea8c | ||
|
|
4c688ce8b2 | ||
|
|
40579d80ce | ||
|
|
fa1257e2cd | ||
|
|
e9fd20bfe5 | ||
|
|
dfbd951aaf | ||
|
|
6b47ecf2d7 | ||
|
|
a8b9f5046e | ||
|
|
83e05fe382 | ||
|
|
5271429715 | ||
|
|
f88f2e8550 | ||
|
|
7097775a4a | ||
|
|
32d082e119 | ||
|
|
376fa1d1d1 | ||
|
|
6fb9b4c2d3 | ||
|
|
f1d1042916 | ||
|
|
207f73d607 | ||
|
|
8dc0ad2348 | ||
|
|
bb39c59db2 | ||
|
|
10bfeab7e4 | ||
|
|
fe08e64860 | ||
|
|
fd517351de | ||
|
|
e12ff6f30b | ||
|
|
176b286332 | ||
|
|
3987360f8e | ||
|
|
78d1e93e4b | ||
|
|
856c0f95ce | ||
|
|
3fa624bef4 | ||
|
|
f27a65018d | ||
|
|
548a7b5f36 | ||
|
|
d80d0d0904 | ||
|
|
6162bae1ac | ||
|
|
fe7ba339b5 | ||
|
|
6a232907c5 | ||
|
|
a8583f6bc4 | ||
|
|
bdee4fe7c6 | ||
|
|
5cdc2f3d07 | ||
|
|
eb2adb5b79 | ||
|
|
2b407a4a83 | ||
|
|
6da6f6312d | ||
|
|
ce2a65b044 | ||
|
|
ffdae14a88 | ||
|
|
94ea7bcd09 | ||
|
|
4ebf7fe879 | ||
|
|
efbeff916a | ||
|
|
e9ea7811df | ||
|
|
198f93c533 | ||
|
|
694c152fcd | ||
|
|
f8f3095f89 | ||
|
|
c68c23ed68 | ||
|
|
4f972f20a7 | ||
|
|
9d2e5918af | ||
|
|
14fc2a5d10 | ||
|
|
2b8e2749a4 | ||
|
|
6c369b072f | ||
|
|
938c0401d1 | ||
|
|
a829fb6cd8 | ||
|
|
004b056232 | ||
|
|
8e9b410e02 | ||
|
|
b82eebdeec | ||
|
|
153e7977d3 | ||
|
|
5b5368ce6f | ||
|
|
7a67890227 | ||
|
|
04253543e9 | ||
|
|
3a98096b61 | ||
|
|
9476e7039b | ||
|
|
c767402865 | ||
|
|
61972410ea | ||
|
|
d3f9430a18 | ||
|
|
81323cafd4 | ||
|
|
14c08bbcdb | ||
|
|
4319c802f5 | ||
|
|
9e58e3d7de | ||
|
|
ac0050e5d1 | ||
|
|
d06cf90fce | ||
|
|
712da62d35 | ||
|
|
57f3d39ea1 | ||
|
|
6d88447458 | ||
|
|
0451676ba7 | ||
|
|
2929362046 | ||
|
|
375db28ebb | ||
|
|
1622a6ce44 | ||
|
|
7e899246e9 | ||
|
|
8610bd7a87 | ||
|
|
70f715fbb2 | ||
|
|
7360e698dd | ||
|
|
5b35c2a036 | ||
|
|
8d8d7d8bf8 | ||
|
|
981d4f88bf | ||
|
|
42aa316a23 | ||
|
|
58b37e56ae | ||
|
|
2bf777f37b | ||
|
|
bdcae36b60 | ||
|
|
dbcd2278a6 | ||
|
|
2490aaf3f4 | ||
|
|
d4ad97b39a | ||
|
|
24213070a3 | ||
|
|
051669b4cc | ||
|
|
c64f53a050 | ||
|
|
8f176543c7 | ||
|
|
9716b5e55b | ||
|
|
956353cd80 | ||
|
|
98db60498a | ||
|
|
7694afc9e2 | ||
|
|
0bcd7e6f41 | ||
|
|
d515b1658a | ||
|
|
e501f4bd10 | ||
|
|
5bac01cf20 | ||
|
|
0e9b3229e9 | ||
|
|
b709cfd51a | ||
|
|
e34295a6f7 | ||
|
|
8dc34e4b49 | ||
|
|
2cc3367666 | ||
|
|
452f2ab188 | ||
|
|
be175a2b75 | ||
|
|
00a0de4431 | ||
|
|
f041cc17d2 | ||
|
|
95fe1941a1 | ||
|
|
b35262c444 | ||
|
|
9bb3a2be88 | ||
|
|
9fa0576547 | ||
|
|
6fba0cc5b4 | ||
|
|
a6eb2b4020 | ||
|
|
03793e08d3 | ||
|
|
2be55958f4 | ||
|
|
bcf48d0ecb | ||
|
|
f0ed63ccf3 | ||
|
|
6012bd1087 | ||
|
|
866c9211f9 | ||
|
|
df7e74c79d | ||
|
|
abbd02eaef | ||
|
|
993741e67f | ||
|
|
fbb7b995b8 | ||
|
|
9d4f086a04 | ||
|
|
e38ae1c3f1 | ||
|
|
d969d573fa | ||
|
|
35da1748f0 | ||
|
|
5e1799268d | ||
|
|
42a801d346 | ||
|
|
a80e1bd706 | ||
|
|
afd9aa52c5 | ||
|
|
5a70784346 | ||
|
|
0dff177e8f | ||
|
|
cf91f74912 | ||
|
|
06e9452718 | ||
|
|
7d3ae36058 | ||
|
|
e8f1f51639 | ||
|
|
170cf349d7 | ||
|
|
f3f1dcc0a4 | ||
|
|
c0111e30bc | ||
|
|
c225e469ee | ||
|
|
1ce06bc0ef | ||
|
|
10c3a60515 | ||
|
|
ff73ce7b36 | ||
|
|
ede8a9076a | ||
|
|
a63dcf91b0 | ||
|
|
479a098e0f | ||
|
|
1085b528fe | ||
|
|
9b3b5c5541 | ||
|
|
ab8e24a276 | ||
|
|
09e2d8579d | ||
|
|
ee7861cbd0 | ||
|
|
b88def8a2e | ||
|
|
30676722a3 | ||
|
|
801c90aaa7 | ||
|
|
02667b018c | ||
|
|
1032c7c7a2 | ||
|
|
fa295aab28 | ||
|
|
a0f4abb9a3 | ||
|
|
78b454fb32 | ||
|
|
5491169d55 | ||
|
|
2b338fd3c9 | ||
|
|
821f321261 | ||
|
|
846eeae04c | ||
|
|
22f2750853 | ||
|
|
f2f8ac7ee8 | ||
|
|
d0c5dce92b | ||
|
|
8eda74c9a5 | ||
|
|
2efac109ef | ||
|
|
215e2bbb0d | ||
|
|
2590def3be | ||
|
|
879a7de83d | ||
|
|
4c2e0b8a21 | ||
|
|
282c42da3c | ||
|
|
87e68988c8 | ||
|
|
7ac97ca6e8 | ||
|
|
26a8ff307f | ||
|
|
6be208b51d | ||
|
|
e00948cad9 | ||
|
|
0af6fed505 | ||
|
|
1f527f7949 | ||
|
|
8680c4faf6 | ||
|
|
b198984fc5 | ||
|
|
58779f8470 | ||
|
|
a0fa50392c | ||
|
|
d357876b16 | ||
|
|
e42f0c644c | ||
|
|
2ec7bf3b9a | ||
|
|
5147e1a3eb | ||
|
|
955c073174 | ||
|
|
7c46a29141 | ||
|
|
0adc761e72 | ||
|
|
b2039a7b67 | ||
|
|
b4c4531e4d | ||
|
|
2d36ad44d6 | ||
|
|
21132a369c | ||
|
|
ff0294c56e | ||
|
|
bc80c8f9ad | ||
|
|
e39356c0e5 | ||
|
|
d44bb02d61 | ||
|
|
9056e9b0e1 | ||
|
|
e9b90412ce | ||
|
|
65c47c20fc | ||
|
|
fab3fb8ec2 | ||
|
|
0d5ccd21fe | ||
|
|
69b7b9f528 | ||
|
|
9a09f40222 | ||
|
|
020819550b | ||
|
|
15f9dbe7a6 | ||
|
|
836bed1207 | ||
|
|
cee5b085d5 | ||
|
|
837a55c718 | ||
|
|
f4f89b39b6 | ||
|
|
c6b4ed7ee4 | ||
|
|
be425860af | ||
|
|
17e88b33f2 | ||
|
|
47f7eb1ef6 | ||
|
|
72d0cfe466 | ||
|
|
cea2f63b44 | ||
|
|
eec315dd58 | ||
|
|
1e9aa91c5d | ||
|
|
9813609ad7 | ||
|
|
5953d9d815 | ||
|
|
a74e09c761 | ||
|
|
ad53d4394b | ||
|
|
151246e1c5 | ||
|
|
77d2826918 | ||
|
|
1bd48ed5db | ||
|
|
c394b81746 | ||
|
|
ec29077247 | ||
|
|
62043ac2d1 | ||
|
|
bada386979 | ||
|
|
e71d404071 | ||
|
|
cab4702bd6 | ||
|
|
ec5a9d457e | ||
|
|
bfbc1580aa | ||
|
|
2d6c9010b9 | ||
|
|
f4ff92302f | ||
|
|
e88ed97b06 | ||
|
|
b8df09e956 | ||
|
|
d7f0147a4f | ||
|
|
f883512882 | ||
|
|
37070a6b3e | ||
|
|
ffe7deb00a |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1,4 +1,3 @@
|
||||
Cargo.lock
|
||||
target
|
||||
.schala_repl
|
||||
.schala_history
|
||||
|
||||
1164
Cargo.lock
generated
Normal file
1164
Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
@@ -6,12 +6,10 @@ authors = ["greg <greg.shuflin@protonmail.com>"]
|
||||
[dependencies]
|
||||
|
||||
schala-repl = { path = "schala-repl" }
|
||||
schala-repl-codegen = { path = "schala-repl-codegen" }
|
||||
maaru-lang = { path = "maaru" }
|
||||
rukka-lang = { path = "rukka" }
|
||||
robo-lang = { path = "robo" }
|
||||
schala-lang = { path = "schala-lang/language" }
|
||||
schala-lang-codegen = { path = "schala-lang/codegen" }
|
||||
# maaru-lang = { path = "maaru" }
|
||||
# rukka-lang = { path = "rukka" }
|
||||
# robo-lang = { path = "robo" }
|
||||
|
||||
[build-dependencies]
|
||||
includedir_codegen = "0.2.0"
|
||||
|
||||
59
README.md
59
README.md
@@ -1,21 +1,24 @@
|
||||
|
||||
# Schala - a programming language meta-interpreter
|
||||
|
||||
Schala is a Rust framework written to make it easy to
|
||||
create and experiment with toy programming languages. It provides
|
||||
a common REPL, and a trait `ProgrammingLanguage` with provisions
|
||||
for tokenizing text, parsing tokens, evaluating an abstract syntax tree,
|
||||
and other tasks that are common to all programming languages.
|
||||
Schala is a Rust framework written to make it easy to create and experiment
|
||||
with multipl toy programming languages. It provides a cross-language REPL and
|
||||
provisions for tokenizing text, parsing tokens, evaluating an abstract syntax
|
||||
tree, and other tasks that are common to all programming languages, as well as sharing state
|
||||
between multiple programming languages.
|
||||
|
||||
Schala is implemented as a Rust library `schala_lib`, which provides a
|
||||
`schala_main` function. This function serves as the main loop of the REPL, if run
|
||||
interactively, or otherwise reads and interprets programming language source
|
||||
files. It expects as input a vector of `PLIGenerator`, which is a type representing
|
||||
a closure that returns a boxed trait object that implements the `ProgrammingLanguage` trait,
|
||||
and stores any persistent state relevant to that programming language. The ability
|
||||
to share state between different programming languages is in the works.
|
||||
Schala is implemented as a Rust library `schala-repl`, which provides a
|
||||
function `start_repl`, meant to be used as entry point into a common REPL or
|
||||
non-interactive environment. Clients are expected to invoke `start_repl` with a
|
||||
vector of programming languages. Individual programming language
|
||||
implementations are Rust types that implement the
|
||||
`ProgrammingLanguageInterface` trait and store whatever persistent state is
|
||||
relevant to that language.
|
||||
|
||||
## About
|
||||
Run schala with: `cargo run`. This will drop you into a REPL environment. Type
|
||||
`:help` for more information, or type in text in any supported programming
|
||||
language (currently only schala-lang) to evaluate it in the REPL.
|
||||
|
||||
## History
|
||||
|
||||
Schala started out life as an experiment in writing a Javascript-like
|
||||
programming language that would never encounter any kind of runtime value
|
||||
@@ -33,18 +36,18 @@ creating a language name confusingly close to Scala. The naming scheme for
|
||||
languages implemented with the Schala meta-interpreter is Chrono Trigger
|
||||
characters.
|
||||
|
||||
Schala is incomplete alpha software and is not ready for public release.
|
||||
Schala and languages implemented with it are incomplete alpha software and are
|
||||
not ready for public release.
|
||||
|
||||
## Languages implemented using the meta-interpreter
|
||||
|
||||
* The eponymous *Schala* language is an interpreted/compiled scripting langauge,
|
||||
designed to be relatively simple, but with a reasonably sophisticated type
|
||||
system.
|
||||
* The eponymous *Schala* language is a work-in-progress general purpose
|
||||
programming language with static typing and algebraic data types. Its design
|
||||
goals include having a very straightforward implemenation and being syntactically
|
||||
minimal.
|
||||
|
||||
* *Maaru* was the original Schala (since renamed to free up the name *Schala*
|
||||
for the above language), a very simple dynamically-typed scripting language
|
||||
such that all possible runtime errors result in null rather than program
|
||||
failure.
|
||||
* *Maaru* is a very simple dynamically-typed scripting language, with the semantics
|
||||
that all runtime errors return a `null` value rather than fail.
|
||||
|
||||
* *Robo* is an experiment in creating a lazy, functional, strongly-typed language
|
||||
much like Haskell
|
||||
@@ -56,10 +59,21 @@ much like Haskell
|
||||
Here's a partial list of resources I've made use of in the process
|
||||
of learning how to write a programming language.
|
||||
|
||||
### General
|
||||
|
||||
http://thume.ca/2019/04/18/writing-a-compiler-in-rust/
|
||||
|
||||
### Type-checking
|
||||
https://skillsmatter.com/skillscasts/10868-inside-the-rust-compiler
|
||||
https://www.youtube.com/watch?v=il3gD7XMdmA
|
||||
http://dev.stephendiehl.com/fun/006_hindley_milner.html
|
||||
https://rust-lang-nursery.github.io/rustc-guide/type-inference.html
|
||||
|
||||
https://eli.thegreenplace.net/2018/unification/
|
||||
https://eli.thegreenplace.net/2018/type-inference/
|
||||
http://smallcultfollowing.com/babysteps/blog/2017/03/25/unification-in-chalk-part-1/
|
||||
http://reasonableapproximation.net/2019/05/05/hindley-milner.html
|
||||
https://rickyhan.com/jekyll/update/2018/05/26/hindley-milner-tutorial-rust.html
|
||||
|
||||
### Evaluation
|
||||
*Understanding Computation*, Tom Stuart, O'Reilly 2013
|
||||
@@ -77,4 +91,5 @@ http://blog.ulysse.io/2016/07/03/llvm-getting-started.html
|
||||
|
||||
###Rust resources
|
||||
https://thefullsnack.com/en/rust-for-the-web.html
|
||||
|
||||
https://rocket.rs/guide/getting-started/
|
||||
|
||||
204
TODO.md
204
TODO.md
@@ -1,19 +1,146 @@
|
||||
# Plan of attack
|
||||
|
||||
# TODO Items
|
||||
-ONLY two types of statement, Expressoin and Declaration
|
||||
-modules and imports are just types of Declarables
|
||||
|
||||
1. modify visitor so it can handle scopes
|
||||
-this is needed both to handle import scope correctly
|
||||
-and also to support making FQSNs aware of function parameters
|
||||
|
||||
*A neat idea for pattern matching optimization would be if you could match on one of several things in a list
|
||||
2. Once FQSNs are aware of function parameters, most of the Rc<String> things in eval.rs can go away
|
||||
|
||||
# TODO items
|
||||
|
||||
-use 'let' sigil in patterns for variables :
|
||||
|
||||
```
|
||||
q is MyStruct(let a, Chrono::Trigga) then {
|
||||
|
||||
}
|
||||
```
|
||||
|
||||
-idea: what if there was something like React jsx syntas built in? i.e. a way to automatically transform some kind of markup
|
||||
into a function call, cf. `<h1 prop="arg">` -> h1(prop=arg)
|
||||
|
||||
## General code cleanup
|
||||
- I think I can restructure the parser to get rid of most instances of expect!, at least at the beginning of a rule
|
||||
DONE -experiment with storing metadata via ItemIds on AST nodes (cf. https://rust-lang.github.io/rustc-guide/hir.html, https://github.com/rust-lang/rust/blob/master/src/librustc/hir/mod.rs )
|
||||
-implement and test open/use statements
|
||||
-implement field access
|
||||
- standardize on an error type that isn't String
|
||||
-implement a visitor pattern for the use of scope_resolver
|
||||
- maybe implement this twice: 1) the value-returning, no-default one in the haoyi blogpost,
|
||||
-look at https://gitlab.haskell.org/ghc/ghc/wikis/pattern-synonyms
|
||||
2) the non-value-returning, default one like in rustc (cf. https://github.com/rust-unofficial/patterns/blob/master/patterns/visitor.md)
|
||||
|
||||
-parser error - should report subset of AST parsed *so far*
|
||||
- what if you used python 'def' syntax to define a function? what error message makes sense here?
|
||||
|
||||
## Reduction
|
||||
- make a good type for actual language builtins to avoid string comparisons
|
||||
|
||||
## Typechecking
|
||||
|
||||
- make a type to represent types rather than relying on string comparisons
|
||||
|
||||
- look at https://rickyhan.com/jekyll/update/2018/05/26/hindley-milner-tutorial-rust.html
|
||||
|
||||
- cf. the notation mentioned in the cardelli paper, the debug information for the `typechecking` pass should
|
||||
print the generated type variable for every subexpression in an expression
|
||||
|
||||
- think about idris-related ideas of multiple implementations of a type for an interface (+ vs * impl for monoids, for preorder/inorder/postorder for Foldable)
|
||||
|
||||
-should have an Idris-like `cast To From` function
|
||||
|
||||
## Schala-lang syntax
|
||||
|
||||
-idea: the `type` declaration should have some kind of GADT-like syntax
|
||||
|
||||
- Idea: if you have a pattern-match where one variant has a variable and the other lacks it
|
||||
instead of treating this as a type error, promote the bound variable to an option type
|
||||
|
||||
- Include extensible scala-style html"string ${var}" string interpolations
|
||||
|
||||
- A neat idea for pattern matching optimization would be if you could match on one of several things in a list
|
||||
ex:
|
||||
if x {
|
||||
```if x {
|
||||
is (comp, LHSPat, RHSPat) if comp in ["==, "<"] -> ...
|
||||
}
|
||||
}```
|
||||
|
||||
- Schala should have both currying *and* default arguments!
|
||||
```fn a(b: Int, c:Int, d:Int = 1) -> Int
|
||||
a(1,2) : Int
|
||||
a(1,2,d=2): Int
|
||||
a(_,1,3) : Int -> Int
|
||||
a(1,2, c=_): Int -> Int
|
||||
a(_,_,_) : Int -> Int -> Int -> Int
|
||||
```
|
||||
|
||||
- scoped types - be able to define a quick enum type scoped to a function or other type for
|
||||
something, that only is meant to be used as a quick bespoke interface between
|
||||
two other things
|
||||
|
||||
ex.
|
||||
```type enum {
|
||||
type enum MySubVariant {
|
||||
SubVariant1, SubVariant2, etc.
|
||||
}
|
||||
Variant1(MySubVariant),
|
||||
Variant2(...),
|
||||
}```
|
||||
|
||||
- inclusive/exclusive range syntax like .. vs ..=
|
||||
|
||||
## Compilation
|
||||
-look into Inkwell for rust LLVM bindings
|
||||
|
||||
- https://nshipster.com/never/
|
||||
-https://cranelift.readthedocs.io/en/latest/?badge=latest<Paste>
|
||||
|
||||
|
||||
## Other links of note
|
||||
|
||||
- https://nshipster.com/never/
|
||||
-consult http://gluon-lang.org/book/embedding-api.html
|
||||
|
||||
|
||||
## Trying if-syntax again
|
||||
|
||||
//simple if expr
|
||||
if x == 10 then "a" else "z"
|
||||
|
||||
//complex if expr
|
||||
if x == 10 then {
|
||||
let a = 1
|
||||
let b = 2
|
||||
a + b
|
||||
} else {
|
||||
55
|
||||
}
|
||||
|
||||
// different comparison ops
|
||||
if x {
|
||||
== 1 then "a"
|
||||
.isPrime() then "b"
|
||||
else "c"
|
||||
}
|
||||
|
||||
/* for now disallow `if x == { 1 then ... }`, b/c hard to parse
|
||||
|
||||
//simple pattern-matching
|
||||
if x is Person("Ivan", age) then age else 0
|
||||
|
||||
//match-block equivalent
|
||||
if x {
|
||||
is Person("Ivan", _) then "Ivan"
|
||||
is Person(_, age) if age > 13 then "barmitzvah'd"
|
||||
else "foo"
|
||||
}
|
||||
|
||||
|
||||
|
||||
## (OLD) Playing around with conditional syntax ideas
|
||||
|
||||
|
||||
- if/match playground
|
||||
|
||||
simple if
|
||||
@@ -52,70 +179,3 @@ if the only two guard patterns are true and false, then the abbreviated syntax:
|
||||
`'if' discriminator 'then' block_or_expr 'else' block_or_expr`
|
||||
can replace `'if' discriminator '{' 'true' 'then' block_or_expr; 'false' 'then' block_or_expr '}'`
|
||||
|
||||
|
||||
|
||||
|
||||
- Next priorities: - get ADTs working, get matches working
|
||||
|
||||
- inclusive/exclusive range syntax like .. vs ..=
|
||||
|
||||
- sketch of an idea for the REPL:
|
||||
-each compiler pass should be a (procedural?) macro like
|
||||
compiler_pass!("parse", dataproducts: ["ast", "parse_tree"], {
|
||||
match parsing::parse(INPUT) {
|
||||
Ok(
|
||||
PASS.add_artifact(
|
||||
}
|
||||
|
||||
-should have an Idris-like `cast To From` function
|
||||
|
||||
- REPL:
|
||||
- want to be able to do things like `:doc Identifier`, and have the language load up these definitions to the REPL
|
||||
|
||||
|
||||
* change 'trait' to 'interface'
|
||||
-think about idris-related ideas of multiple implementations of a type for an interface (+ vs * impl for monoids, for preorder/inorder/postorder for Foldable)
|
||||
|
||||
* Share state between programming languages
|
||||
|
||||
* idea for Schala - scoped types - be able to define a quick enum type scoped to a function ro something, that only is meant to be used as a quick bespoke interface between two other things
|
||||
|
||||
* another idea, allow:
|
||||
type enum {
|
||||
type enum MySubVariant {
|
||||
SubVariant1, SubVariant2, etc.
|
||||
}
|
||||
Variant1(MySubVariant),
|
||||
Variant2(...),
|
||||
}
|
||||
|
||||
|
||||
|
||||
* idea for Schala: both currying *and* default arguments!
|
||||
ex. fn a(b: Int, c:Int, d:Int = 1) -> Int
|
||||
a(1,2) : Int
|
||||
a(1,2,d=2): Int
|
||||
a(_,1,3) : Int -> Int
|
||||
a(1,2, c=_): Int -> Int
|
||||
a(_,_,_) : Int -> Int -> Int -> Int
|
||||
|
||||
|
||||
|
||||
- AST : maybe replace the Expression type with "Ascription(TypeName, Box<Expression>) nodes??
|
||||
- parser: add a "debug" field to the Parser struct for all debug-related things
|
||||
|
||||
-scala-style html"dfasfsadf${}" string interpolations!
|
||||
|
||||
*Compiler passes architecture
|
||||
|
||||
-ProgrammingLanguageInterface defines a evaluate_in_repl() and evaluate_no_repl() functions
|
||||
-these take in a vec of CompilerPasses
|
||||
|
||||
struct CompilerPass {
|
||||
name: String,
|
||||
run: fn(PrevPass) -> NextPass
|
||||
}
|
||||
|
||||
-change "Type...." names in parser.rs to "Anno..." for non-collision with names in typechecking.rs
|
||||
|
||||
-get rid of code pertaining to compilation specifically, have a more generation notion of "execution type"
|
||||
|
||||
@@ -1,279 +0,0 @@
|
||||
extern crate llvm_sys;
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
use self::llvm_sys::prelude::*;
|
||||
use self::llvm_sys::{LLVMIntPredicate};
|
||||
|
||||
use parser::{AST, Statement, Function, Prototype, Expression, BinOp};
|
||||
use schala_repl::LLVMCodeString;
|
||||
|
||||
use schala_repl::llvm_wrap as LLVMWrap;
|
||||
|
||||
type VariableMap = HashMap<String, LLVMValueRef>;
|
||||
|
||||
struct CompilationData {
|
||||
context: LLVMContextRef,
|
||||
module: LLVMModuleRef,
|
||||
builder: LLVMBuilderRef,
|
||||
variables: VariableMap,
|
||||
main_function: LLVMValueRef,
|
||||
current_function: Option<LLVMValueRef>,
|
||||
}
|
||||
|
||||
pub fn compile_ast(ast: AST) -> LLVMCodeString {
|
||||
println!("Compiling!");
|
||||
let names: VariableMap = HashMap::new();
|
||||
|
||||
let context = LLVMWrap::create_context();
|
||||
let module = LLVMWrap::module_create_with_name("example module");
|
||||
let builder = LLVMWrap::CreateBuilderInContext(context);
|
||||
|
||||
let program_return_type = LLVMWrap::Int64TypeInContext(context);
|
||||
let main_function_type = LLVMWrap::FunctionType(program_return_type, Vec::new(), false);
|
||||
let main_function: LLVMValueRef = LLVMWrap::AddFunction(module, "main", main_function_type);
|
||||
|
||||
let mut data = CompilationData {
|
||||
context: context,
|
||||
builder: builder,
|
||||
module: module,
|
||||
variables: names,
|
||||
main_function: main_function,
|
||||
current_function: None,
|
||||
};
|
||||
|
||||
let bb = LLVMWrap::AppendBasicBlockInContext(data.context, data.main_function, "entry");
|
||||
LLVMWrap::PositionBuilderAtEnd(builder, bb);
|
||||
|
||||
let value = ast.codegen(&mut data);
|
||||
|
||||
LLVMWrap::BuildRet(builder, value);
|
||||
|
||||
let ret = LLVMWrap::PrintModuleToString(module);
|
||||
|
||||
// Clean up. Values created in the context mostly get cleaned up there.
|
||||
LLVMWrap::DisposeBuilder(builder);
|
||||
LLVMWrap::DisposeModule(module);
|
||||
LLVMWrap::ContextDispose(context);
|
||||
LLVMCodeString(ret)
|
||||
}
|
||||
|
||||
trait CodeGen {
|
||||
fn codegen(&self, &mut CompilationData) -> LLVMValueRef;
|
||||
}
|
||||
|
||||
impl CodeGen for AST {
|
||||
fn codegen(&self, data: &mut CompilationData) -> LLVMValueRef {
|
||||
|
||||
let int_type = LLVMWrap::Int64TypeInContext(data.context);
|
||||
let mut ret = LLVMWrap::ConstInt(int_type, 0, false);
|
||||
|
||||
for statement in self {
|
||||
ret = statement.codegen(data);
|
||||
}
|
||||
ret
|
||||
}
|
||||
}
|
||||
|
||||
impl CodeGen for Statement {
|
||||
fn codegen(&self, data: &mut CompilationData) -> LLVMValueRef {
|
||||
use self::Statement::*;
|
||||
match self {
|
||||
&ExprNode(ref expr) => expr.codegen(data),
|
||||
&FuncDefNode(ref func) => func.codegen(data),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl CodeGen for Function {
|
||||
fn codegen(&self, data: &mut CompilationData) -> LLVMValueRef {
|
||||
|
||||
/* should have a check here for function already being defined */
|
||||
let function = self.prototype.codegen(data);
|
||||
let ref body = self.body;
|
||||
|
||||
data.current_function = Some(function);
|
||||
|
||||
let return_type = LLVMWrap::Int64TypeInContext(data.context);
|
||||
let mut ret = LLVMWrap::ConstInt(return_type, 0, false);
|
||||
|
||||
let block = LLVMWrap::AppendBasicBlockInContext(data.context, function, "entry");
|
||||
LLVMWrap::PositionBuilderAtEnd(data.builder, block);
|
||||
|
||||
//insert function params into variables
|
||||
for value in LLVMWrap::GetParams(function) {
|
||||
let name = LLVMWrap::GetValueName(value);
|
||||
data.variables.insert(name, value);
|
||||
}
|
||||
|
||||
for expr in body {
|
||||
ret = expr.codegen(data);
|
||||
}
|
||||
|
||||
LLVMWrap::BuildRet(data.builder, ret);
|
||||
|
||||
// get basic block of main
|
||||
let main_bb = LLVMWrap::GetBasicBlocks(data.main_function).get(0).expect("Couldn't get first block of main").clone();
|
||||
LLVMWrap::PositionBuilderAtEnd(data.builder, main_bb);
|
||||
|
||||
data.current_function = None;
|
||||
|
||||
ret
|
||||
}
|
||||
}
|
||||
|
||||
impl CodeGen for Prototype {
|
||||
fn codegen(&self, data: &mut CompilationData) -> LLVMValueRef {
|
||||
let num_args = self.parameters.len();
|
||||
let return_type = LLVMWrap::Int64TypeInContext(data.context);
|
||||
let mut arguments: Vec<LLVMTypeRef> = vec![];
|
||||
|
||||
for _ in 0..num_args {
|
||||
arguments.push(LLVMWrap::Int64TypeInContext(data.context));
|
||||
}
|
||||
|
||||
let function_type =
|
||||
LLVMWrap::FunctionType(return_type,
|
||||
arguments,
|
||||
false);
|
||||
|
||||
let function = LLVMWrap::AddFunction(data.module,
|
||||
&*self.name,
|
||||
function_type);
|
||||
|
||||
let function_params = LLVMWrap::GetParams(function);
|
||||
for (index, param) in function_params.iter().enumerate() {
|
||||
let name = self.parameters.get(index).expect(&format!("Failed this check at index {}", index));
|
||||
let new = *param;
|
||||
|
||||
LLVMWrap::SetValueName(new, name);
|
||||
}
|
||||
|
||||
function
|
||||
}
|
||||
}
|
||||
|
||||
impl CodeGen for Expression {
|
||||
fn codegen(&self, data: &mut CompilationData) -> LLVMValueRef {
|
||||
use self::BinOp::*;
|
||||
use self::Expression::*;
|
||||
|
||||
let int_type = LLVMWrap::Int64TypeInContext(data.context);
|
||||
let zero = LLVMWrap::ConstInt(int_type, 0, false);
|
||||
|
||||
match *self {
|
||||
Variable(ref name) => *data.variables.get(&**name).expect(&format!("Can't find variable {}", name)),
|
||||
BinExp(Assign, ref left, ref right) => {
|
||||
if let Variable(ref name) = **left {
|
||||
let new_value = right.codegen(data);
|
||||
data.variables.insert((**name).clone(), new_value);
|
||||
new_value
|
||||
} else {
|
||||
panic!("Bad variable assignment")
|
||||
}
|
||||
}
|
||||
BinExp(ref op, ref left, ref right) => {
|
||||
let lhs = left.codegen(data);
|
||||
let rhs = right.codegen(data);
|
||||
op.codegen_with_ops(data, lhs, rhs)
|
||||
}
|
||||
Number(ref n) => {
|
||||
let native_val = *n as u64;
|
||||
let int_value: LLVMValueRef = LLVMWrap::ConstInt(int_type, native_val, false);
|
||||
int_value
|
||||
}
|
||||
Conditional(ref test, ref then_expr, ref else_expr) => {
|
||||
let condition_value = test.codegen(data);
|
||||
let is_nonzero =
|
||||
LLVMWrap::BuildICmp(data.builder,
|
||||
LLVMIntPredicate::LLVMIntNE,
|
||||
condition_value,
|
||||
zero,
|
||||
"ifcond");
|
||||
|
||||
let func = LLVMWrap::GetBasicBlockParent(LLVMWrap::GetInsertBlock(data.builder));
|
||||
|
||||
let mut then_block =
|
||||
LLVMWrap::AppendBasicBlockInContext(data.context, func, "then_block");
|
||||
let mut else_block =
|
||||
LLVMWrap::AppendBasicBlockInContext(data.context, func, "else_block");
|
||||
let merge_block =
|
||||
LLVMWrap::AppendBasicBlockInContext(data.context, func, "ifcont");
|
||||
|
||||
// add conditional branch to ifcond block
|
||||
LLVMWrap::BuildCondBr(data.builder, is_nonzero, then_block, else_block);
|
||||
|
||||
// start inserting into then block
|
||||
LLVMWrap::PositionBuilderAtEnd(data.builder, then_block);
|
||||
|
||||
// then-block codegen
|
||||
let then_return = then_expr.codegen(data);
|
||||
LLVMWrap::BuildBr(data.builder, merge_block);
|
||||
|
||||
// update then block b/c recursive codegen() call may have changed the notion of
|
||||
// the current block
|
||||
then_block = LLVMWrap::GetInsertBlock(data.builder);
|
||||
|
||||
// then do the same stuff again for the else branch
|
||||
//
|
||||
LLVMWrap::PositionBuilderAtEnd(data.builder, else_block);
|
||||
let else_return = match *else_expr {
|
||||
Some(ref e) => e.codegen(data),
|
||||
None => zero,
|
||||
};
|
||||
LLVMWrap::BuildBr(data.builder, merge_block);
|
||||
else_block = LLVMWrap::GetInsertBlock(data.builder);
|
||||
|
||||
LLVMWrap::PositionBuilderAtEnd(data.builder, merge_block);
|
||||
|
||||
let phi = LLVMWrap::BuildPhi(data.builder, int_type, "phinode");
|
||||
let values = vec![then_return, else_return];
|
||||
let blocks = vec![then_block, else_block];
|
||||
LLVMWrap::AddIncoming(phi, values, blocks);
|
||||
phi
|
||||
}
|
||||
Block(ref exprs) => {
|
||||
let mut ret = zero;
|
||||
for e in exprs.iter() {
|
||||
ret = e.codegen(data);
|
||||
}
|
||||
ret
|
||||
}
|
||||
ref e => {
|
||||
println!("Unimplemented {:?}", e);
|
||||
unimplemented!()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl BinOp {
|
||||
fn codegen_with_ops(&self, data: &CompilationData, lhs: LLVMValueRef, rhs: LLVMValueRef) -> LLVMValueRef {
|
||||
use self::BinOp::*;
|
||||
macro_rules! simple_binop {
|
||||
($fnname: expr, $name: expr) => {
|
||||
$fnname(data.builder, lhs, rhs, $name)
|
||||
}
|
||||
}
|
||||
let int_type = LLVMWrap::Int64TypeInContext(data.context);
|
||||
match *self {
|
||||
Add => simple_binop!(LLVMWrap::BuildAdd, "addtemp"),
|
||||
Sub => simple_binop!(LLVMWrap::BuildSub, "subtemp"),
|
||||
Mul => simple_binop!(LLVMWrap::BuildMul, "multemp"),
|
||||
Div => simple_binop!(LLVMWrap::BuildUDiv, "divtemp"),
|
||||
Mod => simple_binop!(LLVMWrap::BuildSRem, "remtemp"),
|
||||
Less => {
|
||||
let pred: LLVMValueRef =
|
||||
LLVMWrap::BuildICmp(data.builder, LLVMIntPredicate::LLVMIntULT, lhs, rhs, "tmp");
|
||||
LLVMWrap::BuildZExt(data.builder, pred, int_type, "temp")
|
||||
}
|
||||
Greater => {
|
||||
let pred: LLVMValueRef =
|
||||
LLVMWrap::BuildICmp(data.builder, LLVMIntPredicate::LLVMIntUGT, lhs, rhs, "tmp");
|
||||
LLVMWrap::BuildZExt(data.builder, pred, int_type, "temp")
|
||||
}
|
||||
ref unknown => panic!("Bad operator {:?}", unknown),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,9 +5,6 @@ extern crate schala_repl;
|
||||
mod tokenizer;
|
||||
mod parser;
|
||||
mod eval;
|
||||
mod compilation;
|
||||
|
||||
use schala_repl::{ProgrammingLanguageInterface, EvalOptions, UnfinishedComputation, FinishedComputation, TraceArtifact};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct TokenError {
|
||||
@@ -34,6 +31,42 @@ impl<'a> Maaru<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
fn execute_pipeline(&mut self, input: &str, options: &EvalOptions) -> Result<String, String> {
|
||||
let mut output = UnfinishedComputation::default();
|
||||
|
||||
let tokens = match tokenizer::tokenize(input) {
|
||||
Ok(tokens) => {
|
||||
if let Some(_) = options.debug_passes.get("tokens") {
|
||||
output.add_artifact(TraceArtifact::new("tokens", format!("{:?}", tokens)));
|
||||
}
|
||||
tokens
|
||||
},
|
||||
Err(err) => {
|
||||
return output.finish(Err(format!("Tokenization error: {:?}\n", err.msg)))
|
||||
}
|
||||
};
|
||||
|
||||
let ast = match parser::parse(&tokens, &[]) {
|
||||
Ok(ast) => {
|
||||
if let Some(_) = options.debug_passes.get("ast") {
|
||||
output.add_artifact(TraceArtifact::new("ast", format!("{:?}", ast)));
|
||||
}
|
||||
ast
|
||||
},
|
||||
Err(err) => {
|
||||
return output.finish(Err(format!("Parse error: {:?}\n", err.msg)))
|
||||
}
|
||||
};
|
||||
let mut evaluation_output = String::new();
|
||||
for s in self.evaluator.run(ast).iter() {
|
||||
evaluation_output.push_str(s);
|
||||
}
|
||||
Ok(evaluation_output)
|
||||
}
|
||||
*/
|
||||
|
||||
/*
|
||||
impl<'a> ProgrammingLanguageInterface for Maaru<'a> {
|
||||
fn get_language_name(&self) -> String {
|
||||
"Maaru".to_string()
|
||||
@@ -41,63 +74,5 @@ impl<'a> ProgrammingLanguageInterface for Maaru<'a> {
|
||||
fn get_source_file_suffix(&self) -> String {
|
||||
format!("maaru")
|
||||
}
|
||||
|
||||
fn execute_pipeline(&mut self, input: &str, options: &EvalOptions) -> FinishedComputation {
|
||||
let mut output = UnfinishedComputation::default();
|
||||
|
||||
let tokens = match tokenizer::tokenize(input) {
|
||||
Ok(tokens) => {
|
||||
if let Some(_) = options.debug_passes.get("tokens") {
|
||||
output.add_artifact(TraceArtifact::new("tokens", format!("{:?}", tokens)));
|
||||
}
|
||||
tokens
|
||||
},
|
||||
Err(err) => {
|
||||
return output.finish(Err(format!("Tokenization error: {:?}\n", err.msg)))
|
||||
}
|
||||
};
|
||||
|
||||
let ast = match parser::parse(&tokens, &[]) {
|
||||
Ok(ast) => {
|
||||
if let Some(_) = options.debug_passes.get("ast") {
|
||||
output.add_artifact(TraceArtifact::new("ast", format!("{:?}", ast)));
|
||||
}
|
||||
ast
|
||||
},
|
||||
Err(err) => {
|
||||
return output.finish(Err(format!("Parse error: {:?}\n", err.msg)))
|
||||
}
|
||||
};
|
||||
let mut evaluation_output = String::new();
|
||||
for s in self.evaluator.run(ast).iter() {
|
||||
evaluation_output.push_str(s);
|
||||
}
|
||||
output.finish(Ok(evaluation_output))
|
||||
}
|
||||
|
||||
/* TODO make this work with new framework */
|
||||
/*
|
||||
fn can_compile(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn compile(&mut self, input: &str) -> LLVMCodeString {
|
||||
let tokens = match tokenizer::tokenize(input) {
|
||||
Ok(tokens) => tokens,
|
||||
Err(err) => {
|
||||
let msg = format!("Tokenization error: {:?}\n", err.msg);
|
||||
panic!("{}", msg);
|
||||
}
|
||||
};
|
||||
|
||||
let ast = match parser::parse(&tokens, &[]) {
|
||||
Ok(ast) => ast,
|
||||
Err(err) => {
|
||||
let msg = format!("Parse error: {:?}\n", err.msg);
|
||||
panic!("{}", msg);
|
||||
}
|
||||
};
|
||||
compilation::compile_ast(ast)
|
||||
}
|
||||
*/
|
||||
}
|
||||
*/
|
||||
|
||||
@@ -4,7 +4,7 @@ extern crate itertools;
|
||||
extern crate schala_repl;
|
||||
|
||||
use itertools::Itertools;
|
||||
use schala_repl::{ProgrammingLanguageInterface, EvalOptions, FinishedComputation, UnfinishedComputation};
|
||||
use schala_repl::{ProgrammingLanguageInterface, EvalOptions};
|
||||
|
||||
pub struct Robo {
|
||||
}
|
||||
@@ -154,17 +154,5 @@ impl ProgrammingLanguageInterface for Robo {
|
||||
fn get_source_file_suffix(&self) -> String {
|
||||
format!("robo")
|
||||
}
|
||||
|
||||
fn execute_pipeline(&mut self, input: &str, _eval_options: &EvalOptions) -> FinishedComputation {
|
||||
let output = UnfinishedComputation::default();
|
||||
let tokens = match tokenize(input) {
|
||||
Ok(tokens) => tokens,
|
||||
Err(e) => {
|
||||
return output.finish(Err(format!("Tokenize error: {:?}", e)));
|
||||
}
|
||||
};
|
||||
|
||||
output.finish(Ok(format!("{:?}", tokens)))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ extern crate itertools;
|
||||
extern crate schala_repl;
|
||||
|
||||
use itertools::Itertools;
|
||||
use schala_repl::{ProgrammingLanguageInterface, EvalOptions, UnfinishedComputation, FinishedComputation};
|
||||
use schala_repl::{ProgrammingLanguageInterface, EvalOptions};
|
||||
use std::iter::Peekable;
|
||||
use std::vec::IntoIter;
|
||||
use std::str::Chars;
|
||||
@@ -72,24 +72,6 @@ impl ProgrammingLanguageInterface for Rukka {
|
||||
fn get_source_file_suffix(&self) -> String {
|
||||
format!("rukka")
|
||||
}
|
||||
|
||||
fn execute_pipeline(&mut self, input: &str, _eval_options: &EvalOptions) -> FinishedComputation {
|
||||
let output = UnfinishedComputation::default();
|
||||
let sexps = match read(input) {
|
||||
Err(err) => {
|
||||
return output.finish(Err(format!("Error: {}", err)));
|
||||
},
|
||||
Ok(sexps) => sexps
|
||||
};
|
||||
|
||||
let output_str: String = sexps.into_iter().enumerate().map(|(i, sexp)| {
|
||||
match self.state.eval(sexp) {
|
||||
Ok(result) => format!("{}: {}", i, result.print()),
|
||||
Err(err) => format!("{} Error: {}", i, err),
|
||||
}
|
||||
}).intersperse(format!("\n")).collect();
|
||||
output.finish(Ok(output_str))
|
||||
}
|
||||
}
|
||||
|
||||
impl EvaluatorState {
|
||||
|
||||
@@ -19,10 +19,10 @@ impl Fold for RecursiveDescentFn {
|
||||
|
||||
let new_block: syn::Block = parse_quote! {
|
||||
{
|
||||
let next_token = self.peek_with_token_offset();
|
||||
let next_token_before_parse = self.token_handler.peek();
|
||||
let record = ParseRecord {
|
||||
production_name: stringify!(#ident).to_string(),
|
||||
next_token: format!("{}", next_token.to_string_with_metadata()),
|
||||
next_token: format!("{}", next_token_before_parse.to_string_with_metadata()),
|
||||
level: self.parse_level,
|
||||
};
|
||||
self.parse_level += 1;
|
||||
@@ -32,11 +32,11 @@ impl Fold for RecursiveDescentFn {
|
||||
if self.parse_level != 0 {
|
||||
self.parse_level -= 1;
|
||||
}
|
||||
match result {
|
||||
Err(ParseError { token: None, msg }) =>
|
||||
Err(ParseError { token: Some(next_token), msg }),
|
||||
_ => result
|
||||
}
|
||||
|
||||
result.map_err(|mut parse_error: ParseError| {
|
||||
parse_error.production_name = Some(stringify!(#ident).to_string());
|
||||
parse_error
|
||||
})
|
||||
}
|
||||
};
|
||||
i.block = Box::new(new_block);
|
||||
|
||||
@@ -2,15 +2,20 @@
|
||||
name = "schala-lang"
|
||||
version = "0.1.0"
|
||||
authors = ["greg <greg.shuflin@protonmail.com>"]
|
||||
edition = "2018"
|
||||
|
||||
[dependencies]
|
||||
itertools = "0.5.8"
|
||||
take_mut = "0.1.3"
|
||||
maplit = "*"
|
||||
lazy_static = "0.2.8"
|
||||
failure = "0.1.2"
|
||||
|
||||
itertools = "0.8.0"
|
||||
take_mut = "0.2.2"
|
||||
maplit = "1.0.1"
|
||||
lazy_static = "1.3.0"
|
||||
failure = "0.1.5"
|
||||
ena = "0.11.0"
|
||||
stopwatch = "0.0.7"
|
||||
derivative = "1.0.3"
|
||||
colored = "1.8"
|
||||
radix_trie = "0.1.5"
|
||||
nom = "5.1.0"
|
||||
|
||||
schala-lang-codegen = { path = "../codegen" }
|
||||
schala-repl = { path = "../../schala-repl" }
|
||||
schala-repl-codegen = { path = "../../schala-repl-codegen" }
|
||||
|
||||
@@ -1,20 +1,90 @@
|
||||
use std::rc::Rc;
|
||||
|
||||
use builtin::{BinOp, PrefixOp};
|
||||
use crate::derivative::Derivative;
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub struct AST(pub Vec<Statement>);
|
||||
mod walker;
|
||||
mod visitor;
|
||||
mod visitor_test;
|
||||
mod operators;
|
||||
pub use operators::*;
|
||||
pub use visitor::ASTVisitor;
|
||||
pub use walker::walk_ast;
|
||||
|
||||
/// An abstract identifier for an AST node
|
||||
#[derive(Debug, PartialEq, Eq, Hash, Clone)]
|
||||
pub struct ItemId {
|
||||
idx: u32,
|
||||
}
|
||||
|
||||
impl ItemId {
|
||||
pub fn new(n: u32) -> ItemId {
|
||||
ItemId { idx: n }
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ItemIdStore {
|
||||
last_idx: u32
|
||||
}
|
||||
|
||||
impl ItemIdStore {
|
||||
pub fn new() -> ItemIdStore {
|
||||
ItemIdStore { last_idx: 0 }
|
||||
}
|
||||
/// Always returns an ItemId with internal value zero
|
||||
#[cfg(test)]
|
||||
pub fn new_id() -> ItemId {
|
||||
ItemId { idx: 0 }
|
||||
}
|
||||
|
||||
/// This limits the size of the AST to 2^32 tree elements
|
||||
pub fn fresh(&mut self) -> ItemId {
|
||||
let idx = self.last_idx;
|
||||
self.last_idx += 1;
|
||||
ItemId::new(idx)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Derivative, Debug)]
|
||||
#[derivative(PartialEq)]
|
||||
pub struct AST {
|
||||
#[derivative(PartialEq="ignore")]
|
||||
pub id: ItemId,
|
||||
pub statements: Vec<Statement>
|
||||
}
|
||||
|
||||
#[derive(Derivative, Debug, Clone)]
|
||||
#[derivative(PartialEq)]
|
||||
pub struct Statement {
|
||||
#[derivative(PartialEq="ignore")]
|
||||
pub id: ItemId,
|
||||
pub kind: StatementKind,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub enum Statement {
|
||||
ExpressionStatement(Expression),
|
||||
pub enum StatementKind {
|
||||
Expression(Expression),
|
||||
Declaration(Declaration),
|
||||
Import(ImportSpecifier),
|
||||
Module(ModuleSpecifier),
|
||||
}
|
||||
|
||||
pub type Block = Vec<Statement>;
|
||||
pub type ParamName = Rc<String>;
|
||||
pub type InterfaceName = Rc<String>; //should be a singleton I think??
|
||||
pub type FormalParam = (ParamName, Option<TypeIdentifier>);
|
||||
|
||||
#[derive(Debug, Derivative, Clone)]
|
||||
#[derivative(PartialEq)]
|
||||
pub struct QualifiedName {
|
||||
#[derivative(PartialEq="ignore")]
|
||||
pub id: ItemId,
|
||||
pub components: Vec<Rc<String>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub struct FormalParam {
|
||||
pub name: ParamName,
|
||||
pub default: Option<Expression>,
|
||||
pub anno: Option<TypeIdentifier>
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub enum Declaration {
|
||||
@@ -25,15 +95,20 @@ pub enum Declaration {
|
||||
body: TypeBody,
|
||||
mutable: bool
|
||||
},
|
||||
TypeAlias(Rc<String>, Rc<String>), //should have TypeSingletonName in it, or maybe just String, not sure
|
||||
//TODO this needs to be more sophisticated
|
||||
TypeAlias {
|
||||
alias: Rc<String>,
|
||||
original: Rc<String>,
|
||||
},
|
||||
Binding {
|
||||
name: Rc<String>,
|
||||
constant: bool,
|
||||
type_anno: Option<TypeIdentifier>,
|
||||
expr: Expression,
|
||||
},
|
||||
Impl {
|
||||
type_name: TypeIdentifier,
|
||||
interface_name: Option<InterfaceName>,
|
||||
interface_name: Option<TypeSingletonName>,
|
||||
block: Vec<Declaration>,
|
||||
},
|
||||
Interface {
|
||||
@@ -57,11 +132,30 @@ pub struct TypeBody(pub Vec<Variant>);
|
||||
pub enum Variant {
|
||||
UnitStruct(Rc<String>),
|
||||
TupleStruct(Rc<String>, Vec<TypeIdentifier>),
|
||||
Record(Rc<String>, Vec<(Rc<String>, TypeIdentifier)>),
|
||||
Record {
|
||||
name: Rc<String>,
|
||||
members: Vec<(Rc<String>, TypeIdentifier)>,
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub struct Expression(pub ExpressionType, pub Option<TypeIdentifier>);
|
||||
#[derive(Debug, Derivative, Clone)]
|
||||
#[derivative(PartialEq)]
|
||||
pub struct Expression {
|
||||
#[derivative(PartialEq="ignore")]
|
||||
pub id: ItemId,
|
||||
pub kind: ExpressionKind,
|
||||
pub type_anno: Option<TypeIdentifier>
|
||||
}
|
||||
|
||||
impl Expression {
|
||||
pub fn new(id: ItemId, kind: ExpressionKind) -> Expression {
|
||||
Expression { id, kind, type_anno: None }
|
||||
}
|
||||
|
||||
pub fn with_anno(id: ItemId, kind: ExpressionKind, type_anno: TypeIdentifier) -> Expression {
|
||||
Expression { id, kind, type_anno: Some(type_anno) }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub enum TypeIdentifier {
|
||||
@@ -76,7 +170,7 @@ pub struct TypeSingletonName {
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub enum ExpressionType {
|
||||
pub enum ExpressionKind {
|
||||
NatLiteral(u64),
|
||||
FloatLiteral(f64),
|
||||
StringLiteral(Rc<String>),
|
||||
@@ -84,21 +178,21 @@ pub enum ExpressionType {
|
||||
BinExp(BinOp, Box<Expression>, Box<Expression>),
|
||||
PrefixExp(PrefixOp, Box<Expression>),
|
||||
TupleLiteral(Vec<Expression>),
|
||||
Value(Rc<String>),
|
||||
Value(QualifiedName),
|
||||
NamedStruct {
|
||||
name: Rc<String>,
|
||||
name: QualifiedName,
|
||||
fields: Vec<(Rc<String>, Expression)>,
|
||||
},
|
||||
Call {
|
||||
f: Box<Expression>,
|
||||
arguments: Vec<Expression>,
|
||||
arguments: Vec<InvocationArgument>,
|
||||
},
|
||||
Index {
|
||||
indexee: Box<Expression>,
|
||||
indexers: Vec<Expression>,
|
||||
},
|
||||
IfExpression {
|
||||
discriminator: Box<Discriminator>,
|
||||
discriminator: Option<Box<Expression>>,
|
||||
body: Box<IfExpressionBody>,
|
||||
},
|
||||
WhileExpression {
|
||||
@@ -111,39 +205,49 @@ pub enum ExpressionType {
|
||||
},
|
||||
Lambda {
|
||||
params: Vec<FormalParam>,
|
||||
type_anno: Option<TypeIdentifier>,
|
||||
body: Block,
|
||||
},
|
||||
ListLiteral(Vec<Expression>),
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub enum Discriminator {
|
||||
Simple(Expression),
|
||||
BinOp(Expression, BinOp)
|
||||
pub enum InvocationArgument {
|
||||
Positional(Expression),
|
||||
Keyword {
|
||||
name: Rc<String>,
|
||||
expr: Expression,
|
||||
},
|
||||
Ignored
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub enum IfExpressionBody {
|
||||
SimpleConditional(Block, Option<Block>),
|
||||
SimplePatternMatch(Pattern, Block, Option<Block>),
|
||||
GuardList(Vec<GuardArm>)
|
||||
SimpleConditional {
|
||||
then_case: Block,
|
||||
else_case: Option<Block>
|
||||
},
|
||||
SimplePatternMatch {
|
||||
pattern: Pattern,
|
||||
then_case: Block,
|
||||
else_case: Option<Block>
|
||||
},
|
||||
CondList(Vec<ConditionArm>)
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub struct GuardArm {
|
||||
pub guard: Guard,
|
||||
pub struct ConditionArm {
|
||||
pub condition: Condition,
|
||||
pub guard: Option<Expression>,
|
||||
pub body: Block,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub enum Guard {
|
||||
Pat(Pattern),
|
||||
HalfExpr(HalfExpr)
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub struct HalfExpr {
|
||||
pub op: Option<BinOp>,
|
||||
pub expr: ExpressionType,
|
||||
pub enum Condition {
|
||||
Pattern(Pattern),
|
||||
TruncatedOp(BinOp, Expression),
|
||||
Expression(Expression),
|
||||
Else,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
@@ -151,19 +255,19 @@ pub enum Pattern {
|
||||
Ignored,
|
||||
TuplePattern(Vec<Pattern>),
|
||||
Literal(PatternLiteral),
|
||||
TupleStruct(Rc<String>, Vec<Pattern>),
|
||||
Record(Rc<String>, Vec<(Rc<String>, Pattern)>),
|
||||
TupleStruct(QualifiedName, Vec<Pattern>),
|
||||
Record(QualifiedName, Vec<(Rc<String>, Pattern)>),
|
||||
VarOrName(QualifiedName),
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub enum PatternLiteral {
|
||||
NumPattern {
|
||||
neg: bool,
|
||||
num: ExpressionType,
|
||||
num: ExpressionKind,
|
||||
},
|
||||
StringPattern(Rc<String>),
|
||||
BoolPattern(bool),
|
||||
VarPattern(Rc<String>)
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
@@ -177,3 +281,27 @@ pub enum ForBody {
|
||||
MonadicReturn(Expression),
|
||||
StatementBlock(Block),
|
||||
}
|
||||
|
||||
#[derive(Debug, Derivative, Clone)]
|
||||
#[derivative(PartialEq)]
|
||||
pub struct ImportSpecifier {
|
||||
#[derivative(PartialEq="ignore")]
|
||||
pub id: ItemId,
|
||||
pub path_components: Vec<Rc<String>>,
|
||||
pub imported_names: ImportedNames
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub enum ImportedNames {
|
||||
All,
|
||||
LastOfPath,
|
||||
List(Vec<Rc<String>>)
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub struct ModuleSpecifier {
|
||||
pub name: Rc<String>,
|
||||
pub contents: Vec<Statement>,
|
||||
}
|
||||
|
||||
|
||||
112
schala-lang/language/src/ast/operators.rs
Normal file
112
schala-lang/language/src/ast/operators.rs
Normal file
@@ -0,0 +1,112 @@
|
||||
use std::rc::Rc;
|
||||
use std::str::FromStr;
|
||||
|
||||
use crate::tokenizing::TokenKind;
|
||||
use crate::builtin::Builtin;
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub struct PrefixOp {
|
||||
sigil: Rc<String>,
|
||||
pub builtin: Option<Builtin>,
|
||||
}
|
||||
|
||||
impl PrefixOp {
|
||||
#[allow(dead_code)]
|
||||
pub fn sigil(&self) -> &Rc<String> {
|
||||
&self.sigil
|
||||
}
|
||||
pub fn is_prefix(op: &str) -> bool {
|
||||
match op {
|
||||
"+" => true,
|
||||
"-" => true,
|
||||
"!" => true,
|
||||
_ => false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for PrefixOp {
|
||||
type Err = ();
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
use Builtin::*;
|
||||
|
||||
let builtin = match s {
|
||||
"+" => Ok(Increment),
|
||||
"-" => Ok(Negate),
|
||||
"!" => Ok(BooleanNot),
|
||||
_ => Err(())
|
||||
};
|
||||
|
||||
builtin.map(|builtin| PrefixOp { sigil: Rc::new(s.to_string()), builtin: Some(builtin) })
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub struct BinOp {
|
||||
sigil: Rc<String>,
|
||||
pub builtin: Option<Builtin>,
|
||||
}
|
||||
|
||||
impl BinOp {
|
||||
pub fn from_sigil(sigil: &str) -> BinOp {
|
||||
let builtin = Builtin::from_str(sigil).ok();
|
||||
BinOp { sigil: Rc::new(sigil.to_string()), builtin }
|
||||
}
|
||||
pub fn sigil(&self) -> &Rc<String> {
|
||||
&self.sigil
|
||||
}
|
||||
pub fn from_sigil_token(tok: &TokenKind) -> Option<BinOp> {
|
||||
let s = token_kind_to_sigil(tok)?;
|
||||
Some(BinOp::from_sigil(s))
|
||||
}
|
||||
|
||||
pub fn min_precedence() -> i32 {
|
||||
i32::min_value()
|
||||
}
|
||||
pub fn get_precedence_from_token(op_tok: &TokenKind) -> Option<i32> {
|
||||
let s = token_kind_to_sigil(op_tok)?;
|
||||
Some(binop_precedences(s))
|
||||
}
|
||||
|
||||
pub fn precedence(&self) -> i32 {
|
||||
binop_precedences(self.sigil.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
fn token_kind_to_sigil<'a>(tok: &'a TokenKind) -> Option<&'a str> {
|
||||
use self::TokenKind::*;
|
||||
Some(match tok {
|
||||
Operator(op) => op.as_str(),
|
||||
Period => ".",
|
||||
Pipe => "|",
|
||||
Slash => "/",
|
||||
LAngleBracket => "<",
|
||||
RAngleBracket => ">",
|
||||
Equals => "=",
|
||||
_ => return None
|
||||
})
|
||||
}
|
||||
|
||||
fn binop_precedences(s: &str) -> i32 {
|
||||
let default = 10_000_000;
|
||||
match s {
|
||||
"+" => 10,
|
||||
"-" => 10,
|
||||
"*" => 20,
|
||||
"/" => 20,
|
||||
"%" => 20,
|
||||
"++" => 30,
|
||||
"^" => 30,
|
||||
"&" => 20,
|
||||
"|" => 20,
|
||||
">" => 20,
|
||||
">=" => 20,
|
||||
"<" => 20,
|
||||
"<=" => 20,
|
||||
"==" => 40,
|
||||
"=" => 10,
|
||||
"<=>" => 30,
|
||||
_ => default,
|
||||
}
|
||||
}
|
||||
41
schala-lang/language/src/ast/visitor.rs
Normal file
41
schala-lang/language/src/ast/visitor.rs
Normal file
@@ -0,0 +1,41 @@
|
||||
use std::rc::Rc;
|
||||
use crate::ast::*;
|
||||
|
||||
//TODO maybe these functions should take closures that return a KeepRecursing | StopHere type,
|
||||
//or a tuple of (T, <that type>)
|
||||
|
||||
pub trait ASTVisitor: Sized {
|
||||
fn ast(&mut self, _ast: &AST) {}
|
||||
fn block(&mut self, _statements: &Vec<Statement>) {}
|
||||
fn statement(&mut self, _statement: &Statement) {}
|
||||
fn declaration(&mut self, _declaration: &Declaration) {}
|
||||
fn signature(&mut self, _signature: &Signature) {}
|
||||
fn type_declaration(&mut self, _name: &TypeSingletonName, _body: &TypeBody, _mutable: bool) {}
|
||||
fn type_alias(&mut self, _alias: &Rc<String>, _original: &Rc<String>) {}
|
||||
fn binding(&mut self, _name: &Rc<String>, _constant: bool, _type_anno: Option<&TypeIdentifier>, _expr: &Expression) {}
|
||||
fn implemention(&mut self, _type_name: &TypeIdentifier, _interface_name: Option<&TypeSingletonName>, _block: &Vec<Declaration>) {}
|
||||
fn interface(&mut self, _name: &Rc<String>, _signatures: &Vec<Signature>) {}
|
||||
fn expression(&mut self, _expression: &Expression) {}
|
||||
fn expression_kind(&mut self, _kind: &ExpressionKind) {}
|
||||
fn type_annotation(&mut self, _type_anno: Option<&TypeIdentifier>) {}
|
||||
fn named_struct(&mut self, _name: &QualifiedName, _fields: &Vec<(Rc<String>, Expression)>) {}
|
||||
fn call(&mut self, _f: &Expression, _arguments: &Vec<InvocationArgument>) {}
|
||||
fn index(&mut self, _indexee: &Expression, _indexers: &Vec<Expression>) {}
|
||||
fn if_expression(&mut self, _discrim: Option<&Expression>, _body: &IfExpressionBody) {}
|
||||
fn condition_arm(&mut self, _arm: &ConditionArm) {}
|
||||
fn while_expression(&mut self, _condition: Option<&Expression>, _body: &Block) {}
|
||||
fn for_expression(&mut self, _enumerators: &Vec<Enumerator>, _body: &ForBody) {}
|
||||
fn lambda(&mut self, _params: &Vec<FormalParam>, _type_anno: Option<&TypeIdentifier>, _body: &Block) {}
|
||||
fn invocation_argument(&mut self, _arg: &InvocationArgument) {}
|
||||
fn formal_param(&mut self, _param: &FormalParam) {}
|
||||
fn import(&mut self, _import: &ImportSpecifier) {}
|
||||
fn module(&mut self, _module: &ModuleSpecifier) {}
|
||||
fn qualified_name(&mut self, _name: &QualifiedName) {}
|
||||
fn nat_literal(&mut self, _n: u64) {}
|
||||
fn float_literal(&mut self, _f: f64) {}
|
||||
fn string_literal(&mut self, _s: &Rc<String>) {}
|
||||
fn bool_literal(&mut self, _b: bool) {}
|
||||
fn binexp(&mut self, _op: &BinOp, _lhs: &Expression, _rhs: &Expression) {}
|
||||
fn prefix_exp(&mut self, _op: &PrefixOp, _arg: &Expression) {}
|
||||
fn pattern(&mut self, _pat: &Pattern) {}
|
||||
}
|
||||
41
schala-lang/language/src/ast/visitor_test.rs
Normal file
41
schala-lang/language/src/ast/visitor_test.rs
Normal file
@@ -0,0 +1,41 @@
|
||||
#![cfg(test)]
|
||||
|
||||
use crate::ast::visitor::ASTVisitor;
|
||||
use crate::ast::walker;
|
||||
use crate::util::quick_ast;
|
||||
|
||||
struct Tester {
|
||||
count: u64,
|
||||
float_count: u64
|
||||
}
|
||||
|
||||
impl ASTVisitor for Tester {
|
||||
fn nat_literal(&mut self, _n: u64) {
|
||||
self.count += 1;
|
||||
}
|
||||
fn float_literal(&mut self, _f: f64) {
|
||||
self.float_count += 1;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
fn foo() {
|
||||
let mut tester = Tester { count: 0, float_count: 0 };
|
||||
let (ast, _) = quick_ast(r#"
|
||||
import gragh
|
||||
|
||||
let a = 20 + 84
|
||||
let b = 28 + 1 + 2 + 2.0
|
||||
fn heh() {
|
||||
let m = 9
|
||||
|
||||
}
|
||||
|
||||
"#);
|
||||
|
||||
walker::walk_ast(&mut tester, &ast);
|
||||
|
||||
assert_eq!(tester.count, 6);
|
||||
assert_eq!(tester.float_count, 1);
|
||||
}
|
||||
269
schala-lang/language/src/ast/walker.rs
Normal file
269
schala-lang/language/src/ast/walker.rs
Normal file
@@ -0,0 +1,269 @@
|
||||
#![allow(dead_code)]
|
||||
use std::rc::Rc;
|
||||
use crate::ast::*;
|
||||
use crate::ast::visitor::ASTVisitor;
|
||||
use crate::util::deref_optional_box;
|
||||
|
||||
pub fn walk_ast<V: ASTVisitor>(v: &mut V, ast: &AST) {
|
||||
v.ast(ast);
|
||||
walk_block(v, &ast.statements);
|
||||
}
|
||||
|
||||
fn walk_block<V: ASTVisitor>(v: &mut V, block: &Vec<Statement>) {
|
||||
for s in block {
|
||||
v.statement(s);
|
||||
statement(v, s);
|
||||
}
|
||||
}
|
||||
|
||||
fn statement<V: ASTVisitor>(v: &mut V, statement: &Statement) {
|
||||
use StatementKind::*;
|
||||
match statement.kind {
|
||||
Expression(ref expr) => {
|
||||
v.expression(expr);
|
||||
expression(v, expr);
|
||||
},
|
||||
Declaration(ref decl) => {
|
||||
v.declaration(decl);
|
||||
declaration(v, decl);
|
||||
},
|
||||
Import(ref import_spec) => v.import(import_spec),
|
||||
Module(ref module_spec) => {
|
||||
v.module(module_spec);
|
||||
walk_block(v, &module_spec.contents);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn declaration<V: ASTVisitor>(v: &mut V, decl: &Declaration) {
|
||||
use Declaration::*;
|
||||
match decl {
|
||||
FuncSig(sig) => {
|
||||
v.signature(&sig);
|
||||
signature(v, &sig);
|
||||
},
|
||||
FuncDecl(sig, block) => {
|
||||
v.signature(&sig);
|
||||
v.block(&block);
|
||||
walk_block(v, block);
|
||||
},
|
||||
TypeDecl { name, body, mutable } => v.type_declaration(name, body, *mutable),
|
||||
TypeAlias { alias, original} => v.type_alias(alias, original),
|
||||
Binding { name, constant, type_anno, expr } => {
|
||||
v.binding(name, *constant, type_anno.as_ref(), expr);
|
||||
v.type_annotation(type_anno.as_ref());
|
||||
v.expression(&expr);
|
||||
expression(v, &expr);
|
||||
},
|
||||
Impl { type_name, interface_name, block } => {
|
||||
v.implemention(type_name, interface_name.as_ref(), block);
|
||||
}
|
||||
Interface { name, signatures } => v.interface(name, signatures),
|
||||
}
|
||||
}
|
||||
|
||||
fn signature<V: ASTVisitor>(v: &mut V, signature: &Signature) {
|
||||
for p in signature.params.iter() {
|
||||
v.formal_param(p);
|
||||
}
|
||||
v.type_annotation(signature.type_anno.as_ref());
|
||||
for p in signature.params.iter() {
|
||||
formal_param(v, p);
|
||||
}
|
||||
}
|
||||
|
||||
fn expression<V: ASTVisitor>(v: &mut V, expression: &Expression) {
|
||||
v.expression_kind(&expression.kind);
|
||||
v.type_annotation(expression.type_anno.as_ref());
|
||||
expression_kind(v, &expression.kind);
|
||||
}
|
||||
|
||||
|
||||
fn call<V: ASTVisitor>(v: &mut V, f: &Expression, args: &Vec<InvocationArgument>) {
|
||||
v.expression(f);
|
||||
expression(v, f);
|
||||
for arg in args.iter() {
|
||||
v.invocation_argument(arg);
|
||||
invocation_argument(v, arg);
|
||||
}
|
||||
}
|
||||
|
||||
fn invocation_argument<V: ASTVisitor>(v: &mut V, arg: &InvocationArgument) {
|
||||
use InvocationArgument::*;
|
||||
match arg {
|
||||
Positional(expr) => {
|
||||
v.expression(expr);
|
||||
expression(v, expr);
|
||||
},
|
||||
Keyword { expr, .. } => {
|
||||
v.expression(expr);
|
||||
expression(v, expr);
|
||||
},
|
||||
Ignored => (),
|
||||
}
|
||||
}
|
||||
|
||||
fn index<V: ASTVisitor>(v: &mut V, indexee: &Expression, indexers: &Vec<Expression>) {
|
||||
v.expression(indexee);
|
||||
for i in indexers.iter() {
|
||||
v.expression(i);
|
||||
}
|
||||
}
|
||||
|
||||
fn named_struct<V: ASTVisitor>(v: &mut V, n: &QualifiedName, fields: &Vec<(Rc<String>, Expression)>) {
|
||||
v.qualified_name(n);
|
||||
for (_, expr) in fields.iter() {
|
||||
v.expression(expr);
|
||||
}
|
||||
}
|
||||
|
||||
fn lambda<V: ASTVisitor>(v: &mut V, params: &Vec<FormalParam>, type_anno: Option<&TypeIdentifier>, body: &Block) {
|
||||
for param in params {
|
||||
v.formal_param(param);
|
||||
formal_param(v, param);
|
||||
}
|
||||
v.type_annotation(type_anno);
|
||||
v.block(body);
|
||||
walk_block(v, body);
|
||||
}
|
||||
|
||||
fn formal_param<V: ASTVisitor>(v: &mut V, param: &FormalParam) {
|
||||
param.default.as_ref().map(|p| {
|
||||
v.expression(p);
|
||||
expression(v, p);
|
||||
});
|
||||
v.type_annotation(param.anno.as_ref());
|
||||
}
|
||||
|
||||
fn expression_kind<V: ASTVisitor>(v: &mut V, expression_kind: &ExpressionKind) {
|
||||
use ExpressionKind::*;
|
||||
match expression_kind {
|
||||
NatLiteral(n) => v.nat_literal(*n),
|
||||
FloatLiteral(f) => v.float_literal(*f),
|
||||
StringLiteral(s) => v.string_literal(s),
|
||||
BoolLiteral(b) => v.bool_literal(*b),
|
||||
BinExp(op, lhs, rhs) => {
|
||||
v.binexp(op, lhs, rhs);
|
||||
expression(v, lhs);
|
||||
expression(v, rhs);
|
||||
},
|
||||
PrefixExp(op, arg) => {
|
||||
v.prefix_exp(op, arg);
|
||||
expression(v, arg);
|
||||
}
|
||||
TupleLiteral(exprs) => {
|
||||
for expr in exprs {
|
||||
v.expression(expr);
|
||||
expression(v, expr);
|
||||
}
|
||||
},
|
||||
Value(name) => v.qualified_name(name),
|
||||
NamedStruct { name, fields } => {
|
||||
v.named_struct(name, fields);
|
||||
named_struct(v, name, fields);
|
||||
}
|
||||
Call { f, arguments } => {
|
||||
v.call(f, arguments);
|
||||
call(v, f, arguments);
|
||||
},
|
||||
Index { indexee, indexers } => {
|
||||
v.index(indexee, indexers);
|
||||
index(v, indexee, indexers);
|
||||
},
|
||||
IfExpression { discriminator, body } => {
|
||||
v.if_expression(deref_optional_box(discriminator), body);
|
||||
discriminator.as_ref().map(|d| expression(v, d));
|
||||
if_expression_body(v, body);
|
||||
},
|
||||
WhileExpression { condition, body } => v.while_expression(deref_optional_box(condition), body),
|
||||
ForExpression { enumerators, body } => v.for_expression(enumerators, body),
|
||||
Lambda { params , type_anno, body } => {
|
||||
v.lambda(params, type_anno.as_ref(), body);
|
||||
lambda(v, params, type_anno.as_ref(), body);
|
||||
},
|
||||
ListLiteral(exprs) => {
|
||||
for expr in exprs {
|
||||
v.expression(expr);
|
||||
expression(v, expr);
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn if_expression_body<V: ASTVisitor>(v: &mut V, body: &IfExpressionBody) {
|
||||
use IfExpressionBody::*;
|
||||
match body {
|
||||
SimpleConditional { then_case, else_case } => {
|
||||
walk_block(v, then_case);
|
||||
else_case.as_ref().map(|block| walk_block(v, block));
|
||||
},
|
||||
SimplePatternMatch { pattern, then_case, else_case } => {
|
||||
v.pattern(pattern);
|
||||
walk_pattern(v, pattern);
|
||||
walk_block(v, then_case);
|
||||
else_case.as_ref().map(|block| walk_block(v, block));
|
||||
},
|
||||
CondList(arms) => {
|
||||
for arm in arms {
|
||||
v.condition_arm(arm);
|
||||
condition_arm(v, arm);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn condition_arm<V: ASTVisitor>(v: &mut V, arm: &ConditionArm) {
|
||||
use Condition::*;
|
||||
v.condition_arm(arm);
|
||||
match arm.condition {
|
||||
Pattern(ref pat) => {
|
||||
v.pattern(pat);
|
||||
walk_pattern(v, pat);
|
||||
},
|
||||
TruncatedOp(ref _binop, ref expr) => {
|
||||
v.expression(expr);
|
||||
expression(v, expr);
|
||||
},
|
||||
Expression(ref expr) => {
|
||||
v.expression(expr);
|
||||
expression(v, expr);
|
||||
},
|
||||
_ => ()
|
||||
}
|
||||
arm.guard.as_ref().map(|guard| {
|
||||
v.expression(guard);
|
||||
expression(v, guard);
|
||||
});
|
||||
v.block(&arm.body);
|
||||
walk_block(v, &arm.body);
|
||||
}
|
||||
|
||||
fn walk_pattern<V: ASTVisitor>(v: &mut V, pat: &Pattern) {
|
||||
use Pattern::*;
|
||||
match pat {
|
||||
TuplePattern(patterns) => {
|
||||
for pat in patterns {
|
||||
v.pattern(pat);
|
||||
walk_pattern(v, pat);
|
||||
}
|
||||
},
|
||||
TupleStruct(qualified_name, patterns) => {
|
||||
v.qualified_name(qualified_name);
|
||||
for pat in patterns {
|
||||
v.pattern(pat);
|
||||
walk_pattern(v, pat);
|
||||
}
|
||||
},
|
||||
Record(qualified_name, name_and_patterns) => {
|
||||
v.qualified_name(qualified_name);
|
||||
for (_, pat) in name_and_patterns {
|
||||
v.pattern(pat);
|
||||
walk_pattern(v, pat);
|
||||
}
|
||||
},
|
||||
VarOrName(qualified_name) => {
|
||||
v.qualified_name(qualified_name);
|
||||
},
|
||||
_ => ()
|
||||
}
|
||||
}
|
||||
@@ -1,147 +1,102 @@
|
||||
use std::rc::Rc;
|
||||
use std::collections::HashMap;
|
||||
use std::fmt;
|
||||
use std::str::FromStr;
|
||||
|
||||
use tokenizing::TokenType;
|
||||
use self::BuiltinTypeSpecifier::*;
|
||||
use self::BuiltinTConst::*;
|
||||
use crate::typechecking::{TypeConst, Type};
|
||||
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub enum BuiltinTypeSpecifier {
|
||||
Const(BuiltinTConst),
|
||||
Func(Box<BuiltinTypeSpecifier>, Box<BuiltinTypeSpecifier>),
|
||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||
pub enum Builtin {
|
||||
Add,
|
||||
Increment,
|
||||
Subtract,
|
||||
Negate,
|
||||
Multiply,
|
||||
Divide,
|
||||
Quotient,
|
||||
Modulo,
|
||||
Exponentiation,
|
||||
BitwiseAnd,
|
||||
BitwiseOr,
|
||||
BooleanAnd,
|
||||
BooleanOr,
|
||||
BooleanNot,
|
||||
Equality,
|
||||
LessThan,
|
||||
LessThanOrEqual,
|
||||
GreaterThan,
|
||||
GreaterThanOrEqual,
|
||||
Comparison,
|
||||
FieldAccess,
|
||||
IOPrint,
|
||||
IOPrintLn,
|
||||
IOGetLine,
|
||||
Assignment,
|
||||
Concatenate,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub enum BuiltinTConst {
|
||||
Nat,
|
||||
Int,
|
||||
Float,
|
||||
StringT,
|
||||
Bool,
|
||||
}
|
||||
|
||||
impl fmt::Display for BuiltinTypeSpecifier {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, "{:?}", self)
|
||||
impl Builtin {
|
||||
pub fn get_type(&self) -> Type {
|
||||
use Builtin::*;
|
||||
match self {
|
||||
Add => ty!(Nat -> Nat -> Nat),
|
||||
Subtract => ty!(Nat -> Nat -> Nat),
|
||||
Multiply => ty!(Nat -> Nat -> Nat),
|
||||
Divide => ty!(Nat -> Nat -> Float),
|
||||
Quotient => ty!(Nat -> Nat -> Nat),
|
||||
Modulo => ty!(Nat -> Nat -> Nat),
|
||||
Exponentiation => ty!(Nat -> Nat -> Nat),
|
||||
BitwiseAnd => ty!(Nat -> Nat -> Nat),
|
||||
BitwiseOr => ty!(Nat -> Nat -> Nat),
|
||||
BooleanAnd => ty!(Bool -> Bool -> Bool),
|
||||
BooleanOr => ty!(Bool -> Bool -> Bool),
|
||||
BooleanNot => ty!(Bool -> Bool),
|
||||
Equality => ty!(Nat -> Nat -> Bool),
|
||||
LessThan => ty!(Nat -> Nat -> Bool),
|
||||
LessThanOrEqual => ty!(Nat -> Nat -> Bool),
|
||||
GreaterThan => ty!(Nat -> Nat -> Bool),
|
||||
GreaterThanOrEqual => ty!(Nat -> Nat -> Bool),
|
||||
Comparison => ty!(Nat -> Nat -> Ordering),
|
||||
FieldAccess => ty!(Unit),
|
||||
IOPrint => ty!(Unit),
|
||||
IOPrintLn => ty!(Unit) ,
|
||||
IOGetLine => ty!(StringT),
|
||||
Assignment => ty!(Unit),
|
||||
Concatenate => ty!(StringT -> StringT -> StringT),
|
||||
Increment => ty!(Nat -> Int),
|
||||
Negate => ty!(Nat -> Int)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub struct BinOp {
|
||||
sigil: Rc<String>
|
||||
}
|
||||
impl FromStr for Builtin {
|
||||
type Err = ();
|
||||
|
||||
impl BinOp {
|
||||
pub fn from_sigil(sigil: &str) -> BinOp {
|
||||
BinOp { sigil: Rc::new(sigil.to_string()) }
|
||||
}
|
||||
pub fn sigil(&self) -> &Rc<String> {
|
||||
&self.sigil
|
||||
}
|
||||
pub fn from_sigil_token(tok: &TokenType) -> Option<BinOp> {
|
||||
use self::TokenType::*;
|
||||
let s = match tok {
|
||||
Operator(op) => op,
|
||||
Period => ".",
|
||||
Pipe => "|",
|
||||
Slash => "/",
|
||||
LAngleBracket => "<",
|
||||
RAngleBracket => ">",
|
||||
_ => return None
|
||||
};
|
||||
Some(BinOp::from_sigil(s))
|
||||
}
|
||||
/*
|
||||
pub fn get_type(&self) -> Result<Type, String> {
|
||||
let s = self.sigil.as_str();
|
||||
BINOPS.get(s).map(|x| x.0.clone()).ok_or(format!("Binop {} not found", s))
|
||||
}
|
||||
*/
|
||||
pub fn min_precedence() -> i32 {
|
||||
i32::min_value()
|
||||
}
|
||||
pub fn get_precedence_from_token(op: &TokenType) -> Option<i32> {
|
||||
use self::TokenType::*;
|
||||
let s = match op {
|
||||
Operator(op) => op,
|
||||
Period => ".",
|
||||
Pipe => "|",
|
||||
Slash => "/",
|
||||
LAngleBracket => "<",
|
||||
RAngleBracket => ">",
|
||||
_ => return None
|
||||
};
|
||||
let default = 10_000_000;
|
||||
Some(BINOPS.get(s).map(|x| x.2.clone()).unwrap_or_else(|| {
|
||||
println!("Warning: operator {} not defined", s);
|
||||
default
|
||||
}))
|
||||
}
|
||||
|
||||
pub fn get_precedence(&self) -> i32 {
|
||||
let s: &str = &self.sigil;
|
||||
let default = 10_000_000;
|
||||
BINOPS.get(s).map(|x| x.2.clone()).unwrap_or_else(|| {
|
||||
println!("Warning: operator {} not defined", s);
|
||||
default
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
use Builtin::*;
|
||||
Ok(match s {
|
||||
"+" => Add,
|
||||
"-" => Subtract,
|
||||
"*" => Multiply,
|
||||
"/" => Divide,
|
||||
"quot" => Quotient,
|
||||
"%" => Modulo,
|
||||
"++" => Concatenate,
|
||||
"^" => Exponentiation,
|
||||
"&" => BitwiseAnd,
|
||||
"&&" => BooleanAnd,
|
||||
"|" => BitwiseOr,
|
||||
"||" => BooleanOr,
|
||||
"!" => BooleanNot,
|
||||
">" => GreaterThan,
|
||||
">=" => GreaterThanOrEqual,
|
||||
"<" => LessThan,
|
||||
"<=" => LessThanOrEqual,
|
||||
"==" => Equality,
|
||||
"=" => Assignment,
|
||||
"<=>" => Comparison,
|
||||
"." => FieldAccess,
|
||||
"print" => IOPrint,
|
||||
"println" => IOPrintLn,
|
||||
"getline" => IOGetLine,
|
||||
_ => return Err(())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub struct PrefixOp {
|
||||
sigil: Rc<String>
|
||||
}
|
||||
|
||||
impl PrefixOp {
|
||||
pub fn from_sigil(sigil: &str) -> PrefixOp {
|
||||
PrefixOp { sigil: Rc::new(sigil.to_string()) }
|
||||
}
|
||||
pub fn sigil(&self) -> &Rc<String> {
|
||||
&self.sigil
|
||||
}
|
||||
pub fn is_prefix(op: &str) -> bool {
|
||||
PREFIX_OPS.get(op).is_some()
|
||||
}
|
||||
/*
|
||||
pub fn get_type(&self) -> Result<Type, String> {
|
||||
let s = self.sigil.as_str();
|
||||
PREFIX_OPS.get(s).map(|x| x.0.clone()).ok_or(format!("Prefix op {} not found", s))
|
||||
}
|
||||
*/
|
||||
}
|
||||
lazy_static! {
|
||||
static ref PREFIX_OPS: HashMap<&'static str, (BuiltinTypeSpecifier, ())> =
|
||||
hashmap! {
|
||||
"+" => (Func(bx!(Const(Int)), bx!(Const(Int))), ()),
|
||||
"-" => (Func(bx!(Const(Int)), bx!(Const(Int))), ()),
|
||||
"!" => (Func(bx!(Const(Bool)), bx!(Const(Bool))), ()),
|
||||
};
|
||||
}
|
||||
|
||||
/* the second tuple member is a placeholder for when I want to make evaluation rules tied to the
|
||||
* binop definition */
|
||||
lazy_static! {
|
||||
static ref BINOPS: HashMap<&'static str, (BuiltinTypeSpecifier, (), i32)> =
|
||||
hashmap! {
|
||||
"+" => (Func(bx!(Const(Nat)), bx!(Func(bx!(Const(Nat)), bx!(Const(Nat))))), (), 10),
|
||||
"-" => (Func(bx!(Const(Nat)), bx!(Func(bx!(Const(Nat)), bx!(Const(Nat))))), (), 10),
|
||||
"*" => (Func(bx!(Const(Nat)), bx!(Func(bx!(Const(Nat)), bx!(Const(Nat))))), (), 20),
|
||||
"/" => (Func(bx!(Const(Nat)), bx!(Func(bx!(Const(Nat)), bx!(Const(Float))))), (), 20),
|
||||
"//" => (Func(bx!(Const(Nat)), bx!(Func(bx!(Const(Nat)), bx!(Const(Nat))))), (), 20), //TODO change this to `quot`
|
||||
"%" => (Func(bx!(Const(Nat)), bx!(Func(bx!(Const(Nat)), bx!(Const(Nat))))), (), 20),
|
||||
"++" => (Func(bx!(Const(StringT)), bx!(Func(bx!(Const(StringT)), bx!(Const(StringT))))), (), 30),
|
||||
"^" => (Func(bx!(Const(Nat)), bx!(Func(bx!(Const(Nat)), bx!(Const(Nat))))), (), 20),
|
||||
"&" => (Func(bx!(Const(Nat)), bx!(Func(bx!(Const(Nat)), bx!(Const(Nat))))), (), 20),
|
||||
"|" => (Func(bx!(Const(Nat)), bx!(Func(bx!(Const(Nat)), bx!(Const(Nat))))), (), 20),
|
||||
">" => (Func(bx!(Const(Nat)), bx!(Func(bx!(Const(Nat)), bx!(Const(Nat))))), (), 20),
|
||||
">=" => (Func(bx!(Const(Nat)), bx!(Func(bx!(Const(Nat)), bx!(Const(Nat))))), (), 20),
|
||||
"<" => (Func(bx!(Const(Nat)), bx!(Func(bx!(Const(Nat)), bx!(Const(Nat))))), (), 20),
|
||||
"<=" => (Func(bx!(Const(Nat)), bx!(Func(bx!(Const(Nat)), bx!(Const(Nat))))), (), 20),
|
||||
"==" => (Func(bx!(Const(Nat)), bx!(Func(bx!(Const(Nat)), bx!(Const(Nat))))), (), 20),
|
||||
"=" => (Func(bx!(Const(Nat)), bx!(Func(bx!(Const(Nat)), bx!(Const(Nat))))), (), 20),
|
||||
"<=>" => (Func(bx!(Const(Nat)), bx!(Func(bx!(Const(Nat)), bx!(Const(Nat))))), (), 20),
|
||||
};
|
||||
}
|
||||
|
||||
10
schala-lang/language/src/debugging.rs
Normal file
10
schala-lang/language/src/debugging.rs
Normal file
@@ -0,0 +1,10 @@
|
||||
use crate::ast::*;
|
||||
|
||||
impl AST {
|
||||
pub fn compact_debug(&self) -> String {
|
||||
format!("{:?}", self)
|
||||
}
|
||||
pub fn expanded_debug(&self) -> String {
|
||||
format!("{:#?}", self)
|
||||
}
|
||||
}
|
||||
@@ -1,39 +1,42 @@
|
||||
|
||||
use std::cell::RefCell;
|
||||
use std::rc::Rc;
|
||||
use std::fmt::Write;
|
||||
use std::io;
|
||||
|
||||
use itertools::Itertools;
|
||||
|
||||
use util::ScopeStack;
|
||||
use reduced_ast::{ReducedAST, Stmt, Expr, Lit, Func, Alternative};
|
||||
use symbol_table::{SymbolSpec, Symbol, SymbolTable};
|
||||
use crate::schala::SymbolTableHandle;
|
||||
use crate::util::ScopeStack;
|
||||
use crate::reduced_ast::{BoundVars, ReducedAST, Stmt, Expr, Lit, Func, Alternative, Subpattern};
|
||||
use crate::symbol_table::{SymbolSpec, Symbol, SymbolTable, FullyQualifiedSymbolName};
|
||||
use crate::builtin::Builtin;
|
||||
|
||||
mod test;
|
||||
|
||||
pub struct State<'a> {
|
||||
values: ScopeStack<'a, Rc<String>, ValueEntry>,
|
||||
symbol_table_handle: Rc<RefCell<SymbolTable>>,
|
||||
}
|
||||
|
||||
macro_rules! builtin_binding {
|
||||
($name:expr, $values:expr) => {
|
||||
$values.insert(Rc::new(format!($name)), ValueEntry::Binding { constant: true, val: Node::Expr(Expr::Func(Func::BuiltIn(Rc::new(format!($name))))) });
|
||||
}
|
||||
}
|
||||
|
||||
//TODO add a more concise way of getting a new frame
|
||||
impl<'a> State<'a> {
|
||||
pub fn new(symbol_table_handle: Rc<RefCell<SymbolTable>>) -> State<'a> {
|
||||
let mut values = ScopeStack::new(Some(format!("global")));
|
||||
builtin_binding!("print", values);
|
||||
builtin_binding!("println", values);
|
||||
builtin_binding!("getline", values);
|
||||
State { values, symbol_table_handle }
|
||||
pub fn new() -> State<'a> {
|
||||
let values = ScopeStack::new(Some(format!("global")));
|
||||
State { values }
|
||||
}
|
||||
|
||||
pub fn debug_print(&self) -> String {
|
||||
format!("Values: {:?}", self.values)
|
||||
}
|
||||
|
||||
fn new_frame(&'a self, items: &'a Vec<Node>, bound_vars: &BoundVars) -> State<'a> {
|
||||
let mut inner_state = State {
|
||||
values: self.values.new_scope(None),
|
||||
};
|
||||
for (bound_var, val) in bound_vars.iter().zip(items.iter()) {
|
||||
if let Some(bv) = bound_var.as_ref() {
|
||||
inner_state.values.insert(bv.clone(), ValueEntry::Binding { constant: true, val: val.clone() });
|
||||
}
|
||||
}
|
||||
inner_state
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
@@ -72,6 +75,12 @@ impl Node {
|
||||
Node::PrimTuple { items } => format!("{}", paren_wrapped_vec(items.iter().map(|x| x.to_repl()))),
|
||||
}
|
||||
}
|
||||
fn is_true(&self) -> bool {
|
||||
match self {
|
||||
Node::Expr(Expr::Lit(crate::reduced_ast::Lit::Bool(true))) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
@@ -101,21 +110,34 @@ impl Expr {
|
||||
StringLit(s) => format!("\"{}\"", s),
|
||||
},
|
||||
Expr::Func(f) => match f {
|
||||
BuiltIn(name) => format!("<built-in function '{}'>", name),
|
||||
BuiltIn(builtin) => format!("<built-in function '{:?}'>", builtin),
|
||||
UserDefined { name: None, .. } => format!("<function>"),
|
||||
UserDefined { name: Some(name), .. } => format!("<function '{}'>", name),
|
||||
},
|
||||
Expr::Constructor {
|
||||
type_name: _, name, arity, ..
|
||||
} => if *arity == 0 {
|
||||
format!("{}", name)
|
||||
} else {
|
||||
format!("<data constructor '{}'>", name)
|
||||
Expr::Constructor { type_name, arity, .. } => {
|
||||
format!("<constructor for `{}` arity {}>", type_name, arity)
|
||||
},
|
||||
Expr::Tuple(exprs) => paren_wrapped_vec(exprs.iter().map(|x| x.to_repl())),
|
||||
_ => format!("{:?}", self),
|
||||
}
|
||||
}
|
||||
|
||||
fn replace_conditional_target_sigil(self, replacement: &Expr) -> Expr {
|
||||
use self::Expr::*;
|
||||
|
||||
match self {
|
||||
ConditionalTargetSigilValue => replacement.clone(),
|
||||
Unit | Lit(_) | Func(_) | Sym(_) | Constructor { .. } |
|
||||
CaseMatch { .. } | UnimplementedSigilValue | ReductionError(_) => self,
|
||||
Tuple(exprs) => Tuple(exprs.into_iter().map(|e| e.replace_conditional_target_sigil(replacement)).collect()),
|
||||
Call { f, args } => {
|
||||
let new_args = args.into_iter().map(|e| e.replace_conditional_target_sigil(replacement)).collect();
|
||||
Call { f, args: new_args }
|
||||
},
|
||||
Conditional { .. } => panic!("Dunno if I need this, but if so implement"),
|
||||
Assign { .. } => panic!("I'm pretty sure I don't need this"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> State<'a> {
|
||||
@@ -129,7 +151,9 @@ impl<'a> State<'a> {
|
||||
|
||||
for statement in ast.0 {
|
||||
match self.statement(statement) {
|
||||
Ok(Some(ref output)) if repl => acc.push(Ok(output.to_repl())),
|
||||
Ok(Some(ref output)) if repl => {
|
||||
acc.push(Ok(output.to_repl()))
|
||||
},
|
||||
Ok(_) => (),
|
||||
Err(error) => {
|
||||
acc.push(Err(format!("Runtime error: {}", error)));
|
||||
@@ -182,7 +206,10 @@ impl<'a> State<'a> {
|
||||
Node::Expr(expr) => match expr {
|
||||
literal @ Lit(_) => Ok(Node::Expr(literal)),
|
||||
Call { box f, args } => self.call_expression(f, args),
|
||||
Val(v) => self.value(v),
|
||||
Sym(name) => Ok(match self.values.lookup(&name) {
|
||||
Some(ValueEntry::Binding { val, .. }) => val.clone(),
|
||||
None => return Err(format!("Could not look up symbol {}", name))
|
||||
}),
|
||||
Constructor { arity, ref name, tag, .. } if arity == 0 => Ok(Node::PrimObject { name: name.clone(), tag, items: vec![] }),
|
||||
constructor @ Constructor { .. } => Ok(Node::Expr(constructor)),
|
||||
func @ Func(_) => Ok(Node::Expr(func)),
|
||||
@@ -194,7 +221,9 @@ impl<'a> State<'a> {
|
||||
Assign { box val, box expr } => self.assign_expression(val, expr),
|
||||
Unit => Ok(Node::Expr(Unit)),
|
||||
CaseMatch { box cond, alternatives } => self.case_match_expression(cond, alternatives),
|
||||
UnimplementedSigilValue => Err(format!("Sigil value eval not implemented"))
|
||||
ConditionalTargetSigilValue => Ok(Node::Expr(ConditionalTargetSigilValue)),
|
||||
UnimplementedSigilValue => Err(format!("Sigil value eval not implemented")),
|
||||
ReductionError(err) => Err(format!("Reduction error: {}", err)),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -210,7 +239,7 @@ impl<'a> State<'a> {
|
||||
|
||||
fn apply_data_constructor(&mut self, _type_name: Rc<String>, name: Rc<String>, tag: usize, arity: usize, args: Vec<Expr>) -> EvalResult<Node> {
|
||||
if arity != args.len() {
|
||||
return Err(format!("Data constructor {} requires {} args", name, arity));
|
||||
return Err(format!("Data constructor {} requires {} arg(s)", name, arity));
|
||||
}
|
||||
|
||||
let evaled_args = args.into_iter().map(|expr| self.expression(Node::Expr(expr))).collect::<Result<Vec<Node>,_>>()?;
|
||||
@@ -224,7 +253,7 @@ impl<'a> State<'a> {
|
||||
|
||||
fn apply_function(&mut self, f: Func, args: Vec<Expr>) -> EvalResult<Node> {
|
||||
match f {
|
||||
Func::BuiltIn(sigil) => Ok(Node::Expr(self.apply_builtin(sigil, args)?)),
|
||||
Func::BuiltIn(builtin) => Ok(self.apply_builtin(builtin, args)?),
|
||||
Func::UserDefined { params, body, name } => {
|
||||
|
||||
if params.len() != args.len() {
|
||||
@@ -232,7 +261,6 @@ impl<'a> State<'a> {
|
||||
}
|
||||
let mut func_state = State {
|
||||
values: self.values.new_scope(name.map(|n| format!("{}", n))),
|
||||
symbol_table_handle: self.symbol_table_handle.clone(),
|
||||
};
|
||||
for (param, val) in params.into_iter().zip(args.into_iter()) {
|
||||
let val = func_state.expression(Node::Expr(val))?;
|
||||
@@ -244,81 +272,84 @@ impl<'a> State<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn apply_builtin(&mut self, name: Rc<String>, args: Vec<Expr>) -> EvalResult<Expr> {
|
||||
fn apply_builtin(&mut self, builtin: Builtin, args: Vec<Expr>) -> EvalResult<Node> {
|
||||
use self::Expr::*;
|
||||
use self::Lit::*;
|
||||
let evaled_args: Result<Vec<Expr>, String> = args.into_iter().map(|arg| {
|
||||
match self.expression(Node::Expr(arg)) {
|
||||
Ok(Node::Expr(e)) => Ok(e),
|
||||
Ok(Node::PrimTuple { .. }) => Err(format!("Trying to apply a builtin to a tuple")),
|
||||
Ok(Node::PrimObject { .. }) => Err(format!("Trying to apply a builtin to a primitive object")),
|
||||
Err(e) => Err(e)
|
||||
}
|
||||
}).collect();
|
||||
use Builtin::*;
|
||||
|
||||
let evaled_args: Result<Vec<Node>, String> = args.into_iter().map(|arg| self.expression(arg.to_node()))
|
||||
.collect();
|
||||
let evaled_args = evaled_args?;
|
||||
|
||||
Ok(match (name.as_str(), evaled_args.as_slice()) {
|
||||
/* binops */
|
||||
("+", &[Lit(Nat(l)), Lit(Nat(r))]) => Lit(Nat(l + r)),
|
||||
("++", &[Lit(StringLit(ref s1)), Lit(StringLit(ref s2))]) => Lit(StringLit(Rc::new(format!("{}{}", s1, s2)))),
|
||||
("-", &[Lit(Nat(l)), Lit(Nat(r))]) => Lit(Nat(l - r)),
|
||||
("*", &[Lit(Nat(l)), Lit(Nat(r))]) => Lit(Nat(l * r)),
|
||||
("/", &[Lit(Nat(l)), Lit(Nat(r))]) => Lit(Float((l as f64)/ (r as f64))),
|
||||
("//", &[Lit(Nat(l)), Lit(Nat(r))]) => if r == 0 {
|
||||
return Err(format!("divide by zero"));
|
||||
} else {
|
||||
Lit(Nat(l / r))
|
||||
Ok(match (builtin, evaled_args.as_slice()) {
|
||||
(FieldAccess, &[Node::PrimObject { .. }]) => {
|
||||
//TODO implement field access
|
||||
unimplemented!()
|
||||
},
|
||||
("%", &[Lit(Nat(l)), Lit(Nat(r))]) => Lit(Nat(l % r)),
|
||||
("^", &[Lit(Nat(l)), Lit(Nat(r))]) => Lit(Nat(l ^ r)),
|
||||
("&", &[Lit(Nat(l)), Lit(Nat(r))]) => Lit(Nat(l & r)),
|
||||
("|", &[Lit(Nat(l)), Lit(Nat(r))]) => Lit(Nat(l | r)),
|
||||
(binop, &[Node::Expr(ref lhs), Node::Expr(ref rhs)]) => match (binop, lhs, rhs) {
|
||||
/* binops */
|
||||
(Add, Lit(Nat(l)), Lit(Nat(r))) => Lit(Nat(l + r)),
|
||||
(Concatenate, Lit(StringLit(ref s1)), Lit(StringLit(ref s2))) => Lit(StringLit(Rc::new(format!("{}{}", s1, s2)))),
|
||||
(Subtract, Lit(Nat(l)), Lit(Nat(r))) => Lit(Nat(l - r)),
|
||||
(Multiply, Lit(Nat(l)), Lit(Nat(r))) => Lit(Nat(l * r)),
|
||||
(Divide, Lit(Nat(l)), Lit(Nat(r))) => Lit(Float((*l as f64)/ (*r as f64))),
|
||||
(Quotient, Lit(Nat(l)), Lit(Nat(r))) => if *r == 0 {
|
||||
return Err(format!("divide by zero"));
|
||||
} else {
|
||||
Lit(Nat(l / r))
|
||||
},
|
||||
(Modulo, Lit(Nat(l)), Lit(Nat(r))) => Lit(Nat(l % r)),
|
||||
(Exponentiation, Lit(Nat(l)), Lit(Nat(r))) => Lit(Nat(l ^ r)),
|
||||
(BitwiseAnd, Lit(Nat(l)), Lit(Nat(r))) => Lit(Nat(l & r)),
|
||||
(BitwiseOr, Lit(Nat(l)), Lit(Nat(r))) => Lit(Nat(l | r)),
|
||||
|
||||
/* comparisons */
|
||||
("==", &[Lit(Nat(l)), Lit(Nat(r))]) => Lit(Bool(l == r)),
|
||||
("==", &[Lit(Int(l)), Lit(Int(r))]) => Lit(Bool(l == r)),
|
||||
("==", &[Lit(Float(l)), Lit(Float(r))]) => Lit(Bool(l == r)),
|
||||
("==", &[Lit(Bool(l)), Lit(Bool(r))]) => Lit(Bool(l == r)),
|
||||
("==", &[Lit(StringLit(ref l)), Lit(StringLit(ref r))]) => Lit(Bool(l == r)),
|
||||
/* comparisons */
|
||||
(Equality, Lit(Nat(l)), Lit(Nat(r))) => Lit(Bool(l == r)),
|
||||
(Equality, Lit(Int(l)), Lit(Int(r))) => Lit(Bool(l == r)),
|
||||
(Equality, Lit(Float(l)), Lit(Float(r))) => Lit(Bool(l == r)),
|
||||
(Equality, Lit(Bool(l)), Lit(Bool(r))) => Lit(Bool(l == r)),
|
||||
(Equality, Lit(StringLit(ref l)), Lit(StringLit(ref r))) => Lit(Bool(l == r)),
|
||||
|
||||
("<", &[Lit(Nat(l)), Lit(Nat(r))]) => Lit(Bool(l < r)),
|
||||
("<", &[Lit(Int(l)), Lit(Int(r))]) => Lit(Bool(l < r)),
|
||||
("<", &[Lit(Float(l)), Lit(Float(r))]) => Lit(Bool(l < r)),
|
||||
(LessThan, Lit(Nat(l)), Lit(Nat(r))) => Lit(Bool(l < r)),
|
||||
(LessThan, Lit(Int(l)), Lit(Int(r))) => Lit(Bool(l < r)),
|
||||
(LessThan, Lit(Float(l)), Lit(Float(r))) => Lit(Bool(l < r)),
|
||||
|
||||
("<=", &[Lit(Nat(l)), Lit(Nat(r))]) => Lit(Bool(l <= r)),
|
||||
("<=", &[Lit(Int(l)), Lit(Int(r))]) => Lit(Bool(l <= r)),
|
||||
("<=", &[Lit(Float(l)), Lit(Float(r))]) => Lit(Bool(l <= r)),
|
||||
(LessThanOrEqual, Lit(Nat(l)), Lit(Nat(r))) => Lit(Bool(l <= r)),
|
||||
(LessThanOrEqual, Lit(Int(l)), Lit(Int(r))) => Lit(Bool(l <= r)),
|
||||
(LessThanOrEqual, Lit(Float(l)), Lit(Float(r))) => Lit(Bool(l <= r)),
|
||||
|
||||
(">", &[Lit(Nat(l)), Lit(Nat(r))]) => Lit(Bool(l > r)),
|
||||
(">", &[Lit(Int(l)), Lit(Int(r))]) => Lit(Bool(l > r)),
|
||||
(">", &[Lit(Float(l)), Lit(Float(r))]) => Lit(Bool(l > r)),
|
||||
|
||||
(">=", &[Lit(Nat(l)), Lit(Nat(r))]) => Lit(Bool(l >= r)),
|
||||
(">=", &[Lit(Int(l)), Lit(Int(r))]) => Lit(Bool(l >= r)),
|
||||
(">=", &[Lit(Float(l)), Lit(Float(r))]) => Lit(Bool(l >= r)),
|
||||
|
||||
/* prefix ops */
|
||||
("!", &[Lit(Bool(true))]) => Lit(Bool(false)),
|
||||
("!", &[Lit(Bool(false))]) => Lit(Bool(true)),
|
||||
("-", &[Lit(Nat(n))]) => Lit(Int(-1*(n as i64))),
|
||||
("-", &[Lit(Int(n))]) => Lit(Int(-1*(n as i64))),
|
||||
("+", &[Lit(Int(n))]) => Lit(Int(n)),
|
||||
("+", &[Lit(Nat(n))]) => Lit(Nat(n)),
|
||||
(GreaterThan, Lit(Nat(l)), Lit(Nat(r))) => Lit(Bool(l > r)),
|
||||
(GreaterThan, Lit(Int(l)), Lit(Int(r))) => Lit(Bool(l > r)),
|
||||
(GreaterThan, Lit(Float(l)), Lit(Float(r))) => Lit(Bool(l > r)),
|
||||
|
||||
(GreaterThanOrEqual, Lit(Nat(l)), Lit(Nat(r))) => Lit(Bool(l >= r)),
|
||||
(GreaterThanOrEqual, Lit(Int(l)), Lit(Int(r))) => Lit(Bool(l >= r)),
|
||||
(GreaterThanOrEqual, Lit(Float(l)), Lit(Float(r))) => Lit(Bool(l >= r)),
|
||||
_ => return Err("No valid binop".to_string())
|
||||
}.to_node(),
|
||||
(prefix, &[Node::Expr(ref arg)]) => match (prefix, arg) {
|
||||
(BooleanNot, Lit(Bool(true))) => Lit(Bool(false)),
|
||||
(BooleanNot, Lit(Bool(false))) => Lit(Bool(true)),
|
||||
(Negate, Lit(Nat(n))) => Lit(Int(-1*(*n as i64))),
|
||||
(Negate, Lit(Int(n))) => Lit(Int(-1*(*n as i64))),
|
||||
(Increment, Lit(Int(n))) => Lit(Int(*n)),
|
||||
(Increment, Lit(Nat(n))) => Lit(Nat(*n)),
|
||||
_ => return Err("No valid prefix op".to_string())
|
||||
}.to_node(),
|
||||
|
||||
/* builtin functions */
|
||||
("print", &[ref anything]) => {
|
||||
(IOPrint, &[ref anything]) => {
|
||||
print!("{}", anything.to_repl());
|
||||
Expr::Unit
|
||||
Expr::Unit.to_node()
|
||||
},
|
||||
("println", &[ref anything]) => {
|
||||
(IOPrintLn, &[ref anything]) => {
|
||||
println!("{}", anything.to_repl());
|
||||
Expr::Unit
|
||||
Expr::Unit.to_node()
|
||||
},
|
||||
("getline", &[]) => {
|
||||
(IOGetLine, &[]) => {
|
||||
let mut buf = String::new();
|
||||
io::stdin().read_line(&mut buf).expect("Error readling line in 'getline'");
|
||||
Lit(StringLit(Rc::new(buf.trim().to_string())))
|
||||
Lit(StringLit(Rc::new(buf.trim().to_string()))).to_node()
|
||||
},
|
||||
(x, args) => return Err(format!("bad or unimplemented builtin {:?} | {:?}", x, args)),
|
||||
})
|
||||
@@ -335,7 +366,7 @@ impl<'a> State<'a> {
|
||||
|
||||
fn assign_expression(&mut self, val: Expr, expr: Expr) -> EvalResult<Node> {
|
||||
let name = match val {
|
||||
Expr::Val(name) => name,
|
||||
Expr::Sym(name) => name,
|
||||
_ => return Err(format!("Trying to assign to a non-value")),
|
||||
};
|
||||
|
||||
@@ -351,234 +382,74 @@ impl<'a> State<'a> {
|
||||
Ok(Node::Expr(Expr::Unit))
|
||||
}
|
||||
|
||||
fn case_match_expression(&mut self, cond: Expr, alternatives: Vec<Alternative>) -> EvalResult<Node> {
|
||||
match self.expression(Node::Expr(cond))? {
|
||||
Node::PrimObject { tag, items, .. } => {
|
||||
for alt in alternatives {
|
||||
if alt.tag.map(|t| t == tag).unwrap_or(true) {
|
||||
let mut inner_state = State {
|
||||
values: self.values.new_scope(None),
|
||||
symbol_table_handle: self.symbol_table_handle.clone(),
|
||||
};
|
||||
for (bound_var, val) in alt.bound_vars.iter().zip(items.iter()) {
|
||||
if let Some(bv) = bound_var.as_ref() {
|
||||
inner_state.values.insert(bv.clone(), ValueEntry::Binding { constant: true, val: val.clone() });
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(guard_expr) = alt.guard {
|
||||
let evaled_guard = inner_state.expression(guard_expr.to_node());
|
||||
println!("EVALED GUARD: {:?}", evaled_guard);
|
||||
//continue
|
||||
}
|
||||
|
||||
return inner_state.block(alt.item)
|
||||
}
|
||||
}
|
||||
return Err(format!("PrimObject failed pattern match"));
|
||||
},
|
||||
Node::PrimTuple { .. } => Err(format!("Tuples not implemented")), //TODO make a distinction between not yet implemented and an actual runtime error
|
||||
Node::Expr(_e) => {
|
||||
for alt in alternatives {
|
||||
match (alt.guard, alt.tag) {
|
||||
(Some(ref guard_expr), None) => {
|
||||
match self.expression(guard_expr.clone().to_node())? {
|
||||
Node::Expr(Expr::Lit(::reduced_ast::Lit::Bool(true))) =>
|
||||
return self.block(alt.item),
|
||||
_ => continue,
|
||||
}
|
||||
},
|
||||
(None, None) => return self.block(alt.item),
|
||||
_ => return Err(format!("Shouldn't match an expr against a pattern"))
|
||||
}
|
||||
}
|
||||
return Err(format!("Expr Failed pattern match"));
|
||||
fn guard_passes(&mut self, guard: &Option<Expr>, cond: &Node) -> EvalResult<bool> {
|
||||
if let Some(ref guard_expr) = guard {
|
||||
let guard_expr = match cond {
|
||||
Node::Expr(ref e) => guard_expr.clone().replace_conditional_target_sigil(e),
|
||||
_ => guard_expr.clone()
|
||||
};
|
||||
Ok(self.expression(guard_expr.to_node())?.is_true())
|
||||
} else {
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
fn value(&mut self, name: Rc<String>) -> EvalResult<Node> {
|
||||
use self::ValueEntry::*;
|
||||
use self::Func::*;
|
||||
//TODO add a layer of indirection here to talk to the symbol table first, and only then look up
|
||||
//in the values table
|
||||
fn case_match_expression(&mut self, cond: Expr, alternatives: Vec<Alternative>) -> EvalResult<Node> {
|
||||
|
||||
let symbol_table = self.symbol_table_handle.borrow();
|
||||
let value = symbol_table.lookup_by_name(&name);
|
||||
Ok(match value {
|
||||
Some(Symbol { name, spec }) => match spec {
|
||||
//TODO I'll need this type_name later to do a table lookup
|
||||
SymbolSpec::DataConstructor { type_name: _type_name, type_args, .. } => {
|
||||
if type_args.len() == 0 {
|
||||
Node::PrimObject { name: name.clone(), tag: 0, items: vec![] }
|
||||
//TODO need to handle recursive subpatterns
|
||||
let all_subpatterns_pass = |state: &mut State, subpatterns: &Vec<Option<Subpattern>>, items: &Vec<Node>| -> EvalResult<bool> {
|
||||
|
||||
if subpatterns.len() == 0 {
|
||||
return Ok(true)
|
||||
}
|
||||
|
||||
if items.len() != subpatterns.len() {
|
||||
return Err(format!("Subpattern length isn't correct items {} subpatterns {}", items.len(), subpatterns.len()));
|
||||
}
|
||||
|
||||
for (maybe_subp, cond) in subpatterns.iter().zip(items.iter()) {
|
||||
if let Some(subp) = maybe_subp {
|
||||
if !state.guard_passes(&subp.guard, &cond)? {
|
||||
return Ok(false)
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(true)
|
||||
};
|
||||
|
||||
let cond = self.expression(Node::Expr(cond))?;
|
||||
for alt in alternatives {
|
||||
// no matter what type of condition we have, ignore alternative if the guard evaluates false
|
||||
if !self.guard_passes(&alt.matchable.guard, &cond)? {
|
||||
continue;
|
||||
}
|
||||
|
||||
match cond {
|
||||
Node::PrimObject { ref tag, ref items, .. } => {
|
||||
if alt.matchable.tag.map(|t| t == *tag).unwrap_or(true) {
|
||||
let mut inner_state = self.new_frame(items, &alt.matchable.bound_vars);
|
||||
if all_subpatterns_pass(&mut inner_state, &alt.matchable.subpatterns, items)? {
|
||||
return inner_state.block(alt.item);
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
},
|
||||
Node::PrimTuple { ref items } => {
|
||||
let mut inner_state = self.new_frame(items, &alt.matchable.bound_vars);
|
||||
if all_subpatterns_pass(&mut inner_state, &alt.matchable.subpatterns, items)? {
|
||||
return inner_state.block(alt.item);
|
||||
} else {
|
||||
return Err(format!("This data constructor thing not done"))
|
||||
continue;
|
||||
}
|
||||
},
|
||||
SymbolSpec::Func(_) => match self.values.lookup(&name) {
|
||||
Some(Binding { val: Node::Expr(Expr::Func(UserDefined { name, params, body })), .. }) => {
|
||||
Node::Expr(Expr::Func(UserDefined { name: name.clone(), params: params.clone(), body: body.clone() }))
|
||||
},
|
||||
_ => unreachable!(),
|
||||
},
|
||||
},
|
||||
/* see if it's an ordinary variable TODO make variables go in symbol table */
|
||||
None => match self.values.lookup(&name) {
|
||||
Some(Binding { val, .. }) => val.clone(),
|
||||
None => return Err(format!("Couldn't find value {}", name)),
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod eval_tests {
|
||||
use std::cell::RefCell;
|
||||
use std::rc::Rc;
|
||||
|
||||
use tokenizing::{Token, tokenize};
|
||||
use ::parsing::ParseResult;
|
||||
use ::ast::AST;
|
||||
use symbol_table::SymbolTable;
|
||||
use eval::State;
|
||||
|
||||
fn parse(tokens: Vec<Token>) -> ParseResult<AST> {
|
||||
let mut parser = ::parsing::Parser::new(tokens);
|
||||
parser.parse()
|
||||
}
|
||||
|
||||
macro_rules! all_output {
|
||||
($string:expr) => {
|
||||
{
|
||||
let symbol_table = Rc::new(RefCell::new(SymbolTable::new()));
|
||||
let mut state = State::new(symbol_table);
|
||||
let ast = parse(tokenize($string)).unwrap();
|
||||
state.symbol_table_handle.borrow_mut().add_top_level_symbols(&ast).unwrap();
|
||||
let reduced = ast.reduce(&state.symbol_table_handle.borrow());
|
||||
let all_output = state.evaluate(reduced, true);
|
||||
all_output
|
||||
Node::Expr(ref _e) => {
|
||||
if let None = alt.matchable.tag {
|
||||
return self.block(alt.item)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! test_in_fresh_env {
|
||||
($string:expr, $correct:expr) => {
|
||||
{
|
||||
let all_output = all_output!($string);
|
||||
let ref output = all_output.last().unwrap();
|
||||
assert_eq!(**output, Ok($correct.to_string()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_basic_eval() {
|
||||
test_in_fresh_env!("1 + 2", "3");
|
||||
test_in_fresh_env!("let mut a = 1; a = 2", "Unit");
|
||||
test_in_fresh_env!("let mut a = 1; a = 2; a", "2");
|
||||
test_in_fresh_env!(r#"("a", 1 + 2)"#, r#"("a", 3)"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn function_eval() {
|
||||
test_in_fresh_env!("fn oi(x) { x + 1 }; oi(4)", "5");
|
||||
test_in_fresh_env!("fn oi(x) { x + 1 }; oi(1+2)", "4");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn scopes() {
|
||||
let scope_ok = r#"
|
||||
let a = 20
|
||||
fn haha() {
|
||||
let a = 10
|
||||
a
|
||||
}
|
||||
haha()
|
||||
"#;
|
||||
test_in_fresh_env!(scope_ok, "10");
|
||||
let scope_ok = r#"
|
||||
let a = 20
|
||||
fn haha() {
|
||||
let a = 10
|
||||
a
|
||||
}
|
||||
a
|
||||
"#;
|
||||
test_in_fresh_env!(scope_ok, "20");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn if_is_patterns() {
|
||||
let source = r#"
|
||||
type Option<T> = Some(T) | None
|
||||
let x = Some(9); if x is Some(q) then { q } else { 0 }"#;
|
||||
test_in_fresh_env!(source, "9");
|
||||
|
||||
let source = r#"
|
||||
type Option<T> = Some(T) | None
|
||||
let x = None; if x is Some(q) then { q } else { 0 }"#;
|
||||
test_in_fresh_env!(source, "0");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn full_if_matching() {
|
||||
let source = r#"
|
||||
type Option<T> = Some(T) | None
|
||||
let a = None
|
||||
if a { is None -> 4, is Some(x) -> x }
|
||||
"#;
|
||||
test_in_fresh_env!(source, "4");
|
||||
|
||||
let source = r#"
|
||||
type Option<T> = Some(T) | None
|
||||
let a = Some(99)
|
||||
if a { is None -> 4, is Some(x) -> x }
|
||||
"#;
|
||||
test_in_fresh_env!(source, "99");
|
||||
|
||||
let source = r#"
|
||||
let a = 10
|
||||
if a { is 10 -> "x", is 4 -> "y" }
|
||||
"#;
|
||||
test_in_fresh_env!(source, "\"x\"");
|
||||
|
||||
let source = r#"
|
||||
let a = 10
|
||||
if a { is 15 -> "x", is 10 -> "y" }
|
||||
"#;
|
||||
test_in_fresh_env!(source, "\"y\"");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn boolean_pattern() {
|
||||
let source = r#"
|
||||
let a = true
|
||||
if a {
|
||||
is true -> "x",
|
||||
is false -> "y"
|
||||
}
|
||||
"#;
|
||||
test_in_fresh_env!(source, "\"x\"");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn boolean_pattern_2() {
|
||||
let source = r#"
|
||||
let a = false
|
||||
if a { is true -> "x", is false -> "y" }
|
||||
"#;
|
||||
test_in_fresh_env!(source, "\"y\"");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ignore_pattern() {
|
||||
let source = r#"
|
||||
type Option<T> = Some(T) | None
|
||||
if Some(10) {
|
||||
is _ -> "hella"
|
||||
}
|
||||
"#;
|
||||
test_in_fresh_env!(source, "\"hella\"");
|
||||
Err(format!("{:?} failed pattern match", cond))
|
||||
}
|
||||
}
|
||||
|
||||
269
schala-lang/language/src/eval/test.rs
Normal file
269
schala-lang/language/src/eval/test.rs
Normal file
@@ -0,0 +1,269 @@
|
||||
#![cfg(test)]
|
||||
|
||||
use std::cell::RefCell;
|
||||
use std::rc::Rc;
|
||||
|
||||
use crate::symbol_table::SymbolTable;
|
||||
use crate::scope_resolution::ScopeResolver;
|
||||
use crate::reduced_ast::reduce;
|
||||
use crate::eval::State;
|
||||
|
||||
fn evaluate_all_outputs(input: &str) -> Vec<Result<String, String>> {
|
||||
let (mut ast, source_map) = crate::util::quick_ast(input);
|
||||
let source_map = Rc::new(RefCell::new(source_map));
|
||||
let symbol_table = Rc::new(RefCell::new(SymbolTable::new(source_map)));
|
||||
symbol_table.borrow_mut().add_top_level_symbols(&ast).unwrap();
|
||||
{
|
||||
let mut scope_resolver = ScopeResolver::new(symbol_table.clone());
|
||||
let _ = scope_resolver.resolve(&mut ast);
|
||||
}
|
||||
|
||||
let reduced = reduce(&ast, &symbol_table.borrow());
|
||||
let mut state = State::new();
|
||||
let all_output = state.evaluate(reduced, true);
|
||||
all_output
|
||||
}
|
||||
|
||||
macro_rules! test_in_fresh_env {
|
||||
($string:expr, $correct:expr) => {
|
||||
{
|
||||
let all_output = evaluate_all_outputs($string);
|
||||
let ref output = all_output.last().unwrap();
|
||||
assert_eq!(**output, Ok($correct.to_string()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_basic_eval() {
|
||||
test_in_fresh_env!("1 + 2", "3");
|
||||
test_in_fresh_env!("let mut a = 1; a = 2", "Unit");
|
||||
/*
|
||||
test_in_fresh_env!("let mut a = 1; a = 2; a", "2");
|
||||
test_in_fresh_env!(r#"("a", 1 + 2)"#, r#"("a", 3)"#);
|
||||
*/
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn op_eval() {
|
||||
test_in_fresh_env!("- 13", "-13");
|
||||
test_in_fresh_env!("10 - 2", "8");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn function_eval() {
|
||||
test_in_fresh_env!("fn oi(x) { x + 1 }; oi(4)", "5");
|
||||
test_in_fresh_env!("fn oi(x) { x + 1 }; oi(1+2)", "4");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn scopes() {
|
||||
let scope_ok = r#"
|
||||
let a = 20
|
||||
fn haha() {
|
||||
let a = 10
|
||||
a
|
||||
}
|
||||
haha()
|
||||
"#;
|
||||
test_in_fresh_env!(scope_ok, "10");
|
||||
let scope_ok = r#"
|
||||
let a = 20
|
||||
fn queque() {
|
||||
let a = 10
|
||||
a
|
||||
}
|
||||
a
|
||||
"#;
|
||||
test_in_fresh_env!(scope_ok, "20");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn if_is_patterns() {
|
||||
let source = r#"
|
||||
type Option<T> = Some(T) | None
|
||||
let x = Option::Some(9); if x is Option::Some(q) then { q } else { 0 }"#;
|
||||
test_in_fresh_env!(source, "9");
|
||||
|
||||
let source = r#"
|
||||
type Option<T> = Some(T) | None
|
||||
let x = Option::None; if x is Option::Some(q) then { q } else { 0 }"#;
|
||||
test_in_fresh_env!(source, "0");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn full_if_matching() {
|
||||
let source = r#"
|
||||
type Option<T> = Some(T) | None
|
||||
let a = Option::None
|
||||
if a { is Option::None then 4, is Option::Some(x) then x }
|
||||
"#;
|
||||
test_in_fresh_env!(source, "4");
|
||||
|
||||
let source = r#"
|
||||
type Option<T> = Some(T) | None
|
||||
let a = Option::Some(99)
|
||||
if a { is Option::None then 4, is Option::Some(x) then x }
|
||||
"#;
|
||||
test_in_fresh_env!(source, "99");
|
||||
|
||||
let source = r#"
|
||||
let a = 10
|
||||
if a { is 10 then "x", is 4 then "y" }
|
||||
"#;
|
||||
test_in_fresh_env!(source, "\"x\"");
|
||||
|
||||
let source = r#"
|
||||
let a = 10
|
||||
if a { is 15 then "x", is 10 then "y" }
|
||||
"#;
|
||||
test_in_fresh_env!(source, "\"y\"");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn string_pattern() {
|
||||
let source = r#"
|
||||
let a = "foo"
|
||||
if a { is "foo" then "x", is _ then "y" }
|
||||
"#;
|
||||
test_in_fresh_env!(source, "\"x\"");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn boolean_pattern() {
|
||||
let source = r#"
|
||||
let a = true
|
||||
if a {
|
||||
is true then "x",
|
||||
is false then "y"
|
||||
}
|
||||
"#;
|
||||
test_in_fresh_env!(source, "\"x\"");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn boolean_pattern_2() {
|
||||
let source = r#"
|
||||
let a = false
|
||||
if a { is true then "x", is false then "y" }
|
||||
"#;
|
||||
test_in_fresh_env!(source, "\"y\"");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ignore_pattern() {
|
||||
let source = r#"
|
||||
type Option<T> = Some(T) | None
|
||||
if Option::Some(10) {
|
||||
is _ then "hella"
|
||||
}
|
||||
"#;
|
||||
test_in_fresh_env!(source, "\"hella\"");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tuple_pattern() {
|
||||
let source = r#"
|
||||
if (1, 2) {
|
||||
is (1, x) then x,
|
||||
is _ then 99
|
||||
}
|
||||
"#;
|
||||
test_in_fresh_env!(source, 2);
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
fn tuple_pattern_2() {
|
||||
let source = r#"
|
||||
if (1, 2) {
|
||||
is (10, x) then x,
|
||||
is (y, x) then x + y
|
||||
}
|
||||
"#;
|
||||
test_in_fresh_env!(source, 3);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tuple_pattern_3() {
|
||||
let source = r#"
|
||||
if (1, 5) {
|
||||
is (10, x) then x,
|
||||
is (1, x) then x
|
||||
}
|
||||
"#;
|
||||
test_in_fresh_env!(source, 5);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tuple_pattern_4() {
|
||||
let source = r#"
|
||||
if (1, 5) {
|
||||
is (10, x) then x,
|
||||
is (1, x) then x,
|
||||
}
|
||||
"#;
|
||||
test_in_fresh_env!(source, 5);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prim_obj_pattern() {
|
||||
let source = r#"
|
||||
type Stuff = Mulch(Nat) | Jugs(Nat, String) | Mardok
|
||||
let a = Stuff::Mulch(20)
|
||||
let b = Stuff::Jugs(1, "haha")
|
||||
let c = Stuff::Mardok
|
||||
|
||||
let x = if a {
|
||||
is Stuff::Mulch(20) then "x",
|
||||
is _ then "ERR"
|
||||
}
|
||||
|
||||
let y = if b {
|
||||
is Stuff::Mulch(n) then "ERR",
|
||||
is Stuff::Jugs(2, _) then "ERR",
|
||||
is Stuff::Jugs(1, s) then s,
|
||||
is _ then "ERR",
|
||||
}
|
||||
|
||||
let z = if c {
|
||||
is Stuff::Jugs(_, _) then "ERR",
|
||||
is Stuff::Mardok then "NIGH",
|
||||
is _ then "ERR",
|
||||
}
|
||||
|
||||
(x, y, z)
|
||||
"#;
|
||||
test_in_fresh_env!(source, r#"("x", "haha", "NIGH")"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn basic_lambda_syntax() {
|
||||
let source = r#"
|
||||
let q = \(x, y) { x * y }
|
||||
let x = q(5,2)
|
||||
let y = \(m, n, o) { m + n + o }(1,2,3)
|
||||
(x, y)
|
||||
"#;
|
||||
test_in_fresh_env!(source, r"(10, 6)");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn lambda_syntax_2() {
|
||||
let source = r#"
|
||||
fn milta() {
|
||||
\(x) { x + 33 }
|
||||
}
|
||||
milta()(10)
|
||||
"#;
|
||||
test_in_fresh_env!(source, "43");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn import_all() {
|
||||
let source = r#"
|
||||
type Option<T> = Some(T) | None
|
||||
import Option::*
|
||||
let x = Some(9); if x is Some(q) then { q } else { 0 }"#;
|
||||
test_in_fresh_env!(source, "9");
|
||||
}
|
||||
@@ -1,156 +1,45 @@
|
||||
#![feature(trace_macros)]
|
||||
#![feature(custom_attribute)]
|
||||
#![feature(unrestricted_attribute_tokens)]
|
||||
#![feature(slice_patterns, box_patterns, box_syntax)]
|
||||
#![feature(box_patterns, box_syntax, trace_macros, or_patterns)]
|
||||
|
||||
//! `schala-lang` is where the Schala programming language is actually implemented.
|
||||
//! It defines the `Schala` type, which contains the state for a Schala REPL, and implements
|
||||
//! `ProgrammingLanguageInterface` and the chain of compiler passes for it.
|
||||
|
||||
extern crate itertools;
|
||||
#[macro_use]
|
||||
extern crate lazy_static;
|
||||
#[macro_use]
|
||||
extern crate maplit;
|
||||
#[macro_use]
|
||||
extern crate schala_repl;
|
||||
#[macro_use]
|
||||
extern crate schala_repl_codegen;
|
||||
#[macro_use]
|
||||
extern crate schala_lang_codegen;
|
||||
extern crate ena;
|
||||
extern crate derivative;
|
||||
extern crate colored;
|
||||
extern crate radix_trie;
|
||||
|
||||
use std::cell::RefCell;
|
||||
use std::rc::Rc;
|
||||
|
||||
use itertools::Itertools;
|
||||
use schala_repl::{ProgrammingLanguageInterface, EvalOptions, TraceArtifact, UnfinishedComputation, FinishedComputation};
|
||||
|
||||
macro_rules! bx {
|
||||
($e:expr) => { Box::new($e) }
|
||||
}
|
||||
|
||||
#[macro_use]
|
||||
mod util;
|
||||
mod builtin;
|
||||
#[macro_use]
|
||||
mod typechecking;
|
||||
mod debugging;
|
||||
|
||||
mod tokenizing;
|
||||
mod ast;
|
||||
mod parser;
|
||||
mod parsing;
|
||||
#[macro_use]
|
||||
mod symbol_table;
|
||||
mod typechecking;
|
||||
mod scope_resolution;
|
||||
mod builtin;
|
||||
mod reduced_ast;
|
||||
mod eval;
|
||||
mod source_map;
|
||||
|
||||
//trace_macros!(true);
|
||||
#[derive(ProgrammingLanguageInterface)]
|
||||
#[LanguageName = "Schala"]
|
||||
#[SourceFileExtension = "schala"]
|
||||
#[PipelineSteps(tokenizing, parsing(compact,expanded,trace), symbol_table, typechecking, ast_reducing, eval)]
|
||||
#[DocMethod = get_doc]
|
||||
#[HandleCustomInterpreterDirectives = handle_custom_interpreter_directives]
|
||||
pub struct Schala {
|
||||
state: eval::State<'static>,
|
||||
symbol_table: Rc<RefCell<symbol_table::SymbolTable>>,
|
||||
active_parser: Option<parsing::Parser>,
|
||||
}
|
||||
|
||||
impl Schala {
|
||||
fn get_doc(&self, commands: &Vec<&str>) -> Option<String> {
|
||||
Some(format!("Documentation on commands: {:?}", commands))
|
||||
}
|
||||
|
||||
fn handle_custom_interpreter_directives(&mut self, commands: &Vec<&str>) -> Option<String> {
|
||||
Some(format!("Schala-lang command: {:?} not supported", commands.get(0)))
|
||||
}
|
||||
}
|
||||
|
||||
impl Schala {
|
||||
fn new_blank_env() -> Schala {
|
||||
let symbols = Rc::new(RefCell::new(symbol_table::SymbolTable::new()));
|
||||
Schala {
|
||||
symbol_table: symbols.clone(),
|
||||
state: eval::State::new(symbols),
|
||||
active_parser: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new() -> Schala {
|
||||
let prelude = r#"
|
||||
type Option<T> = Some(T) | None
|
||||
type Color = Red | Green | Blue
|
||||
type Ord = LT | EQ | GT
|
||||
"#;
|
||||
let mut s = Schala::new_blank_env();
|
||||
s.execute_pipeline(prelude, &EvalOptions::default());
|
||||
s
|
||||
}
|
||||
}
|
||||
|
||||
fn tokenizing(_handle: &mut Schala, input: &str, comp: Option<&mut UnfinishedComputation>) -> Result<Vec<tokenizing::Token>, String> {
|
||||
let tokens = tokenizing::tokenize(input);
|
||||
comp.map(|comp| {
|
||||
let token_string = tokens.iter().map(|t| format!("{:?}<L:{},C:{}>", t.token_type, t.offset.0, t.offset.1)).join(", ");
|
||||
comp.add_artifact(TraceArtifact::new("tokens", token_string));
|
||||
});
|
||||
|
||||
let errors: Vec<String> = tokens.iter().filter_map(|t| t.get_error()).collect();
|
||||
if errors.len() == 0 {
|
||||
Ok(tokens)
|
||||
} else {
|
||||
Err(format!("{:?}", errors))
|
||||
}
|
||||
}
|
||||
|
||||
fn parsing(handle: &mut Schala, input: Vec<tokenizing::Token>, comp: Option<&mut UnfinishedComputation>) -> Result<ast::AST, String> {
|
||||
use parsing::Parser;
|
||||
|
||||
let mut parser = match handle.active_parser.take() {
|
||||
None => Parser::new(input),
|
||||
Some(parser) => parser
|
||||
};
|
||||
|
||||
let ast = parser.parse();
|
||||
let trace = parser.format_parse_trace();
|
||||
|
||||
comp.map(|comp| {
|
||||
//TODO need to control which of these debug stages get added
|
||||
let opt = comp.cur_debug_options.get(0).map(|s| s.clone());
|
||||
match opt {
|
||||
None => comp.add_artifact(TraceArtifact::new("ast", format!("{:?}", ast))),
|
||||
Some(ref s) if s == "compact" => comp.add_artifact(TraceArtifact::new("ast", format!("{:?}", ast))),
|
||||
Some(ref s) if s == "expanded" => comp.add_artifact(TraceArtifact::new("ast", format!("{:#?}", ast))),
|
||||
Some(ref s) if s == "trace" => comp.add_artifact(TraceArtifact::new_parse_trace(trace)),
|
||||
Some(ref x) => println!("Bad parsing debug option: {}", x),
|
||||
};
|
||||
});
|
||||
ast.map_err(|err| err.msg)
|
||||
}
|
||||
|
||||
fn symbol_table(handle: &mut Schala, input: ast::AST, comp: Option<&mut UnfinishedComputation>) -> Result<ast::AST, String> {
|
||||
let add = handle.symbol_table.borrow_mut().add_top_level_symbols(&input);
|
||||
match add {
|
||||
Ok(()) => {
|
||||
let artifact = TraceArtifact::new("symbol_table", handle.symbol_table.borrow().debug_symbol_table());
|
||||
comp.map(|comp| comp.add_artifact(artifact));
|
||||
Ok(input)
|
||||
},
|
||||
Err(msg) => Err(msg)
|
||||
}
|
||||
}
|
||||
|
||||
fn typechecking(_handle: &mut Schala, input: ast::AST, _comp: Option<&mut UnfinishedComputation>) -> Result<ast::AST, String> {
|
||||
Ok(input)
|
||||
}
|
||||
|
||||
fn ast_reducing(handle: &mut Schala, input: ast::AST, comp: Option<&mut UnfinishedComputation>) -> Result<reduced_ast::ReducedAST, String> {
|
||||
let ref symbol_table = handle.symbol_table.borrow();
|
||||
let output = input.reduce(symbol_table);
|
||||
comp.map(|comp| comp.add_artifact(TraceArtifact::new("ast_reducing", format!("{:?}", output))));
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
fn eval(handle: &mut Schala, input: reduced_ast::ReducedAST, comp: Option<&mut UnfinishedComputation>) -> Result<String, String> {
|
||||
comp.map(|comp| comp.add_artifact(TraceArtifact::new("value_state", handle.state.debug_print())));
|
||||
let evaluation_outputs = handle.state.evaluate(input, true);
|
||||
let text_output: Result<Vec<String>, String> = evaluation_outputs
|
||||
.into_iter()
|
||||
.collect();
|
||||
|
||||
let eval_output: Result<String, String> = text_output
|
||||
.map(|v| { v.into_iter().intersperse(format!("\n")).collect() });
|
||||
eval_output
|
||||
}
|
||||
mod schala;
|
||||
|
||||
pub use schala::Schala;
|
||||
|
||||
598
schala-lang/language/src/parser.rs
Normal file
598
schala-lang/language/src/parser.rs
Normal file
@@ -0,0 +1,598 @@
|
||||
extern crate nom;
|
||||
|
||||
use std::rc::Rc;
|
||||
use std::str::FromStr;
|
||||
|
||||
use nom::IResult;
|
||||
use nom::character::complete::{one_of, space0, alphanumeric0};
|
||||
use nom::bytes::complete::{tag, take, take_while, take_while1, take_until};
|
||||
use nom::combinator::{cut, cond, map, map_res, value, opt, verify};
|
||||
use nom::multi::{separated_list, separated_nonempty_list, many1, many0};
|
||||
use nom::error::{context, ParseError, VerboseError, ErrorKind, make_error};
|
||||
use nom::branch::alt;
|
||||
use nom::sequence::{pair, tuple, delimited, preceded};
|
||||
|
||||
use crate::ast::*;
|
||||
use crate::builtin::Builtin;
|
||||
|
||||
type ParseResult<'a, T> = IResult<&'a str, T, VerboseError<&'a str>>;
|
||||
|
||||
pub fn ws<'a, O, E: ParseError<&'a str>, F>(parser: F) -> impl Fn(&'a str) -> IResult<&'a str, O, E>
|
||||
where
|
||||
F: Fn(&'a str) -> IResult<&'a str, O, E>,
|
||||
{
|
||||
delimited(space0, parser, space0)
|
||||
}
|
||||
|
||||
fn statement_sep(text: &str) -> ParseResult<()> {
|
||||
value((), one_of("\n;"))(text)
|
||||
}
|
||||
|
||||
fn single_alphabetic_character(text: &str) -> ParseResult<char> {
|
||||
let p = verify(take(1usize), |s: &str| s.chars().nth(0).map(|c| c.is_alphabetic()).unwrap_or(false));
|
||||
map(p, |s: &str| s.chars().nth(0).unwrap())(text)
|
||||
}
|
||||
|
||||
fn single_alphanumeric_character(text: &str) -> ParseResult<char> {
|
||||
let p = verify(take(1usize), |s: &str| s.chars().nth(0).map(|c| c.is_alphanumeric() || c == '_').unwrap_or(false));
|
||||
map(p, |s: &str| s.chars().nth(0).unwrap())(text)
|
||||
}
|
||||
|
||||
fn identifier(text: &str) -> ParseResult<Rc<String>> {
|
||||
use nom::character::complete::char;
|
||||
map(alt((
|
||||
pair(char('_'), many1(single_alphanumeric_character)),
|
||||
pair(single_alphabetic_character, many0(single_alphanumeric_character))
|
||||
)),
|
||||
|(first, rest): (char, Vec<char>)| Rc::new(format!("{}{}", first, rest.into_iter().collect::<String>()))
|
||||
)(text)
|
||||
}
|
||||
|
||||
const OPERATOR_CHARS: &'static str = "~`!@#$%^&*-+=<>?/|";
|
||||
fn operator(text: &str) -> ParseResult<Vec<char>> {
|
||||
many1(one_of(OPERATOR_CHARS))(text)
|
||||
}
|
||||
|
||||
fn binop(text: &str) -> ParseResult<BinOp> {
|
||||
context("Binop", map(
|
||||
operator,
|
||||
|op| BinOp::from_sigil(&op.into_iter().collect::<String>())
|
||||
))(text)
|
||||
}
|
||||
|
||||
fn bool_literal(text: &str) -> ParseResult<ExpressionKind> {
|
||||
let p = alt((
|
||||
value(true, tag("true")),
|
||||
value(false, tag("false"))
|
||||
));
|
||||
context("Bool literal", map(p, ExpressionKind::BoolLiteral))(text)
|
||||
}
|
||||
|
||||
fn number_literal(text: &str) -> ParseResult<ExpressionKind> {
|
||||
let num_lit = many1(alt((
|
||||
map(one_of("1234567890"), |s: char| Some(s)),
|
||||
value(None, nom::character::complete::char('_')),
|
||||
)));
|
||||
|
||||
let (text, n) = map_res(num_lit,
|
||||
|digits: Vec<Option<char>>| {
|
||||
let num_str: String = digits.into_iter().filter_map(|x| x).collect();
|
||||
u64::from_str_radix(&num_str, 10)
|
||||
})(text)?;
|
||||
|
||||
Ok((text, ExpressionKind::NatLiteral(n)))
|
||||
}
|
||||
|
||||
fn binary_literal(text: &str) -> ParseResult<ExpressionKind> {
|
||||
let p = preceded(tag("0b"), cut(take_while1(|c: char| c == '0' || c == '1')));
|
||||
let (rest, n): (&str, u64) = map_res(
|
||||
p, |hex_str: &str| u64::from_str_radix(hex_str, 2)
|
||||
)(text)?;
|
||||
let expr = ExpressionKind::NatLiteral(n);
|
||||
Ok((rest, expr))
|
||||
}
|
||||
|
||||
fn hex_literal(text: &str) -> ParseResult<ExpressionKind> {
|
||||
let p = preceded(tag("0x"), cut(take_while1(|c: char| c.is_digit(16))));
|
||||
let (rest, n): (&str, u64) = map_res(
|
||||
p, |hex_str: &str| u64::from_str_radix(hex_str, 16)
|
||||
)(text)?;
|
||||
let expr = ExpressionKind::NatLiteral(n);
|
||||
Ok((rest, expr))
|
||||
}
|
||||
|
||||
fn string_literal(text: &str) -> ParseResult<ExpressionKind> {
|
||||
use nom::character::complete::char;
|
||||
let (text, string_output) = delimited(
|
||||
char('"'), take_until("\""), char('"')
|
||||
)(text)?;
|
||||
let expr = ExpressionKind::StringLiteral(Rc::new(string_output.to_string()));
|
||||
Ok((text, expr))
|
||||
}
|
||||
|
||||
fn literal(text: &str) -> ParseResult<ExpressionKind> {
|
||||
alt((
|
||||
string_literal,
|
||||
hex_literal,
|
||||
binary_literal,
|
||||
number_literal,
|
||||
bool_literal,
|
||||
))(text)
|
||||
}
|
||||
|
||||
fn paren_expr(text: &str) -> ParseResult<ExpressionKind> {
|
||||
context("Paren expression", delimited(tag("("), ws(expression_kind), tag(")")))(text)
|
||||
}
|
||||
|
||||
fn prefix_op(text: &str) -> ParseResult<PrefixOp> {
|
||||
use nom::character::complete::char;
|
||||
let p = alt((char('+'), char('-'), char('!')));
|
||||
map(p, |sigil| PrefixOp::from_str(&sigil.to_string()).unwrap())(text)
|
||||
}
|
||||
|
||||
fn qualified_name(text: &str) -> ParseResult<QualifiedName> {
|
||||
map(
|
||||
separated_nonempty_list(tag("::"), identifier),
|
||||
|components| QualifiedName { id: ItemId::new(0), components }
|
||||
)(text)
|
||||
}
|
||||
|
||||
fn identifier_expr(text: &str) -> ParseResult<ExpressionKind> {
|
||||
map(qualified_name, ExpressionKind::Value)(text)
|
||||
}
|
||||
|
||||
fn primary_expr(text: &str) -> ParseResult<ExpressionKind> {
|
||||
// primary := literal | paren_expr | if_expr | for_expr | while_expr | identifier_expr | lambda_expr | anonymous_struct | list_expr
|
||||
|
||||
alt((
|
||||
if_expr,
|
||||
for_expr,
|
||||
while_expr,
|
||||
literal,
|
||||
paren_expr,
|
||||
lambda_expr,
|
||||
identifier_expr,
|
||||
))(text)
|
||||
}
|
||||
|
||||
fn lambda_expr(text: &str) -> ParseResult<ExpressionKind> {
|
||||
let p = preceded(ws(tag("\\")),
|
||||
tuple((ws(lambda_param_list), ws(opt(type_anno)), ws(block))));
|
||||
context("Lambda expression",
|
||||
map(p, |(params, type_anno, body)| ExpressionKind::Lambda { params, type_anno, body })
|
||||
)(text)
|
||||
}
|
||||
|
||||
fn lambda_param_list(text: &str) -> ParseResult<Vec<FormalParam>> {
|
||||
alt((
|
||||
map(formal_param, |x| vec![x]),
|
||||
formal_params
|
||||
))(text)
|
||||
}
|
||||
|
||||
|
||||
fn while_expr(text: &str) -> ParseResult<ExpressionKind> {
|
||||
let p = preceded(tag("while"), tuple((ws(while_cond), ws(block))));
|
||||
let m = map(p, |(condition, body)| {
|
||||
let condition = condition.map(Box::new);
|
||||
ExpressionKind::WhileExpression {condition, body}
|
||||
});
|
||||
context("While expression", m)(text)
|
||||
}
|
||||
|
||||
fn while_cond(text: &str) -> ParseResult<Option<Expression>> {
|
||||
//TODO support is constructs?
|
||||
context("While condition",
|
||||
map(opt(ws(expression_kind)),
|
||||
|maybe_expr_kind| maybe_expr_kind.map(|kind| Expression::new(ItemId::new(0), kind)))
|
||||
)(text)
|
||||
}
|
||||
|
||||
fn for_expr(text: &str) -> ParseResult<ExpressionKind> {
|
||||
//TODO do I need something like no struct literal here?
|
||||
let en = alt((
|
||||
map(enumerator, |e| vec![e]),
|
||||
delimited(tag("{"), enumerators, tag("}"))
|
||||
));
|
||||
context("For expression",
|
||||
preceded(tag("for"),
|
||||
cut(
|
||||
map(tuple((ws(en), for_expr_body)),
|
||||
|(enumerators, body)| ExpressionKind::ForExpression { enumerators, body: Box::new(body) }
|
||||
))))(text)
|
||||
}
|
||||
|
||||
|
||||
fn enumerators(text: &str) -> ParseResult<Vec<Enumerator>> {
|
||||
separated_nonempty_list(alt((value((), tag(",")), statement_sep)),
|
||||
enumerator)(text)
|
||||
}
|
||||
|
||||
fn enumerator(text: &str) -> ParseResult<Enumerator> {
|
||||
map(
|
||||
tuple((ws(identifier), ws(tag("<-")), ws(expression))),
|
||||
|(id, _, generator)| Enumerator { id, generator }
|
||||
)(text)
|
||||
}
|
||||
|
||||
fn for_expr_body(text: &str) -> ParseResult<ForBody> {
|
||||
context("For expression body",
|
||||
alt((
|
||||
map(preceded(ws(tag("return")), expression), ForBody::MonadicReturn),
|
||||
map(block, ForBody::StatementBlock),
|
||||
)))(text)
|
||||
}
|
||||
|
||||
fn invocation_argument(text: &str) -> ParseResult<InvocationArgument> {
|
||||
use nom::character::complete::char;
|
||||
alt((
|
||||
value(InvocationArgument::Ignored, pair(char('_'), alphanumeric0)),
|
||||
map(expression_kind, |kind: ExpressionKind| InvocationArgument::Positional(
|
||||
Expression { id: ItemId::new(0), kind, type_anno: None }))
|
||||
//map(identifier, |id: Rc<String>|
|
||||
))(text)
|
||||
}
|
||||
|
||||
fn if_expr(text: &str) -> ParseResult<ExpressionKind> {
|
||||
let p = preceded(tag("if"), pair(ws(discriminator), ws(if_expr_body)));
|
||||
map(p, |(discriminator, body)| {
|
||||
let discriminator = discriminator.map(Box::new);
|
||||
let body = Box::new(body);
|
||||
ExpressionKind::IfExpression { discriminator, body }
|
||||
}) (text)
|
||||
}
|
||||
|
||||
fn discriminator(text: &str) -> ParseResult<Option<Expression>> {
|
||||
use nom::combinator::verify;
|
||||
cond(text.chars().next().map(|c| c != '{').unwrap_or(true),
|
||||
expression
|
||||
)(text)
|
||||
}
|
||||
|
||||
fn if_expr_body(text: &str) -> ParseResult<IfExpressionBody> {
|
||||
alt((
|
||||
preceded(tag("then"), simple_conditional),
|
||||
preceded(tag("is"), simple_pattern_match),
|
||||
cond_block,
|
||||
))(text)
|
||||
}
|
||||
|
||||
fn simple_conditional(text: &str) -> ParseResult<IfExpressionBody> {
|
||||
map(
|
||||
pair(expr_or_block, else_case),
|
||||
|(then_case, else_case)| IfExpressionBody::SimpleConditional { then_case, else_case }
|
||||
)(text)
|
||||
}
|
||||
|
||||
fn else_case(text: &str) -> ParseResult<Option<Block>> {
|
||||
opt(preceded(tag("else"), expr_or_block))(text)
|
||||
}
|
||||
|
||||
fn simple_pattern_match(text: &str) -> ParseResult<IfExpressionBody> {
|
||||
let p = tuple((pattern, tag("then"), expr_or_block, else_case));
|
||||
map(p, |(pattern, _, then_case, else_case)|
|
||||
IfExpressionBody::SimplePatternMatch { pattern, then_case, else_case }
|
||||
)(text)
|
||||
}
|
||||
|
||||
fn pattern(text: &str) -> ParseResult<Pattern> {
|
||||
use nom::character::complete::char;
|
||||
|
||||
let t = delimited(char('('),
|
||||
separated_nonempty_list(char(','), pattern),
|
||||
char(')')
|
||||
);
|
||||
|
||||
alt((
|
||||
map(t, |patterns| Pattern::TuplePattern(patterns)),
|
||||
simple_pattern,
|
||||
))(text)
|
||||
}
|
||||
|
||||
fn simple_pattern(text: &str) -> ParseResult<Pattern> {
|
||||
alt((
|
||||
value(Pattern::Ignored, tag("_")),
|
||||
tuple_struct_pattern,
|
||||
record_pattern,
|
||||
map(pattern_literal, Pattern::Literal),
|
||||
map(qualified_name, Pattern::VarOrName),
|
||||
))(text)
|
||||
}
|
||||
|
||||
fn tuple_struct_pattern(text: &str) -> ParseResult<Pattern> {
|
||||
let p = tuple((
|
||||
qualified_name,
|
||||
delimited(ws(tag("(")),
|
||||
separated_nonempty_list(ws(tag(",")), ws(pattern)),
|
||||
ws(tag(")"))
|
||||
)
|
||||
));
|
||||
map(p, |(name, patterns)| Pattern::TupleStruct(name, patterns))(text)
|
||||
}
|
||||
|
||||
fn record_pattern(text: &str) -> ParseResult<Pattern> {
|
||||
let p = tuple((
|
||||
qualified_name,
|
||||
delimited(ws(tag("{")),
|
||||
separated_nonempty_list(ws(tag(",")), ws(record_pattern_entry)), //TODO support newlines?
|
||||
ws(tag("}")))
|
||||
));
|
||||
map(p, |(name, members)| Pattern::Record(name, members))(text)
|
||||
}
|
||||
|
||||
fn record_pattern_entry(text: &str) -> ParseResult<(Rc<String>, Pattern)> {
|
||||
alt((
|
||||
map(tuple((ws(identifier), ws(tag(":")), ws(pattern))),
|
||||
|(name, _, pattern)| (name, pattern)),
|
||||
map(identifier, |name|
|
||||
(name.clone(), Pattern::Literal(PatternLiteral::StringPattern(name.clone())))
|
||||
)
|
||||
))(text)
|
||||
}
|
||||
|
||||
fn pattern_literal(text: &str) -> ParseResult<PatternLiteral> {
|
||||
use PatternLiteral::*;
|
||||
use nom::character::complete::char;
|
||||
alt((
|
||||
value(BoolPattern(true), tag("true")),
|
||||
value(BoolPattern(false), tag("false")),
|
||||
map(delimited(char('"'), take_until("\""), char('"')), |s: &str| StringPattern(Rc::new(s.to_string()))),
|
||||
))(text)
|
||||
//TODO handle signed_number_literal
|
||||
}
|
||||
|
||||
fn cond_block(text: &str) -> ParseResult<IfExpressionBody> {
|
||||
use nom::character::complete::char;
|
||||
//TODO maybe change this bit of syntax
|
||||
let comma_or_delimitor = alt((value((), char(',')), statement_sep));
|
||||
let p = delimited(char('{'),
|
||||
separated_nonempty_list(comma_or_delimitor, cond_arm),
|
||||
char('}'));
|
||||
map(p, IfExpressionBody::CondList)(text)
|
||||
}
|
||||
|
||||
fn cond_arm(text: &str) -> ParseResult<ConditionArm> {
|
||||
let variant_1 = map(
|
||||
tuple((condition, guard, tag("then"), expr_or_block)),
|
||||
|(condition, guard, _, body)| ConditionArm { condition, guard, body }
|
||||
);
|
||||
let variant_2 = map(
|
||||
preceded(tag("else"), expr_or_block),
|
||||
|body| ConditionArm { condition: Condition::Else, guard: None, body }
|
||||
);
|
||||
alt((variant_1, variant_2))(text)
|
||||
}
|
||||
|
||||
fn condition(text: &str) -> ParseResult<Condition> {
|
||||
alt((
|
||||
map(preceded(tag("is"), pattern), Condition::Pattern),
|
||||
map(tuple((binop, expression)), |(op, expr)|
|
||||
Condition::TruncatedOp(op, expr)),
|
||||
map(expression, Condition::Expression),
|
||||
))(text)
|
||||
}
|
||||
|
||||
fn guard(text: &str) -> ParseResult<Option<Expression>> {
|
||||
opt(preceded(tag("if"), expression))(text)
|
||||
}
|
||||
|
||||
fn expr_or_block(text: &str) -> ParseResult<Block> {
|
||||
//TODO fix
|
||||
alt((block, map(expression, |expr| vec![Statement { id: ItemId::new(0), kind: StatementKind::Expression(expr)}])))(text)
|
||||
}
|
||||
|
||||
fn block(text: &str) -> ParseResult<Block> {
|
||||
//TODO fix this so it can handle nested statements
|
||||
let make_expr = |e| Statement { id: ItemId::new(0), kind: StatementKind::Expression(e) };
|
||||
delimited(ws(tag("{")),
|
||||
delimited(opt(many0(statement_sep)),
|
||||
separated_list(many1(statement_sep),
|
||||
map(expression, make_expr)
|
||||
),
|
||||
opt(many0(statement_sep))
|
||||
),
|
||||
ws(tag("}")))(text)
|
||||
}
|
||||
|
||||
fn call_expr(text: &str) -> ParseResult<ExpressionKind> {
|
||||
use nom::character::complete::char;
|
||||
let parse_call = opt(
|
||||
delimited(char('('), separated_list(char(','), invocation_argument), char(')'))
|
||||
);
|
||||
let p = pair(primary_expr, parse_call);
|
||||
map(p, |(expr, call_part)| if let Some(arguments) = call_part {
|
||||
let f = bx!(Expression { id: ItemId::new(0), kind: expr, type_anno: None });
|
||||
ExpressionKind::Call { f, arguments }
|
||||
} else {
|
||||
expr
|
||||
})(text)
|
||||
}
|
||||
|
||||
fn prefix_expr(text: &str) -> ParseResult<ExpressionKind> {
|
||||
let (text, pfx) = ws(opt(prefix_op))(text)?;
|
||||
let (text, result) = call_expr(text)?;
|
||||
match pfx {
|
||||
None => Ok((text, result)),
|
||||
Some(pfx) => {
|
||||
let exp = Expression { id: ItemId::new(0), kind: result, type_anno: None };
|
||||
Ok((text, ExpressionKind::PrefixExp(pfx, Box::new(exp))))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// this implements Pratt parsing, see http://journal.stuffwithstuff.com/2011/03/19/pratt-parsers-expression-parsing-made-easy/
|
||||
fn precedence_expr(text: &str) -> ParseResult<ExpressionKind> {
|
||||
fn inner_precedence_expr(input: &str, precedence: i32) -> ParseResult<ExpressionKind> {
|
||||
let (mut outer_rest, mut lhs) = prefix_expr(input)?;
|
||||
loop {
|
||||
let (rest, _) = space0(outer_rest)?;
|
||||
let (rest, maybe_binop) = opt(binop)(rest)?;
|
||||
let (new_precedence, binop) = match maybe_binop {
|
||||
Some(binop) => (binop.precedence(), binop),
|
||||
None => break,
|
||||
};
|
||||
|
||||
if precedence >= new_precedence {
|
||||
break;
|
||||
}
|
||||
let (rest, _) = space0(rest)?;
|
||||
let (rest, rhs) = inner_precedence_expr(rest, new_precedence)?;
|
||||
outer_rest = rest;
|
||||
lhs = ExpressionKind::BinExp(binop,
|
||||
bx!(Expression::new(ItemId::new(0), lhs)),
|
||||
bx!(Expression::new(ItemId::new(0), rhs))
|
||||
);
|
||||
}
|
||||
Ok((outer_rest, lhs))
|
||||
}
|
||||
context("Precedence expression",
|
||||
|input| inner_precedence_expr(input, BinOp::min_precedence())
|
||||
)(text)
|
||||
}
|
||||
|
||||
fn expression_kind(text: &str) -> ParseResult<ExpressionKind> {
|
||||
context("Expression kind", ws(precedence_expr))(text)
|
||||
}
|
||||
|
||||
fn type_anno(text: &str) -> ParseResult<TypeIdentifier> {
|
||||
preceded(ws(tag(":")), ws(type_name))(text)
|
||||
}
|
||||
|
||||
fn type_name(text: &str) -> ParseResult<TypeIdentifier> {
|
||||
//TODO incomplete
|
||||
let (text, name) = identifier(text)?;
|
||||
let id = TypeIdentifier::Singleton(TypeSingletonName { name, params: vec![] });
|
||||
Ok((text, id))
|
||||
}
|
||||
|
||||
pub fn expression(text: &str) -> ParseResult<Expression> {
|
||||
let (rest, (kind, type_anno)) = ws(pair(expression_kind, opt(type_anno)))(text)?;
|
||||
let expr = Expression { id: ItemId::new(0), kind, type_anno };
|
||||
Ok((rest, expr))
|
||||
}
|
||||
|
||||
fn import(text: &str) -> ParseResult<ImportSpecifier> {
|
||||
let p = preceded(
|
||||
tag("import"),
|
||||
separated_nonempty_list(tag("::"), identifier)
|
||||
);
|
||||
map(p, |path_components| ImportSpecifier {
|
||||
id: ItemId::new(0),
|
||||
path_components,
|
||||
imported_names: ImportedNames::LastOfPath, //TODO finish
|
||||
})(text)
|
||||
}
|
||||
|
||||
fn module(text: &str) -> ParseResult<ModuleSpecifier> {
|
||||
let p = tuple((tag("module"), ws(identifier), ws(block)));
|
||||
map(p, |(_, name, contents)| ModuleSpecifier { name, contents })
|
||||
(text)
|
||||
}
|
||||
|
||||
fn declaration(text: &str) -> ParseResult<Declaration> {
|
||||
alt((
|
||||
func_declaration,
|
||||
type_declaration,
|
||||
))(text)
|
||||
}
|
||||
|
||||
fn func_declaration(text: &str) -> ParseResult<Declaration> {
|
||||
use Declaration::*;
|
||||
let p = tuple((func_signature, ws(opt(block))));
|
||||
map(p, |(signature, maybe_block)| match maybe_block {
|
||||
Some(block) => FuncDecl(signature, block),
|
||||
None => FuncSig(signature),
|
||||
})(text)
|
||||
}
|
||||
|
||||
fn func_signature(text: &str) -> ParseResult<Signature> {
|
||||
let p = preceded(tag("fn"), cut(tuple((ws(identifier), ws(formal_params), opt(ws(type_anno))))));
|
||||
//TODO fix op
|
||||
map(p, |(name, params, type_anno)| Signature { name, params, type_anno, operator: false })
|
||||
(text)
|
||||
}
|
||||
|
||||
fn formal_params(text: &str) -> ParseResult<Vec<FormalParam>> {
|
||||
delimited(tag("("), ws(separated_list(ws(tag(",")), formal_param)), tag(")"))(text)
|
||||
}
|
||||
|
||||
fn formal_param(text: &str) -> ParseResult<FormalParam> {
|
||||
let default = opt(preceded(ws(tag("=")), ws(expression)));
|
||||
let p = tuple((ws(identifier), opt(ws(type_anno)), default));
|
||||
map(p, |(name, anno, default)|
|
||||
FormalParam { name, anno, default })(text)
|
||||
}
|
||||
|
||||
fn type_declaration(text: &str) -> ParseResult<Declaration> {
|
||||
preceded(tag("type"), ws(type_declaration_body))(text)
|
||||
}
|
||||
|
||||
fn type_declaration_body(text: &str) -> ParseResult<Declaration> {
|
||||
let t = tuple((opt(tag("mut")), ws(type_singleton_name), ws(tag("=")), ws(type_body)));
|
||||
alt((
|
||||
preceded(tag("alias"), ws(type_alias)),
|
||||
map(t, |(mut_kw, name, _, body)| {
|
||||
Declaration::TypeDecl { name, body, mutable: mut_kw.is_some() }
|
||||
})
|
||||
))(text)
|
||||
}
|
||||
|
||||
fn type_body(text: &str) -> ParseResult<TypeBody> {
|
||||
let p = separated_nonempty_list(ws(tag("|")), variant_specifier);
|
||||
map(p, TypeBody)(text)
|
||||
}
|
||||
|
||||
fn variant_specifier(text: &str) -> ParseResult<Variant> {
|
||||
use self::Variant::*;
|
||||
let tuple_struct =
|
||||
delimited(tag("("), separated_nonempty_list(ws(tag(",")), type_name), ws(tag(")")));
|
||||
//TODO record
|
||||
|
||||
let p = tuple((identifier, opt(tuple_struct)));
|
||||
map(p, |(name, maybe_tuple_members)| match maybe_tuple_members {
|
||||
Some(members) => TupleStruct(name, members),
|
||||
None => UnitStruct(name),
|
||||
})(text)
|
||||
}
|
||||
|
||||
fn type_singleton_name(text: &str) -> ParseResult<TypeSingletonName> {
|
||||
let p = tuple((identifier, opt(delimited(tag("<"),
|
||||
separated_nonempty_list(tag(","), ws(type_name)),
|
||||
tag(">")))));
|
||||
map(p, |(name, params)| TypeSingletonName { name, params: params.unwrap_or(vec![]) })(text)
|
||||
}
|
||||
|
||||
fn type_alias(text: &str) -> ParseResult<Declaration> {
|
||||
let p = tuple((ws(identifier), ws(tag("=")), ws(identifier)));
|
||||
map(p, |(alias, _, original)| Declaration::TypeAlias { alias, original })
|
||||
(text)
|
||||
}
|
||||
|
||||
fn statement(text: &str) -> ParseResult<Statement> {
|
||||
let p = alt((
|
||||
map(import, StatementKind::Import),
|
||||
map(module, StatementKind::Module),
|
||||
map(declaration, StatementKind::Declaration),
|
||||
map(expression, StatementKind::Expression),
|
||||
));
|
||||
map(p, |kind| Statement { id: ItemId::new(0), kind })(text)
|
||||
}
|
||||
|
||||
pub fn parse_ast(text: &str) -> ParseResult<AST> {
|
||||
map(separated_list(statement_sep, statement),
|
||||
|statements| AST { id: ItemId::new(0), statements }
|
||||
)(text)
|
||||
}
|
||||
|
||||
pub fn perform_parsing(input: &str) -> Result<String, String> {
|
||||
let output = match parse_ast(input) {
|
||||
Ok((rest, ast)) => format!("{:?} (rest: {})", ast, rest),
|
||||
Err(nom::Err::Incomplete(needed)) => format!("Incomplete: {:?}" ,needed),
|
||||
Err(nom::Err::Error(verbose_error) | nom::Err::Failure(verbose_error)) => {
|
||||
format!("Verbose Error: ` {:?} `", verbose_error)
|
||||
//nom::error::convert_error(input, verbose_error)
|
||||
}
|
||||
};
|
||||
|
||||
Ok(output)
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
849
schala-lang/language/src/parsing/test.rs
Normal file
849
schala-lang/language/src/parsing/test.rs
Normal file
@@ -0,0 +1,849 @@
|
||||
#![cfg(test)]
|
||||
use std::cell::RefCell;
|
||||
use std::rc::Rc;
|
||||
use std::str::FromStr;
|
||||
|
||||
use super::{Parser, ParseResult, tokenize, ParseError};
|
||||
use crate::ast::*;
|
||||
use super::Declaration::*;
|
||||
use super::Signature;
|
||||
use super::TypeIdentifier::*;
|
||||
use super::TypeSingletonName;
|
||||
use super::ExpressionKind::*;
|
||||
use super::Variant::*;
|
||||
use super::ForBody::*;
|
||||
|
||||
/*
|
||||
fn make_parser(input: &str) -> Parser {
|
||||
let source_map = crate::source_map::SourceMap::new();
|
||||
let source_map_handle = Rc::new(RefCell::new(source_map));
|
||||
let tokens: Vec<crate::tokenizing::Token> = tokenize(input);
|
||||
let mut parser = super::Parser::new(source_map_handle);
|
||||
parser.add_new_tokens(tokens);
|
||||
parser
|
||||
}
|
||||
*/
|
||||
|
||||
fn parse(input: &str) -> ParseResult<AST> {
|
||||
use crate::tokenizing::*;
|
||||
crate::parser::parse_ast(input).map_err(|err| {
|
||||
let token = Token { kind: TokenKind::Newline, location: crate::source_map::Location { line_num: 0, char_num: 0 } };
|
||||
ParseError { production_name: None, msg: "".to_string(), token }
|
||||
})
|
||||
.map(|(rest, s)| s)
|
||||
/*
|
||||
let mut parser = make_parser(input);
|
||||
parser.parse()
|
||||
*/
|
||||
}
|
||||
|
||||
macro_rules! parse_test {
|
||||
($string:expr, $correct:expr) => {
|
||||
assert_eq!(parse($string).unwrap(), $correct)
|
||||
};
|
||||
}
|
||||
macro_rules! parse_test_wrap_ast {
|
||||
($string:expr, $correct:expr) => { parse_test!($string, AST { id: ItemIdStore::new_id(), statements: vec![$correct] }) }
|
||||
}
|
||||
macro_rules! parse_error {
|
||||
($string:expr) => { assert!(parse($string).is_err()) }
|
||||
}
|
||||
macro_rules! qname {
|
||||
( $( $component:expr),* ) => {
|
||||
{
|
||||
let mut components = vec![];
|
||||
$(
|
||||
components.push(rc!($component));
|
||||
)*
|
||||
QualifiedName { components, id: ItemIdStore::new_id() }
|
||||
}
|
||||
};
|
||||
}
|
||||
macro_rules! val {
|
||||
($var:expr) => { Value(QualifiedName { components: vec![Rc::new($var.to_string())], id: ItemIdStore::new_id() }) };
|
||||
}
|
||||
macro_rules! ty {
|
||||
($name:expr) => { Singleton(tys!($name)) }
|
||||
}
|
||||
macro_rules! tys {
|
||||
($name:expr) => { TypeSingletonName { name: Rc::new($name.to_string()), params: vec![] } };
|
||||
}
|
||||
|
||||
macro_rules! decl {
|
||||
($expr_type:expr) => {
|
||||
Statement { id: ItemIdStore::new_id(), kind: StatementKind::Declaration($expr_type) }
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! import {
|
||||
($import_spec:expr) => {
|
||||
Statement { id: ItemIdStore::new_id(), kind: StatementKind::Import($import_spec) }
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! module {
|
||||
($module_spec:expr) => {
|
||||
Statement { id: ItemIdStore::new_id(), kind: StatementKind::Module($module_spec) }
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! ex {
|
||||
($expr_type:expr) => { Expression::new(ItemIdStore::new_id(), $expr_type) };
|
||||
($expr_type:expr, $type_anno:expr) => { Expression::with_anno(ItemIdStore::new_id(), $expr_type, $type_anno) };
|
||||
(s $expr_text:expr) => {
|
||||
{
|
||||
/*
|
||||
let mut parser = make_parser($expr_text);
|
||||
parser.expression().unwrap()
|
||||
*/
|
||||
crate::parser::expression($expr_text).unwrap().1
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! inv {
|
||||
($expr_type:expr) => { InvocationArgument::Positional($expr_type) }
|
||||
}
|
||||
|
||||
macro_rules! binexp {
|
||||
($op:expr, $lhs:expr, $rhs:expr) => { BinExp(BinOp::from_sigil($op), bx!(Expression::new(ItemIdStore::new_id(), $lhs).into()), bx!(Expression::new(ItemIdStore::new_id(), $rhs).into())) }
|
||||
}
|
||||
macro_rules! prefexp {
|
||||
($op:expr, $lhs:expr) => { PrefixExp(PrefixOp::from_str($op).unwrap(), bx!(Expression::new(ItemIdStore::new_id(), $lhs).into())) }
|
||||
}
|
||||
macro_rules! exst {
|
||||
($expr_type:expr) => { Statement { id: ItemIdStore::new_id(), kind: StatementKind::Expression(Expression::new(ItemIdStore::new_id(), $expr_type).into())} };
|
||||
($expr_type:expr, $type_anno:expr) => { Statement { id: ItemIdStore::new_id(), kind: StatementKind::Expression(Expression::with_anno(ItemIdStore::new_id(), $expr_type, $type_anno).into())} };
|
||||
($op:expr, $lhs:expr, $rhs:expr) => { Statement { id: ItemIdStore::new_id(), ,kind: StatementKind::Expression(ex!(binexp!($op, $lhs, $rhs)))}
|
||||
};
|
||||
(s $statement_text:expr) => {
|
||||
{
|
||||
/*
|
||||
let mut parser = make_parser($statement_text);
|
||||
parser.statement().unwrap()
|
||||
*/
|
||||
Statement {
|
||||
kind: StatementKind::Expression(
|
||||
crate::parser::expression($statement_text).unwrap().1
|
||||
),
|
||||
id: ItemIdStore::new_id()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parsing_number_literals_and_binexps() {
|
||||
parse_test_wrap_ast! { ".2", exst!(FloatLiteral(0.2)) };
|
||||
parse_test_wrap_ast! { "8.1", exst!(FloatLiteral(8.1)) };
|
||||
|
||||
parse_test_wrap_ast! { "0b010", exst!(NatLiteral(2)) };
|
||||
parse_test_wrap_ast! { "0b0_1_0_", exst!(NatLiteral(2)) }
|
||||
|
||||
parse_test_wrap_ast! {"0xff", exst!(NatLiteral(255)) };
|
||||
parse_test_wrap_ast! {"0xf_f_", exst!(NatLiteral(255)) };
|
||||
|
||||
parse_test_wrap_ast! {"0xf_f_+1", exst!(binexp!("+", NatLiteral(255), NatLiteral(1))) };
|
||||
|
||||
parse_test! {"3; 4; 4.3",
|
||||
AST {
|
||||
id: ItemIdStore::new_id(),
|
||||
statements: vec![exst!(NatLiteral(3)), exst!(NatLiteral(4)),
|
||||
exst!(FloatLiteral(4.3))]
|
||||
}
|
||||
};
|
||||
|
||||
parse_test_wrap_ast!("1 + 2 * 3",
|
||||
exst!(binexp!("+", NatLiteral(1), binexp!("*", NatLiteral(2), NatLiteral(3))))
|
||||
);
|
||||
|
||||
parse_test_wrap_ast!("1 * 2 + 3",
|
||||
exst!(binexp!("+", binexp!("*", NatLiteral(1), NatLiteral(2)), NatLiteral(3)))
|
||||
) ;
|
||||
|
||||
parse_test_wrap_ast!("1 && 2", exst!(binexp!("&&", NatLiteral(1), NatLiteral(2))));
|
||||
|
||||
parse_test_wrap_ast!("1 + 2 * 3 + 4", exst!(
|
||||
binexp!("+",
|
||||
binexp!("+", NatLiteral(1), binexp!("*", NatLiteral(2), NatLiteral(3))),
|
||||
NatLiteral(4))));
|
||||
|
||||
parse_test_wrap_ast!("(1 + 2) * 3",
|
||||
exst!(binexp!("*", binexp!("+", NatLiteral(1), NatLiteral(2)), NatLiteral(3))));
|
||||
|
||||
parse_test_wrap_ast!(".1 + .2", exst!(binexp!("+", FloatLiteral(0.1), FloatLiteral(0.2))));
|
||||
parse_test_wrap_ast!("1 / 2", exst!(binexp!("/", NatLiteral(1), NatLiteral(2))));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parsing_tuples() {
|
||||
parse_test_wrap_ast!("()", exst!(TupleLiteral(vec![])));
|
||||
parse_test_wrap_ast!("(\"hella\", 34)", exst!(
|
||||
TupleLiteral(
|
||||
vec![ex!(s r#""hella""#).into(), ex!(s "34").into()]
|
||||
)
|
||||
));
|
||||
parse_test_wrap_ast!("((1+2), \"slough\")", exst!(TupleLiteral(vec![
|
||||
ex!(binexp!("+", NatLiteral(1), NatLiteral(2))).into(),
|
||||
ex!(StringLiteral(rc!(slough))).into(),
|
||||
])))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parsing_identifiers() {
|
||||
parse_test_wrap_ast!("a", exst!(val!("a")));
|
||||
parse_test_wrap_ast!("some_value", exst!(val!("some_value")));
|
||||
parse_test_wrap_ast!("a + b", exst!(binexp!("+", val!("a"), val!("b"))));
|
||||
//parse_test!("a[b]", AST(vec![Expression(
|
||||
//parse_test!("a[]", <- TODO THIS NEEDS TO FAIL
|
||||
//parse_test("a()[b]()[d]")
|
||||
//TODO fix this parsing stuff
|
||||
/*
|
||||
parse_test! { "perspicacity()[a]", AST(vec![
|
||||
exst!(Index {
|
||||
indexee: bx!(ex!(Call { f: bx!(ex!(val!("perspicacity"))), arguments: vec![] })),
|
||||
indexers: vec![ex!(val!("a"))]
|
||||
})
|
||||
])
|
||||
}
|
||||
*/
|
||||
parse_test_wrap_ast!("a[b,c]", exst!(Index { indexee: bx!(ex!(val!("a"))), indexers: vec![ex!(val!("b")), ex!(val!("c"))]} ));
|
||||
|
||||
parse_test_wrap_ast!("None", exst!(val!("None")));
|
||||
parse_test_wrap_ast!("Pandas { a: x + y }",
|
||||
exst!(NamedStruct { name: qname!(Pandas), fields: vec![(rc!(a), ex!(binexp!("+", val!("x"), val!("y"))))]})
|
||||
);
|
||||
parse_test_wrap_ast! { "Pandas { a: n, b: q, }",
|
||||
exst!(NamedStruct { name: qname!(Pandas), fields:
|
||||
vec![(rc!(a), ex!(val!("n"))), (rc!(b), ex!(val!("q")))]
|
||||
}
|
||||
)
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
fn qualified_identifiers() {
|
||||
parse_test_wrap_ast! {
|
||||
"let q_q = Yolo::Swaggins",
|
||||
decl!(Binding { name: rc!(q_q), constant: true, type_anno: None,
|
||||
expr: Expression::new(ItemIdStore::new_id(), Value(qname!(Yolo, Swaggins))),
|
||||
})
|
||||
}
|
||||
|
||||
parse_test_wrap_ast! {
|
||||
"thing::item::call()",
|
||||
exst!(Call { f: bx![ex!(Value(qname!(thing, item, call)))], arguments: vec![] })
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn reserved_words() {
|
||||
parse_error!("module::item::call()");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parsing_complicated_operators() {
|
||||
parse_test_wrap_ast!("a <- b", exst!(binexp!("<-", val!("a"), val!("b"))));
|
||||
parse_test_wrap_ast!("a || b", exst!(binexp!("||", val!("a"), val!("b"))));
|
||||
parse_test_wrap_ast!("a<>b", exst!(binexp!("<>", val!("a"), val!("b"))));
|
||||
parse_test_wrap_ast!("a.b.c.d", exst!(binexp!(".",
|
||||
binexp!(".",
|
||||
binexp!(".", val!("a"), val!("b")),
|
||||
val!("c")),
|
||||
val!("d"))));
|
||||
parse_test_wrap_ast!("-3", exst!(prefexp!("-", NatLiteral(3))));
|
||||
parse_test_wrap_ast!("-0.2", exst!(prefexp!("-", FloatLiteral(0.2))));
|
||||
parse_test_wrap_ast!("!3", exst!(prefexp!("!", NatLiteral(3))));
|
||||
parse_test_wrap_ast!("a <- -b", exst!(binexp!("<-", val!("a"), prefexp!("-", val!("b")))));
|
||||
parse_test_wrap_ast!("a <--b", exst!(binexp!("<--", val!("a"), val!("b"))));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parsing_functions() {
|
||||
parse_test_wrap_ast!("fn oi()", decl!(FuncSig(Signature { name: rc!(oi), operator: false, params: vec![], type_anno: None })));
|
||||
parse_test_wrap_ast!("oi()", exst!(Call { f: bx!(ex!(val!("oi"))), arguments: vec![] }));
|
||||
parse_test_wrap_ast!("oi(a, 2 + 2)", exst!(Call
|
||||
{ f: bx!(ex!(val!("oi"))),
|
||||
arguments: vec![inv!(ex!(val!("a"))), inv!(ex!(binexp!("+", NatLiteral(2), NatLiteral(2)))).into()]
|
||||
}));
|
||||
parse_error!("a(b,,c)");
|
||||
|
||||
parse_test_wrap_ast!("fn a(b, c: Int): Int", decl!(
|
||||
FuncSig(Signature { name: rc!(a), operator: false, params: vec![
|
||||
FormalParam { name: rc!(b), anno: None, default: None },
|
||||
FormalParam { name: rc!(c), anno: Some(ty!("Int")), default: None }
|
||||
], type_anno: Some(ty!("Int")) })));
|
||||
|
||||
|
||||
parse_test_wrap_ast!("fn a(x) { x() }", decl!(
|
||||
FuncDecl(Signature { name: rc!(a), operator: false, params: vec![FormalParam { name: rc!(x), anno: None, default: None }], type_anno: None },
|
||||
vec![exst!(Call { f: bx!(ex!(val!("x"))), arguments: vec![] })])));
|
||||
parse_test_wrap_ast!("fn a(x) {\n x() }", decl!(
|
||||
FuncDecl(Signature { name: rc!(a), operator: false, params: vec![FormalParam { name: rc!(x), anno: None, default: None }], type_anno: None },
|
||||
vec![exst!(Call { f: bx!(ex!(val!("x"))), arguments: vec![] })])));
|
||||
|
||||
let multiline = r#"
|
||||
fn a(x) {
|
||||
x()
|
||||
}
|
||||
"#;
|
||||
parse_test_wrap_ast!(multiline, decl!(
|
||||
FuncDecl(Signature { name: rc!(a), operator: false, params: vec![FormalParam { name: rc!(x), default: None, anno: None }], type_anno: None },
|
||||
vec![exst!(Call { f: bx!(ex!(val!("x"))), arguments: vec![] })])));
|
||||
let multiline2 = r#"
|
||||
fn a(x) {
|
||||
|
||||
x()
|
||||
|
||||
}
|
||||
"#;
|
||||
parse_test_wrap_ast!(multiline2, decl!(
|
||||
FuncDecl(Signature { name: rc!(a), operator: false, params: vec![FormalParam { name: rc!(x), default: None, anno: None }], type_anno: None },
|
||||
vec![exst!(s "x()")])));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn functions_with_default_args() {
|
||||
parse_test_wrap_ast! {
|
||||
"fn func(x: Int, y: Int = 4) { }",
|
||||
decl!(
|
||||
FuncDecl(Signature { name: rc!(func), operator: false, type_anno: None, params: vec![
|
||||
FormalParam { name: rc!(x), default: None, anno: Some(ty!("Int")) },
|
||||
FormalParam { name: rc!(y), default: Some(ex!(s "4")), anno: Some(ty!("Int")) }
|
||||
]}, vec![])
|
||||
)
|
||||
};
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parsing_bools() {
|
||||
parse_test_wrap_ast!("false", exst!(BoolLiteral(false)));
|
||||
parse_test_wrap_ast!("true", exst!(BoolLiteral(true)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parsing_strings() {
|
||||
parse_test_wrap_ast!(r#""hello""#, exst!(StringLiteral(rc!(hello))));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parsing_types() {
|
||||
parse_test_wrap_ast!("type Yolo = Yolo", decl!(TypeDecl { name: tys!("Yolo"), body: TypeBody(vec![UnitStruct(rc!(Yolo))]), mutable: false} ));
|
||||
parse_test_wrap_ast!("type mut Yolo = Yolo", decl!(TypeDecl { name: tys!("Yolo"), body: TypeBody(vec![UnitStruct(rc!(Yolo))]), mutable: true} ));
|
||||
parse_test_wrap_ast!("type alias Sex = Drugs", decl!(TypeAlias { alias: rc!(Sex), original: rc!(Drugs) }));
|
||||
parse_test_wrap_ast!("type Sanchez = Miguel | Alejandro(Int, Option<a>) | Esperanza { a: Int, b: String }",
|
||||
decl!(TypeDecl {
|
||||
name: tys!("Sanchez"),
|
||||
body: TypeBody(vec![
|
||||
UnitStruct(rc!(Miguel)),
|
||||
TupleStruct(rc!(Alejandro), vec![
|
||||
Singleton(TypeSingletonName { name: rc!(Int), params: vec![] }),
|
||||
Singleton(TypeSingletonName { name: rc!(Option), params: vec![Singleton(TypeSingletonName { name: rc!(a), params: vec![] })] }),
|
||||
]),
|
||||
Record{
|
||||
name: rc!(Esperanza),
|
||||
members: vec![
|
||||
(rc!(a), Singleton(TypeSingletonName { name: rc!(Int), params: vec![] })),
|
||||
(rc!(b), Singleton(TypeSingletonName { name: rc!(String), params: vec![] })),
|
||||
]
|
||||
}
|
||||
]),
|
||||
mutable: false
|
||||
}));
|
||||
|
||||
parse_test_wrap_ast! {
|
||||
"type Jorge<a> = Diego | Kike(a)",
|
||||
decl!(TypeDecl{
|
||||
name: TypeSingletonName { name: rc!(Jorge), params: vec![Singleton(TypeSingletonName { name: rc!(a), params: vec![] })] },
|
||||
body: TypeBody(vec![UnitStruct(rc!(Diego)), TupleStruct(rc!(Kike), vec![Singleton(TypeSingletonName { name: rc!(a), params: vec![] })])]),
|
||||
mutable: false
|
||||
}
|
||||
)
|
||||
};
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parsing_bindings() {
|
||||
parse_test_wrap_ast!("let mut a = 10", decl!(Binding { name: rc!(a), constant: false, type_anno: None, expr: ex!(NatLiteral(10)) } ));
|
||||
parse_test_wrap_ast!("let a = 2 + 2", decl!(Binding { name: rc!(a), constant: true, type_anno: None, expr: ex!(binexp!("+", NatLiteral(2), NatLiteral(2))) }));
|
||||
parse_test_wrap_ast!("let a: Nat = 2 + 2", decl!(
|
||||
Binding { name: rc!(a), constant: true, type_anno: Some(Singleton(TypeSingletonName { name: rc!(Nat), params: vec![] })),
|
||||
expr: ex!(binexp!("+", NatLiteral(2), NatLiteral(2))) }
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parsing_block_expressions() {
|
||||
parse_test_wrap_ast! {
|
||||
"if a() then { b(); c() }", exst!(
|
||||
IfExpression {
|
||||
discriminator: Some(bx! {
|
||||
ex!(Call { f: bx!(ex!(val!("a"))), arguments: vec![]})
|
||||
}),
|
||||
body: bx! {
|
||||
IfExpressionBody::SimpleConditional {
|
||||
then_case: vec![exst!(Call { f: bx!(ex!(val!("b"))), arguments: vec![]}), exst!(Call { f: bx!(ex!(val!("c"))), arguments: vec![] })],
|
||||
else_case: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
};
|
||||
|
||||
parse_test_wrap_ast! {
|
||||
"if a() then { b(); c() } else { q }", exst!(
|
||||
IfExpression {
|
||||
discriminator: Some(bx! {
|
||||
ex!(Call { f: bx!(ex!(val!("a"))), arguments: vec![]})
|
||||
}),
|
||||
body: bx! {
|
||||
IfExpressionBody::SimpleConditional {
|
||||
then_case: vec![exst!(Call { f: bx!(ex!(val!("b"))), arguments: vec![]}), exst!(Call { f: bx!(ex!(val!("c"))), arguments: vec![] })],
|
||||
else_case: Some(vec![exst!(val!("q"))]),
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
};
|
||||
|
||||
/*
|
||||
parse_test!("if a() then { b(); c() }", AST(vec![exst!(
|
||||
IfExpression(bx!(ex!(Call { f: bx!(ex!(val!("a"))), arguments: vec![]})),
|
||||
vec![exst!(Call { f: bx!(ex!(val!("b"))), arguments: vec![]}), exst!(Call { f: bx!(ex!(val!("c"))), arguments: vec![] })],
|
||||
None)
|
||||
)]));
|
||||
parse_test!(r#"
|
||||
if true then {
|
||||
const a = 10
|
||||
b
|
||||
} else {
|
||||
c
|
||||
}"#,
|
||||
AST(vec![exst!(IfExpression(bx!(ex!(BoolLiteral(true))),
|
||||
vec![decl!(Binding { name: rc!(a), constant: true, expr: ex!(NatLiteral(10)) }),
|
||||
exst!(val!(rc!(b)))],
|
||||
Some(vec![exst!(val!(rc!(c)))])))])
|
||||
);
|
||||
|
||||
parse_test!("if a { b } else { c }", AST(vec![exst!(
|
||||
IfExpression(bx!(ex!(val!("a"))),
|
||||
vec![exst!(val!("b"))],
|
||||
Some(vec![exst!(val!("c"))])))]));
|
||||
|
||||
parse_test!("if (A {a: 1}) { b } else { c }", AST(vec![exst!(
|
||||
IfExpression(bx!(ex!(NamedStruct { name: rc!(A), fields: vec![(rc!(a), ex!(NatLiteral(1)))]})),
|
||||
vec![exst!(val!("b"))],
|
||||
Some(vec![exst!(val!("c"))])))]));
|
||||
|
||||
parse_error!("if A {a: 1} { b } else { c }");
|
||||
*/
|
||||
}
|
||||
#[test]
|
||||
fn parsing_interfaces() {
|
||||
parse_test_wrap_ast!("interface Unglueable { fn unglue(a: Glue); fn mar(): Glue }",
|
||||
decl!(Interface {
|
||||
name: rc!(Unglueable),
|
||||
signatures: vec![
|
||||
Signature {
|
||||
name: rc!(unglue),
|
||||
operator: false,
|
||||
params: vec![
|
||||
FormalParam { name: rc!(a), anno: Some(Singleton(TypeSingletonName { name: rc!(Glue), params: vec![] })), default: None }
|
||||
],
|
||||
type_anno: None
|
||||
},
|
||||
Signature { name: rc!(mar), operator: false, params: vec![], type_anno: Some(Singleton(TypeSingletonName { name: rc!(Glue), params: vec![] })) },
|
||||
]
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parsing_impls() {
|
||||
parse_test_wrap_ast!("impl Heh { fn yolo(); fn swagg(); }",
|
||||
decl!(Impl {
|
||||
type_name: ty!("Heh"),
|
||||
interface_name: None,
|
||||
block: vec![
|
||||
FuncSig(Signature { name: rc!(yolo), operator: false, params: vec![], type_anno: None }),
|
||||
FuncSig(Signature { name: rc!(swagg), operator: false, params: vec![], type_anno: None })
|
||||
] }));
|
||||
|
||||
parse_test_wrap_ast!("impl Mondai for Lollerino { fn yolo(); fn swagg(); }",
|
||||
decl!(Impl {
|
||||
type_name: ty!("Lollerino"),
|
||||
interface_name: Some(TypeSingletonName { name: rc!(Mondai), params: vec![] }),
|
||||
block: vec![
|
||||
FuncSig(Signature { name: rc!(yolo), operator: false, params: vec![], type_anno: None}),
|
||||
FuncSig(Signature { name: rc!(swagg), operator: false, params: vec![], type_anno: None })
|
||||
] }));
|
||||
|
||||
parse_test_wrap_ast!("impl Hella<T> for (Alpha, Omega) { }",
|
||||
decl!(Impl {
|
||||
type_name: Tuple(vec![ty!("Alpha"), ty!("Omega")]),
|
||||
interface_name: Some(TypeSingletonName { name: rc!(Hella), params: vec![ty!("T")] }),
|
||||
block: vec![]
|
||||
})
|
||||
);
|
||||
|
||||
parse_test_wrap_ast!("impl Option<WTFMate> { fn oi() }",
|
||||
decl!(Impl {
|
||||
type_name: Singleton(TypeSingletonName { name: rc!(Option), params: vec![ty!("WTFMate")]}),
|
||||
interface_name: None,
|
||||
block: vec![
|
||||
FuncSig(Signature { name: rc!(oi), operator: false, params: vec![], type_anno: None }),
|
||||
]
|
||||
}));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parsing_type_annotations() {
|
||||
parse_test_wrap_ast!("let a = b : Int",
|
||||
decl!(Binding { name: rc!(a), constant: true, type_anno: None, expr:
|
||||
ex!(val!("b"), ty!("Int")) }));
|
||||
|
||||
parse_test_wrap_ast!("a : Int",
|
||||
exst!(val!("a"), ty!("Int"))
|
||||
);
|
||||
|
||||
parse_test_wrap_ast!("a : Option<Int>",
|
||||
exst!(val!("a"), Singleton(TypeSingletonName { name: rc!(Option), params: vec![ty!("Int")] }))
|
||||
);
|
||||
|
||||
parse_test_wrap_ast!("a : KoreanBBQSpecifier<Kimchi, Option<Bulgogi> >",
|
||||
exst!(val!("a"), Singleton(TypeSingletonName { name: rc!(KoreanBBQSpecifier), params: vec![
|
||||
ty!("Kimchi"), Singleton(TypeSingletonName { name: rc!(Option), params: vec![ty!("Bulgogi")] })
|
||||
] }))
|
||||
);
|
||||
|
||||
parse_test_wrap_ast!("a : (Int, Yolo<a>)",
|
||||
exst!(val!("a"), Tuple(
|
||||
vec![ty!("Int"), Singleton(TypeSingletonName {
|
||||
name: rc!(Yolo), params: vec![ty!("a")]
|
||||
})])));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parsing_lambdas() {
|
||||
parse_test_wrap_ast! { r#"\(x) { x + 1}"#, exst!(
|
||||
Lambda { params: vec![FormalParam { name: rc!(x), anno: None, default: None } ], type_anno: None, body: vec![exst!(s "x + 1")] }
|
||||
)
|
||||
}
|
||||
|
||||
parse_test_wrap_ast!(r#"\ (x: Int, y) { a;b;c;}"#,
|
||||
exst!(Lambda {
|
||||
params: vec![
|
||||
FormalParam { name: rc!(x), anno: Some(ty!("Int")), default: None },
|
||||
FormalParam { name: rc!(y), anno: None, default: None }
|
||||
],
|
||||
type_anno: None,
|
||||
body: vec![exst!(s "a"), exst!(s "b"), exst!(s "c")]
|
||||
})
|
||||
);
|
||||
|
||||
parse_test_wrap_ast! { r#"\(x){y}(1)"#,
|
||||
exst!(Call { f: bx!(ex!(
|
||||
Lambda {
|
||||
params: vec![
|
||||
FormalParam { name: rc!(x), anno: None, default: None }
|
||||
],
|
||||
type_anno: None,
|
||||
body: vec![exst!(s "y")] }
|
||||
)),
|
||||
arguments: vec![inv!(ex!(NatLiteral(1))).into()] })
|
||||
};
|
||||
|
||||
parse_test_wrap_ast! {
|
||||
r#"\(x: Int): String { "q" }"#,
|
||||
exst!(Lambda {
|
||||
params: vec![
|
||||
FormalParam { name: rc!(x), anno: Some(ty!("Int")), default: None },
|
||||
],
|
||||
type_anno: Some(ty!("String")),
|
||||
body: vec![exst!(s r#""q""#)]
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn single_param_lambda() {
|
||||
parse_test_wrap_ast! {
|
||||
r"\x { x + 10 }",
|
||||
exst!(Lambda {
|
||||
params: vec![FormalParam { name: rc!(x), anno: None, default: None }],
|
||||
type_anno: None,
|
||||
body: vec![exst!(s r"x + 10")]
|
||||
})
|
||||
}
|
||||
|
||||
parse_test_wrap_ast! {
|
||||
r"\x: Nat { x + 10 }",
|
||||
exst!(Lambda {
|
||||
params: vec![FormalParam { name: rc!(x), anno: Some(ty!("Nat")), default: None }],
|
||||
type_anno: None,
|
||||
body: vec![exst!(s r"x + 10")]
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn more_advanced_lambdas() {
|
||||
parse_test! {
|
||||
r#"fn wahoo() { let a = 10; \(x) { x + a } };
|
||||
wahoo()(3) "#,
|
||||
AST {
|
||||
id: ItemIdStore::new_id(),
|
||||
statements: vec![
|
||||
exst!(s r"fn wahoo() { let a = 10; \(x) { x + a } }"),
|
||||
exst! {
|
||||
Call {
|
||||
f: bx!(ex!(Call { f: bx!(ex!(val!("wahoo"))), arguments: vec![] })),
|
||||
arguments: vec![inv!(ex!(NatLiteral(3))).into()],
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn list_literals() {
|
||||
parse_test_wrap_ast! {
|
||||
"[1,2]",
|
||||
exst!(ListLiteral(vec![ex!(NatLiteral(1)), ex!(NatLiteral(2))]))
|
||||
};
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn while_expr() {
|
||||
parse_test_wrap_ast! {
|
||||
"while { 3 }",
|
||||
exst!(WhileExpression { condition: None, body: vec![ exst!(s "3")] })
|
||||
}
|
||||
|
||||
parse_test_wrap_ast! {
|
||||
"while a == b { 3 }",
|
||||
exst!(WhileExpression { condition: Some(bx![ex![binexp!("==", val!("a"), val!("b"))]]), body: vec![ exst!(s "3")] })
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn for_expr() {
|
||||
parse_test_wrap_ast! {
|
||||
"for { a <- maybeValue } return 1",
|
||||
exst!(ForExpression {
|
||||
enumerators: vec![Enumerator { id: rc!(a), generator: ex!(val!("maybeValue")) }],
|
||||
body: bx!(MonadicReturn(ex!(s "1")))
|
||||
})
|
||||
}
|
||||
|
||||
parse_test_wrap_ast! {
|
||||
"for n <- someRange { f(n); }",
|
||||
exst!(ForExpression { enumerators: vec![Enumerator { id: rc!(n), generator: ex!(val!("someRange"))}],
|
||||
body: bx!(ForBody::StatementBlock(vec![exst!(s "f(n)")]))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn patterns() {
|
||||
parse_test_wrap_ast! {
|
||||
"if x is Some(a) then { 4 } else { 9 }", exst!(
|
||||
IfExpression {
|
||||
discriminator: Some(bx!(ex!(s "x"))),
|
||||
body: bx!(IfExpressionBody::SimplePatternMatch {
|
||||
pattern: Pattern::TupleStruct(qname!(Some), vec![Pattern::VarOrName(qname!(a))]),
|
||||
then_case: vec![exst!(s "4")],
|
||||
else_case: Some(vec![exst!(s "9")]) })
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
parse_test_wrap_ast! {
|
||||
"if x is Some(a) then 4 else 9", exst!(
|
||||
IfExpression {
|
||||
discriminator: Some(bx!(ex!(s "x"))),
|
||||
body: bx!(IfExpressionBody::SimplePatternMatch {
|
||||
pattern: Pattern::TupleStruct(qname!(Some), vec![Pattern::VarOrName(qname!(a))]),
|
||||
then_case: vec![exst!(s "4")],
|
||||
else_case: Some(vec![exst!(s "9")]) }
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
parse_test_wrap_ast! {
|
||||
"if x is Something { a, b: x } then { 4 } else { 9 }", exst!(
|
||||
IfExpression {
|
||||
discriminator: Some(bx!(ex!(s "x"))),
|
||||
body: bx!(IfExpressionBody::SimplePatternMatch {
|
||||
pattern: Pattern::Record(qname!(Something), vec![
|
||||
(rc!(a),Pattern::Literal(PatternLiteral::StringPattern(rc!(a)))),
|
||||
(rc!(b),Pattern::VarOrName(qname!(x)))
|
||||
]),
|
||||
then_case: vec![exst!(s "4")],
|
||||
else_case: Some(vec![exst!(s "9")])
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn pattern_literals() {
|
||||
parse_test_wrap_ast! {
|
||||
"if x is -1 then 1 else 2",
|
||||
exst!(
|
||||
IfExpression {
|
||||
discriminator: Some(bx!(ex!(s "x"))),
|
||||
body: bx!(IfExpressionBody::SimplePatternMatch {
|
||||
pattern: Pattern::Literal(PatternLiteral::NumPattern { neg: true, num: NatLiteral(1) }),
|
||||
then_case: vec![exst!(NatLiteral(1))],
|
||||
else_case: Some(vec![exst!(NatLiteral(2))]),
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
parse_test_wrap_ast! {
|
||||
"if x is 1 then 1 else 2",
|
||||
exst!(
|
||||
IfExpression {
|
||||
discriminator: Some(bx!(ex!(s "x"))),
|
||||
body: bx!(IfExpressionBody::SimplePatternMatch {
|
||||
pattern: Pattern::Literal(PatternLiteral::NumPattern { neg: false, num: NatLiteral(1) }),
|
||||
then_case: vec![exst!(s "1")],
|
||||
else_case: Some(vec![exst!(s "2")]),
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
parse_test_wrap_ast! {
|
||||
"if x is true then 1 else 2",
|
||||
exst!(
|
||||
IfExpression {
|
||||
discriminator: Some(bx!(ex!(s "x"))),
|
||||
body: bx!(
|
||||
IfExpressionBody::SimplePatternMatch {
|
||||
pattern: Pattern::Literal(PatternLiteral::BoolPattern(true)),
|
||||
then_case: vec![exst!(NatLiteral(1))],
|
||||
else_case: Some(vec![exst!(NatLiteral(2))]),
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
parse_test_wrap_ast! {
|
||||
"if x is \"gnosticism\" then 1 else 2",
|
||||
exst!(
|
||||
IfExpression {
|
||||
discriminator: Some(bx!(ex!(s "x"))),
|
||||
body: bx!(IfExpressionBody::SimplePatternMatch {
|
||||
pattern: Pattern::Literal(PatternLiteral::StringPattern(rc!(gnosticism))),
|
||||
then_case: vec![exst!(s "1")],
|
||||
else_case: Some(vec![exst!(s "2")]),
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn imports() {
|
||||
parse_test_wrap_ast! {
|
||||
"import harbinger::draughts::Norgleheim",
|
||||
import!(ImportSpecifier {
|
||||
id: ItemIdStore::new_id(),
|
||||
path_components: vec![rc!(harbinger), rc!(draughts), rc!(Norgleheim)],
|
||||
imported_names: ImportedNames::LastOfPath
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn imports_2() {
|
||||
parse_test_wrap_ast! {
|
||||
"import harbinger::draughts::{Norgleheim, Xraksenlaigar}",
|
||||
import!(ImportSpecifier {
|
||||
id: ItemIdStore::new_id(),
|
||||
path_components: vec![rc!(harbinger), rc!(draughts)],
|
||||
imported_names: ImportedNames::List(vec![
|
||||
rc!(Norgleheim),
|
||||
rc!(Xraksenlaigar)
|
||||
])
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn imports_3() {
|
||||
parse_test_wrap_ast! {
|
||||
"import bespouri::{}",
|
||||
import!(ImportSpecifier {
|
||||
id: ItemIdStore::new_id(),
|
||||
path_components: vec![rc!(bespouri)],
|
||||
imported_names: ImportedNames::List(vec![])
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
fn imports_4() {
|
||||
parse_test_wrap_ast! {
|
||||
"import bespouri::*",
|
||||
import!(ImportSpecifier {
|
||||
id: ItemIdStore::new_id(),
|
||||
path_components: vec![rc!(bespouri)],
|
||||
imported_names: ImportedNames::All
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
fn if_expr() {
|
||||
parse_test_wrap_ast! {
|
||||
"if x { is 1 then 5, else 20 }",
|
||||
exst! {
|
||||
IfExpression {
|
||||
discriminator: Some(bx!(ex!(s "x"))),
|
||||
body: bx!(IfExpressionBody::CondList(
|
||||
vec![
|
||||
ConditionArm {
|
||||
condition: Condition::Pattern(Pattern::Literal(PatternLiteral::NumPattern { neg: false, num: NatLiteral(1)})),
|
||||
guard: None,
|
||||
body: vec![exst!(s "5")],
|
||||
},
|
||||
ConditionArm {
|
||||
condition: Condition::Else,
|
||||
guard: None,
|
||||
body: vec![exst!(s "20")],
|
||||
},
|
||||
]
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn modules() {
|
||||
parse_test_wrap_ast! {
|
||||
r#"
|
||||
module ephraim {
|
||||
let a = 10
|
||||
fn nah() { 33 }
|
||||
}
|
||||
"#,
|
||||
module!(
|
||||
ModuleSpecifier { name: rc!(ephraim), contents: vec![
|
||||
decl!(Binding { name: rc!(a), constant: true, type_anno: None, expr: ex!(s "10") }),
|
||||
decl!(FuncDecl(Signature { name: rc!(nah), operator: false, params: vec![], type_anno: None }, vec![exst!(NatLiteral(33))])),
|
||||
] }
|
||||
)
|
||||
}
|
||||
}
|
||||
14
schala-lang/language/src/prelude.schala
Normal file
14
schala-lang/language/src/prelude.schala
Normal file
@@ -0,0 +1,14 @@
|
||||
let _SCHALA_VERSION = "0.1.0"
|
||||
|
||||
type Option<T> = Some(T) | None
|
||||
type Ord = LT | EQ | GT
|
||||
|
||||
|
||||
fn map(input: Option<T>, func: Func): Option<T> {
|
||||
if input {
|
||||
is Option::Some(x) then Option::Some(func(x)),
|
||||
is Option::None then Option::None,
|
||||
}
|
||||
}
|
||||
|
||||
type Complicated = Sunrise | Metal { black: bool, norwegian: bool } | Fella(String, Int)
|
||||
@@ -1,8 +1,24 @@
|
||||
//! # Reduced AST
|
||||
//! The reduced AST is a minimal AST designed to be built from the full AST after all possible
|
||||
//! static checks have been done. Consequently, the AST reduction phase does very little error
|
||||
//! checking itself - any errors should ideally be caught either by an earlier phase, or are
|
||||
//! runtime errors that the evaluator should handle. That said, becuase it does do table lookups
|
||||
//! that can in principle fail [especially at the moment with most static analysis not yet complete],
|
||||
//! there is an Expr variant `ReductionError` to handle these cases.
|
||||
//!
|
||||
//! A design decision to make - should the ReducedAST types contain all information about
|
||||
//! type/layout necessary for the evaluator to work? If so, then the evaluator should not
|
||||
//! have access to the symbol table at all and ReducedAST should carry that information. If not,
|
||||
//! then ReducedAST shouldn't be duplicating information that can be queried at runtime from the
|
||||
//! symbol table. But I think the former might make sense since ultimately the bytecode will be
|
||||
//! built from the ReducedAST.
|
||||
use std::rc::Rc;
|
||||
use std::str::FromStr;
|
||||
|
||||
use ast::{AST, Statement, Expression, ExpressionType, Declaration, Discriminator, IfExpressionBody, Pattern, PatternLiteral, Guard, HalfExpr};
|
||||
use symbol_table::{Symbol, SymbolSpec, SymbolTable};
|
||||
use builtin::{BinOp, PrefixOp};
|
||||
use crate::ast::*;
|
||||
use crate::symbol_table::{Symbol, SymbolSpec, SymbolTable, FullyQualifiedSymbolName};
|
||||
use crate::builtin::Builtin;
|
||||
use crate::util::deref_optional_box;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ReducedAST(pub Vec<Stmt>);
|
||||
@@ -26,21 +42,21 @@ pub enum Stmt {
|
||||
pub enum Expr {
|
||||
Unit,
|
||||
Lit(Lit),
|
||||
Sym(Rc<String>), //a Sym is anything that can be looked up by name at runtime - i.e. a function or variable address
|
||||
Tuple(Vec<Expr>),
|
||||
Func(Func),
|
||||
Val(Rc<String>),
|
||||
Constructor {
|
||||
type_name: Rc<String>,
|
||||
name: Rc<String>,
|
||||
tag: usize,
|
||||
arity: usize,
|
||||
arity: usize, // n.b. arity here is always the value from the symbol table - if it doesn't match what it's being called with, that's an eval error, eval will handle it
|
||||
},
|
||||
Call {
|
||||
f: Box<Expr>,
|
||||
args: Vec<Expr>,
|
||||
},
|
||||
Assign {
|
||||
val: Box<Expr>,
|
||||
val: Box<Expr>, //TODO this probably can't be a val
|
||||
expr: Box<Expr>,
|
||||
},
|
||||
Conditional {
|
||||
@@ -48,26 +64,29 @@ pub enum Expr {
|
||||
then_clause: Vec<Stmt>,
|
||||
else_clause: Vec<Stmt>,
|
||||
},
|
||||
ConditionalTargetSigilValue,
|
||||
CaseMatch {
|
||||
cond: Box<Expr>,
|
||||
alternatives: Vec<Alternative>
|
||||
},
|
||||
UnimplementedSigilValue
|
||||
UnimplementedSigilValue,
|
||||
ReductionError(String),
|
||||
}
|
||||
|
||||
pub type BoundVars = Vec<Option<Rc<String>>>; //remember that order matters here
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Alternative {
|
||||
pub tag: Option<usize>,
|
||||
pub subpatterns: Vec<Alternative>,
|
||||
pub guard: Option<Expr>,
|
||||
pub bound_vars: Vec<Option<Rc<String>>>, //remember that order matters here
|
||||
pub matchable: Subpattern,
|
||||
pub item: Vec<Stmt>,
|
||||
}
|
||||
|
||||
impl Alternative {
|
||||
fn default(item: Vec<Stmt>) -> Alternative {
|
||||
Alternative { tag: None, subpatterns: vec![], guard: None, bound_vars: vec![], item }
|
||||
}
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Subpattern {
|
||||
pub tag: Option<usize>,
|
||||
pub subpatterns: Vec<Option<Subpattern>>,
|
||||
pub bound_vars: BoundVars,
|
||||
pub guard: Option<Expr>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
@@ -81,7 +100,7 @@ pub enum Lit {
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum Func {
|
||||
BuiltIn(Rc<String>),
|
||||
BuiltIn(Builtin),
|
||||
UserDefined {
|
||||
name: Option<Rc<String>>,
|
||||
params: Vec<Rc<String>>,
|
||||
@@ -89,228 +108,261 @@ pub enum Func {
|
||||
}
|
||||
}
|
||||
|
||||
impl AST {
|
||||
pub fn reduce(&self, symbol_table: &SymbolTable) -> ReducedAST {
|
||||
pub fn reduce(ast: &AST, symbol_table: &SymbolTable) -> ReducedAST {
|
||||
let mut reducer = Reducer { symbol_table };
|
||||
reducer.ast(ast)
|
||||
}
|
||||
|
||||
struct Reducer<'a> {
|
||||
symbol_table: &'a SymbolTable
|
||||
}
|
||||
|
||||
impl<'a> Reducer<'a> {
|
||||
fn ast(&mut self, ast: &AST) -> ReducedAST {
|
||||
let mut output = vec![];
|
||||
for statement in self.0.iter() {
|
||||
output.push(statement.reduce(symbol_table));
|
||||
for statement in ast.statements.iter() {
|
||||
output.push(self.statement(statement));
|
||||
}
|
||||
ReducedAST(output)
|
||||
}
|
||||
}
|
||||
|
||||
impl Statement {
|
||||
fn reduce(&self, symbol_table: &SymbolTable) -> Stmt {
|
||||
use ast::Statement::*;
|
||||
match self {
|
||||
ExpressionStatement(expr) => Stmt::Expr(expr.reduce(symbol_table)),
|
||||
Declaration(decl) => decl.reduce(symbol_table),
|
||||
fn statement(&mut self, stmt: &Statement) -> Stmt {
|
||||
match &stmt.kind {
|
||||
StatementKind::Expression(expr) => Stmt::Expr(self.expression(&expr)),
|
||||
StatementKind::Declaration(decl) => self.declaration(&decl),
|
||||
StatementKind::Import(_) => Stmt::Noop,
|
||||
StatementKind::Module(modspec) => {
|
||||
for statement in modspec.contents.iter() {
|
||||
self.statement(&statement);
|
||||
}
|
||||
Stmt::Noop
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Expression {
|
||||
fn reduce(&self, symbol_table: &SymbolTable) -> Expr {
|
||||
use ast::ExpressionType::*;
|
||||
let ref input = self.0;
|
||||
fn block(&mut self, block: &Block) -> Vec<Stmt> {
|
||||
block.iter().map(|stmt| self.statement(stmt)).collect()
|
||||
}
|
||||
|
||||
fn invocation_argument(&mut self, invoc: &InvocationArgument) -> Expr {
|
||||
use crate::ast::InvocationArgument::*;
|
||||
match invoc {
|
||||
Positional(ex) => self.expression(ex),
|
||||
Keyword { .. } => Expr::UnimplementedSigilValue,
|
||||
Ignored => Expr::UnimplementedSigilValue,
|
||||
}
|
||||
}
|
||||
|
||||
fn expression(&mut self, expr: &Expression) -> Expr {
|
||||
use crate::ast::ExpressionKind::*;
|
||||
let symbol_table = self.symbol_table;
|
||||
let ref input = expr.kind;
|
||||
match input {
|
||||
NatLiteral(n) => Expr::Lit(Lit::Nat(*n)),
|
||||
FloatLiteral(f) => Expr::Lit(Lit::Float(*f)),
|
||||
StringLiteral(s) => Expr::Lit(Lit::StringLit(s.clone())),
|
||||
BoolLiteral(b) => Expr::Lit(Lit::Bool(*b)),
|
||||
BinExp(binop, lhs, rhs) => binop.reduce(symbol_table, lhs, rhs),
|
||||
PrefixExp(op, arg) => op.reduce(symbol_table, arg),
|
||||
Value(name) => match symbol_table.lookup_by_name(name) {
|
||||
Some(Symbol { spec: SymbolSpec::DataConstructor { index, type_args, type_name}, .. }) => Expr::Constructor {
|
||||
type_name: type_name.clone(),
|
||||
name: name.clone(),
|
||||
tag: index.clone(),
|
||||
arity: type_args.len(),
|
||||
},
|
||||
_ => Expr::Val(name.clone()),
|
||||
},
|
||||
Call { f, arguments } => Expr::Call {
|
||||
f: Box::new(f.reduce(symbol_table)),
|
||||
args: arguments.iter().map(|arg| arg.reduce(symbol_table)).collect(),
|
||||
},
|
||||
TupleLiteral(exprs) => Expr::Tuple(exprs.iter().map(|e| e.reduce(symbol_table)).collect()),
|
||||
IfExpression { discriminator, body } => reduce_if_expression(discriminator, body, symbol_table),
|
||||
_ => Expr::UnimplementedSigilValue,
|
||||
BinExp(binop, lhs, rhs) => self.binop(binop, lhs, rhs),
|
||||
PrefixExp(op, arg) => self.prefix(op, arg),
|
||||
Value(qualified_name) => self.value(qualified_name),
|
||||
Call { f, arguments } => self.reduce_call_expression(f, arguments),
|
||||
TupleLiteral(exprs) => Expr::Tuple(exprs.iter().map(|e| self.expression(e)).collect()),
|
||||
IfExpression { discriminator, body } => self.reduce_if_expression(deref_optional_box(discriminator), body),
|
||||
Lambda { params, body, .. } => self.reduce_lambda(params, body),
|
||||
NamedStruct { name, fields } => self.reduce_named_struct(name, fields),
|
||||
Index { .. } => Expr::UnimplementedSigilValue,
|
||||
WhileExpression { .. } => Expr::UnimplementedSigilValue,
|
||||
ForExpression { .. } => Expr::UnimplementedSigilValue,
|
||||
ListLiteral { .. } => Expr::UnimplementedSigilValue,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn reduce_if_expression(discriminator: &Discriminator, body: &IfExpressionBody, symbol_table: &SymbolTable) -> Expr {
|
||||
let cond = Box::new(match *discriminator {
|
||||
Discriminator::Simple(ref expr) => expr.reduce(symbol_table),
|
||||
Discriminator::BinOp(ref _expr, ref _binop) => panic!("Can't yet handle binop discriminators")
|
||||
});
|
||||
match *body {
|
||||
IfExpressionBody::SimpleConditional(ref then_clause, ref else_clause) => {
|
||||
let then_clause = then_clause.iter().map(|expr| expr.reduce(symbol_table)).collect();
|
||||
let else_clause = match else_clause {
|
||||
None => vec![],
|
||||
Some(stmts) => stmts.iter().map(|expr| expr.reduce(symbol_table)).collect(),
|
||||
};
|
||||
Expr::Conditional { cond, then_clause, else_clause }
|
||||
},
|
||||
IfExpressionBody::SimplePatternMatch(ref pat, ref then_clause, ref else_clause) => {
|
||||
let then_clause = then_clause.iter().map(|expr| expr.reduce(symbol_table)).collect();
|
||||
let else_clause = match else_clause {
|
||||
None => vec![],
|
||||
Some(stmts) => stmts.iter().map(|expr| expr.reduce(symbol_table)).collect(),
|
||||
};
|
||||
fn value(&mut self, qualified_name: &QualifiedName) -> Expr {
|
||||
let symbol_table = self.symbol_table;
|
||||
let ref id = qualified_name.id;
|
||||
let ref sym_name = match symbol_table.get_fqsn_from_id(id) {
|
||||
Some(fqsn) => fqsn,
|
||||
None => return Expr::ReductionError(format!("FQSN lookup for Value {:?} failed", qualified_name)),
|
||||
};
|
||||
|
||||
let alternatives = vec![
|
||||
pat.to_alternative(&cond, then_clause, symbol_table),
|
||||
Alternative::default(else_clause),
|
||||
];
|
||||
//TODO this probably needs to change
|
||||
let FullyQualifiedSymbolName(ref v) = sym_name;
|
||||
let name = v.last().unwrap().name.clone();
|
||||
|
||||
Expr::CaseMatch {
|
||||
cond,
|
||||
alternatives,
|
||||
}
|
||||
},
|
||||
IfExpressionBody::GuardList(ref guard_arms) => {
|
||||
let mut alternatives = vec![];
|
||||
for arm in guard_arms {
|
||||
match arm.guard {
|
||||
Guard::Pat(ref p) => {
|
||||
let item = arm.body.iter().map(|expr| expr.reduce(symbol_table)).collect();
|
||||
let alt = p.to_alternative(&cond, item, symbol_table);
|
||||
alternatives.push(alt);
|
||||
let Symbol { local_name, spec, .. } = match symbol_table.lookup_by_fqsn(&sym_name) {
|
||||
Some(s) => s,
|
||||
//None => return Expr::ReductionError(format!("Symbol {:?} not found", sym_name)),
|
||||
None => return Expr::Sym(name.clone())
|
||||
};
|
||||
|
||||
match spec {
|
||||
SymbolSpec::RecordConstructor { .. } => Expr::ReductionError(format!("AST reducer doesn't expect a RecordConstructor here")),
|
||||
SymbolSpec::DataConstructor { index, type_args, type_name } => Expr::Constructor {
|
||||
type_name: type_name.clone(),
|
||||
name: name.clone(),
|
||||
tag: index.clone(),
|
||||
arity: type_args.len(),
|
||||
},
|
||||
SymbolSpec::Func(_) => Expr::Sym(local_name.clone()),
|
||||
SymbolSpec::Binding => Expr::Sym(local_name.clone()), //TODO not sure if this is right, probably needs to eventually be fqsn
|
||||
SymbolSpec::Type { .. } => Expr::ReductionError("AST reducer doesnt expect a type here".to_string())
|
||||
}
|
||||
}
|
||||
|
||||
fn reduce_lambda(&mut self, params: &Vec<FormalParam>, body: &Block) -> Expr {
|
||||
Expr::Func(Func::UserDefined {
|
||||
name: None,
|
||||
params: params.iter().map(|param| param.name.clone()).collect(),
|
||||
body: self.block(body),
|
||||
})
|
||||
}
|
||||
|
||||
fn reduce_named_struct(&mut self, name: &QualifiedName, fields: &Vec<(Rc<String>, Expression)>) -> Expr {
|
||||
let symbol_table = self.symbol_table;
|
||||
let ref sym_name = match symbol_table.get_fqsn_from_id(&name.id) {
|
||||
Some(fqsn) => fqsn,
|
||||
None => return Expr::ReductionError(format!("FQSN lookup for name {:?} failed", name)),
|
||||
};
|
||||
|
||||
let FullyQualifiedSymbolName(ref v) = sym_name;
|
||||
let ref name = v.last().unwrap().name;
|
||||
let (type_name, index, members_from_table) = match symbol_table.lookup_by_fqsn(&sym_name) {
|
||||
Some(Symbol { spec: SymbolSpec::RecordConstructor { members, type_name, index }, .. }) => (type_name.clone(), index, members),
|
||||
_ => return Expr::ReductionError("Not a record constructor".to_string()),
|
||||
};
|
||||
let arity = members_from_table.len();
|
||||
|
||||
let mut args: Vec<(Rc<String>, Expr)> = fields.iter()
|
||||
.map(|(name, expr)| (name.clone(), self.expression(expr)))
|
||||
.collect();
|
||||
|
||||
args.as_mut_slice()
|
||||
.sort_unstable_by(|(name1, _), (name2, _)| name1.cmp(name2)); //arbitrary - sorting by alphabetical order
|
||||
|
||||
let args = args.into_iter().map(|(_, expr)| expr).collect();
|
||||
|
||||
//TODO make sure this sorting actually works
|
||||
let f = box Expr::Constructor { type_name, name: name.clone(), tag: *index, arity, };
|
||||
Expr::Call { f, args }
|
||||
}
|
||||
|
||||
fn reduce_call_expression(&mut self, func: &Expression, arguments: &Vec<InvocationArgument>) -> Expr {
|
||||
Expr::Call {
|
||||
f: Box::new(self.expression(func)),
|
||||
args: arguments.iter().map(|arg| self.invocation_argument(arg)).collect(),
|
||||
}
|
||||
}
|
||||
|
||||
fn reduce_if_expression(&mut self, discriminator: Option<&Expression>, body: &IfExpressionBody) -> Expr {
|
||||
let symbol_table = self.symbol_table;
|
||||
let cond = Box::new(match discriminator {
|
||||
Some(expr) => self.expression(expr),
|
||||
None => return Expr::ReductionError(format!("blank cond if-expr not supported")),
|
||||
});
|
||||
|
||||
match body {
|
||||
IfExpressionBody::SimpleConditional { then_case, else_case } => {
|
||||
let then_clause = self.block(&then_case);
|
||||
let else_clause = match else_case.as_ref() {
|
||||
None => vec![],
|
||||
Some(stmts) => self.block(&stmts),
|
||||
};
|
||||
Expr::Conditional { cond, then_clause, else_clause }
|
||||
},
|
||||
IfExpressionBody::SimplePatternMatch { pattern, then_case, else_case } => {
|
||||
let then_clause = self.block(&then_case);
|
||||
let else_clause = match else_case.as_ref() {
|
||||
None => vec![],
|
||||
Some(stmts) => self.block(&stmts),
|
||||
};
|
||||
|
||||
let alternatives = vec![
|
||||
pattern.to_alternative(then_clause, symbol_table),
|
||||
Alternative {
|
||||
matchable: Subpattern {
|
||||
tag: None,
|
||||
subpatterns: vec![],
|
||||
bound_vars: vec![],
|
||||
guard: None,
|
||||
},
|
||||
item: else_clause
|
||||
},
|
||||
Guard::HalfExpr(HalfExpr { op: _, expr: _ }) => {
|
||||
return Expr::UnimplementedSigilValue
|
||||
}
|
||||
];
|
||||
|
||||
Expr::CaseMatch {
|
||||
cond,
|
||||
alternatives,
|
||||
}
|
||||
}
|
||||
Expr::CaseMatch { cond, alternatives }
|
||||
}
|
||||
}
|
||||
}
|
||||
/* ig var pat
|
||||
* x is SomeBigOldEnum(_, x, Some(t))
|
||||
*/
|
||||
|
||||
|
||||
impl Pattern {
|
||||
fn to_alternative(&self, cond: &Box<Expr>, item: Vec<Stmt>, symbol_table: &SymbolTable) -> Alternative {
|
||||
use self::Pattern::*;
|
||||
|
||||
fn handle_symbol(symbol: &Symbol, subpatterns: &Vec<Pattern>, item: Vec<Stmt>) -> Alternative {
|
||||
let tag = match symbol.spec {
|
||||
SymbolSpec::DataConstructor { index, .. } => index.clone(),
|
||||
_ => panic!("Symbol is not a data constructor - this should've been caught in type-checking"),
|
||||
};
|
||||
let bound_vars = subpatterns.iter().map(|p| match p {
|
||||
Literal(PatternLiteral::VarPattern(var)) => Some(var.clone()),
|
||||
_ => None,
|
||||
}).collect();
|
||||
|
||||
/*
|
||||
let guard_equality_exprs: Vec<Expr> = subpatterns.iter().map(|p| match p {
|
||||
Literal(lit) => match lit {
|
||||
_ => unimplemented!()
|
||||
},
|
||||
_ => unimplemented!()
|
||||
}).collect();
|
||||
*/
|
||||
|
||||
let guard = None;
|
||||
let subpatterns = vec![];
|
||||
|
||||
Alternative {
|
||||
tag: Some(tag),
|
||||
subpatterns,
|
||||
guard,
|
||||
bound_vars,
|
||||
item,
|
||||
}
|
||||
}
|
||||
|
||||
match self {
|
||||
TupleStruct(name, subpatterns) => {
|
||||
let symbol = symbol_table.lookup_by_name(name).expect(&format!("Symbol {} not found", name));
|
||||
handle_symbol(symbol, subpatterns, item)
|
||||
},
|
||||
TuplePattern(_items) => {
|
||||
unimplemented!()
|
||||
},
|
||||
Record(_name, _pairs) => {
|
||||
unimplemented!()
|
||||
},
|
||||
Ignored => Alternative::default(item),
|
||||
Literal(lit) => match lit {
|
||||
PatternLiteral::NumPattern { neg, num } => {
|
||||
let comparison = Expr::Lit(match (neg, num) {
|
||||
(false, ExpressionType::NatLiteral(n)) => Lit::Nat(*n),
|
||||
(false, ExpressionType::FloatLiteral(f)) => Lit::Float(*f),
|
||||
(true, ExpressionType::NatLiteral(n)) => Lit::Int(-1*(*n as i64)),
|
||||
(true, ExpressionType::FloatLiteral(f)) => Lit::Float(-1.0*f),
|
||||
_ => panic!("This should never happen")
|
||||
});
|
||||
let guard = Some(Expr::Call {
|
||||
f: Box::new(Expr::Func(Func::BuiltIn(Rc::new("==".to_string())))),
|
||||
args: vec![comparison, *cond.clone()]
|
||||
});
|
||||
Alternative {
|
||||
tag: None,
|
||||
subpatterns: vec![],
|
||||
guard,
|
||||
bound_vars: vec![],
|
||||
item
|
||||
}
|
||||
},
|
||||
PatternLiteral::StringPattern(_s) => unimplemented!(),
|
||||
PatternLiteral::BoolPattern(b) => {
|
||||
let guard = Some(if *b {
|
||||
*cond.clone()
|
||||
} else {
|
||||
Expr::Call {
|
||||
f: Box::new(Expr::Func(Func::BuiltIn(Rc::new("!".to_string())))),
|
||||
args: vec![*cond.clone()]
|
||||
IfExpressionBody::CondList(ref condition_arms) => {
|
||||
let mut alternatives = vec![];
|
||||
for arm in condition_arms {
|
||||
match arm.condition {
|
||||
Condition::Expression(ref _expr) => {
|
||||
return Expr::UnimplementedSigilValue
|
||||
},
|
||||
Condition::Pattern(ref p) => {
|
||||
let item = self.block(&arm.body);
|
||||
let alt = p.to_alternative(item, symbol_table);
|
||||
alternatives.push(alt);
|
||||
},
|
||||
Condition::TruncatedOp(_, _) => {
|
||||
return Expr::UnimplementedSigilValue
|
||||
},
|
||||
Condition::Else => {
|
||||
return Expr::UnimplementedSigilValue
|
||||
}
|
||||
});
|
||||
Alternative {
|
||||
tag: None,
|
||||
subpatterns: vec![],
|
||||
guard,
|
||||
bound_vars: vec![],
|
||||
item
|
||||
}
|
||||
},
|
||||
PatternLiteral::VarPattern(var) => match symbol_table.lookup_by_name(var) {
|
||||
Some(symbol) => handle_symbol(symbol, &vec![], item),
|
||||
None => Alternative {
|
||||
tag: None,
|
||||
subpatterns: vec![],
|
||||
guard: None,
|
||||
bound_vars: vec![Some(var.clone())],
|
||||
item
|
||||
}
|
||||
}
|
||||
},
|
||||
Expr::CaseMatch { cond, alternatives }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Declaration {
|
||||
fn reduce(&self, symbol_table: &SymbolTable) -> Stmt {
|
||||
fn binop(&mut self, binop: &BinOp, lhs: &Box<Expression>, rhs: &Box<Expression>) -> Expr {
|
||||
let operation = Builtin::from_str(binop.sigil()).ok();
|
||||
match operation {
|
||||
Some(Builtin::Assignment) => Expr::Assign {
|
||||
val: Box::new(self.expression(&*lhs)),
|
||||
expr: Box::new(self.expression(&*rhs)),
|
||||
},
|
||||
Some(op) => {
|
||||
let f = Box::new(Expr::Func(Func::BuiltIn(op)));
|
||||
Expr::Call { f, args: vec![self.expression(&*lhs), self.expression(&*rhs)] }
|
||||
},
|
||||
None => {
|
||||
//TODO handle a user-defined operation
|
||||
Expr::UnimplementedSigilValue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn prefix(&mut self, prefix: &PrefixOp, arg: &Box<Expression>) -> Expr {
|
||||
match prefix.builtin {
|
||||
Some(op) => {
|
||||
let f = Box::new(Expr::Func(Func::BuiltIn(op)));
|
||||
Expr::Call { f, args: vec![self.expression(arg)] }
|
||||
},
|
||||
None => { //TODO need this for custom prefix ops
|
||||
Expr::UnimplementedSigilValue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn declaration(&mut self, declaration: &Declaration) -> Stmt {
|
||||
use self::Declaration::*;
|
||||
use ::ast::Signature;
|
||||
match self {
|
||||
Binding {name, constant, expr } => Stmt::Binding { name: name.clone(), constant: *constant, expr: expr.reduce(symbol_table) },
|
||||
match declaration {
|
||||
Binding {name, constant, expr, .. } => Stmt::Binding { name: name.clone(), constant: *constant, expr: self.expression(expr) },
|
||||
FuncDecl(Signature { name, params, .. }, statements) => Stmt::PreBinding {
|
||||
name: name.clone(),
|
||||
func: Func::UserDefined {
|
||||
name: Some(name.clone()),
|
||||
params: params.iter().map(|param| param.0.clone()).collect(),
|
||||
body: statements.iter().map(|stmt| stmt.reduce(symbol_table)).collect(),
|
||||
params: params.iter().map(|param| param.name.clone()).collect(),
|
||||
body: self.block(&statements),
|
||||
}
|
||||
},
|
||||
TypeDecl { .. } => Stmt::Noop,
|
||||
TypeAlias(_, _) => Stmt::Noop,
|
||||
TypeAlias{ .. } => Stmt::Noop,
|
||||
Interface { .. } => Stmt::Noop,
|
||||
Impl { .. } => Stmt::Expr(Expr::UnimplementedSigilValue),
|
||||
_ => Stmt::Expr(Expr::UnimplementedSigilValue)
|
||||
@@ -318,23 +370,175 @@ impl Declaration {
|
||||
}
|
||||
}
|
||||
|
||||
impl BinOp {
|
||||
fn reduce(&self, symbol_table: &SymbolTable, lhs: &Box<Expression>, rhs: &Box<Expression>) -> Expr {
|
||||
if **self.sigil() == "=" {
|
||||
Expr::Assign {
|
||||
val: Box::new(lhs.reduce(symbol_table)),
|
||||
expr: Box::new(rhs.reduce(symbol_table)),
|
||||
|
||||
|
||||
|
||||
|
||||
/* ig var pat
|
||||
* x is SomeBigOldEnum(_, x, Some(t))
|
||||
*/
|
||||
|
||||
fn handle_symbol(symbol: Option<&Symbol>, inner_patterns: &Vec<Pattern>, symbol_table: &SymbolTable) -> Subpattern {
|
||||
use self::Pattern::*;
|
||||
let tag = symbol.map(|symbol| match symbol.spec {
|
||||
SymbolSpec::DataConstructor { index, .. } => index.clone(),
|
||||
_ => panic!("Symbol is not a data constructor - this should've been caught in type-checking"),
|
||||
});
|
||||
let bound_vars = inner_patterns.iter().map(|p| match p {
|
||||
VarOrName(qualified_name) => {
|
||||
let fqsn = symbol_table.get_fqsn_from_id(&qualified_name.id);
|
||||
let symbol_exists = fqsn.and_then(|fqsn| symbol_table.lookup_by_fqsn(&fqsn)).is_some();
|
||||
if symbol_exists {
|
||||
None
|
||||
} else {
|
||||
let QualifiedName { components, .. } = qualified_name;
|
||||
if components.len() == 1 {
|
||||
Some(components[0].clone())
|
||||
} else {
|
||||
panic!("Bad variable name in pattern");
|
||||
}
|
||||
}
|
||||
} else {
|
||||
let f = Box::new(Expr::Func(Func::BuiltIn(self.sigil().clone())));
|
||||
Expr::Call { f, args: vec![lhs.reduce(symbol_table), rhs.reduce(symbol_table)]}
|
||||
},
|
||||
_ => None,
|
||||
}).collect();
|
||||
|
||||
let subpatterns = inner_patterns.iter().map(|p| match p {
|
||||
Ignored => None,
|
||||
VarOrName(_) => None,
|
||||
Literal(other) => Some(other.to_subpattern(symbol_table)),
|
||||
tp @ TuplePattern(_) => Some(tp.to_subpattern(symbol_table)),
|
||||
ts @ TupleStruct(_, _) => Some(ts.to_subpattern(symbol_table)),
|
||||
Record(..) => unimplemented!(),
|
||||
}).collect();
|
||||
|
||||
let guard = None;
|
||||
/*
|
||||
let guard_equality_exprs: Vec<Expr> = subpatterns.iter().map(|p| match p {
|
||||
Literal(lit) => match lit {
|
||||
_ => unimplemented!()
|
||||
},
|
||||
_ => unimplemented!()
|
||||
}).collect();
|
||||
*/
|
||||
|
||||
Subpattern {
|
||||
tag,
|
||||
subpatterns,
|
||||
guard,
|
||||
bound_vars,
|
||||
}
|
||||
}
|
||||
|
||||
impl Pattern {
|
||||
fn to_alternative(&self, item: Vec<Stmt>, symbol_table: &SymbolTable) -> Alternative {
|
||||
let s = self.to_subpattern(symbol_table);
|
||||
Alternative {
|
||||
matchable: Subpattern {
|
||||
tag: s.tag,
|
||||
subpatterns: s.subpatterns,
|
||||
bound_vars: s.bound_vars,
|
||||
guard: s.guard,
|
||||
},
|
||||
item
|
||||
}
|
||||
}
|
||||
|
||||
fn to_subpattern(&self, symbol_table: &SymbolTable) -> Subpattern {
|
||||
use self::Pattern::*;
|
||||
match self {
|
||||
TupleStruct(QualifiedName{ components, id }, inner_patterns) => {
|
||||
let fqsn = symbol_table.get_fqsn_from_id(&id);
|
||||
match fqsn.and_then(|fqsn| symbol_table.lookup_by_fqsn(&fqsn)) {
|
||||
Some(symbol) => handle_symbol(Some(symbol), inner_patterns, symbol_table),
|
||||
None => {
|
||||
panic!("Symbol {:?} not found", components);
|
||||
}
|
||||
}
|
||||
},
|
||||
TuplePattern(inner_patterns) => handle_symbol(None, inner_patterns, symbol_table),
|
||||
Record(_name, _pairs) => {
|
||||
unimplemented!()
|
||||
},
|
||||
Ignored => Subpattern { tag: None, subpatterns: vec![], guard: None, bound_vars: vec![] },
|
||||
Literal(lit) => lit.to_subpattern(symbol_table),
|
||||
VarOrName(QualifiedName { components, id }) => {
|
||||
// if fqsn is Some, treat this as a symbol pattern. If it's None, treat it
|
||||
// as a variable.
|
||||
let fqsn = symbol_table.get_fqsn_from_id(&id);
|
||||
match fqsn.and_then(|fqsn| symbol_table.lookup_by_fqsn(&fqsn)) {
|
||||
Some(symbol) => handle_symbol(Some(symbol), &vec![], symbol_table),
|
||||
None => {
|
||||
let name = if components.len() == 1 {
|
||||
components[0].clone()
|
||||
} else {
|
||||
panic!("check this line of code yo");
|
||||
};
|
||||
Subpattern {
|
||||
tag: None,
|
||||
subpatterns: vec![],
|
||||
guard: None,
|
||||
bound_vars: vec![Some(name.clone())],
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PrefixOp {
|
||||
fn reduce(&self, symbol_table: &SymbolTable, arg: &Box<Expression>) -> Expr {
|
||||
let f = Box::new(Expr::Func(Func::BuiltIn(self.sigil().clone())));
|
||||
Expr::Call { f, args: vec![arg.reduce(symbol_table)]}
|
||||
impl PatternLiteral {
|
||||
fn to_subpattern(&self, _symbol_table: &SymbolTable) -> Subpattern {
|
||||
use self::PatternLiteral::*;
|
||||
match self {
|
||||
NumPattern { neg, num } => {
|
||||
let comparison = Expr::Lit(match (neg, num) {
|
||||
(false, ExpressionKind::NatLiteral(n)) => Lit::Nat(*n),
|
||||
(false, ExpressionKind::FloatLiteral(f)) => Lit::Float(*f),
|
||||
(true, ExpressionKind::NatLiteral(n)) => Lit::Int(-1*(*n as i64)),
|
||||
(true, ExpressionKind::FloatLiteral(f)) => Lit::Float(-1.0*f),
|
||||
_ => panic!("This should never happen")
|
||||
});
|
||||
let guard = Some(Expr::Call {
|
||||
f: Box::new(Expr::Func(Func::BuiltIn(Builtin::Equality))),
|
||||
args: vec![comparison, Expr::ConditionalTargetSigilValue],
|
||||
});
|
||||
Subpattern {
|
||||
tag: None,
|
||||
subpatterns: vec![],
|
||||
guard,
|
||||
bound_vars: vec![],
|
||||
}
|
||||
},
|
||||
StringPattern(s) => {
|
||||
let guard = Some(Expr::Call {
|
||||
f: Box::new(Expr::Func(Func::BuiltIn(Builtin::Equality))),
|
||||
args: vec![Expr::Lit(Lit::StringLit(s.clone())), Expr::ConditionalTargetSigilValue]
|
||||
});
|
||||
|
||||
Subpattern {
|
||||
tag: None,
|
||||
subpatterns: vec![],
|
||||
guard,
|
||||
bound_vars: vec![],
|
||||
}
|
||||
},
|
||||
BoolPattern(b) => {
|
||||
let guard = Some(if *b {
|
||||
Expr::ConditionalTargetSigilValue
|
||||
} else {
|
||||
Expr::Call {
|
||||
f: Box::new(Expr::Func(Func::BuiltIn(Builtin::BooleanNot))),
|
||||
args: vec![Expr::ConditionalTargetSigilValue]
|
||||
}
|
||||
});
|
||||
Subpattern {
|
||||
tag: None,
|
||||
subpatterns: vec![],
|
||||
guard,
|
||||
bound_vars: vec![],
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
340
schala-lang/language/src/schala.rs
Normal file
340
schala-lang/language/src/schala.rs
Normal file
@@ -0,0 +1,340 @@
|
||||
use stopwatch::Stopwatch;
|
||||
|
||||
use std::time::Duration;
|
||||
use std::cell::RefCell;
|
||||
use std::rc::Rc;
|
||||
use std::collections::HashSet;
|
||||
|
||||
use itertools::Itertools;
|
||||
use schala_repl::{ProgrammingLanguageInterface,
|
||||
ComputationRequest, ComputationResponse,
|
||||
LangMetaRequest, LangMetaResponse, GlobalOutputStats,
|
||||
DebugResponse, DebugAsk};
|
||||
use crate::{ast, reduced_ast, tokenizing, parsing, parser, eval, typechecking, symbol_table, source_map};
|
||||
|
||||
pub type SymbolTableHandle = Rc<RefCell<symbol_table::SymbolTable>>;
|
||||
pub type SourceMapHandle = Rc<RefCell<source_map::SourceMap>>;
|
||||
|
||||
/// All the state necessary to parse and execute a Schala program are stored in this struct.
|
||||
/// `state` represents the execution state for the AST-walking interpreter, the other fields
|
||||
/// should be self-explanatory.
|
||||
pub struct Schala {
|
||||
source_reference: SourceReference,
|
||||
source_map: SourceMapHandle,
|
||||
state: eval::State<'static>,
|
||||
symbol_table: SymbolTableHandle,
|
||||
resolver: crate::scope_resolution::ScopeResolver<'static>,
|
||||
type_context: typechecking::TypeContext<'static>,
|
||||
active_parser: parsing::Parser,
|
||||
}
|
||||
|
||||
impl Schala {
|
||||
fn handle_docs(&self, source: String) -> LangMetaResponse {
|
||||
LangMetaResponse::Docs {
|
||||
doc_string: format!("Schala item `{}` : <<Schala-lang documentation not yet implemented>>", source)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Schala {
|
||||
/// Creates a new Schala environment *without* any prelude.
|
||||
fn new_blank_env() -> Schala {
|
||||
let source_map = Rc::new(RefCell::new(source_map::SourceMap::new()));
|
||||
let symbols = Rc::new(RefCell::new(symbol_table::SymbolTable::new(source_map.clone())));
|
||||
Schala {
|
||||
//TODO maybe these can be the same structure
|
||||
source_reference: SourceReference::new(),
|
||||
symbol_table: symbols.clone(),
|
||||
source_map: source_map.clone(),
|
||||
resolver: crate::scope_resolution::ScopeResolver::new(symbols.clone()),
|
||||
state: eval::State::new(),
|
||||
type_context: typechecking::TypeContext::new(),
|
||||
active_parser: parsing::Parser::new(source_map)
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a new Schala environment with the standard prelude, which is defined as ordinary
|
||||
/// Schala code in the file `prelude.schala`
|
||||
pub fn new() -> Schala {
|
||||
let prelude = include_str!("prelude.schala");
|
||||
let mut s = Schala::new_blank_env();
|
||||
|
||||
let request = ComputationRequest { source: prelude, debug_requests: HashSet::default() };
|
||||
let response = s.run_computation(request);
|
||||
if let Err(msg) = response.main_output {
|
||||
panic!("Error in prelude, panicking: {}", msg);
|
||||
}
|
||||
s
|
||||
}
|
||||
|
||||
fn handle_debug_immediate(&self, request: DebugAsk) -> DebugResponse {
|
||||
use DebugAsk::*;
|
||||
match request {
|
||||
Timing => DebugResponse { ask: Timing, value: format!("Invalid") },
|
||||
ByStage { stage_name, token } => match &stage_name[..] {
|
||||
"symbol-table" => {
|
||||
let value = self.symbol_table.borrow().debug_symbol_table();
|
||||
DebugResponse {
|
||||
ask: ByStage { stage_name: format!("symbol-table"), token },
|
||||
value
|
||||
}
|
||||
},
|
||||
s => {
|
||||
DebugResponse {
|
||||
ask: ByStage { stage_name: s.to_string(), token: None },
|
||||
value: format!("Not-implemented")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn tokenizing(input: &str, _handle: &mut Schala, comp: Option<&mut PassDebugArtifact>) -> Result<Vec<tokenizing::Token>, String> {
|
||||
let tokens = tokenizing::tokenize(input);
|
||||
comp.map(|comp| {
|
||||
let token_string = tokens.iter().map(|t| t.to_string_with_metadata()).join(", ");
|
||||
comp.add_artifact(token_string);
|
||||
});
|
||||
|
||||
let errors: Vec<String> = tokens.iter().filter_map(|t| t.get_error()).collect();
|
||||
if errors.len() == 0 {
|
||||
Ok(tokens)
|
||||
} else {
|
||||
Err(format!("{:?}", errors))
|
||||
}
|
||||
}
|
||||
|
||||
fn parsing(input: Vec<tokenizing::Token>, handle: &mut Schala, comp: Option<&mut PassDebugArtifact>) -> Result<ast::AST, String> {
|
||||
use ParsingDebugType::*;
|
||||
|
||||
let ref mut parser = handle.active_parser;
|
||||
parser.add_new_tokens(input);
|
||||
let ast = parser.parse();
|
||||
|
||||
comp.map(|comp| {
|
||||
let debug_format = comp.parsing.as_ref().unwrap_or(&CompactAST);
|
||||
let debug_info = match debug_format {
|
||||
CompactAST => match ast{
|
||||
Ok(ref ast) => ast.compact_debug(),
|
||||
Err(_) => "Error - see output".to_string(),
|
||||
},
|
||||
ExpandedAST => match ast{
|
||||
Ok(ref ast) => ast.expanded_debug(),
|
||||
Err(_) => "Error - see output".to_string(),
|
||||
},
|
||||
Trace => parser.format_parse_trace(),
|
||||
};
|
||||
comp.add_artifact(debug_info);
|
||||
});
|
||||
ast.map_err(|err| format_parse_error(err, &handle.source_reference))
|
||||
}
|
||||
|
||||
fn format_parse_error(error: parsing::ParseError, source_reference: &SourceReference) -> String {
|
||||
let line_num = error.token.location.line_num;
|
||||
let ch = error.token.location.char_num;
|
||||
let line_from_program = source_reference.get_line(line_num);
|
||||
let location_pointer = format!("{}^", " ".repeat(ch));
|
||||
|
||||
let line_num_digits = format!("{}", line_num).chars().count();
|
||||
let space_padding = " ".repeat(line_num_digits);
|
||||
|
||||
let production = match error.production_name {
|
||||
Some(n) => format!("\n(from production \"{}\")", n),
|
||||
None => "".to_string()
|
||||
};
|
||||
|
||||
format!(r#"
|
||||
{error_msg}{production}
|
||||
{space_padding} |
|
||||
{line_num} | {}
|
||||
{space_padding} | {}
|
||||
"#, line_from_program, location_pointer, error_msg=error.msg, space_padding=space_padding, line_num=line_num, production=production
|
||||
)
|
||||
}
|
||||
|
||||
fn symbol_table(input: ast::AST, handle: &mut Schala, comp: Option<&mut PassDebugArtifact>) -> Result<ast::AST, String> {
|
||||
let () = handle.symbol_table.borrow_mut().add_top_level_symbols(&input)?;
|
||||
comp.map(|comp| {
|
||||
let debug = handle.symbol_table.borrow().debug_symbol_table();
|
||||
comp.add_artifact(debug);
|
||||
});
|
||||
Ok(input)
|
||||
}
|
||||
|
||||
fn scope_resolution(mut input: ast::AST, handle: &mut Schala, _com: Option<&mut PassDebugArtifact>) -> Result<ast::AST, String> {
|
||||
let () = handle.resolver.resolve(&mut input)?;
|
||||
Ok(input)
|
||||
}
|
||||
|
||||
fn typechecking(input: ast::AST, handle: &mut Schala, comp: Option<&mut PassDebugArtifact>) -> Result<ast::AST, String> {
|
||||
let result = handle.type_context.typecheck(&input);
|
||||
|
||||
comp.map(|comp| {
|
||||
comp.add_artifact(match result {
|
||||
Ok(ty) => ty.to_string(),
|
||||
Err(err) => format!("Type error: {}", err.msg)
|
||||
});
|
||||
});
|
||||
|
||||
Ok(input)
|
||||
}
|
||||
|
||||
fn ast_reducing(input: ast::AST, handle: &mut Schala, comp: Option<&mut PassDebugArtifact>) -> Result<reduced_ast::ReducedAST, String> {
|
||||
let ref symbol_table = handle.symbol_table.borrow();
|
||||
let output = reduced_ast::reduce(&input, symbol_table);
|
||||
comp.map(|comp| comp.add_artifact(format!("{:?}", output)));
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
fn eval(input: reduced_ast::ReducedAST, handle: &mut Schala, comp: Option<&mut PassDebugArtifact>) -> Result<String, String> {
|
||||
comp.map(|comp| comp.add_artifact(handle.state.debug_print()));
|
||||
let evaluation_outputs = handle.state.evaluate(input, true);
|
||||
let text_output: Result<Vec<String>, String> = evaluation_outputs
|
||||
.into_iter()
|
||||
.collect();
|
||||
|
||||
let eval_output: Result<String, String> = text_output
|
||||
.map(|v| { v.into_iter().intersperse(format!("\n")).collect() });
|
||||
eval_output
|
||||
}
|
||||
|
||||
/// Represents lines of source code
|
||||
struct SourceReference {
|
||||
lines: Option<Vec<String>>
|
||||
}
|
||||
|
||||
impl SourceReference {
|
||||
fn new() -> SourceReference {
|
||||
SourceReference { lines: None }
|
||||
}
|
||||
|
||||
fn load_new_source(&mut self, source: &str) {
|
||||
//TODO this is a lot of heap allocations - maybe there's a way to make it more efficient?
|
||||
self.lines = Some(source.lines().map(|s| s.to_string()).collect()); }
|
||||
|
||||
fn get_line(&self, line: usize) -> String {
|
||||
self.lines.as_ref().and_then(|x| x.get(line).map(|s| s.to_string())).unwrap_or(format!("NO LINE FOUND"))
|
||||
}
|
||||
}
|
||||
|
||||
enum ParsingDebugType {
|
||||
CompactAST,
|
||||
ExpandedAST,
|
||||
Trace
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct PassDebugArtifact {
|
||||
parsing: Option<ParsingDebugType>,
|
||||
artifacts: Vec<String>
|
||||
|
||||
}
|
||||
impl PassDebugArtifact {
|
||||
fn add_artifact(&mut self, artifact: String) {
|
||||
self.artifacts.push(artifact)
|
||||
}
|
||||
}
|
||||
|
||||
fn stage_names() -> Vec<&'static str> {
|
||||
vec![
|
||||
"tokenizing",
|
||||
"parsing",
|
||||
"symbol-table",
|
||||
"scope-resolution",
|
||||
"typechecking",
|
||||
"ast-reduction",
|
||||
"ast-walking-evaluation"
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
impl ProgrammingLanguageInterface for Schala {
|
||||
fn get_language_name(&self) -> String { format!("Schala") }
|
||||
fn get_source_file_suffix(&self) -> String { format!("schala") }
|
||||
|
||||
fn run_computation(&mut self, request: ComputationRequest) -> ComputationResponse {
|
||||
struct PassToken<'a> {
|
||||
schala: &'a mut Schala,
|
||||
stage_durations: &'a mut Vec<(String, Duration)>,
|
||||
sw: &'a Stopwatch,
|
||||
debug_requests: &'a HashSet<DebugAsk>,
|
||||
debug_responses: &'a mut Vec<DebugResponse>,
|
||||
}
|
||||
|
||||
fn output_wrapper<Input, Output, F>(n: usize, func: F, input: Input, token: &mut PassToken) -> Result<Output, String>
|
||||
where F: Fn(Input, &mut Schala, Option<&mut PassDebugArtifact>) -> Result<Output, String>
|
||||
{
|
||||
let stage_names = stage_names();
|
||||
let cur_stage_name = stage_names[n];
|
||||
let ask = token.debug_requests.iter().find(|ask| ask.is_for_stage(cur_stage_name));
|
||||
|
||||
let parsing = match ask {
|
||||
Some(DebugAsk::ByStage { token, .. }) if cur_stage_name == "parsing" => Some(
|
||||
token.as_ref().map(|token| match &token[..] {
|
||||
"compact" => ParsingDebugType::CompactAST,
|
||||
"expanded" => ParsingDebugType::ExpandedAST,
|
||||
"trace" => ParsingDebugType::Trace,
|
||||
_ => ParsingDebugType::CompactAST,
|
||||
}).unwrap_or(ParsingDebugType::CompactAST)
|
||||
),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
let mut debug_artifact = ask.map(|_| PassDebugArtifact {
|
||||
parsing, ..Default::default()
|
||||
});
|
||||
|
||||
let output = func(input, token.schala, debug_artifact.as_mut());
|
||||
|
||||
//TODO I think this is not counting the time since the *previous* stage
|
||||
token.stage_durations.push((cur_stage_name.to_string(), token.sw.elapsed()));
|
||||
if let Some(artifact) = debug_artifact {
|
||||
for value in artifact.artifacts.into_iter() {
|
||||
let resp = DebugResponse { ask: ask.unwrap().clone(), value };
|
||||
token.debug_responses.push(resp);
|
||||
}
|
||||
}
|
||||
output
|
||||
}
|
||||
|
||||
let ComputationRequest { source, debug_requests } = request;
|
||||
self.source_reference.load_new_source(source);
|
||||
let sw = Stopwatch::start_new();
|
||||
let mut stage_durations = Vec::new();
|
||||
let mut debug_responses = Vec::new();
|
||||
let mut tok = PassToken { schala: self, stage_durations: &mut stage_durations, sw: &sw, debug_requests: &debug_requests, debug_responses: &mut debug_responses };
|
||||
|
||||
let main_output: Result<String, String> = Ok(source)
|
||||
.and_then(|source| output_wrapper(0, tokenizing, source, &mut tok))
|
||||
.and_then(|tokens| output_wrapper(1, parsing, tokens, &mut tok))
|
||||
.and_then(|ast| output_wrapper(2, symbol_table, ast, &mut tok))
|
||||
.and_then(|ast| output_wrapper(3, scope_resolution, ast, &mut tok))
|
||||
.and_then(|ast| output_wrapper(4, typechecking, ast, &mut tok))
|
||||
.and_then(|ast| output_wrapper(5, ast_reducing, ast, &mut tok))
|
||||
.and_then(|reduced_ast| output_wrapper(6, eval, reduced_ast, &mut tok));
|
||||
|
||||
let total_duration = sw.elapsed();
|
||||
let global_output_stats = GlobalOutputStats {
|
||||
total_duration, stage_durations
|
||||
};
|
||||
|
||||
let main_output = parser::perform_parsing(source);
|
||||
|
||||
ComputationResponse {
|
||||
main_output,
|
||||
global_output_stats,
|
||||
debug_responses,
|
||||
}
|
||||
}
|
||||
|
||||
fn request_meta(&mut self, request: LangMetaRequest) -> LangMetaResponse {
|
||||
match request {
|
||||
LangMetaRequest::StageNames => LangMetaResponse::StageNames(stage_names().iter().map(|s| s.to_string()).collect()),
|
||||
LangMetaRequest::Docs { source } => self.handle_docs(source),
|
||||
LangMetaRequest::ImmediateDebug(debug_request) =>
|
||||
LangMetaResponse::ImmediateDebug(self.handle_debug_immediate(debug_request)),
|
||||
LangMetaRequest::Custom { .. } => LangMetaResponse::Custom { kind: format!("not-implemented"), value: format!("") }
|
||||
}
|
||||
}
|
||||
}
|
||||
119
schala-lang/language/src/scope_resolution.rs
Normal file
119
schala-lang/language/src/scope_resolution.rs
Normal file
@@ -0,0 +1,119 @@
|
||||
use std::rc::Rc;
|
||||
|
||||
use crate::schala::SymbolTableHandle;
|
||||
use crate::symbol_table::{ScopeSegment, FullyQualifiedSymbolName};
|
||||
use crate::ast::*;
|
||||
use crate::util::ScopeStack;
|
||||
|
||||
type FQSNPrefix = Vec<ScopeSegment>;
|
||||
|
||||
pub struct ScopeResolver<'a> {
|
||||
symbol_table_handle: SymbolTableHandle,
|
||||
name_scope_stack: ScopeStack<'a, Rc<String>, FQSNPrefix>,
|
||||
}
|
||||
|
||||
impl<'a> ASTVisitor for ScopeResolver<'a> {
|
||||
//TODO need to un-insert these - maybe need to rethink visitor
|
||||
fn import(&mut self, import_spec: &ImportSpecifier) {
|
||||
let ref symbol_table = self.symbol_table_handle.borrow();
|
||||
let ImportSpecifier { ref path_components, ref imported_names, .. } = &import_spec;
|
||||
match imported_names {
|
||||
ImportedNames::All => {
|
||||
let prefix = FullyQualifiedSymbolName(path_components.iter().map(|c| ScopeSegment {
|
||||
name: c.clone(),
|
||||
}).collect());
|
||||
let members = symbol_table.lookup_children_of_fqsn(&prefix);
|
||||
for member in members.into_iter() {
|
||||
let local_name = member.0.last().unwrap().name.clone();
|
||||
self.name_scope_stack.insert(local_name.clone(), member.0);
|
||||
}
|
||||
},
|
||||
ImportedNames::LastOfPath => {
|
||||
let name = path_components.last().unwrap(); //TODO handle better
|
||||
let fqsn_prefix = path_components.iter().map(|c| ScopeSegment {
|
||||
name: c.clone(),
|
||||
}).collect();
|
||||
self.name_scope_stack.insert(name.clone(), fqsn_prefix);
|
||||
}
|
||||
ImportedNames::List(ref names) => {
|
||||
let fqsn_prefix: FQSNPrefix = path_components.iter().map(|c| ScopeSegment {
|
||||
name: c.clone(),
|
||||
}).collect();
|
||||
for name in names.iter() {
|
||||
self.name_scope_stack.insert(name.clone(), fqsn_prefix.clone());
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
fn qualified_name(&mut self, qualified_name: &QualifiedName) {
|
||||
let ref mut symbol_table = self.symbol_table_handle.borrow_mut();
|
||||
let fqsn = self.lookup_name_in_scope(&qualified_name);
|
||||
let ref id = qualified_name.id;
|
||||
symbol_table.map_id_to_fqsn(id, fqsn);
|
||||
}
|
||||
|
||||
fn named_struct(&mut self, name: &QualifiedName, _fields: &Vec<(Rc<String>, Expression)>) {
|
||||
let ref mut symbol_table = self.symbol_table_handle.borrow_mut();
|
||||
let ref id = name.id;
|
||||
let fqsn = self.lookup_name_in_scope(&name);
|
||||
symbol_table.map_id_to_fqsn(id, fqsn);
|
||||
}
|
||||
|
||||
fn pattern(&mut self, pat: &Pattern) {
|
||||
use Pattern::*;
|
||||
match pat {
|
||||
Ignored => (),
|
||||
TuplePattern(_) => (),
|
||||
Literal(_) => (),
|
||||
TupleStruct(name, _) => self.qualified_name_in_pattern(name),
|
||||
Record(name, _) => self.qualified_name_in_pattern(name),
|
||||
VarOrName(name) => self.qualified_name_in_pattern(name),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> ScopeResolver<'a> {
|
||||
pub fn new(symbol_table_handle: SymbolTableHandle) -> ScopeResolver<'static> {
|
||||
let name_scope_stack = ScopeStack::new(None);
|
||||
ScopeResolver { symbol_table_handle, name_scope_stack }
|
||||
}
|
||||
pub fn resolve(&mut self, ast: &mut AST) -> Result<(), String> {
|
||||
walk_ast(self, ast);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn lookup_name_in_scope(&self, sym_name: &QualifiedName) -> FullyQualifiedSymbolName {
|
||||
let QualifiedName { components, .. } = sym_name;
|
||||
let first_component = &components[0];
|
||||
match self.name_scope_stack.lookup(first_component) {
|
||||
None => {
|
||||
FullyQualifiedSymbolName(components.iter().map(|name| ScopeSegment { name: name.clone() }).collect())
|
||||
},
|
||||
Some(fqsn_prefix) => {
|
||||
let mut full_name = fqsn_prefix.clone();
|
||||
let rest_of_name: FQSNPrefix = components[1..].iter().map(|name| ScopeSegment { name: name.clone() }).collect();
|
||||
full_name.extend_from_slice(&rest_of_name);
|
||||
FullyQualifiedSymbolName(full_name)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// this might be a variable or a pattern. if a variable, set to none
|
||||
fn qualified_name_in_pattern(&mut self, qualified_name: &QualifiedName) {
|
||||
let ref mut symbol_table = self.symbol_table_handle.borrow_mut();
|
||||
let ref id = qualified_name.id;
|
||||
let fqsn = self.lookup_name_in_scope(qualified_name);
|
||||
if symbol_table.lookup_by_fqsn(&fqsn).is_some() {
|
||||
symbol_table.map_id_to_fqsn(&id, fqsn);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
#[test]
|
||||
fn basic_scope() {
|
||||
|
||||
}
|
||||
}
|
||||
39
schala-lang/language/src/source_map.rs
Normal file
39
schala-lang/language/src/source_map.rs
Normal file
@@ -0,0 +1,39 @@
|
||||
use std::collections::HashMap;
|
||||
use std::fmt;
|
||||
|
||||
use crate::ast::ItemId;
|
||||
|
||||
pub type LineNumber = usize;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||
pub struct Location {
|
||||
pub line_num: LineNumber,
|
||||
pub char_num: usize,
|
||||
}
|
||||
|
||||
impl fmt::Display for Location {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, "{}:{}", self.line_num, self.char_num)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct SourceMap {
|
||||
map: HashMap<ItemId, Location>
|
||||
}
|
||||
|
||||
impl SourceMap {
|
||||
pub fn new() -> SourceMap {
|
||||
SourceMap { map: HashMap::new() }
|
||||
}
|
||||
|
||||
pub fn add_location(&mut self, id: &ItemId, loc: Location) {
|
||||
self.map.insert(id.clone(), loc);
|
||||
}
|
||||
|
||||
pub fn lookup(&self, id: &ItemId) -> Option<Location> {
|
||||
match self.map.get(id) {
|
||||
Some(loc) => Some(loc.clone()),
|
||||
None => None
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,37 +1,149 @@
|
||||
use std::collections::HashMap;
|
||||
use std::collections::hash_map::Entry;
|
||||
use std::rc::Rc;
|
||||
use std::fmt;
|
||||
use std::fmt::Write;
|
||||
|
||||
use ast;
|
||||
use typechecking::TypeName;
|
||||
use crate::schala::SourceMapHandle;
|
||||
use crate::source_map::{SourceMap, LineNumber};
|
||||
use crate::ast;
|
||||
use crate::ast::{ItemId, TypeBody, TypeSingletonName, Signature, Statement, StatementKind, ModuleSpecifier};
|
||||
use crate::typechecking::TypeName;
|
||||
|
||||
|
||||
#[allow(unused_macros)]
|
||||
macro_rules! fqsn {
|
||||
( $( $name:expr ; $kind:tt),* ) => {
|
||||
{
|
||||
let mut vec = vec![];
|
||||
$(
|
||||
vec.push(crate::symbol_table::ScopeSegment::new(std::rc::Rc::new($name.to_string())));
|
||||
)*
|
||||
FullyQualifiedSymbolName(vec)
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
mod symbol_trie;
|
||||
use symbol_trie::SymbolTrie;
|
||||
mod test;
|
||||
|
||||
/// Keeps track of what names were used in a given namespace. Call try_register to add a name to
|
||||
/// the table, or report an error if a name already exists.
|
||||
struct DuplicateNameTrackTable {
|
||||
table: HashMap<Rc<String>, LineNumber>,
|
||||
}
|
||||
|
||||
impl DuplicateNameTrackTable {
|
||||
fn new() -> DuplicateNameTrackTable {
|
||||
DuplicateNameTrackTable { table: HashMap::new() }
|
||||
}
|
||||
|
||||
fn try_register(&mut self, name: &Rc<String>, id: &ItemId, source_map: &SourceMap) -> Result<(), LineNumber> {
|
||||
match self.table.entry(name.clone()) {
|
||||
Entry::Occupied(o) => {
|
||||
let line_number = o.get();
|
||||
Err(*line_number)
|
||||
},
|
||||
Entry::Vacant(v) => {
|
||||
let line_number = if let Some(loc) = source_map.lookup(id) {
|
||||
loc.line_num
|
||||
} else {
|
||||
0
|
||||
};
|
||||
v.insert(line_number);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Hash, Debug, Clone, PartialOrd, Ord)]
|
||||
pub struct FullyQualifiedSymbolName(pub Vec<ScopeSegment>);
|
||||
|
||||
impl fmt::Display for FullyQualifiedSymbolName {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
let FullyQualifiedSymbolName(v) = self;
|
||||
for segment in v {
|
||||
write!(f, "::{}", segment)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Hash, PartialOrd, Ord)]
|
||||
pub struct ScopeSegment {
|
||||
pub name: Rc<String>, //TODO maybe this could be a &str, for efficiency?
|
||||
}
|
||||
|
||||
impl fmt::Display for ScopeSegment {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
let kind = ""; //TODO implement some kind of kind-tracking here
|
||||
write!(f, "{}{}", self.name, kind)
|
||||
}
|
||||
}
|
||||
|
||||
impl ScopeSegment {
|
||||
pub fn new(name: Rc<String>) -> ScopeSegment {
|
||||
ScopeSegment { name }
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
//cf. p. 150 or so of Language Implementation Patterns
|
||||
pub struct SymbolTable {
|
||||
pub values: HashMap<Rc<String>, Symbol> //TODO this will eventually have real type information
|
||||
source_map_handle: SourceMapHandle,
|
||||
symbol_path_to_symbol: HashMap<FullyQualifiedSymbolName, Symbol>,
|
||||
id_to_fqsn: HashMap<ItemId, FullyQualifiedSymbolName>,
|
||||
symbol_trie: SymbolTrie,
|
||||
}
|
||||
|
||||
//TODO add various types of lookups here, maybe multiple hash tables internally? also make values
|
||||
//non-public
|
||||
impl SymbolTable {
|
||||
pub fn new() -> SymbolTable {
|
||||
SymbolTable { values: HashMap::new() }
|
||||
pub fn new(source_map_handle: SourceMapHandle) -> SymbolTable {
|
||||
SymbolTable {
|
||||
source_map_handle,
|
||||
symbol_path_to_symbol: HashMap::new(),
|
||||
id_to_fqsn: HashMap::new(),
|
||||
symbol_trie: SymbolTrie::new()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn lookup_by_name(&self, name: &Rc<String>) -> Option<&Symbol> {
|
||||
self.values.get(name)
|
||||
pub fn map_id_to_fqsn(&mut self, id: &ItemId, fqsn: FullyQualifiedSymbolName) {
|
||||
self.id_to_fqsn.insert(id.clone(), fqsn);
|
||||
}
|
||||
|
||||
pub fn get_fqsn_from_id(&self, id: &ItemId) -> Option<FullyQualifiedSymbolName> {
|
||||
self.id_to_fqsn.get(&id).cloned()
|
||||
}
|
||||
|
||||
fn add_new_symbol(&mut self, local_name: &Rc<String>, scope_path: &Vec<ScopeSegment>, spec: SymbolSpec) {
|
||||
let mut vec: Vec<ScopeSegment> = scope_path.clone();
|
||||
vec.push(ScopeSegment { name: local_name.clone() });
|
||||
let fully_qualified_name = FullyQualifiedSymbolName(vec);
|
||||
let symbol = Symbol { local_name: local_name.clone(), fully_qualified_name: fully_qualified_name.clone(), spec };
|
||||
self.symbol_trie.insert(&fully_qualified_name);
|
||||
self.symbol_path_to_symbol.insert(fully_qualified_name, symbol);
|
||||
}
|
||||
|
||||
pub fn lookup_by_fqsn(&self, fully_qualified_path: &FullyQualifiedSymbolName) -> Option<&Symbol> {
|
||||
self.symbol_path_to_symbol.get(fully_qualified_path)
|
||||
}
|
||||
|
||||
pub fn lookup_children_of_fqsn(&self, path: &FullyQualifiedSymbolName) -> Vec<FullyQualifiedSymbolName> {
|
||||
self.symbol_trie.get_children(path)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Symbol {
|
||||
pub name: Rc<String>,
|
||||
pub local_name: Rc<String>, //TODO does this need to be pub?
|
||||
fully_qualified_name: FullyQualifiedSymbolName,
|
||||
pub spec: SymbolSpec,
|
||||
}
|
||||
|
||||
impl fmt::Display for Symbol {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, "<Name: {}, Spec: {}>", self.name, self.spec)
|
||||
write!(f, "<Local name: {}, Spec: {}>", self.local_name, self.spec)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -40,9 +152,18 @@ pub enum SymbolSpec {
|
||||
Func(Vec<TypeName>),
|
||||
DataConstructor {
|
||||
index: usize,
|
||||
type_name: Rc<String>,
|
||||
type_name: TypeName,
|
||||
type_args: Vec<Rc<String>>,
|
||||
},
|
||||
RecordConstructor {
|
||||
index: usize,
|
||||
members: HashMap<Rc<String>, TypeName>,
|
||||
type_name: TypeName,
|
||||
},
|
||||
Binding,
|
||||
Type {
|
||||
name: TypeName
|
||||
},
|
||||
}
|
||||
|
||||
impl fmt::Display for SymbolSpec {
|
||||
@@ -51,6 +172,9 @@ impl fmt::Display for SymbolSpec {
|
||||
match self {
|
||||
Func(type_names) => write!(f, "Func({:?})", type_names),
|
||||
DataConstructor { index, type_name, type_args } => write!(f, "DataConstructor(idx: {})({:?} -> {})", index, type_args, type_name),
|
||||
RecordConstructor { type_name, index, ..} => write!(f, "RecordConstructor(idx: {})(<members> -> {})", index, type_name),
|
||||
Binding => write!(f, "Binding"),
|
||||
Type { name } => write!(f, "Type <{}>", name),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -58,74 +182,162 @@ impl fmt::Display for SymbolSpec {
|
||||
impl SymbolTable {
|
||||
/* note: this adds names for *forward reference* but doesn't actually create any types. solve that problem
|
||||
* later */
|
||||
|
||||
pub fn add_top_level_symbols(&mut self, ast: &ast::AST) -> Result<(), String> {
|
||||
use self::ast::{Statement, TypeIdentifier, Variant, TypeSingletonName, TypeBody};
|
||||
let mut scope_name_stack = Vec::new();
|
||||
self.add_symbols_from_scope(&ast.statements, &mut scope_name_stack)
|
||||
}
|
||||
|
||||
fn add_symbols_from_scope<'a>(&'a mut self, statements: &Vec<Statement>, scope_name_stack: &mut Vec<ScopeSegment>) -> Result<(), String> {
|
||||
use self::ast::Declaration::*;
|
||||
for statement in ast.0.iter() {
|
||||
if let Statement::Declaration(decl) = statement {
|
||||
match decl {
|
||||
FuncSig(signature) | FuncDecl(signature, _) => {
|
||||
let mut ch: char = 'a';
|
||||
let mut types = vec![];
|
||||
for param in signature.params.iter() {
|
||||
match param {
|
||||
(_, Some(_ty)) => {
|
||||
//TODO eventually handle this case different
|
||||
types.push(Rc::new(format!("{}", ch)));
|
||||
ch = ((ch as u8) + 1) as char;
|
||||
},
|
||||
(_, None) => {
|
||||
types.push(Rc::new(format!("{}", ch)));
|
||||
ch = ((ch as u8) + 1) as char;
|
||||
}
|
||||
}
|
||||
|
||||
let mut seen_identifiers = DuplicateNameTrackTable::new();
|
||||
let mut seen_modules = DuplicateNameTrackTable::new();
|
||||
|
||||
for statement in statements.iter() {
|
||||
match statement {
|
||||
Statement { kind: StatementKind::Declaration(decl), id } => {
|
||||
match decl {
|
||||
FuncSig(ref signature) => {
|
||||
seen_identifiers.try_register(&signature.name, &id, &self.source_map_handle.borrow())
|
||||
.map_err(|line| format!("Duplicate function definition: {}. It's already defined at {}", signature.name, line))?;
|
||||
self.add_function_signature(signature, scope_name_stack)?
|
||||
}
|
||||
let spec = SymbolSpec::Func(types);
|
||||
self.values.insert(
|
||||
signature.name.clone(),
|
||||
Symbol { name: signature.name.clone(), spec }
|
||||
);
|
||||
},
|
||||
//TODO figure out why _params isn't being used here
|
||||
TypeDecl { name: TypeSingletonName { name, params: _params}, body: TypeBody(variants), mutable: _mutable, } => {
|
||||
for (index, var) in variants.iter().enumerate() {
|
||||
match var {
|
||||
Variant::UnitStruct(variant_name) => {
|
||||
let spec = SymbolSpec::DataConstructor {
|
||||
index,
|
||||
type_name: name.clone(),
|
||||
type_args: vec![],
|
||||
};
|
||||
self.values.insert(variant_name.clone(), Symbol { name: variant_name.clone(), spec });
|
||||
},
|
||||
Variant::TupleStruct(variant_name, tuple_members) => {
|
||||
let type_args = tuple_members.iter().map(|type_name| match type_name {
|
||||
TypeIdentifier::Singleton(TypeSingletonName { name, ..}) => name.clone(),
|
||||
TypeIdentifier::Tuple(_) => unimplemented!(),
|
||||
}).collect();
|
||||
let spec = SymbolSpec::DataConstructor {
|
||||
index,
|
||||
type_name: name.clone(),
|
||||
type_args
|
||||
};
|
||||
let symbol = Symbol { name: variant_name.clone(), spec };
|
||||
self.values.insert(variant_name.clone(), symbol);
|
||||
},
|
||||
e => return Err(format!("{:?} not supported in typing yet", e)),
|
||||
}
|
||||
FuncDecl(ref signature, ref body) => {
|
||||
seen_identifiers.try_register(&signature.name, &id, &self.source_map_handle.borrow())
|
||||
.map_err(|line| format!("Duplicate function definition: {}. It's already defined at {}", signature.name, line))?;
|
||||
self.add_function_signature(signature, scope_name_stack)?;
|
||||
scope_name_stack.push(ScopeSegment{
|
||||
name: signature.name.clone(),
|
||||
});
|
||||
let output = self.add_symbols_from_scope(body, scope_name_stack);
|
||||
scope_name_stack.pop();
|
||||
output?
|
||||
},
|
||||
TypeDecl { name, body, mutable } => {
|
||||
seen_identifiers.try_register(&name.name, &id, &self.source_map_handle.borrow())
|
||||
.map_err(|line| format!("Duplicate type definition: {}. It's already defined at {}", name.name, line))?;
|
||||
self.add_type_decl(name, body, mutable, scope_name_stack)?
|
||||
},
|
||||
Binding { name, .. } => {
|
||||
seen_identifiers.try_register(&name, &id, &self.source_map_handle.borrow())
|
||||
.map_err(|line| format!("Duplicate variable definition: {}. It's already defined at {}", name, line))?;
|
||||
self.add_new_symbol(name, scope_name_stack, SymbolSpec::Binding);
|
||||
}
|
||||
},
|
||||
_ => ()
|
||||
}
|
||||
_ => ()
|
||||
}
|
||||
},
|
||||
Statement { kind: StatementKind::Module(ModuleSpecifier { name, contents}), id } => {
|
||||
seen_modules.try_register(&name, &id, &self.source_map_handle.borrow())
|
||||
.map_err(|line| format!("Duplicate module definition: {}. It's already defined at {}", name, line))?;
|
||||
scope_name_stack.push(ScopeSegment { name: name.clone() });
|
||||
let output = self.add_symbols_from_scope(contents, scope_name_stack);
|
||||
scope_name_stack.pop();
|
||||
output?
|
||||
},
|
||||
_ => ()
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
pub fn debug_symbol_table(&self) -> String {
|
||||
let mut output = format!("Symbol table\n");
|
||||
for (name, sym) in &self.values {
|
||||
let mut sorted_symbols: Vec<(&FullyQualifiedSymbolName, &Symbol)> = self.symbol_path_to_symbol.iter().collect();
|
||||
sorted_symbols.sort_by(|(fqsn, _), (other_fqsn, _)| fqsn.cmp(other_fqsn));
|
||||
for (name, sym) in sorted_symbols.iter() {
|
||||
write!(output, "{} -> {}\n", name, sym).unwrap();
|
||||
}
|
||||
output
|
||||
}
|
||||
|
||||
fn add_function_signature(&mut self, signature: &Signature, scope_name_stack: &mut Vec<ScopeSegment>) -> Result<(), String> {
|
||||
let mut local_type_context = LocalTypeContext::new();
|
||||
let types = signature.params.iter().map(|param| match param.anno {
|
||||
Some(ref type_identifier) => Rc::new(format!("{:?}", type_identifier)),
|
||||
None => local_type_context.new_universal_type()
|
||||
}).collect();
|
||||
self.add_new_symbol(&signature.name, scope_name_stack, SymbolSpec::Func(types));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
//TODO handle type mutability
|
||||
fn add_type_decl(&mut self, type_name: &TypeSingletonName, body: &TypeBody, _mutable: &bool, scope_name_stack: &mut Vec<ScopeSegment>) -> Result<(), String> {
|
||||
use crate::ast::{TypeIdentifier, Variant};
|
||||
let TypeBody(variants) = body;
|
||||
let ref type_name = type_name.name;
|
||||
|
||||
|
||||
let type_spec = SymbolSpec::Type {
|
||||
name: type_name.clone(),
|
||||
};
|
||||
self.add_new_symbol(type_name, &scope_name_stack, type_spec);
|
||||
|
||||
scope_name_stack.push(ScopeSegment{
|
||||
name: type_name.clone(),
|
||||
});
|
||||
//TODO figure out why _params isn't being used here
|
||||
for (index, var) in variants.iter().enumerate() {
|
||||
match var {
|
||||
Variant::UnitStruct(variant_name) => {
|
||||
let spec = SymbolSpec::DataConstructor {
|
||||
index,
|
||||
type_name: type_name.clone(),
|
||||
type_args: vec![],
|
||||
};
|
||||
self.add_new_symbol(variant_name, scope_name_stack, spec);
|
||||
},
|
||||
Variant::TupleStruct(variant_name, tuple_members) => {
|
||||
//TODO fix the notion of a tuple type
|
||||
let type_args = tuple_members.iter().map(|type_name| match type_name {
|
||||
TypeIdentifier::Singleton(TypeSingletonName { name, ..}) => name.clone(),
|
||||
TypeIdentifier::Tuple(_) => unimplemented!(),
|
||||
}).collect();
|
||||
let spec = SymbolSpec::DataConstructor {
|
||||
index,
|
||||
type_name: type_name.clone(),
|
||||
type_args
|
||||
};
|
||||
self.add_new_symbol(variant_name, scope_name_stack, spec);
|
||||
},
|
||||
Variant::Record { name, members: defined_members } => {
|
||||
let mut members = HashMap::new();
|
||||
let mut duplicate_member_definitions = Vec::new();
|
||||
for (member_name, member_type) in defined_members {
|
||||
match members.entry(member_name.clone()) {
|
||||
Entry::Occupied(_) => duplicate_member_definitions.push(member_name.clone()),
|
||||
Entry::Vacant(v) => {
|
||||
v.insert(match member_type {
|
||||
TypeIdentifier::Singleton(TypeSingletonName { name, ..}) => name.clone(),
|
||||
TypeIdentifier::Tuple(_) => unimplemented!(),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
if duplicate_member_definitions.len() != 0 {
|
||||
return Err(format!("Duplicate member(s) in definition of type {}: {:?}", type_name, duplicate_member_definitions));
|
||||
}
|
||||
let spec = SymbolSpec::RecordConstructor { index, type_name: type_name.clone(), members };
|
||||
self.add_new_symbol(name, scope_name_stack, spec);
|
||||
},
|
||||
}
|
||||
}
|
||||
scope_name_stack.pop();
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
struct LocalTypeContext {
|
||||
state: u8
|
||||
}
|
||||
impl LocalTypeContext {
|
||||
fn new() -> LocalTypeContext {
|
||||
LocalTypeContext { state: 0 }
|
||||
}
|
||||
|
||||
fn new_universal_type(&mut self) -> TypeName {
|
||||
let n = self.state;
|
||||
self.state += 1;
|
||||
Rc::new(format!("{}", (('a' as u8) + n) as char))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
51
schala-lang/language/src/symbol_table/symbol_trie.rs
Normal file
51
schala-lang/language/src/symbol_table/symbol_trie.rs
Normal file
@@ -0,0 +1,51 @@
|
||||
use radix_trie::{Trie, TrieCommon, TrieKey};
|
||||
use super::FullyQualifiedSymbolName;
|
||||
use std::hash::{Hasher, Hash};
|
||||
use std::collections::hash_map::DefaultHasher;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct SymbolTrie(Trie<FullyQualifiedSymbolName, ()>);
|
||||
|
||||
impl TrieKey for FullyQualifiedSymbolName {
|
||||
fn encode_bytes(&self) -> Vec<u8> {
|
||||
let mut hasher = DefaultHasher::new();
|
||||
let mut output = vec![];
|
||||
let FullyQualifiedSymbolName(scopes) = self;
|
||||
for segment in scopes.iter() {
|
||||
segment.name.as_bytes().hash(&mut hasher);
|
||||
output.extend_from_slice(&hasher.finish().to_be_bytes());
|
||||
}
|
||||
output
|
||||
}
|
||||
}
|
||||
|
||||
impl SymbolTrie {
|
||||
pub fn new() -> SymbolTrie {
|
||||
SymbolTrie(Trie::new())
|
||||
}
|
||||
|
||||
pub fn insert(&mut self, fqsn: &FullyQualifiedSymbolName) {
|
||||
self.0.insert(fqsn.clone(), ());
|
||||
}
|
||||
|
||||
pub fn get_children(&self, fqsn: &FullyQualifiedSymbolName) -> Vec<FullyQualifiedSymbolName> {
|
||||
let subtrie = match self.0.subtrie(fqsn) {
|
||||
Some(s) => s,
|
||||
None => return vec![]
|
||||
};
|
||||
let output: Vec<FullyQualifiedSymbolName> = subtrie.keys().filter(|cur_key| **cur_key != *fqsn).map(|fqsn| fqsn.clone()).collect();
|
||||
output
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_trie_insertion() {
|
||||
let mut trie = SymbolTrie::new();
|
||||
|
||||
trie.insert(&fqsn!("unrelated"; ty, "thing"; tr));
|
||||
trie.insert(&fqsn!("outer"; ty, "inner"; tr));
|
||||
trie.insert(&fqsn!("outer"; ty, "inner"; ty, "still_inner"; tr));
|
||||
|
||||
let children = trie.get_children(&fqsn!("outer"; ty, "inner"; tr));
|
||||
assert_eq!(children.len(), 1);
|
||||
}
|
||||
193
schala-lang/language/src/symbol_table/test.rs
Normal file
193
schala-lang/language/src/symbol_table/test.rs
Normal file
@@ -0,0 +1,193 @@
|
||||
#![cfg(test)]
|
||||
use std::cell::RefCell;
|
||||
use std::rc::Rc;
|
||||
|
||||
use super::*;
|
||||
use crate::util::quick_ast;
|
||||
|
||||
fn add_symbols_from_source(src: &str) -> (SymbolTable, Result<(), String>) {
|
||||
let (ast, source_map) = quick_ast(src);
|
||||
let source_map = Rc::new(RefCell::new(source_map));
|
||||
let mut symbol_table = SymbolTable::new(source_map);
|
||||
let result = symbol_table.add_top_level_symbols(&ast);
|
||||
(symbol_table, result)
|
||||
}
|
||||
|
||||
macro_rules! values_in_table {
|
||||
($source:expr, $single_value:expr) => {
|
||||
values_in_table!($source => $single_value);
|
||||
};
|
||||
($source:expr => $( $value:expr ),* ) => {
|
||||
{
|
||||
let (symbol_table, _) = add_symbols_from_source($source);
|
||||
$(
|
||||
match symbol_table.lookup_by_fqsn($value) {
|
||||
Some(_spec) => (),
|
||||
None => panic!(),
|
||||
};
|
||||
)*
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn basic_symbol_table() {
|
||||
values_in_table! { "let a = 10; fn b() { 20 }", &fqsn!("b"; tr) };
|
||||
values_in_table! { "type Option<T> = Some(T) | None" =>
|
||||
&fqsn!("Option"; tr),
|
||||
&fqsn!("Option"; ty, "Some"; tr),
|
||||
&fqsn!("Option"; ty, "None"; tr) };
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_function_definition_duplicates() {
|
||||
let source = r#"
|
||||
fn a() { 1 }
|
||||
fn b() { 2 }
|
||||
fn a() { 3 }
|
||||
"#;
|
||||
let (_, output) = add_symbols_from_source(source);
|
||||
assert!(output.unwrap_err().contains("Duplicate function definition: a"))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_variable_definition_duplicates() {
|
||||
let source = r#"
|
||||
let x = 9
|
||||
let a = 20
|
||||
let q = 39
|
||||
let a = 30
|
||||
"#;
|
||||
let (_, output) = add_symbols_from_source(source);
|
||||
let output = output.unwrap_err();
|
||||
assert!(output.contains("Duplicate variable definition: a"));
|
||||
assert!(output.contains("already defined at 2"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_variable_definition_duplicates_in_function() {
|
||||
let source = r#"
|
||||
fn a() {
|
||||
let a = 20
|
||||
let b = 40
|
||||
a + b
|
||||
}
|
||||
|
||||
fn q() {
|
||||
let a = 29
|
||||
let x = 30
|
||||
let x = 33
|
||||
}
|
||||
"#;
|
||||
let (_, output) = add_symbols_from_source(source);
|
||||
assert!(output.unwrap_err().contains("Duplicate variable definition: x"))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn dont_falsely_detect_duplicates() {
|
||||
let source = r#"
|
||||
let a = 20;
|
||||
fn some_func() {
|
||||
let a = 40;
|
||||
77
|
||||
}
|
||||
let q = 39;
|
||||
"#;
|
||||
let (symbol_table, _) = add_symbols_from_source(source);
|
||||
assert!(symbol_table.lookup_by_fqsn(&fqsn!["a"; tr]).is_some());
|
||||
assert!(symbol_table.lookup_by_fqsn(&fqsn!["some_func"; fn, "a";tr]).is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn enclosing_scopes() {
|
||||
let source = r#"
|
||||
fn outer_func(x) {
|
||||
fn inner_func(arg) {
|
||||
arg
|
||||
}
|
||||
x + inner_func(x)
|
||||
}"#;
|
||||
let (symbol_table, _) = add_symbols_from_source(source);
|
||||
assert!(symbol_table.lookup_by_fqsn(&fqsn!("outer_func"; tr)).is_some());
|
||||
assert!(symbol_table.lookup_by_fqsn(&fqsn!("outer_func"; fn, "inner_func"; tr)).is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn enclosing_scopes_2() {
|
||||
let source = r#"
|
||||
fn outer_func(x) {
|
||||
fn inner_func(arg) {
|
||||
arg
|
||||
}
|
||||
|
||||
fn second_inner_func() {
|
||||
fn another_inner_func() {
|
||||
}
|
||||
}
|
||||
|
||||
inner_func(x)
|
||||
}"#;
|
||||
let (symbol_table, _) = add_symbols_from_source(source);
|
||||
assert!(symbol_table.lookup_by_fqsn(&fqsn!("outer_func"; tr)).is_some());
|
||||
assert!(symbol_table.lookup_by_fqsn(&fqsn!("outer_func"; fn, "inner_func"; tr)).is_some());
|
||||
assert!(symbol_table.lookup_by_fqsn(&fqsn!("outer_func"; fn, "second_inner_func"; tr)).is_some());
|
||||
assert!(symbol_table.lookup_by_fqsn(&fqsn!("outer_func"; fn, "second_inner_func"; fn, "another_inner_func"; tr)).is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn enclosing_scopes_3() {
|
||||
let source = r#"
|
||||
fn outer_func(x) {
|
||||
fn inner_func(arg) {
|
||||
arg
|
||||
}
|
||||
|
||||
fn second_inner_func() {
|
||||
fn another_inner_func() {
|
||||
}
|
||||
fn another_inner_func() {
|
||||
}
|
||||
}
|
||||
|
||||
inner_func(x)
|
||||
}"#;
|
||||
let (_, output) = add_symbols_from_source(source);
|
||||
assert!(output.unwrap_err().contains("Duplicate"))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn modules() {
|
||||
let source = r#"
|
||||
module stuff {
|
||||
fn item() {
|
||||
}
|
||||
}
|
||||
|
||||
fn item()
|
||||
"#;
|
||||
values_in_table! { source =>
|
||||
&fqsn!("item"; tr),
|
||||
&fqsn!("stuff"; tr, "item"; tr)
|
||||
};
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn duplicate_modules() {
|
||||
let source = r#"
|
||||
module q {
|
||||
fn foo() { 4 }
|
||||
}
|
||||
|
||||
module a {
|
||||
fn foo() { 334 }
|
||||
}
|
||||
|
||||
module a {
|
||||
fn foo() { 256.1 }
|
||||
}
|
||||
"#;
|
||||
let (_, output) = add_symbols_from_source(source);
|
||||
let output = output.unwrap_err();
|
||||
assert!(output.contains("Duplicate module"));
|
||||
assert!(output.contains("already defined at 5"));
|
||||
}
|
||||
@@ -4,22 +4,27 @@ use std::rc::Rc;
|
||||
use std::iter::{Iterator, Peekable};
|
||||
use std::fmt;
|
||||
|
||||
use crate::source_map::Location;
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub enum TokenType {
|
||||
pub enum TokenKind {
|
||||
Newline, Semicolon,
|
||||
|
||||
LParen, RParen,
|
||||
LSquareBracket, RSquareBracket,
|
||||
LAngleBracket, RAngleBracket,
|
||||
LCurlyBrace, RCurlyBrace,
|
||||
Pipe,
|
||||
Pipe, Backslash,
|
||||
|
||||
Comma, Period, Colon, Underscore,
|
||||
Slash,
|
||||
Slash, Equals,
|
||||
|
||||
Operator(Rc<String>),
|
||||
DigitGroup(Rc<String>), HexLiteral(Rc<String>), BinNumberSigil,
|
||||
StrLiteral(Rc<String>),
|
||||
StrLiteral {
|
||||
s: Rc<String>,
|
||||
prefix: Option<Rc<String>>
|
||||
},
|
||||
Identifier(Rc<String>),
|
||||
Keyword(Kw),
|
||||
|
||||
@@ -27,15 +32,15 @@ pub enum TokenType {
|
||||
|
||||
Error(String),
|
||||
}
|
||||
use self::TokenType::*;
|
||||
use self::TokenKind::*;
|
||||
|
||||
impl fmt::Display for TokenType {
|
||||
impl fmt::Display for TokenKind {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
&Operator(ref s) => write!(f, "Operator({})", **s),
|
||||
&DigitGroup(ref s) => write!(f, "DigitGroup({})", s),
|
||||
&HexLiteral(ref s) => write!(f, "HexLiteral({})", s),
|
||||
&StrLiteral(ref s) => write!(f, "StrLiteral({})", s),
|
||||
&StrLiteral {ref s, .. } => write!(f, "StrLiteral({})", s),
|
||||
&Identifier(ref s) => write!(f, "Identifier({})", s),
|
||||
&Error(ref s) => write!(f, "Error({})", s),
|
||||
other => write!(f, "{:?}", other),
|
||||
@@ -55,7 +60,7 @@ pub enum Kw {
|
||||
Alias, Type, SelfType, SelfIdent,
|
||||
Interface, Impl,
|
||||
True, False,
|
||||
Module
|
||||
Module, Import
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
@@ -82,24 +87,29 @@ lazy_static! {
|
||||
"true" => Kw::True,
|
||||
"false" => Kw::False,
|
||||
"module" => Kw::Module,
|
||||
"import" => Kw::Import,
|
||||
};
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct Token {
|
||||
pub token_type: TokenType,
|
||||
pub offset: (usize, usize),
|
||||
pub kind: TokenKind,
|
||||
pub location: Location,
|
||||
}
|
||||
|
||||
impl Token {
|
||||
pub fn get_error(&self) -> Option<String> {
|
||||
match self.token_type {
|
||||
TokenType::Error(ref s) => Some(s.clone()),
|
||||
match self.kind {
|
||||
TokenKind::Error(ref s) => Some(s.clone()),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
pub fn to_string_with_metadata(&self) -> String {
|
||||
format!("{}(L:{},c:{})", self.token_type, self.offset.0, self.offset.1)
|
||||
format!("{}({})", self.kind, self.location)
|
||||
}
|
||||
|
||||
pub fn get_kind(&self) -> TokenKind {
|
||||
self.kind.clone()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -113,15 +123,15 @@ type CharData = (usize, usize, char);
|
||||
pub fn tokenize(input: &str) -> Vec<Token> {
|
||||
let mut tokens: Vec<Token> = Vec::new();
|
||||
|
||||
let mut input = input.lines().enumerate()
|
||||
let mut input = input.lines().enumerate()
|
||||
.intersperse((0, "\n"))
|
||||
.flat_map(|(line_idx, ref line)| {
|
||||
line.chars().enumerate().map(move |(ch_idx, ch)| (line_idx, ch_idx, ch))
|
||||
})
|
||||
.peekable();
|
||||
|
||||
while let Some((line_idx, ch_idx, c)) = input.next() {
|
||||
let cur_tok_type = match c {
|
||||
while let Some((line_num, char_num, c)) = input.next() {
|
||||
let cur_tok_kind = match c {
|
||||
'/' => match input.peek().map(|t| t.2) {
|
||||
Some('/') => {
|
||||
while let Some((_, _, c)) = input.next() {
|
||||
@@ -156,18 +166,20 @@ pub fn tokenize(input: &str) -> Vec<Token> {
|
||||
'(' => LParen, ')' => RParen,
|
||||
'{' => LCurlyBrace, '}' => RCurlyBrace,
|
||||
'[' => LSquareBracket, ']' => RSquareBracket,
|
||||
'"' => handle_quote(&mut input),
|
||||
'"' => handle_quote(&mut input, None),
|
||||
'\\' => Backslash,
|
||||
c if c.is_digit(10) => handle_digit(c, &mut input),
|
||||
c if c.is_alphabetic() || c == '_' => handle_alphabetic(c, &mut input), //TODO I'll probably have to rewrite this if I care about types being uppercase, also type parameterization
|
||||
c if c.is_alphabetic() || c == '_' => handle_alphabetic(c, &mut input),
|
||||
c if is_operator(&c) => handle_operator(c, &mut input),
|
||||
unknown => Error(format!("Unexpected character: {}", unknown)),
|
||||
};
|
||||
tokens.push(Token { token_type: cur_tok_type, offset: (line_idx, ch_idx) });
|
||||
let location = Location { line_num, char_num };
|
||||
tokens.push(Token { kind: cur_tok_kind, location });
|
||||
}
|
||||
tokens
|
||||
}
|
||||
|
||||
fn handle_digit(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenType {
|
||||
fn handle_digit(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenKind {
|
||||
if c == '0' && input.peek().map_or(false, |&(_, _, c)| { c == 'x' }) {
|
||||
input.next();
|
||||
let rest: String = input.peeking_take_while(|&(_, _, ref c)| c.is_digit(16) || *c == '_').map(|(_, _, c)| { c }).collect();
|
||||
@@ -182,7 +194,7 @@ fn handle_digit(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>) ->
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_quote(input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenType {
|
||||
fn handle_quote(input: &mut Peekable<impl Iterator<Item=CharData>>, quote_prefix: Option<&str>) -> TokenKind {
|
||||
let mut buf = String::new();
|
||||
loop {
|
||||
match input.next().map(|(_, _, c)| { c }) {
|
||||
@@ -201,22 +213,26 @@ fn handle_quote(input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenType
|
||||
}
|
||||
},
|
||||
Some(c) => buf.push(c),
|
||||
None => return TokenType::Error(format!("Unclosed string")),
|
||||
None => return TokenKind::Error(format!("Unclosed string")),
|
||||
}
|
||||
}
|
||||
TokenType::StrLiteral(Rc::new(buf))
|
||||
TokenKind::StrLiteral { s: Rc::new(buf), prefix: quote_prefix.map(|s| Rc::new(s.to_string())) }
|
||||
}
|
||||
|
||||
fn handle_alphabetic(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenType {
|
||||
fn handle_alphabetic(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenKind {
|
||||
let mut buf = String::new();
|
||||
buf.push(c);
|
||||
if c == '_' && input.peek().map(|&(_, _, c)| { !c.is_alphabetic() }).unwrap_or(true) {
|
||||
return TokenType::Underscore
|
||||
return TokenKind::Underscore
|
||||
}
|
||||
|
||||
loop {
|
||||
match input.peek().map(|&(_, _, c)| { c }) {
|
||||
Some(c) if c.is_alphanumeric() => {
|
||||
Some(c) if c == '"' => {
|
||||
input.next();
|
||||
return handle_quote(input, Some(&buf));
|
||||
},
|
||||
Some(c) if c.is_alphanumeric() || c == '_' => {
|
||||
input.next();
|
||||
buf.push(c);
|
||||
},
|
||||
@@ -225,14 +241,14 @@ fn handle_alphabetic(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>
|
||||
}
|
||||
|
||||
match KEYWORDS.get(buf.as_str()) {
|
||||
Some(kw) => TokenType::Keyword(*kw),
|
||||
None => TokenType::Identifier(Rc::new(buf)),
|
||||
Some(kw) => TokenKind::Keyword(*kw),
|
||||
None => TokenKind::Identifier(Rc::new(buf)),
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_operator(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenType {
|
||||
fn handle_operator(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenKind {
|
||||
match c {
|
||||
'<' | '>' | '|' | '.' => {
|
||||
'<' | '>' | '|' | '.' | '=' => {
|
||||
let ref next = input.peek().map(|&(_, _, c)| { c });
|
||||
if !next.map(|n| { is_operator(&n) }).unwrap_or(false) {
|
||||
return match c {
|
||||
@@ -240,6 +256,7 @@ fn handle_operator(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>)
|
||||
'>' => RAngleBracket,
|
||||
'|' => Pipe,
|
||||
'.' => Period,
|
||||
'=' => Equals,
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
@@ -275,7 +292,7 @@ fn handle_operator(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>)
|
||||
}
|
||||
}
|
||||
}
|
||||
TokenType::Operator(Rc::new(buf))
|
||||
TokenKind::Operator(Rc::new(buf))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -290,26 +307,38 @@ mod schala_tokenizer_tests {
|
||||
#[test]
|
||||
fn tokens() {
|
||||
let a = tokenize("let a: A<B> = c ++ d");
|
||||
let token_types: Vec<TokenType> = a.into_iter().map(move |t| t.token_type).collect();
|
||||
assert_eq!(token_types, vec![Keyword(Let), ident!("a"), Colon, ident!("A"),
|
||||
LAngleBracket, ident!("B"), RAngleBracket, op!("="), ident!("c"), op!("++"), ident!("d")]);
|
||||
let token_kinds: Vec<TokenKind> = a.into_iter().map(move |t| t.kind).collect();
|
||||
assert_eq!(token_kinds, vec![Keyword(Let), ident!("a"), Colon, ident!("A"),
|
||||
LAngleBracket, ident!("B"), RAngleBracket, Equals, ident!("c"), op!("++"), ident!("d")]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn underscores() {
|
||||
let token_types: Vec<TokenType> = tokenize("4_8").into_iter().map(move |t| t.token_type).collect();
|
||||
assert_eq!(token_types, vec![digit!("4"), Underscore, digit!("8")]);
|
||||
let token_kinds: Vec<TokenKind> = tokenize("4_8").into_iter().map(move |t| t.kind).collect();
|
||||
assert_eq!(token_kinds, vec![digit!("4"), Underscore, digit!("8")]);
|
||||
|
||||
let token_kinds2: Vec<TokenKind> = tokenize("aba_yo").into_iter().map(move |t| t.kind).collect();
|
||||
assert_eq!(token_kinds2, vec![ident!("aba_yo")]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn comments() {
|
||||
let token_types: Vec<TokenType> = tokenize("1 + /* hella /* bro */ */ 2").into_iter().map(move |t| t.token_type).collect();
|
||||
assert_eq!(token_types, vec![digit!("1"), op!("+"), digit!("2")]);
|
||||
let token_kinds: Vec<TokenKind> = tokenize("1 + /* hella /* bro */ */ 2").into_iter().map(move |t| t.kind).collect();
|
||||
assert_eq!(token_kinds, vec![digit!("1"), op!("+"), digit!("2")]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn backtick_operators() {
|
||||
let token_types: Vec<TokenType> = tokenize("1 `plus` 2").into_iter().map(move |t| t.token_type).collect();
|
||||
assert_eq!(token_types, vec![digit!("1"), op!("plus"), digit!("2")]);
|
||||
let token_kinds: Vec<TokenKind> = tokenize("1 `plus` 2").into_iter().map(move |t| t.kind).collect();
|
||||
assert_eq!(token_kinds, vec![digit!("1"), op!("plus"), digit!("2")]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn string_literals() {
|
||||
let token_kinds: Vec<TokenKind> = tokenize(r#""some string""#).into_iter().map(move |t| t.kind).collect();
|
||||
assert_eq!(token_kinds, vec![StrLiteral { s: Rc::new("some string".to_string()), prefix: None }]);
|
||||
|
||||
let token_kinds: Vec<TokenKind> = tokenize(r#"b"some bytestring""#).into_iter().map(move |t| t.kind).collect();
|
||||
assert_eq!(token_kinds, vec![StrLiteral { s: Rc::new("some bytestring".to_string()), prefix: Some(Rc::new("b".to_string())) }]);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,3 +1,488 @@
|
||||
use std::rc::Rc;
|
||||
use std::fmt::Write;
|
||||
|
||||
use ena::unify::{UnifyKey, InPlaceUnificationTable, UnificationTable, EqUnifyValue};
|
||||
|
||||
use crate::ast::*;
|
||||
use crate::util::ScopeStack;
|
||||
use crate::util::deref_optional_box;
|
||||
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct TypeData {
|
||||
ty: Option<Type>
|
||||
}
|
||||
|
||||
impl TypeData {
|
||||
#[allow(dead_code)]
|
||||
pub fn new() -> TypeData {
|
||||
TypeData { ty: None }
|
||||
}
|
||||
}
|
||||
|
||||
pub type TypeName = Rc<String>;
|
||||
|
||||
pub struct TypeContext<'a> {
|
||||
variable_map: ScopeStack<'a, Rc<String>, Type>,
|
||||
unification_table: InPlaceUnificationTable<TypeVar>,
|
||||
}
|
||||
|
||||
/// `InferResult` is the monad in which type inference takes place.
|
||||
type InferResult<T> = Result<T, TypeError>;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct TypeError { pub msg: String }
|
||||
|
||||
impl TypeError {
|
||||
fn new<A, T>(msg: T) -> InferResult<A> where T: Into<String> {
|
||||
Err(TypeError { msg: msg.into() })
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)] // avoids warning from Compound
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub enum Type {
|
||||
Const(TypeConst),
|
||||
Var(TypeVar),
|
||||
Arrow {
|
||||
params: Vec<Type>,
|
||||
ret: Box<Type>
|
||||
},
|
||||
Compound {
|
||||
ty_name: String,
|
||||
args:Vec<Type>
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct TypeVar(usize);
|
||||
|
||||
impl UnifyKey for TypeVar {
|
||||
type Value = Option<TypeConst>;
|
||||
fn index(&self) -> u32 { self.0 as u32 }
|
||||
fn from_index(u: u32) -> TypeVar { TypeVar(u as usize) }
|
||||
fn tag() -> &'static str { "TypeVar" }
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum TypeConst {
|
||||
Unit,
|
||||
Nat,
|
||||
Int,
|
||||
Float,
|
||||
StringT,
|
||||
Bool,
|
||||
Ordering,
|
||||
//UserDefined
|
||||
}
|
||||
|
||||
impl TypeConst {
|
||||
pub fn to_string(&self) -> String {
|
||||
use self::TypeConst::*;
|
||||
match self {
|
||||
Unit => format!("()"),
|
||||
Nat => format!("Nat"),
|
||||
Int => format!("Int"),
|
||||
Float => format!("Float"),
|
||||
StringT => format!("String"),
|
||||
Bool => format!("Bool"),
|
||||
Ordering => format!("Ordering"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl EqUnifyValue for TypeConst { }
|
||||
|
||||
macro_rules! ty {
|
||||
($type_name:ident) => { Type::Const(TypeConst::$type_name) };
|
||||
($t1:ident -> $t2:ident) => { Type::Arrow { params: vec![ty!($t1)], ret: box ty!($t2) } };
|
||||
($t1:ident -> $t2:ident -> $t3:ident) => { Type::Arrow { params: vec![ty!($t1), ty!($t2)], ret: box ty!($t3) } };
|
||||
($type_list:ident, $ret_type:ident) => {
|
||||
Type::Arrow {
|
||||
params: $type_list,
|
||||
ret: box $ret_type,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//TODO find a better way to capture the to/from string logic
|
||||
impl Type {
|
||||
pub fn to_string(&self) -> String {
|
||||
use self::Type::*;
|
||||
match self {
|
||||
Const(c) => c.to_string(),
|
||||
Var(v) => format!("t_{}", v.0),
|
||||
Arrow { params, box ref ret } => {
|
||||
if params.len() == 0 {
|
||||
format!("-> {}", ret.to_string())
|
||||
} else {
|
||||
let mut buf = String::new();
|
||||
for p in params.iter() {
|
||||
write!(buf, "{} -> ", p.to_string()).unwrap();
|
||||
}
|
||||
write!(buf, "{}", ret.to_string()).unwrap();
|
||||
buf
|
||||
}
|
||||
},
|
||||
Compound { .. } => format!("<some compound type>")
|
||||
}
|
||||
}
|
||||
|
||||
fn from_string(string: &str) -> Option<Type> {
|
||||
Some(match string {
|
||||
"()" | "Unit" => ty!(Unit),
|
||||
"Nat" => ty!(Nat),
|
||||
"Int" => ty!(Int),
|
||||
"Float" => ty!(Float),
|
||||
"String" => ty!(StringT),
|
||||
"Bool" => ty!(Bool),
|
||||
"Ordering" => ty!(Ordering),
|
||||
_ => return None
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
/// `Type` is parameterized by whether the type variables can be just universal, or universal or
|
||||
/// existential.
|
||||
#[derive(Debug, Clone)]
|
||||
enum Type<A> {
|
||||
Var(A),
|
||||
Const(TConst),
|
||||
Arrow(Box<Type<A>>, Box<Type<A>>),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
enum TVar {
|
||||
Univ(UVar),
|
||||
Exist(ExistentialVar)
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct UVar(Rc<String>);
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct ExistentialVar(u32);
|
||||
|
||||
impl Type<UVar> {
|
||||
fn to_tvar(&self) -> Type<TVar> {
|
||||
match self {
|
||||
Type::Var(UVar(name)) => Type::Var(TVar::Univ(UVar(name.clone()))),
|
||||
Type::Const(ref c) => Type::Const(c.clone()),
|
||||
Type::Arrow(a, b) => Type::Arrow(
|
||||
Box::new(a.to_tvar()),
|
||||
Box::new(b.to_tvar())
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Type<TVar> {
|
||||
fn skolemize(&self) -> Type<UVar> {
|
||||
match self {
|
||||
Type::Var(TVar::Univ(uvar)) => Type::Var(uvar.clone()),
|
||||
Type::Var(TVar::Exist(_)) => Type::Var(UVar(Rc::new(format!("sk")))),
|
||||
Type::Const(ref c) => Type::Const(c.clone()),
|
||||
Type::Arrow(a, b) => Type::Arrow(
|
||||
Box::new(a.skolemize()),
|
||||
Box::new(b.skolemize())
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TypeIdentifier {
|
||||
fn to_monotype(&self) -> Type<UVar> {
|
||||
match self {
|
||||
TypeIdentifier::Tuple(_) => Type::Const(TConst::Nat),
|
||||
TypeIdentifier::Singleton(TypeSingletonName { name, .. }) => {
|
||||
match &name[..] {
|
||||
"Nat" => Type::Const(TConst::Nat),
|
||||
"Int" => Type::Const(TConst::Int),
|
||||
"Float" => Type::Const(TConst::Float),
|
||||
"Bool" => Type::Const(TConst::Bool),
|
||||
"String" => Type::Const(TConst::StringT),
|
||||
_ => Type::Const(TConst::Nat),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
enum TConst {
|
||||
User(Rc<String>),
|
||||
Unit,
|
||||
Nat,
|
||||
Int,
|
||||
Float,
|
||||
StringT,
|
||||
Bool,
|
||||
}
|
||||
|
||||
impl TConst {
|
||||
fn user(name: &str) -> TConst {
|
||||
TConst::User(Rc::new(name.to_string()))
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
||||
impl<'a> TypeContext<'a> {
|
||||
pub fn new() -> TypeContext<'a> {
|
||||
TypeContext {
|
||||
variable_map: ScopeStack::new(None),
|
||||
unification_table: UnificationTable::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
fn new_env(&'a self, new_var: Rc<String>, ty: Type) -> TypeContext<'a> {
|
||||
let mut new_context = TypeContext {
|
||||
variable_map: self.variable_map.new_scope(None),
|
||||
unification_table: UnificationTable::new(), //???? not sure if i want this
|
||||
};
|
||||
|
||||
new_context.variable_map.insert(new_var, ty);
|
||||
new_context
|
||||
}
|
||||
*/
|
||||
|
||||
|
||||
fn get_type_from_name(&self, name: &TypeIdentifier) -> InferResult<Type> {
|
||||
use self::TypeIdentifier::*;
|
||||
Ok(match name {
|
||||
Singleton(TypeSingletonName { name,.. }) => {
|
||||
match Type::from_string(&name) {
|
||||
Some(ty) => ty,
|
||||
None => return TypeError::new(format!("Unknown type name: {}", name))
|
||||
}
|
||||
},
|
||||
Tuple(_) => return TypeError::new("tuples aren't ready yet"),
|
||||
})
|
||||
}
|
||||
|
||||
/// `typecheck` is the entry into the type-inference system, accepting an AST as an argument
|
||||
/// Following the example of GHC, the compiler deliberately does typechecking before de-sugaring
|
||||
/// the AST to ReducedAST
|
||||
pub fn typecheck(&mut self, ast: &AST) -> Result<Type, TypeError> {
|
||||
let mut returned_type = Type::Const(TypeConst::Unit);
|
||||
for statement in ast.statements.iter() {
|
||||
returned_type = self.statement(statement)?;
|
||||
}
|
||||
Ok(returned_type)
|
||||
}
|
||||
|
||||
fn statement(&mut self, statement: &Statement) -> InferResult<Type> {
|
||||
match &statement.kind {
|
||||
StatementKind::Expression(e) => self.expr(e),
|
||||
StatementKind::Declaration(decl) => self.decl(&decl),
|
||||
StatementKind::Import(_) => Ok(ty!(Unit)),
|
||||
StatementKind::Module(_) => Ok(ty!(Unit)),
|
||||
}
|
||||
}
|
||||
|
||||
fn decl(&mut self, decl: &Declaration) -> InferResult<Type> {
|
||||
use self::Declaration::*;
|
||||
match decl {
|
||||
Binding { name, expr, .. } => {
|
||||
let ty = self.expr(expr)?;
|
||||
self.variable_map.insert(name.clone(), ty);
|
||||
},
|
||||
_ => (),
|
||||
}
|
||||
Ok(ty!(Unit))
|
||||
}
|
||||
|
||||
fn invoc(&mut self, invoc: &InvocationArgument) -> InferResult<Type> {
|
||||
use InvocationArgument::*;
|
||||
match invoc {
|
||||
Positional(expr) => self.expr(expr),
|
||||
_ => Ok(ty!(Nat)) //TODO this is wrong
|
||||
}
|
||||
}
|
||||
|
||||
fn expr(&mut self, expr: &Expression) -> InferResult<Type> {
|
||||
match expr {
|
||||
Expression { kind, type_anno: Some(anno), .. } => {
|
||||
let t1 = self.expr_type(kind)?;
|
||||
let t2 = self.get_type_from_name(anno)?;
|
||||
self.unify(t2, t1)
|
||||
},
|
||||
Expression { kind, type_anno: None, .. } => self.expr_type(kind)
|
||||
}
|
||||
}
|
||||
|
||||
fn expr_type(&mut self, expr: &ExpressionKind) -> InferResult<Type> {
|
||||
use self::ExpressionKind::*;
|
||||
Ok(match expr {
|
||||
NatLiteral(_) => ty!(Nat),
|
||||
BoolLiteral(_) => ty!(Bool),
|
||||
FloatLiteral(_) => ty!(Float),
|
||||
StringLiteral(_) => ty!(StringT),
|
||||
PrefixExp(op, expr) => self.prefix(op, expr)?,
|
||||
BinExp(op, lhs, rhs) => self.binexp(op, lhs, rhs)?,
|
||||
IfExpression { discriminator, body } => self.if_expr(deref_optional_box(discriminator), &**body)?,
|
||||
Value(val) => self.handle_value(val)?,
|
||||
Call { box ref f, arguments } => self.call(f, arguments)?,
|
||||
Lambda { params, type_anno, body } => self.lambda(params, type_anno, body)?,
|
||||
_ => ty!(Unit),
|
||||
})
|
||||
}
|
||||
|
||||
fn prefix(&mut self, op: &PrefixOp, expr: &Expression) -> InferResult<Type> {
|
||||
let tf = match op.builtin.map(|b| b.get_type()) {
|
||||
Some(ty) => ty,
|
||||
None => return TypeError::new("no type found")
|
||||
};
|
||||
|
||||
let tx = self.expr(expr)?;
|
||||
self.handle_apply(tf, vec![tx])
|
||||
}
|
||||
|
||||
fn binexp(&mut self, op: &BinOp, lhs: &Expression, rhs: &Expression) -> InferResult<Type> {
|
||||
let tf = match op.builtin.map(|b| b.get_type()) {
|
||||
Some(ty) => ty,
|
||||
None => return TypeError::new("no type found"),
|
||||
};
|
||||
|
||||
let t_lhs = self.expr(lhs)?;
|
||||
let t_rhs = self.expr(rhs)?; //TODO is this order a problem? not sure
|
||||
|
||||
self.handle_apply(tf, vec![t_lhs, t_rhs])
|
||||
}
|
||||
|
||||
fn if_expr(&mut self, discriminator: Option<&Expression>, body: &IfExpressionBody) -> InferResult<Type> {
|
||||
use self::IfExpressionBody::*;
|
||||
match (discriminator, body) {
|
||||
(Some(expr), SimpleConditional{ then_case, else_case }) => self.handle_simple_if(expr, then_case, else_case),
|
||||
_ => TypeError::new(format!("Complex conditionals not supported"))
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_simple_if(&mut self, expr: &Expression, then_clause: &Block, else_clause: &Option<Block>) -> InferResult<Type> {
|
||||
let t1 = self.expr(expr)?;
|
||||
let t2 = self.block(then_clause)?;
|
||||
let t3 = match else_clause {
|
||||
Some(block) => self.block(block)?,
|
||||
None => ty!(Unit)
|
||||
};
|
||||
|
||||
let _ = self.unify(ty!(Bool), t1)?;
|
||||
self.unify(t2, t3)
|
||||
}
|
||||
|
||||
fn lambda(&mut self, params: &Vec<FormalParam>, type_anno: &Option<TypeIdentifier>, _body: &Block) -> InferResult<Type> {
|
||||
let argument_types: InferResult<Vec<Type>> = params.iter().map(|param: &FormalParam| {
|
||||
if let FormalParam { anno: Some(type_identifier), .. } = param {
|
||||
self.get_type_from_name(type_identifier)
|
||||
} else {
|
||||
Ok(Type::Var(self.fresh_type_variable()))
|
||||
}
|
||||
}).collect();
|
||||
let argument_types = argument_types?;
|
||||
let ret_type = match type_anno.as_ref() {
|
||||
Some(anno) => self.get_type_from_name(anno)?,
|
||||
None => Type::Var(self.fresh_type_variable())
|
||||
};
|
||||
|
||||
Ok(ty!(argument_types, ret_type))
|
||||
}
|
||||
|
||||
fn call(&mut self, f: &Expression, args: &Vec<InvocationArgument>) -> InferResult<Type> {
|
||||
let tf = self.expr(f)?;
|
||||
let arg_types: InferResult<Vec<Type>> = args.iter().map(|ex| self.invoc(ex)).collect();
|
||||
let arg_types = arg_types?;
|
||||
self.handle_apply(tf, arg_types)
|
||||
}
|
||||
|
||||
fn handle_apply(&mut self, tf: Type, args: Vec<Type>) -> InferResult<Type> {
|
||||
Ok(match tf {
|
||||
Type::Arrow { ref params, ret: box ref t_ret } if params.len() == args.len() => {
|
||||
for (t_param, t_arg) in params.iter().zip(args.iter()) {
|
||||
let _ = self.unify(t_param.clone(), t_arg.clone())?; //TODO I think this needs to reference a sub-scope
|
||||
}
|
||||
t_ret.clone()
|
||||
},
|
||||
Type::Arrow { .. } => return TypeError::new("Wrong length"),
|
||||
_ => return TypeError::new(format!("Not a function"))
|
||||
})
|
||||
}
|
||||
|
||||
fn block(&mut self, block: &Block) -> InferResult<Type> {
|
||||
let mut output = ty!(Unit);
|
||||
for statement in block.iter() {
|
||||
output = self.statement(statement)?;
|
||||
}
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
fn handle_value(&mut self, val: &QualifiedName) -> InferResult<Type> {
|
||||
let QualifiedName { components: vec, .. } = val;
|
||||
let var = &vec[0];
|
||||
match self.variable_map.lookup(var) {
|
||||
Some(ty) => Ok(ty.clone()),
|
||||
None => TypeError::new(format!("Couldn't find variable: {}", &var)),
|
||||
}
|
||||
}
|
||||
|
||||
fn unify(&mut self, t1: Type, t2: Type) -> InferResult<Type> {
|
||||
use self::Type::*;
|
||||
|
||||
match (t1, t2) {
|
||||
(Const(ref c1), Const(ref c2)) if c1 == c2 => Ok(Const(c1.clone())), //choice of c1 is arbitrary I *think*
|
||||
(a @ Var(_), b @ Const(_)) => self.unify(b, a),
|
||||
(Const(ref c1), Var(ref v2)) => {
|
||||
self.unification_table.unify_var_value(v2.clone(), Some(c1.clone()))
|
||||
.or_else(|_| TypeError::new(format!("Couldn't unify {:?} and {:?}", Const(c1.clone()), Var(*v2))))?;
|
||||
Ok(Const(c1.clone()))
|
||||
},
|
||||
(Var(v1), Var(v2)) => {
|
||||
//TODO add occurs check
|
||||
self.unification_table.unify_var_var(v1.clone(), v2.clone())
|
||||
.or_else(|e| {
|
||||
println!("Unify error: {:?}", e);
|
||||
TypeError::new(format!("Two type variables {:?} and {:?} couldn't unify", v1, v2))
|
||||
})?;
|
||||
Ok(Var(v1.clone())) //arbitrary decision I think
|
||||
},
|
||||
(a, b) => TypeError::new(format!("{:?} and {:?} do not unify", a, b)),
|
||||
}
|
||||
}
|
||||
|
||||
fn fresh_type_variable(&mut self) -> TypeVar {
|
||||
let new_type_var = self.unification_table.new_key(None);
|
||||
new_type_var
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod typechecking_tests {
|
||||
use super::*;
|
||||
|
||||
macro_rules! assert_type_in_fresh_context {
|
||||
($string:expr, $type:expr) => {
|
||||
let mut tc = TypeContext::new();
|
||||
let (ref ast, _) = crate::util::quick_ast($string);
|
||||
let ty = tc.typecheck(ast).unwrap();
|
||||
assert_eq!(ty, $type)
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn basic_test() {
|
||||
assert_type_in_fresh_context!("1", ty!(Nat));
|
||||
assert_type_in_fresh_context!(r#""drugs""#, ty!(StringT));
|
||||
assert_type_in_fresh_context!("true", ty!(Bool));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn operators() {
|
||||
//TODO fix these with new operator regime
|
||||
/*
|
||||
assert_type_in_fresh_context!("-1", ty!(Int));
|
||||
assert_type_in_fresh_context!("1 + 2", ty!(Nat));
|
||||
assert_type_in_fresh_context!("-2", ty!(Int));
|
||||
assert_type_in_fresh_context!("!true", ty!(Bool));
|
||||
*/
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,11 @@
|
||||
use std::collections::HashMap;
|
||||
use std::hash::Hash;
|
||||
use std::cmp::Eq;
|
||||
use std::ops::Deref;
|
||||
|
||||
pub fn deref_optional_box<T>(x: &Option<Box<T>>) -> Option<&T> {
|
||||
x.as_ref().map(|b: &Box<T>| Deref::deref(b))
|
||||
}
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
pub struct ScopeStack<'a, T: 'a, V: 'a> where T: Hash + Eq {
|
||||
@@ -41,3 +46,23 @@ impl<'a, T, V> ScopeStack<'a, T, V> where T: Hash + Eq {
|
||||
}
|
||||
}
|
||||
|
||||
/// this is intended for use in tests, and does no error-handling whatsoever
|
||||
#[allow(dead_code)]
|
||||
pub fn quick_ast(input: &str) -> (crate::ast::AST, crate::source_map::SourceMap) {
|
||||
use std::cell::RefCell;
|
||||
use std::rc::Rc;
|
||||
|
||||
let source_map = crate::source_map::SourceMap::new();
|
||||
let source_map_handle = Rc::new(RefCell::new(source_map));
|
||||
let tokens = crate::tokenizing::tokenize(input);
|
||||
let mut parser = crate::parsing::Parser::new(source_map_handle.clone());
|
||||
parser.add_new_tokens(tokens);
|
||||
let output = parser.parse();
|
||||
std::mem::drop(parser);
|
||||
(output.unwrap(), Rc::try_unwrap(source_map_handle).map_err(|_| ()).unwrap().into_inner())
|
||||
}
|
||||
|
||||
#[allow(unused_macros)]
|
||||
macro_rules! rc {
|
||||
($string:tt) => { Rc::new(stringify!($string).to_string()) }
|
||||
}
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
[package]
|
||||
name = "schala-repl-codegen"
|
||||
version = "0.1.0"
|
||||
authors = ["greg <greg.shuflin@protonmail.com>"]
|
||||
|
||||
[dependencies]
|
||||
syn = { version = "0.15.6", features = ["full", "extra-traits"] }
|
||||
quote = "0.6.8"
|
||||
proc-macro2 = "0.4.19"
|
||||
schala-repl = { path = "../schala-repl" }
|
||||
|
||||
[lib]
|
||||
proc-macro = true
|
||||
@@ -1,138 +0,0 @@
|
||||
#![feature(trace_macros)]
|
||||
extern crate proc_macro;
|
||||
extern crate proc_macro2;
|
||||
#[macro_use]
|
||||
extern crate quote;
|
||||
extern crate syn;
|
||||
|
||||
use proc_macro::TokenStream;
|
||||
use syn::{Ident, Attribute, DeriveInput};
|
||||
|
||||
fn find_attr_by_name<'a>(name: &str, attrs: &'a Vec<Attribute>) -> Option<&'a Attribute> {
|
||||
attrs.iter().find(|attr| {
|
||||
let first = attr.path.segments.first();
|
||||
let seg: Option<&&syn::PathSegment> = first.as_ref().map(|x| x.value());
|
||||
seg.map(|seg| seg.ident.to_string() == name).unwrap_or(false)
|
||||
})
|
||||
}
|
||||
|
||||
fn extract_attribute_arg_by_name(name: &str, attrs: &Vec<Attribute>) -> Option<String> {
|
||||
use syn::{Meta, Lit, MetaNameValue};
|
||||
find_attr_by_name(name, attrs)
|
||||
.and_then(|attr| {
|
||||
match attr.interpret_meta() {
|
||||
Some(Meta::NameValue(MetaNameValue { lit: Lit::Str(litstr), .. })) => Some(litstr.value()),
|
||||
_ => None,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn extract_attribute_list(name: &str, attrs: &Vec<Attribute>) -> Option<Vec<(Ident, Option<Vec<Ident>>)>> {
|
||||
use syn::{Meta, MetaList, NestedMeta};
|
||||
find_attr_by_name(name, attrs)
|
||||
.and_then(|attr| {
|
||||
match attr.interpret_meta() {
|
||||
Some(Meta::List(MetaList { nested, .. })) => {
|
||||
Some(nested.iter().map(|nested_meta| match nested_meta {
|
||||
&NestedMeta::Meta(Meta::Word(ref ident)) => (ident.clone(), None),
|
||||
&NestedMeta::Meta(Meta::List(MetaList { ref ident, nested: ref nested2, .. })) => {
|
||||
let own_args = nested2.iter().map(|nested_meta2| match nested_meta2 {
|
||||
&NestedMeta::Meta(Meta::Word(ref ident)) => ident.clone(),
|
||||
_ => panic!("Bad format for doubly-nested attribute list")
|
||||
}).collect();
|
||||
(ident.clone(), Some(own_args))
|
||||
},
|
||||
_ => panic!("Bad format for nested list")
|
||||
}).collect())
|
||||
},
|
||||
_ => panic!("{} must be a comma-delimited list surrounded by parens", name)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn get_attribute_identifier(attr_name: &str, attrs: &Vec<Attribute>) -> Option<proc_macro2::Ident> {
|
||||
find_attr_by_name(attr_name, attrs).and_then(|attr| {
|
||||
let tts = attr.tts.clone().into_iter().collect::<Vec<_>>();
|
||||
|
||||
if tts.len() == 2 {
|
||||
let ref after_equals: proc_macro2::TokenTree = tts[1];
|
||||
match after_equals {
|
||||
proc_macro2::TokenTree::Ident(ident) => Some(ident.clone()),
|
||||
_ => None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
#[proc_macro_derive(ProgrammingLanguageInterface,
|
||||
attributes(LanguageName, SourceFileExtension, PipelineSteps, DocMethod, HandleCustomInterpreterDirectives))]
|
||||
pub fn derive_programming_language_interface(input: TokenStream) -> TokenStream {
|
||||
let ast: DeriveInput = syn::parse(input).unwrap();
|
||||
let name = &ast.ident;
|
||||
let attrs = &ast.attrs;
|
||||
|
||||
let language_name: String = extract_attribute_arg_by_name("LanguageName", attrs).expect("LanguageName is required");
|
||||
let file_ext = extract_attribute_arg_by_name("SourceFileExtension", attrs).expect("SourceFileExtension is required");
|
||||
let passes = extract_attribute_list("PipelineSteps", attrs).expect("PipelineSteps are required");
|
||||
let pass_idents = passes.iter().map(|x| x.0.clone());
|
||||
|
||||
let get_doc_impl = match get_attribute_identifier("DocMethod", attrs) {
|
||||
None => quote! { },
|
||||
Some(method_name) => quote! {
|
||||
fn get_doc(&self, commands: &Vec<&str>) -> Option<String> {
|
||||
self.#method_name(commands)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let handle_custom_interpreter_directives_impl = match get_attribute_identifier("HandleCustomInterpreterDirectives", attrs) {
|
||||
None => quote! { },
|
||||
Some(method_name) => quote! {
|
||||
fn handle_custom_interpreter_directives(&mut self, commands: &Vec<&str>) -> Option<String> {
|
||||
//println!("If #method_name is &self not &mut self, this runs forever");
|
||||
self.#method_name(commands)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let pass_descriptors = passes.iter().map(|pass| {
|
||||
let name = pass.0.to_string();
|
||||
let opts: Vec<String> = match &pass.1 {
|
||||
None => vec![],
|
||||
Some(opts) => opts.iter().map(|o| o.to_string()).collect(),
|
||||
};
|
||||
|
||||
quote! {
|
||||
PassDescriptor {
|
||||
name: #name.to_string(),
|
||||
debug_options: vec![#(format!(#opts)),*]
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
let tokens = quote! {
|
||||
use schala_repl::PassDescriptor;
|
||||
impl ProgrammingLanguageInterface for #name {
|
||||
fn get_language_name(&self) -> String {
|
||||
#language_name.to_string()
|
||||
}
|
||||
fn get_source_file_suffix(&self) -> String {
|
||||
#file_ext.to_string()
|
||||
}
|
||||
fn execute_pipeline(&mut self, input: &str, options: &EvalOptions) -> FinishedComputation {
|
||||
let mut chain = pass_chain![self, options; #(#pass_idents),* ];
|
||||
chain(input)
|
||||
}
|
||||
fn get_passes(&self) -> Vec<PassDescriptor> {
|
||||
vec![ #(#pass_descriptors),* ]
|
||||
}
|
||||
#get_doc_impl
|
||||
#handle_custom_interpreter_directives_impl
|
||||
}
|
||||
};
|
||||
|
||||
let output: TokenStream = tokens.into();
|
||||
output
|
||||
}
|
||||
@@ -2,24 +2,22 @@
|
||||
name = "schala-repl"
|
||||
version = "0.1.0"
|
||||
authors = ["greg <greg.shuflin@protonmail.com>"]
|
||||
edition = "2018"
|
||||
|
||||
[dependencies]
|
||||
llvm-sys = "*"
|
||||
take_mut = "0.1.3"
|
||||
llvm-sys = "70.0.2"
|
||||
take_mut = "0.2.2"
|
||||
itertools = "0.5.8"
|
||||
getopts = "*"
|
||||
getopts = "0.2.18"
|
||||
lazy_static = "0.2.8"
|
||||
maplit = "*"
|
||||
colored = "1.5"
|
||||
serde = "1.0.15"
|
||||
serde_derive = "1.0.15"
|
||||
serde_json = "1.0.3"
|
||||
rocket = "0.3.13"
|
||||
rocket_codegen = "0.3.13"
|
||||
rocket_contrib = "0.3.13"
|
||||
colored = "1.8"
|
||||
serde = "1.0.91"
|
||||
serde_derive = "1.0.91"
|
||||
serde_json = "1.0.15"
|
||||
phf = "0.7.12"
|
||||
includedir = "0.2.0"
|
||||
linefeed = "0.5.0"
|
||||
linefeed = "0.6.0"
|
||||
regex = "0.2"
|
||||
|
||||
[build-dependencies]
|
||||
|
||||
@@ -1,233 +1,80 @@
|
||||
use std::collections::HashMap;
|
||||
use colored::*;
|
||||
use std::fmt::Write;
|
||||
use std::time;
|
||||
|
||||
pub struct LLVMCodeString(pub String);
|
||||
|
||||
#[derive(Debug, Default, Serialize, Deserialize)]
|
||||
pub struct EvalOptions {
|
||||
pub execution_method: ExecutionMethod,
|
||||
pub debug_passes: HashMap<String, PassDebugOptionsDescriptor>,
|
||||
pub debug_timing: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Hash, PartialEq)]
|
||||
pub struct PassDescriptor {
|
||||
pub name: String,
|
||||
pub debug_options: Vec<String>
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct PassDebugOptionsDescriptor {
|
||||
pub opts: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub enum ExecutionMethod {
|
||||
Compile,
|
||||
Interpret,
|
||||
}
|
||||
impl Default for ExecutionMethod {
|
||||
fn default() -> ExecutionMethod {
|
||||
ExecutionMethod::Interpret
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct UnfinishedComputation {
|
||||
artifacts: Vec<(String, TraceArtifact)>,
|
||||
pub durations: Vec<time::Duration>,
|
||||
pub cur_debug_options: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct FinishedComputation {
|
||||
artifacts: Vec<(String, TraceArtifact)>,
|
||||
durations: Vec<time::Duration>,
|
||||
text_output: Result<String, String>,
|
||||
}
|
||||
|
||||
impl UnfinishedComputation {
|
||||
pub fn add_artifact(&mut self, artifact: TraceArtifact) {
|
||||
self.artifacts.push((artifact.stage_name.clone(), artifact));
|
||||
}
|
||||
pub fn finish(self, text_output: Result<String, String>) -> FinishedComputation {
|
||||
FinishedComputation {
|
||||
artifacts: self.artifacts,
|
||||
text_output,
|
||||
durations: self.durations,
|
||||
}
|
||||
}
|
||||
pub fn output(self, output: Result<String, String>) -> FinishedComputation {
|
||||
FinishedComputation {
|
||||
artifacts: self.artifacts,
|
||||
text_output: output,
|
||||
durations: self.durations,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FinishedComputation {
|
||||
|
||||
fn get_timing(&self) -> Option<String> {
|
||||
if self.durations.len() != 0 {
|
||||
let mut buf = String::new();
|
||||
write!(&mut buf, "Timing: ").unwrap();
|
||||
for duration in self.durations.iter() {
|
||||
let timing = (duration.as_secs() as f64) + (duration.subsec_nanos() as f64 * 1e-9);
|
||||
write!(&mut buf, "{}s, ", timing).unwrap()
|
||||
}
|
||||
write!(&mut buf, "\n").unwrap();
|
||||
Some(buf)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_repl(&self) -> String {
|
||||
let mut buf = String::new();
|
||||
for (stage, artifact) in self.artifacts.iter() {
|
||||
let color = artifact.text_color;
|
||||
let stage = stage.color(color).bold();
|
||||
let output = artifact.debug_output.color(color);
|
||||
write!(&mut buf, "{}: {}\n", stage, output).unwrap();
|
||||
}
|
||||
|
||||
match self.get_timing() {
|
||||
Some(timing) => write!(&mut buf, "{}", timing).unwrap(),
|
||||
None => ()
|
||||
}
|
||||
|
||||
match self.text_output {
|
||||
Ok(ref output) => write!(&mut buf, "{}", output).unwrap(),
|
||||
Err(ref err) => write!(&mut buf, "{} {}", "Error: ".red().bold(), err).unwrap(),
|
||||
}
|
||||
buf
|
||||
}
|
||||
pub fn to_noninteractive(&self) -> Option<String> {
|
||||
match self.text_output {
|
||||
Ok(_) => {
|
||||
let mut buf = String::new();
|
||||
for (stage, artifact) in self.artifacts.iter() {
|
||||
let color = artifact.text_color;
|
||||
let stage = stage.color(color).bold();
|
||||
let output = artifact.debug_output.color(color);
|
||||
write!(&mut buf, "{}: {}\n", stage, output).unwrap();
|
||||
}
|
||||
if buf == "" { None } else { Some(buf) }
|
||||
},
|
||||
Err(ref s) => Some(format!("{} {}", "Error: ".red().bold(), s))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct TraceArtifact {
|
||||
stage_name: String,
|
||||
debug_output: String,
|
||||
text_color: &'static str,
|
||||
}
|
||||
|
||||
impl TraceArtifact {
|
||||
pub fn new(stage: &str, debug: String) -> TraceArtifact {
|
||||
let color = match stage {
|
||||
"parse_trace" | "ast" => "red",
|
||||
"ast_reducing" => "red",
|
||||
"tokens" => "green",
|
||||
"type_check" => "magenta",
|
||||
_ => "blue",
|
||||
};
|
||||
TraceArtifact { stage_name: stage.to_string(), debug_output: debug, text_color: color}
|
||||
}
|
||||
|
||||
pub fn new_parse_trace(trace: Vec<String>) -> TraceArtifact {
|
||||
let mut output = String::new();
|
||||
|
||||
for t in trace {
|
||||
output.push_str(&t);
|
||||
output.push_str("\n");
|
||||
}
|
||||
|
||||
TraceArtifact { stage_name: "parse_trace".to_string(), debug_output: output, text_color: "red"}
|
||||
}
|
||||
}
|
||||
use std::collections::HashSet;
|
||||
|
||||
pub trait ProgrammingLanguageInterface {
|
||||
fn execute_pipeline(&mut self, _input: &str, _eval_options: &EvalOptions) -> FinishedComputation {
|
||||
FinishedComputation { artifacts: vec![], text_output: Err(format!("Execution pipeline not done")), durations: vec![] }
|
||||
}
|
||||
|
||||
fn get_language_name(&self) -> String;
|
||||
fn get_source_file_suffix(&self) -> String;
|
||||
fn get_passes(&self) -> Vec<PassDescriptor> {
|
||||
vec![]
|
||||
|
||||
fn run_computation(&mut self, _request: ComputationRequest) -> ComputationResponse {
|
||||
ComputationResponse {
|
||||
main_output: Err(format!("Computation pipeline not implemented")),
|
||||
global_output_stats: GlobalOutputStats::default(),
|
||||
debug_responses: vec![],
|
||||
}
|
||||
}
|
||||
fn handle_custom_interpreter_directives(&mut self, _commands: &Vec<&str>) -> Option<String> {
|
||||
None
|
||||
}
|
||||
fn custom_interpreter_directives_help(&self) -> String {
|
||||
format!(">> No custom interpreter directives specified <<")
|
||||
}
|
||||
fn get_doc(&self, _commands: &Vec<&str>) -> Option<String> {
|
||||
None
|
||||
|
||||
fn request_meta(&mut self, _request: LangMetaRequest) -> LangMetaResponse {
|
||||
LangMetaResponse::Custom { kind: format!("not-implemented"), value: format!("") }
|
||||
}
|
||||
}
|
||||
|
||||
/* a pass_chain function signature looks like:
|
||||
* fn(&mut ProgrammingLanguageInterface, A, Option<&mut DebugHandler>) -> Result<B, String>
|
||||
*
|
||||
* TODO use some kind of failure-handling library to make this better
|
||||
*/
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! pass_chain {
|
||||
($state:expr, $eval_options:expr; $($pass:path), *) => {
|
||||
|text_input| {
|
||||
let mut comp = UnfinishedComputation::default();
|
||||
pass_chain_helper! { ($state, comp, $eval_options); text_input $(, $pass)* }
|
||||
}
|
||||
};
|
||||
pub struct ComputationRequest<'a> {
|
||||
pub source: &'a str,
|
||||
pub debug_requests: HashSet<DebugAsk>,
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! pass_chain_helper {
|
||||
(($state:expr, $comp:expr, $eval_options:expr); $input:expr, $pass:path $(, $rest:path)*) => {
|
||||
{
|
||||
use std::time;
|
||||
use schala_repl::PassDebugOptionsDescriptor;
|
||||
let pass_name = stringify!($pass);
|
||||
let (output, duration) = {
|
||||
let ref debug_map = $eval_options.debug_passes;
|
||||
let debug_handle = match debug_map.get(pass_name) {
|
||||
Some(PassDebugOptionsDescriptor { opts }) => {
|
||||
let ptr = &mut $comp;
|
||||
ptr.cur_debug_options = opts.clone();
|
||||
Some(ptr)
|
||||
}
|
||||
_ => None
|
||||
};
|
||||
let start = time::Instant::now();
|
||||
let pass_output = $pass($state, $input, debug_handle);
|
||||
let elapsed = start.elapsed();
|
||||
(pass_output, elapsed)
|
||||
};
|
||||
if $eval_options.debug_timing {
|
||||
$comp.durations.push(duration);
|
||||
}
|
||||
match output {
|
||||
Ok(result) => pass_chain_helper! { ($state, $comp, $eval_options); result $(, $rest)* },
|
||||
Err(err) => { //TODO this error type needs to be guaranteed to provide a useable string
|
||||
$comp.output(Err(format!("Pass {} failed:\n{}", pass_name, err)))
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
// Done
|
||||
(($state:expr, $comp:expr, $eval_options:expr); $final_output:expr) => {
|
||||
{
|
||||
let final_output: FinishedComputation = $comp.finish(Ok($final_output));
|
||||
final_output
|
||||
}
|
||||
};
|
||||
pub struct ComputationResponse {
|
||||
pub main_output: Result<String, String>,
|
||||
pub global_output_stats: GlobalOutputStats,
|
||||
pub debug_responses: Vec<DebugResponse>,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
pub struct GlobalOutputStats {
|
||||
pub total_duration: time::Duration,
|
||||
pub stage_durations: Vec<(String, time::Duration)>
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Hash, Eq, PartialEq, Deserialize, Serialize)]
|
||||
pub enum DebugAsk {
|
||||
Timing,
|
||||
ByStage { stage_name: String, token: Option<String> },
|
||||
}
|
||||
|
||||
impl DebugAsk {
|
||||
pub fn is_for_stage(&self, name: &str) -> bool {
|
||||
match self {
|
||||
DebugAsk::ByStage { stage_name, .. } if stage_name == name => true,
|
||||
_ => false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct DebugResponse {
|
||||
pub ask: DebugAsk,
|
||||
pub value: String
|
||||
}
|
||||
|
||||
pub enum LangMetaRequest {
|
||||
StageNames,
|
||||
Docs {
|
||||
source: String,
|
||||
},
|
||||
Custom {
|
||||
kind: String,
|
||||
value: String
|
||||
},
|
||||
ImmediateDebug(DebugAsk),
|
||||
}
|
||||
|
||||
pub enum LangMetaResponse {
|
||||
StageNames(Vec<String>),
|
||||
Docs {
|
||||
doc_string: String,
|
||||
},
|
||||
Custom {
|
||||
kind: String,
|
||||
value: String
|
||||
},
|
||||
ImmediateDebug(DebugResponse),
|
||||
}
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
#![feature(link_args)]
|
||||
#![feature(slice_patterns, box_patterns, box_syntax)]
|
||||
#![feature(link_args, box_patterns, box_syntax, proc_macro_hygiene, decl_macro)]
|
||||
#![feature(plugin)]
|
||||
#![plugin(rocket_codegen)]
|
||||
extern crate getopts;
|
||||
extern crate linefeed;
|
||||
extern crate itertools;
|
||||
@@ -10,88 +8,49 @@ extern crate colored;
|
||||
#[macro_use]
|
||||
extern crate serde_derive;
|
||||
extern crate serde_json;
|
||||
extern crate rocket;
|
||||
extern crate rocket_contrib;
|
||||
extern crate includedir;
|
||||
extern crate phf;
|
||||
|
||||
use std::collections::HashSet;
|
||||
use std::path::Path;
|
||||
use std::fs::File;
|
||||
use std::io::Read;
|
||||
use std::process::exit;
|
||||
use std::default::Default;
|
||||
|
||||
mod repl;
|
||||
mod language;
|
||||
mod webapp;
|
||||
pub mod llvm_wrap;
|
||||
|
||||
const VERSION_STRING: &'static str = "0.1.0";
|
||||
pub use language::{ProgrammingLanguageInterface,
|
||||
ComputationRequest, ComputationResponse,
|
||||
LangMetaRequest, LangMetaResponse,
|
||||
DebugResponse, DebugAsk, GlobalOutputStats};
|
||||
|
||||
include!(concat!(env!("OUT_DIR"), "/static.rs"));
|
||||
const VERSION_STRING: &'static str = "0.1.0";
|
||||
|
||||
pub use language::{LLVMCodeString, ProgrammingLanguageInterface, EvalOptions,
|
||||
ExecutionMethod, TraceArtifact, FinishedComputation, UnfinishedComputation, PassDebugOptionsDescriptor, PassDescriptor};
|
||||
|
||||
pub type PLIGenerator = Box<Fn() -> Box<ProgrammingLanguageInterface> + Send + Sync>;
|
||||
|
||||
pub fn repl_main(generators: Vec<PLIGenerator>) {
|
||||
let languages: Vec<Box<ProgrammingLanguageInterface>> = generators.iter().map(|x| x()).collect();
|
||||
|
||||
let option_matches = program_options().parse(std::env::args()).unwrap_or_else(|e| {
|
||||
pub fn start_repl(langs: Vec<Box<dyn ProgrammingLanguageInterface>>) {
|
||||
let options = command_line_options().parse(std::env::args()).unwrap_or_else(|e| {
|
||||
println!("{:?}", e);
|
||||
exit(1);
|
||||
});
|
||||
|
||||
if option_matches.opt_present("list-languages") {
|
||||
for lang in languages {
|
||||
println!("{}", lang.get_language_name());
|
||||
}
|
||||
exit(1);
|
||||
}
|
||||
|
||||
if option_matches.opt_present("help") {
|
||||
println!("{}", program_options().usage("Schala metainterpreter"));
|
||||
if options.opt_present("help") {
|
||||
println!("{}", command_line_options().usage("Schala metainterpreter"));
|
||||
exit(0);
|
||||
}
|
||||
|
||||
if option_matches.opt_present("webapp") {
|
||||
webapp::web_main(generators);
|
||||
exit(0);
|
||||
}
|
||||
|
||||
let mut options = EvalOptions::default();
|
||||
let debug_passes = if let Some(opts) = option_matches.opt_str("debug") {
|
||||
let output: Vec<String> = opts.split_terminator(",").map(|s| s.to_string()).collect();
|
||||
output
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
|
||||
let language_names: Vec<String> = languages.iter().map(|lang| {lang.get_language_name()}).collect();
|
||||
let initial_index: usize =
|
||||
option_matches.opt_str("lang")
|
||||
.and_then(|lang| { language_names.iter().position(|x| { x.to_lowercase() == lang.to_lowercase() }) })
|
||||
.unwrap_or(0);
|
||||
|
||||
options.execution_method = match option_matches.opt_str("eval-style") {
|
||||
Some(ref s) if s == "compile" => ExecutionMethod::Compile,
|
||||
_ => ExecutionMethod::Interpret,
|
||||
};
|
||||
|
||||
match option_matches.free[..] {
|
||||
match options.free[..] {
|
||||
[] | [_] => {
|
||||
let mut repl = repl::Repl::new(languages, initial_index);
|
||||
repl.run();
|
||||
let mut repl = repl::Repl::new(langs);
|
||||
repl.run_repl();
|
||||
}
|
||||
[_, ref filename, _..] => {
|
||||
|
||||
run_noninteractive(filename, languages, options, debug_passes);
|
||||
[_, ref filename, ..] => {
|
||||
run_noninteractive(filename, langs);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
fn run_noninteractive(filename: &str, languages: Vec<Box<ProgrammingLanguageInterface>>, mut options: EvalOptions, debug_passes: Vec<String>) {
|
||||
fn run_noninteractive(filename: &str, languages: Vec<Box<dyn ProgrammingLanguageInterface>>) {
|
||||
let path = Path::new(filename);
|
||||
let ext = path.extension().and_then(|e| e.to_str()).unwrap_or_else(|| {
|
||||
println!("Source file lacks extension");
|
||||
@@ -105,101 +64,28 @@ fn run_noninteractive(filename: &str, languages: Vec<Box<ProgrammingLanguageInte
|
||||
|
||||
let mut source_file = File::open(path).unwrap();
|
||||
let mut buffer = String::new();
|
||||
|
||||
source_file.read_to_string(&mut buffer).unwrap();
|
||||
|
||||
for pass in debug_passes.into_iter() {
|
||||
if let Some(_) = language.get_passes().iter().find(|desc| desc.name == pass) {
|
||||
options.debug_passes.insert(pass, PassDebugOptionsDescriptor { opts: vec![] });
|
||||
}
|
||||
}
|
||||
let request = ComputationRequest {
|
||||
source: &buffer,
|
||||
debug_requests: HashSet::new(),
|
||||
};
|
||||
|
||||
match options.execution_method {
|
||||
ExecutionMethod::Compile => {
|
||||
/*
|
||||
let llvm_bytecode = language.compile(&buffer);
|
||||
compilation_sequence(llvm_bytecode, filename);
|
||||
*/
|
||||
panic!("Not ready to go yet");
|
||||
},
|
||||
ExecutionMethod::Interpret => {
|
||||
let output = language.execute_pipeline(&buffer, &options);
|
||||
output.to_noninteractive().map(|text| println!("{}", text));
|
||||
}
|
||||
}
|
||||
let response = language.run_computation(request);
|
||||
match response.main_output {
|
||||
Ok(s) => println!("{}", s),
|
||||
Err(s) => println!("{}", s)
|
||||
};
|
||||
}
|
||||
|
||||
/*
|
||||
pub fn compilation_sequence(llvm_code: LLVMCodeString, sourcefile: &str) {
|
||||
use std::process::Command;
|
||||
|
||||
let ll_filename = "out.ll";
|
||||
let obj_filename = "out.o";
|
||||
let q: Vec<&str> = sourcefile.split('.').collect();
|
||||
let bin_filename = match &q[..] {
|
||||
&[name, "maaru"] => name,
|
||||
_ => panic!("Bad filename {}", sourcefile),
|
||||
};
|
||||
|
||||
let LLVMCodeString(llvm_str) = llvm_code;
|
||||
|
||||
println!("Compilation process finished for {}", ll_filename);
|
||||
File::create(ll_filename)
|
||||
.and_then(|mut f| f.write_all(llvm_str.as_bytes()))
|
||||
.expect("Error writing file");
|
||||
|
||||
let llc_output = Command::new("llc")
|
||||
.args(&["-filetype=obj", ll_filename, "-o", obj_filename])
|
||||
.output()
|
||||
.expect("Failed to run llc");
|
||||
|
||||
|
||||
if !llc_output.status.success() {
|
||||
println!("{}", String::from_utf8_lossy(&llc_output.stderr));
|
||||
}
|
||||
|
||||
let gcc_output = Command::new("gcc")
|
||||
.args(&["-o", bin_filename, &obj_filename])
|
||||
.output()
|
||||
.expect("failed to run gcc");
|
||||
|
||||
if !gcc_output.status.success() {
|
||||
println!("{}", String::from_utf8_lossy(&gcc_output.stdout));
|
||||
println!("{}", String::from_utf8_lossy(&gcc_output.stderr));
|
||||
}
|
||||
|
||||
for filename in [obj_filename].iter() {
|
||||
Command::new("rm")
|
||||
.arg(filename)
|
||||
.output()
|
||||
.expect(&format!("failed to run rm {}", filename));
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
||||
fn program_options() -> getopts::Options {
|
||||
fn command_line_options() -> getopts::Options {
|
||||
let mut options = getopts::Options::new();
|
||||
options.optopt("s",
|
||||
"eval-style",
|
||||
"Specify whether to compile (if supported) or interpret the language. If not specified, the default is language-specific",
|
||||
"[compile|interpret]"
|
||||
);
|
||||
options.optflag("",
|
||||
"list-languages",
|
||||
"Show a list of all supported languages");
|
||||
options.optopt("l",
|
||||
"lang",
|
||||
"Start up REPL in a language",
|
||||
"LANGUAGE");
|
||||
options.optflag("h",
|
||||
"help",
|
||||
"Show help text");
|
||||
options.optflag("w",
|
||||
"webapp",
|
||||
"Start up web interpreter");
|
||||
options.optopt("d",
|
||||
"debug",
|
||||
"Debug a stage (l = tokenizer, a = AST, r = parse trace, s = symbol table)",
|
||||
"[l|a|r|s]");
|
||||
options
|
||||
}
|
||||
|
||||
@@ -1,279 +0,0 @@
|
||||
#![allow(non_snake_case)]
|
||||
#![allow(dead_code)]
|
||||
extern crate llvm_sys;
|
||||
|
||||
use self::llvm_sys::{LLVMIntPredicate, LLVMRealPredicate};
|
||||
use self::llvm_sys::prelude::*;
|
||||
use self::llvm_sys::core;
|
||||
use std::ptr;
|
||||
use std::ffi::{CString, CStr};
|
||||
use std::os::raw::c_char;
|
||||
|
||||
pub fn create_context() -> LLVMContextRef {
|
||||
unsafe { core::LLVMContextCreate() }
|
||||
}
|
||||
pub fn module_create_with_name(name: &str) -> LLVMModuleRef {
|
||||
unsafe {
|
||||
let n = name.as_ptr() as *const _;
|
||||
core::LLVMModuleCreateWithName(n)
|
||||
}
|
||||
}
|
||||
pub fn CreateBuilderInContext(context: LLVMContextRef) -> LLVMBuilderRef {
|
||||
unsafe { core::LLVMCreateBuilderInContext(context) }
|
||||
}
|
||||
|
||||
pub fn AppendBasicBlockInContext(context: LLVMContextRef,
|
||||
function: LLVMValueRef,
|
||||
name: &str)
|
||||
-> LLVMBasicBlockRef {
|
||||
let c_name = CString::new(name).unwrap();
|
||||
unsafe { core::LLVMAppendBasicBlockInContext(context, function, c_name.as_ptr()) }
|
||||
}
|
||||
|
||||
pub fn AddFunction(module: LLVMModuleRef, name: &str, function_type: LLVMTypeRef) -> LLVMValueRef {
|
||||
let c_name = CString::new(name).unwrap();
|
||||
unsafe { core::LLVMAddFunction(module, c_name.as_ptr(), function_type) }
|
||||
}
|
||||
|
||||
pub fn FunctionType(return_type: LLVMTypeRef,
|
||||
mut param_types: Vec<LLVMTypeRef>,
|
||||
is_var_rag: bool)
|
||||
-> LLVMTypeRef {
|
||||
let len = param_types.len();
|
||||
unsafe {
|
||||
let pointer = param_types.as_mut_ptr();
|
||||
core::LLVMFunctionType(return_type,
|
||||
pointer,
|
||||
len as u32,
|
||||
if is_var_rag { 1 } else { 0 })
|
||||
}
|
||||
}
|
||||
|
||||
pub fn GetNamedFunction(module: LLVMModuleRef,
|
||||
name: &str) -> Option<LLVMValueRef> {
|
||||
|
||||
let c_name = CString::new(name).unwrap();
|
||||
let ret = unsafe { core::LLVMGetNamedFunction(module, c_name.as_ptr()) };
|
||||
|
||||
if ret.is_null() {
|
||||
None
|
||||
} else {
|
||||
Some(ret)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn VoidTypeInContext(context: LLVMContextRef) -> LLVMTypeRef {
|
||||
unsafe { core::LLVMVoidTypeInContext(context) }
|
||||
}
|
||||
|
||||
pub fn DisposeBuilder(builder: LLVMBuilderRef) {
|
||||
unsafe { core::LLVMDisposeBuilder(builder) }
|
||||
}
|
||||
|
||||
pub fn DisposeModule(module: LLVMModuleRef) {
|
||||
unsafe { core::LLVMDisposeModule(module) }
|
||||
}
|
||||
|
||||
pub fn ContextDispose(context: LLVMContextRef) {
|
||||
unsafe { core::LLVMContextDispose(context) }
|
||||
}
|
||||
|
||||
pub fn PositionBuilderAtEnd(builder: LLVMBuilderRef, basic_block: LLVMBasicBlockRef) {
|
||||
unsafe { core::LLVMPositionBuilderAtEnd(builder, basic_block) }
|
||||
}
|
||||
|
||||
pub fn BuildRet(builder: LLVMBuilderRef, val: LLVMValueRef) -> LLVMValueRef {
|
||||
unsafe { core::LLVMBuildRet(builder, val) }
|
||||
}
|
||||
|
||||
pub fn BuildRetVoid(builder: LLVMBuilderRef) -> LLVMValueRef {
|
||||
unsafe { core::LLVMBuildRetVoid(builder) }
|
||||
}
|
||||
|
||||
pub fn DumpModule(module: LLVMModuleRef) {
|
||||
unsafe { core::LLVMDumpModule(module) }
|
||||
}
|
||||
|
||||
pub fn Int64TypeInContext(context: LLVMContextRef) -> LLVMTypeRef {
|
||||
unsafe { core::LLVMInt64TypeInContext(context) }
|
||||
}
|
||||
|
||||
pub fn ConstInt(int_type: LLVMTypeRef, n: u64, sign_extend: bool) -> LLVMValueRef {
|
||||
unsafe { core::LLVMConstInt(int_type, n, if sign_extend { 1 } else { 0 }) }
|
||||
}
|
||||
|
||||
pub fn BuildAdd(builder: LLVMBuilderRef,
|
||||
lhs: LLVMValueRef,
|
||||
rhs: LLVMValueRef,
|
||||
reg_name: &str)
|
||||
-> LLVMValueRef {
|
||||
let name = CString::new(reg_name).unwrap();
|
||||
unsafe { core::LLVMBuildAdd(builder, lhs, rhs, name.as_ptr()) }
|
||||
}
|
||||
|
||||
pub fn BuildSub(builder: LLVMBuilderRef,
|
||||
lhs: LLVMValueRef,
|
||||
rhs: LLVMValueRef,
|
||||
reg_name: &str)
|
||||
-> LLVMValueRef {
|
||||
let name = CString::new(reg_name).unwrap();
|
||||
unsafe { core::LLVMBuildSub(builder, lhs, rhs, name.as_ptr()) }
|
||||
}
|
||||
|
||||
pub fn BuildMul(builder: LLVMBuilderRef,
|
||||
lhs: LLVMValueRef,
|
||||
rhs: LLVMValueRef,
|
||||
reg_name: &str)
|
||||
-> LLVMValueRef {
|
||||
let name = CString::new(reg_name).unwrap();
|
||||
unsafe { core::LLVMBuildMul(builder, lhs, rhs, name.as_ptr()) }
|
||||
}
|
||||
|
||||
pub fn BuildUDiv(builder: LLVMBuilderRef,
|
||||
lhs: LLVMValueRef,
|
||||
rhs: LLVMValueRef,
|
||||
reg_name: &str)
|
||||
-> LLVMValueRef {
|
||||
let name = CString::new(reg_name).unwrap();
|
||||
unsafe { core::LLVMBuildUDiv(builder, lhs, rhs, name.as_ptr()) }
|
||||
}
|
||||
|
||||
pub fn BuildSRem(builder: LLVMBuilderRef,
|
||||
lhs: LLVMValueRef,
|
||||
rhs: LLVMValueRef,
|
||||
reg_name: &str)
|
||||
-> LLVMValueRef {
|
||||
let name = CString::new(reg_name).unwrap();
|
||||
unsafe { core::LLVMBuildSRem(builder, lhs, rhs, name.as_ptr()) }
|
||||
}
|
||||
|
||||
pub fn BuildCondBr(builder: LLVMBuilderRef,
|
||||
if_expr: LLVMValueRef,
|
||||
then_expr: LLVMBasicBlockRef,
|
||||
else_expr: LLVMBasicBlockRef) -> LLVMValueRef {
|
||||
|
||||
|
||||
unsafe { core::LLVMBuildCondBr(builder, if_expr, then_expr, else_expr) }
|
||||
}
|
||||
|
||||
pub fn BuildBr(builder: LLVMBuilderRef,
|
||||
dest: LLVMBasicBlockRef) -> LLVMValueRef {
|
||||
unsafe { core::LLVMBuildBr(builder, dest) }
|
||||
}
|
||||
|
||||
pub fn GetInsertBlock(builder: LLVMBuilderRef) -> LLVMBasicBlockRef {
|
||||
unsafe { core::LLVMGetInsertBlock(builder) }
|
||||
}
|
||||
|
||||
pub fn BuildPhi(builder: LLVMBuilderRef, ty: LLVMTypeRef, name: &str) -> LLVMValueRef {
|
||||
let name = CString::new(name).unwrap();
|
||||
unsafe { core::LLVMBuildPhi(builder, ty, name.as_ptr()) }
|
||||
}
|
||||
|
||||
pub fn SetValueName(value: LLVMValueRef, name: &str) {
|
||||
let name = CString::new(name).unwrap();
|
||||
unsafe {
|
||||
core::LLVMSetValueName(value, name.as_ptr())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn GetValueName(value: LLVMValueRef) -> String {
|
||||
unsafe {
|
||||
let name_ptr: *const c_char = core::LLVMGetValueName(value);
|
||||
CStr::from_ptr(name_ptr).to_string_lossy().into_owned()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn GetParams(function: LLVMValueRef) -> Vec<LLVMValueRef> {
|
||||
let size = CountParams(function);
|
||||
unsafe {
|
||||
let mut container = Vec::with_capacity(size);
|
||||
container.set_len(size);
|
||||
core::LLVMGetParams(function, container.as_mut_ptr());
|
||||
container
|
||||
}
|
||||
}
|
||||
|
||||
pub fn CountParams(function: LLVMValueRef) -> usize {
|
||||
unsafe { core::LLVMCountParams(function) as usize }
|
||||
}
|
||||
|
||||
pub fn BuildFCmp(builder: LLVMBuilderRef,
|
||||
op: LLVMRealPredicate,
|
||||
lhs: LLVMValueRef,
|
||||
rhs: LLVMValueRef,
|
||||
name: &str) -> LLVMValueRef {
|
||||
let name = CString::new(name).unwrap();
|
||||
unsafe { core::LLVMBuildFCmp(builder, op, lhs, rhs, name.as_ptr()) }
|
||||
}
|
||||
|
||||
pub fn BuildZExt(builder: LLVMBuilderRef,
|
||||
val: LLVMValueRef,
|
||||
dest_type: LLVMTypeRef,
|
||||
name: &str) -> LLVMValueRef {
|
||||
let name = CString::new(name).unwrap();
|
||||
unsafe { core::LLVMBuildZExt(builder, val, dest_type, name.as_ptr()) }
|
||||
}
|
||||
|
||||
pub fn BuildUIToFP(builder: LLVMBuilderRef,
|
||||
val: LLVMValueRef,
|
||||
dest_type: LLVMTypeRef,
|
||||
name: &str) -> LLVMValueRef {
|
||||
|
||||
let name = CString::new(name).unwrap();
|
||||
unsafe { core::LLVMBuildUIToFP(builder, val, dest_type, name.as_ptr()) }
|
||||
}
|
||||
|
||||
pub fn BuildICmp(builder: LLVMBuilderRef,
|
||||
op: LLVMIntPredicate,
|
||||
lhs: LLVMValueRef,
|
||||
rhs: LLVMValueRef,
|
||||
name: &str) -> LLVMValueRef {
|
||||
let name = CString::new(name).unwrap();
|
||||
unsafe { core::LLVMBuildICmp(builder, op, lhs, rhs, name.as_ptr()) }
|
||||
}
|
||||
|
||||
pub fn GetBasicBlockParent(block: LLVMBasicBlockRef) -> LLVMValueRef {
|
||||
unsafe { core::LLVMGetBasicBlockParent(block) }
|
||||
}
|
||||
|
||||
pub fn GetBasicBlocks(function: LLVMValueRef) -> Vec<LLVMBasicBlockRef> {
|
||||
let size = CountBasicBlocks(function);
|
||||
unsafe {
|
||||
let mut container = Vec::with_capacity(size);
|
||||
container.set_len(size);
|
||||
core::LLVMGetBasicBlocks(function, container.as_mut_ptr());
|
||||
container
|
||||
}
|
||||
}
|
||||
|
||||
pub fn CountBasicBlocks(function: LLVMValueRef) -> usize {
|
||||
unsafe { core::LLVMCountBasicBlocks(function) as usize }
|
||||
}
|
||||
|
||||
pub fn PrintModuleToString(module: LLVMModuleRef) -> String {
|
||||
unsafe {
|
||||
let str_ptr: *const c_char = core::LLVMPrintModuleToString(module);
|
||||
CStr::from_ptr(str_ptr).to_string_lossy().into_owned()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn AddIncoming(phi_node: LLVMValueRef, mut incoming_values: Vec<LLVMValueRef>,
|
||||
mut incoming_blocks: Vec<LLVMBasicBlockRef>) {
|
||||
|
||||
let count = incoming_blocks.len() as u32;
|
||||
if incoming_values.len() as u32 != count {
|
||||
panic!("Bad invocation of AddIncoming");
|
||||
}
|
||||
|
||||
unsafe {
|
||||
let vals = incoming_values.as_mut_ptr();
|
||||
let blocks = incoming_blocks.as_mut_ptr();
|
||||
core::LLVMAddIncoming(phi_node, vals, blocks, count)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn PrintModuleToFile(module: LLVMModuleRef, filename: &str) -> LLVMBool {
|
||||
let out_file = CString::new(filename).unwrap();
|
||||
unsafe { core::LLVMPrintModuleToFile(module, out_file.as_ptr(), ptr::null_mut()) }
|
||||
}
|
||||
@@ -1,23 +1,34 @@
|
||||
use super::{Repl, InterpreterDirectiveOutput};
|
||||
use crate::repl::directive_actions::DirectiveAction;
|
||||
use colored::*;
|
||||
|
||||
/// A CommandTree is either a `Terminal` or a `NonTerminal`. When command parsing reaches the first
|
||||
/// Terminal, it will use the `DirectiveAction` found there to find an appropriate function to execute,
|
||||
/// and then execute it with any remaining arguments
|
||||
#[derive(Clone)]
|
||||
pub enum CommandTree {
|
||||
Terminal {
|
||||
name: String,
|
||||
children: Vec<CommandTree>,
|
||||
help_msg: Option<String>,
|
||||
function: Option<Box<(fn() -> Option<String>)>>,
|
||||
action: DirectiveAction,
|
||||
},
|
||||
NonTerminal {
|
||||
name: String,
|
||||
children: Vec<CommandTree>,
|
||||
help_msg: Option<String>,
|
||||
function: Option<Box<(fn() -> Option<String>)>>,
|
||||
action: DirectiveAction,
|
||||
},
|
||||
Top(Vec<CommandTree>),
|
||||
}
|
||||
|
||||
impl CommandTree {
|
||||
pub fn term(s: &str, help: Option<&str>) -> CommandTree {
|
||||
CommandTree::Terminal {name: s.to_string(), help_msg: help.map(|x| x.to_string()), function: None }
|
||||
pub fn nonterm_no_further_tab_completions(s: &str, help: Option<&str>) -> CommandTree {
|
||||
CommandTree::NonTerminal {name: s.to_string(), help_msg: help.map(|x| x.to_string()), children: vec![], action: DirectiveAction::Null }
|
||||
}
|
||||
|
||||
pub fn terminal(s: &str, help: Option<&str>, children: Vec<CommandTree>, action: DirectiveAction) -> CommandTree {
|
||||
CommandTree::Terminal {name: s.to_string(), help_msg: help.map(|x| x.to_string()), children, action}
|
||||
}
|
||||
|
||||
pub fn nonterm(s: &str, help: Option<&str>, children: Vec<CommandTree>) -> CommandTree {
|
||||
@@ -25,7 +36,7 @@ impl CommandTree {
|
||||
name: s.to_string(),
|
||||
help_msg: help.map(|x| x.to_string()),
|
||||
children,
|
||||
function: None,
|
||||
action: DirectiveAction::Null
|
||||
}
|
||||
}
|
||||
|
||||
@@ -38,16 +49,51 @@ impl CommandTree {
|
||||
}
|
||||
pub fn get_help(&self) -> &str {
|
||||
match self {
|
||||
CommandTree::Terminal { help_msg, ..} => help_msg.as_ref().map(|s| s.as_str()).unwrap_or(""),
|
||||
CommandTree::NonTerminal { help_msg, .. } => help_msg.as_ref().map(|s| s.as_str()).unwrap_or(""),
|
||||
CommandTree::Terminal { help_msg, ..} => help_msg.as_ref().map(|s| s.as_str()).unwrap_or("<no help text provided>"),
|
||||
CommandTree::NonTerminal { help_msg, .. } => help_msg.as_ref().map(|s| s.as_str()).unwrap_or("<no help text provided>"),
|
||||
CommandTree::Top(_) => ""
|
||||
}
|
||||
}
|
||||
pub fn get_children(&self) -> Vec<&str> {
|
||||
pub fn get_children(&self) -> &Vec<CommandTree> {
|
||||
use CommandTree::*;
|
||||
match self {
|
||||
CommandTree::Terminal { .. } => vec![],
|
||||
CommandTree::NonTerminal { children, .. } => children.iter().map(|x| x.get_cmd()).collect(),
|
||||
CommandTree::Top(children) => children.iter().map(|x| x.get_cmd()).collect(),
|
||||
Terminal { children, .. } |
|
||||
NonTerminal { children, .. } |
|
||||
Top(children) => children
|
||||
}
|
||||
}
|
||||
pub fn get_subcommands(&self) -> Vec<&str> {
|
||||
self.get_children().iter().map(|x| x.get_cmd()).collect()
|
||||
}
|
||||
|
||||
pub fn perform(&self, repl: &mut Repl, arguments: &Vec<&str>) -> InterpreterDirectiveOutput {
|
||||
let mut dir_pointer: &CommandTree = self;
|
||||
let mut idx = 0;
|
||||
|
||||
let res: Result<(DirectiveAction, usize), String> = loop {
|
||||
match dir_pointer {
|
||||
CommandTree::Top(subcommands) | CommandTree::NonTerminal { children: subcommands, .. } => {
|
||||
let next_command = match arguments.get(idx) {
|
||||
Some(cmd) => cmd,
|
||||
None => break Err(format!("Command requires arguments"))
|
||||
};
|
||||
idx += 1;
|
||||
match subcommands.iter().find(|sc| sc.get_cmd() == *next_command) {
|
||||
Some(command_tree) => {
|
||||
dir_pointer = command_tree;
|
||||
},
|
||||
None => break Err(format!("Command {} not found", next_command))
|
||||
};
|
||||
},
|
||||
CommandTree::Terminal { action, .. } => {
|
||||
break Ok((action.clone(), idx));
|
||||
},
|
||||
}
|
||||
};
|
||||
|
||||
match res {
|
||||
Ok((action, idx)) => action.perform(repl, &arguments[idx..]),
|
||||
Err(err) => Some(err.red().to_string())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
133
schala-repl/src/repl/directive_actions.rs
Normal file
133
schala-repl/src/repl/directive_actions.rs
Normal file
@@ -0,0 +1,133 @@
|
||||
use super::{Repl, InterpreterDirectiveOutput};
|
||||
use crate::repl::help::help;
|
||||
use crate::language::{LangMetaRequest, LangMetaResponse, DebugAsk, DebugResponse};
|
||||
use itertools::Itertools;
|
||||
use std::fmt::Write as FmtWrite;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum DirectiveAction {
|
||||
Null,
|
||||
Help,
|
||||
QuitProgram,
|
||||
ListPasses,
|
||||
ShowImmediate,
|
||||
Show,
|
||||
Hide,
|
||||
TotalTimeOff,
|
||||
TotalTimeOn,
|
||||
StageTimeOff,
|
||||
StageTimeOn,
|
||||
Doc,
|
||||
}
|
||||
|
||||
impl DirectiveAction {
|
||||
pub fn perform(&self, repl: &mut Repl, arguments: &[&str]) -> InterpreterDirectiveOutput {
|
||||
use DirectiveAction::*;
|
||||
match self {
|
||||
Null => None,
|
||||
Help => help(repl, arguments),
|
||||
QuitProgram => {
|
||||
repl.save_before_exit();
|
||||
::std::process::exit(0)
|
||||
},
|
||||
ListPasses => {
|
||||
let language_state = repl.get_cur_language_state();
|
||||
let pass_names = match language_state.request_meta(LangMetaRequest::StageNames) {
|
||||
LangMetaResponse::StageNames(names) => names,
|
||||
_ => vec![],
|
||||
};
|
||||
|
||||
let mut buf = String::new();
|
||||
for pass in pass_names.iter().map(|name| Some(name)).intersperse(None) {
|
||||
match pass {
|
||||
Some(pass) => write!(buf, "{}", pass).unwrap(),
|
||||
None => write!(buf, " -> ").unwrap(),
|
||||
}
|
||||
}
|
||||
Some(buf)
|
||||
},
|
||||
ShowImmediate => {
|
||||
let cur_state = repl.get_cur_language_state();
|
||||
let stage_name = match arguments.get(0) {
|
||||
Some(s) => s.to_string(),
|
||||
None => return Some(format!("Must specify a thing to debug")),
|
||||
};
|
||||
let meta = LangMetaRequest::ImmediateDebug(DebugAsk::ByStage { stage_name: stage_name.clone(), token: None });
|
||||
let meta_response = cur_state.request_meta(meta);
|
||||
|
||||
let response = match meta_response {
|
||||
LangMetaResponse::ImmediateDebug(DebugResponse { ask, value }) => match ask {
|
||||
DebugAsk::ByStage { stage_name: ref this_stage_name, ..} if *this_stage_name == stage_name => value,
|
||||
_ => return Some(format!("Wrong debug stage"))
|
||||
},
|
||||
_ => return Some(format!("Invalid language meta response")),
|
||||
};
|
||||
Some(response)
|
||||
},
|
||||
Show => {
|
||||
let this_stage_name = match arguments.get(0) {
|
||||
Some(s) => s.to_string(),
|
||||
None => return Some(format!("Must specify a stage to show")),
|
||||
};
|
||||
let token = arguments.get(1).map(|s| s.to_string());
|
||||
repl.options.debug_asks.retain(|ask| match ask {
|
||||
DebugAsk::ByStage { stage_name, .. } if *stage_name == this_stage_name => false,
|
||||
_ => true
|
||||
});
|
||||
|
||||
let ask = DebugAsk::ByStage { stage_name: this_stage_name, token };
|
||||
repl.options.debug_asks.insert(ask);
|
||||
None
|
||||
},
|
||||
Hide => {
|
||||
let stage_name_to_remove = match arguments.get(0) {
|
||||
Some(s) => s.to_string(),
|
||||
None => return Some(format!("Must specify a stage to hide")),
|
||||
};
|
||||
repl.options.debug_asks.retain(|ask| match ask {
|
||||
DebugAsk::ByStage { stage_name, .. } if *stage_name == stage_name_to_remove => false,
|
||||
_ => true
|
||||
});
|
||||
None
|
||||
},
|
||||
TotalTimeOff => total_time_off(repl, arguments),
|
||||
TotalTimeOn => total_time_on(repl, arguments),
|
||||
StageTimeOff => stage_time_off(repl, arguments),
|
||||
StageTimeOn => stage_time_on(repl, arguments),
|
||||
Doc => doc(repl, arguments),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn total_time_on(repl: &mut Repl, _: &[&str]) -> InterpreterDirectiveOutput {
|
||||
repl.options.show_total_time = true;
|
||||
None
|
||||
}
|
||||
|
||||
fn total_time_off(repl: &mut Repl, _: &[&str]) -> InterpreterDirectiveOutput {
|
||||
repl.options.show_total_time = false;
|
||||
None
|
||||
}
|
||||
|
||||
fn stage_time_on(repl: &mut Repl, _: &[&str]) -> InterpreterDirectiveOutput {
|
||||
repl.options.show_stage_times = true;
|
||||
None
|
||||
}
|
||||
|
||||
fn stage_time_off(repl: &mut Repl, _: &[&str]) -> InterpreterDirectiveOutput {
|
||||
repl.options.show_stage_times = false;
|
||||
None
|
||||
}
|
||||
|
||||
fn doc(repl: &mut Repl, arguments: &[&str]) -> InterpreterDirectiveOutput {
|
||||
arguments.get(0).map(|cmd| {
|
||||
let source = cmd.to_string();
|
||||
let meta = LangMetaRequest::Docs { source };
|
||||
let cur_state = repl.get_cur_language_state();
|
||||
match cur_state.request_meta(meta) {
|
||||
LangMetaResponse::Docs { doc_string } => Some(doc_string),
|
||||
_ => Some(format!("Invalid doc response"))
|
||||
}
|
||||
}).unwrap_or(Some(format!(":docs needs an argument")))
|
||||
}
|
||||
|
||||
55
schala-repl/src/repl/directives.rs
Normal file
55
schala-repl/src/repl/directives.rs
Normal file
@@ -0,0 +1,55 @@
|
||||
use crate::repl::command_tree::CommandTree;
|
||||
use crate::repl::directive_actions::DirectiveAction;
|
||||
|
||||
pub fn directives_from_pass_names(pass_names: &Vec<String>) -> CommandTree {
|
||||
let passes_directives: Vec<CommandTree> = pass_names.iter()
|
||||
.map(|pass_name| {
|
||||
if pass_name == "parsing" {
|
||||
CommandTree::nonterm(pass_name, None, vec![
|
||||
CommandTree::nonterm_no_further_tab_completions("compact", None),
|
||||
CommandTree::nonterm_no_further_tab_completions("expanded", None),
|
||||
CommandTree::nonterm_no_further_tab_completions("trace", None),
|
||||
])
|
||||
} else {
|
||||
CommandTree::nonterm_no_further_tab_completions(pass_name, None)
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
CommandTree::Top(get_list(&passes_directives, true))
|
||||
}
|
||||
|
||||
fn get_list(passes_directives: &Vec<CommandTree>, include_help: bool) -> Vec<CommandTree> {
|
||||
use DirectiveAction::*;
|
||||
|
||||
vec![
|
||||
CommandTree::terminal("exit", Some("exit the REPL"), vec![], QuitProgram),
|
||||
CommandTree::terminal("quit", Some("exit the REPL"), vec![], QuitProgram),
|
||||
CommandTree::terminal("help", Some("Print this help message"), if include_help { get_list(passes_directives, false) } else { vec![] }, Help),
|
||||
CommandTree::nonterm("debug",
|
||||
Some("Configure debug information"),
|
||||
vec![
|
||||
CommandTree::terminal("list-passes", Some("List all registered compiler passes"), vec![], ListPasses),
|
||||
CommandTree::terminal("show-immediate", None, passes_directives.clone(), ShowImmediate),
|
||||
CommandTree::terminal("show", Some("Show debug output for a specific pass"), passes_directives.clone(), Show),
|
||||
CommandTree::terminal("hide", Some("Hide debug output for a specific pass"), passes_directives.clone(), Hide),
|
||||
CommandTree::nonterm("total-time", None, vec![
|
||||
CommandTree::terminal("on", None, vec![], TotalTimeOn),
|
||||
CommandTree::terminal("off", None, vec![], TotalTimeOff),
|
||||
]),
|
||||
CommandTree::nonterm("stage-times", Some("Computation time per-stage"), vec![
|
||||
CommandTree::terminal("on", None, vec![], StageTimeOn),
|
||||
CommandTree::terminal("off", None, vec![], StageTimeOff),
|
||||
])
|
||||
]
|
||||
),
|
||||
CommandTree::nonterm("lang",
|
||||
Some("switch between languages, or go directly to a langauge by name"),
|
||||
vec![
|
||||
CommandTree::nonterm_no_further_tab_completions("next", None),
|
||||
CommandTree::nonterm_no_further_tab_completions("prev", None),
|
||||
CommandTree::nonterm("go", None, vec![]),
|
||||
]
|
||||
),
|
||||
CommandTree::terminal("doc", Some("Get language-specific help for an item"), vec![], Doc),
|
||||
]
|
||||
}
|
||||
59
schala-repl/src/repl/help.rs
Normal file
59
schala-repl/src/repl/help.rs
Normal file
@@ -0,0 +1,59 @@
|
||||
use std::fmt::Write as FmtWrite;
|
||||
|
||||
use colored::*;
|
||||
use super::command_tree::CommandTree;
|
||||
use super::{Repl, InterpreterDirectiveOutput};
|
||||
|
||||
pub fn help(repl: &mut Repl, arguments: &[&str]) -> InterpreterDirectiveOutput {
|
||||
match arguments {
|
||||
[] => return global_help(repl),
|
||||
commands => {
|
||||
let dirs = repl.get_directives();
|
||||
Some(match get_directive_from_commands(commands, &dirs) {
|
||||
None => format!("Directive `{}` not found", commands.last().unwrap()),
|
||||
Some(dir) => {
|
||||
let mut buf = String::new();
|
||||
let cmd = dir.get_cmd();
|
||||
let children = dir.get_children();
|
||||
writeln!(buf, "`{}` - {}", cmd, dir.get_help()).unwrap();
|
||||
for sub in children.iter() {
|
||||
writeln!(buf, "\t`{} {}` - {}", cmd, sub.get_cmd(), sub.get_help()).unwrap();
|
||||
}
|
||||
buf
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn get_directive_from_commands<'a>(commands: &[&str], dirs: &'a CommandTree) -> Option<&'a CommandTree> {
|
||||
let mut directive_list = dirs.get_children();
|
||||
let mut matched_directive = None;
|
||||
for cmd in commands {
|
||||
let found = directive_list.iter().find(|directive| directive.get_cmd() == *cmd);
|
||||
if let Some(dir) = found {
|
||||
directive_list = dir.get_children();
|
||||
}
|
||||
|
||||
matched_directive = found;
|
||||
}
|
||||
matched_directive
|
||||
}
|
||||
|
||||
fn global_help(repl: &mut Repl) -> InterpreterDirectiveOutput {
|
||||
let mut buf = String::new();
|
||||
let sigil = repl.interpreter_directive_sigil;
|
||||
|
||||
writeln!(buf, "{} version {}", "Schala REPL".bright_red().bold(), crate::VERSION_STRING).unwrap();
|
||||
writeln!(buf, "-----------------------").unwrap();
|
||||
|
||||
for directive in repl.get_directives().get_children() {
|
||||
writeln!(buf, "{}{} - {}", sigil, directive.get_cmd(), directive.get_help()).unwrap();
|
||||
}
|
||||
|
||||
let ref lang = repl.get_cur_language_state();
|
||||
writeln!(buf, "").unwrap();
|
||||
writeln!(buf, "Language-specific help for {}", lang.get_language_name()).unwrap();
|
||||
writeln!(buf, "-----------------------").unwrap();
|
||||
Some(buf)
|
||||
}
|
||||
@@ -1,297 +1,193 @@
|
||||
use std::fmt::Write as FmtWrite;
|
||||
use std::io::{Read, Write};
|
||||
use std::fs::File;
|
||||
use std::sync::Arc;
|
||||
use std::collections::HashSet;
|
||||
|
||||
use crate::language::{ProgrammingLanguageInterface,
|
||||
ComputationRequest, LangMetaResponse, LangMetaRequest};
|
||||
|
||||
use colored::*;
|
||||
use itertools::Itertools;
|
||||
use language::{ProgrammingLanguageInterface, EvalOptions,
|
||||
PassDebugOptionsDescriptor};
|
||||
mod command_tree;
|
||||
use self::command_tree::CommandTree;
|
||||
mod repl_options;
|
||||
use repl_options::ReplOptions;
|
||||
mod directive_actions;
|
||||
mod directives;
|
||||
use directives::directives_from_pass_names;
|
||||
mod help;
|
||||
mod response;
|
||||
use response::ReplResponse;
|
||||
|
||||
const HISTORY_SAVE_FILE: &'static str = ".schala_history";
|
||||
const OPTIONS_SAVE_FILE: &'static str = ".schala_repl";
|
||||
|
||||
type InterpreterDirectiveOutput = Option<String>;
|
||||
|
||||
pub struct Repl {
|
||||
options: EvalOptions,
|
||||
languages: Vec<Box<ProgrammingLanguageInterface>>,
|
||||
current_language_index: usize,
|
||||
interpreter_directive_sigil: char,
|
||||
pub interpreter_directive_sigil: char,
|
||||
line_reader: ::linefeed::interface::Interface<::linefeed::terminal::DefaultTerminal>,
|
||||
language_states: Vec<Box<dyn ProgrammingLanguageInterface>>,
|
||||
options: ReplOptions,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
enum PromptStyle {
|
||||
Normal,
|
||||
Multiline
|
||||
}
|
||||
|
||||
impl Repl {
|
||||
pub fn new(languages: Vec<Box<ProgrammingLanguageInterface>>, initial_index: usize) -> Repl {
|
||||
pub fn new(initial_states: Vec<Box<dyn ProgrammingLanguageInterface>>) -> Repl {
|
||||
use linefeed::Interface;
|
||||
let i = if initial_index < languages.len() { initial_index } else { 0 };
|
||||
|
||||
let line_reader = Interface::new("schala-repl").unwrap();
|
||||
let interpreter_directive_sigil = ':';
|
||||
|
||||
Repl {
|
||||
options: Repl::get_options(),
|
||||
languages: languages,
|
||||
current_language_index: i,
|
||||
interpreter_directive_sigil: ':',
|
||||
line_reader
|
||||
interpreter_directive_sigil,
|
||||
line_reader,
|
||||
language_states: initial_states,
|
||||
options: ReplOptions::new(),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_cur_language(&self) -> &ProgrammingLanguageInterface {
|
||||
self.languages[self.current_language_index].as_ref()
|
||||
}
|
||||
|
||||
fn get_options() -> EvalOptions {
|
||||
File::open(OPTIONS_SAVE_FILE)
|
||||
.and_then(|mut file| {
|
||||
let mut contents = String::new();
|
||||
file.read_to_string(&mut contents)?;
|
||||
Ok(contents)
|
||||
})
|
||||
.and_then(|contents| {
|
||||
let options: EvalOptions = ::serde_json::from_str(&contents)?;
|
||||
Ok(options)
|
||||
}).unwrap_or(EvalOptions::default())
|
||||
}
|
||||
|
||||
fn save_options(&self) {
|
||||
let ref options = self.options;
|
||||
let read = File::create(OPTIONS_SAVE_FILE)
|
||||
.and_then(|mut file| {
|
||||
let buf = ::serde_json::to_string(options).unwrap();
|
||||
file.write_all(buf.as_bytes())
|
||||
});
|
||||
|
||||
if let Err(err) = read {
|
||||
println!("Error saving {} file {}", OPTIONS_SAVE_FILE, err);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn run(&mut self) {
|
||||
use linefeed::ReadResult;
|
||||
|
||||
println!("Schala MetaInterpreter version {}", ::VERSION_STRING);
|
||||
pub fn run_repl(&mut self) {
|
||||
println!("Schala MetaInterpreter version {}", crate::VERSION_STRING);
|
||||
println!("Type {}help for help with the REPL", self.interpreter_directive_sigil);
|
||||
|
||||
self.line_reader.load_history(HISTORY_SAVE_FILE).unwrap_or(());
|
||||
|
||||
loop {
|
||||
let language_name = self.get_cur_language().get_language_name();
|
||||
let directives = self.get_directives();
|
||||
let tab_complete_handler = TabCompleteHandler::new(self.interpreter_directive_sigil, directives);
|
||||
self.line_reader.set_completer(Arc::new(tab_complete_handler));
|
||||
|
||||
let prompt_str = format!("{} >> ", language_name);
|
||||
self.line_reader.set_prompt(&prompt_str).unwrap();
|
||||
|
||||
match self.line_reader.read_line() {
|
||||
Err(e) => {
|
||||
println!("Terminal read error: {}", e);
|
||||
},
|
||||
Ok(ReadResult::Eof) => break,
|
||||
Ok(ReadResult::Signal(_)) => break,
|
||||
Ok(ReadResult::Input(ref input)) => {
|
||||
self.line_reader.add_history_unique(input.to_string());
|
||||
let output = match input.chars().nth(0) {
|
||||
Some(ch) if ch == self.interpreter_directive_sigil => self.handle_interpreter_directive(input),
|
||||
_ => Some(self.input_handler(input)),
|
||||
};
|
||||
if let Some(o) = output {
|
||||
println!("=> {}", o);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
self.line_reader.save_history(HISTORY_SAVE_FILE).unwrap_or(());
|
||||
self.save_options();
|
||||
self.load_options();
|
||||
self.handle_repl_loop();
|
||||
self.save_before_exit();
|
||||
println!("Exiting...");
|
||||
}
|
||||
|
||||
fn input_handler(&mut self, input: &str) -> String {
|
||||
let ref mut language = self.languages[self.current_language_index];
|
||||
let interpreter_output = language.execute_pipeline(input, &self.options);
|
||||
interpreter_output.to_repl()
|
||||
fn load_options(&mut self) {
|
||||
self.line_reader.load_history(HISTORY_SAVE_FILE).unwrap_or(());
|
||||
match ReplOptions::load_from_file(OPTIONS_SAVE_FILE) {
|
||||
Ok(options) => {
|
||||
self.options = options;
|
||||
},
|
||||
Err(()) => ()
|
||||
};
|
||||
}
|
||||
|
||||
fn get_directives(&self) -> CommandTree {
|
||||
let ref passes = self.get_cur_language().get_passes();
|
||||
fn handle_repl_loop(&mut self) {
|
||||
use linefeed::ReadResult::*;
|
||||
let sigil = self.interpreter_directive_sigil;
|
||||
|
||||
let passes_directives: Vec<CommandTree> = passes.iter()
|
||||
.map(|pass_descriptor| {
|
||||
let name = &pass_descriptor.name;
|
||||
if pass_descriptor.debug_options.len() == 0 {
|
||||
CommandTree::term(name, None)
|
||||
} else {
|
||||
let children: Vec<CommandTree> = pass_descriptor.debug_options.iter()
|
||||
.map(|o| CommandTree::term(o, None)).collect();
|
||||
CommandTree::NonTerminal {
|
||||
name: name.clone(),
|
||||
children,
|
||||
help_msg: None,
|
||||
function: None,
|
||||
'main: loop {
|
||||
macro_rules! match_or_break {
|
||||
($line:expr) => {
|
||||
match $line {
|
||||
Err(e) => {
|
||||
println!("readline IO Error: {}", e);
|
||||
break 'main;
|
||||
},
|
||||
Ok(Eof) | Ok(Signal(_)) => break 'main,
|
||||
Ok(Input(ref input)) => input,
|
||||
}
|
||||
}
|
||||
}).collect();
|
||||
}
|
||||
self.update_line_reader();
|
||||
let line = self.line_reader.read_line();
|
||||
let input: &str = match_or_break!(line);
|
||||
|
||||
CommandTree::Top(vec![
|
||||
CommandTree::term("exit", Some("exit the REPL")),
|
||||
CommandTree::term("quit", Some("exit the REPL")),
|
||||
CommandTree::term("help", Some("Print this help message")),
|
||||
CommandTree::nonterm("debug",
|
||||
Some("show or hide pass debug info for a given pass, or display the names of all passes, or turn timing on/off"),
|
||||
vec![
|
||||
CommandTree::term("passes", None),
|
||||
CommandTree::nonterm("show", None, passes_directives.clone()),
|
||||
CommandTree::nonterm("hide", None, passes_directives.clone()),
|
||||
CommandTree::nonterm("timing", None, vec![
|
||||
CommandTree::term("on", None),
|
||||
CommandTree::term("off", None),
|
||||
])
|
||||
]
|
||||
),
|
||||
CommandTree::nonterm("lang",
|
||||
Some("switch between languages, or go directly to a langauge by name"),
|
||||
vec![
|
||||
CommandTree::term("next", None),
|
||||
CommandTree::term("prev", None),
|
||||
CommandTree::nonterm("go", None, vec![]),
|
||||
]
|
||||
),
|
||||
CommandTree::term("doc", Some("Get language-specific help for an item")),
|
||||
])
|
||||
self.line_reader.add_history_unique(input.to_string());
|
||||
let mut chars = input.chars().peekable();
|
||||
let repl_responses = match chars.nth(0) {
|
||||
Some(ch) if ch == sigil => {
|
||||
if chars.peek() == Some(&'{') {
|
||||
let mut buf = String::new();
|
||||
buf.push_str(input.get(2..).unwrap());
|
||||
'multiline: loop {
|
||||
self.set_prompt(PromptStyle::Multiline);
|
||||
let new_line = self.line_reader.read_line();
|
||||
let new_input = match_or_break!(new_line);
|
||||
if new_input.starts_with(":}") {
|
||||
break 'multiline;
|
||||
} else {
|
||||
buf.push_str(new_input);
|
||||
buf.push_str("\n");
|
||||
}
|
||||
}
|
||||
self.handle_input(&buf)
|
||||
} else {
|
||||
match self.handle_interpreter_directive(input) {
|
||||
Some(directive_output) => println!("<> {}", directive_output),
|
||||
None => (),
|
||||
}
|
||||
continue
|
||||
}
|
||||
},
|
||||
_ => self.handle_input(input)
|
||||
};
|
||||
|
||||
for repl_response in repl_responses.iter() {
|
||||
println!("{}", repl_response);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_interpreter_directive(&mut self, input: &str) -> Option<String> {
|
||||
fn update_line_reader(&mut self) {
|
||||
let tab_complete_handler = TabCompleteHandler::new(self.interpreter_directive_sigil, self.get_directives());
|
||||
self.line_reader.set_completer(Arc::new(tab_complete_handler)); //TODO fix this here
|
||||
self.set_prompt(PromptStyle::Normal);
|
||||
}
|
||||
|
||||
fn set_prompt(&mut self, prompt_style: PromptStyle) {
|
||||
let prompt_str = match prompt_style {
|
||||
PromptStyle::Normal => ">> ".to_string(),
|
||||
PromptStyle::Multiline => ">| ".to_string(),
|
||||
};
|
||||
|
||||
self.line_reader.set_prompt(&prompt_str).unwrap();
|
||||
}
|
||||
|
||||
fn save_before_exit(&self) {
|
||||
self.line_reader.save_history(HISTORY_SAVE_FILE).unwrap_or(());
|
||||
self.options.save_to_file(OPTIONS_SAVE_FILE);
|
||||
}
|
||||
|
||||
fn handle_interpreter_directive(&mut self, input: &str) -> InterpreterDirectiveOutput {
|
||||
let mut iter = input.chars();
|
||||
iter.next();
|
||||
let commands: Vec<&str> = iter
|
||||
let arguments: Vec<&str> = iter
|
||||
.as_str()
|
||||
.split_whitespace()
|
||||
.collect();
|
||||
|
||||
let initial_cmd: &str = match commands.get(0).clone() {
|
||||
None => return None,
|
||||
Some(s) => s
|
||||
if arguments.len() < 1 {
|
||||
return None;
|
||||
}
|
||||
|
||||
let directives = self.get_directives();
|
||||
directives.perform(self, &arguments)
|
||||
}
|
||||
|
||||
fn get_cur_language_state(&mut self) -> &mut Box<dyn ProgrammingLanguageInterface> {
|
||||
//TODO this is obviously not complete
|
||||
&mut self.language_states[0]
|
||||
}
|
||||
|
||||
fn handle_input(&mut self, input: &str) -> Vec<ReplResponse> {
|
||||
let mut debug_requests = HashSet::new();
|
||||
for ask in self.options.debug_asks.iter() {
|
||||
debug_requests.insert(ask.clone());
|
||||
}
|
||||
|
||||
let request = ComputationRequest { source: input, debug_requests };
|
||||
let ref mut language_state = self.get_cur_language_state();
|
||||
let response = language_state.run_computation(request);
|
||||
response::handle_computation_response(response, &self.options)
|
||||
}
|
||||
|
||||
fn get_directives(&mut self) -> CommandTree {
|
||||
let language_state = self.get_cur_language_state();
|
||||
let pass_names = match language_state.request_meta(LangMetaRequest::StageNames) {
|
||||
LangMetaResponse::StageNames(names) => names,
|
||||
_ => vec![],
|
||||
};
|
||||
|
||||
match initial_cmd {
|
||||
"exit" | "quit" => {
|
||||
self.save_options();
|
||||
::std::process::exit(0)
|
||||
},
|
||||
"lang" | "language" => match commands.get(1) {
|
||||
Some(&"show") => {
|
||||
let mut buf = String::new();
|
||||
for (i, lang) in self.languages.iter().enumerate() {
|
||||
write!(buf, "{}{}\n", if i == self.current_language_index { "* "} else { "" }, lang.get_language_name()).unwrap();
|
||||
}
|
||||
Some(buf)
|
||||
},
|
||||
Some(&"go") => match commands.get(2) {
|
||||
None => Some(format!("Must specify a language name")),
|
||||
Some(&desired_name) => {
|
||||
for (i, _) in self.languages.iter().enumerate() {
|
||||
let lang_name = self.languages[i].get_language_name();
|
||||
if lang_name.to_lowercase() == desired_name.to_lowercase() {
|
||||
self.current_language_index = i;
|
||||
return Some(format!("Switching to {}", self.languages[self.current_language_index].get_language_name()));
|
||||
}
|
||||
}
|
||||
Some(format!("Language {} not found", desired_name))
|
||||
}
|
||||
},
|
||||
Some(&"next") | Some(&"n") => {
|
||||
self.current_language_index = (self.current_language_index + 1) % self.languages.len();
|
||||
Some(format!("Switching to {}", self.languages[self.current_language_index].get_language_name()))
|
||||
},
|
||||
Some(&"previous") | Some(&"p") | Some(&"prev") => {
|
||||
self.current_language_index = if self.current_language_index == 0 { self.languages.len() - 1 } else { self.current_language_index - 1 };
|
||||
Some(format!("Switching to {}", self.languages[self.current_language_index].get_language_name()))
|
||||
},
|
||||
Some(e) => Some(format!("Bad `lang(uage)` argument: {}", e)),
|
||||
None => Some(format!("Valid arguments for `lang(uage)` are `show`, `next`|`n`, `previous`|`prev`|`n`"))
|
||||
},
|
||||
"help" => {
|
||||
let mut buf = String::new();
|
||||
let ref lang = self.languages[self.current_language_index];
|
||||
let directives = match self.get_directives() {
|
||||
CommandTree::Top(children) => children,
|
||||
_ => panic!("Top-level CommandTree not Top")
|
||||
};
|
||||
|
||||
writeln!(buf, "MetaInterpreter options").unwrap();
|
||||
writeln!(buf, "-----------------------").unwrap();
|
||||
|
||||
for directive in directives {
|
||||
let trailer = " ";
|
||||
writeln!(buf, "{}{}- {}", directive.get_cmd(), trailer, directive.get_help()).unwrap();
|
||||
}
|
||||
|
||||
writeln!(buf, "").unwrap();
|
||||
writeln!(buf, "Language-specific help for {}", lang.get_language_name()).unwrap();
|
||||
writeln!(buf, "-----------------------").unwrap();
|
||||
writeln!(buf, "{}", lang.custom_interpreter_directives_help()).unwrap();
|
||||
Some(buf)
|
||||
},
|
||||
"debug" => self.handle_debug(commands),
|
||||
"doc" => self.languages[self.current_language_index]
|
||||
.get_doc(&commands)
|
||||
.or(Some(format!("No docs implemented"))),
|
||||
e => {
|
||||
self.languages[self.current_language_index]
|
||||
.handle_custom_interpreter_directives(&commands)
|
||||
.or(Some(format!("Unknown command: {}", e)))
|
||||
}
|
||||
}
|
||||
}
|
||||
fn handle_debug(&mut self, commands: Vec<&str>) -> Option<String> {
|
||||
let passes = self.get_cur_language().get_passes();
|
||||
match commands.get(1) {
|
||||
Some(&"timing") => match commands.get(2) {
|
||||
Some(&"on") => { self.options.debug_timing = true; None }
|
||||
Some(&"off") => { self.options.debug_timing = false; None }
|
||||
_ => return Some(format!(r#"Argument to "timing" must be "on" or "off""#)),
|
||||
},
|
||||
Some(&"passes") => Some(
|
||||
passes.into_iter()
|
||||
.map(|desc| {
|
||||
if self.options.debug_passes.contains_key(&desc.name) {
|
||||
let color = "green";
|
||||
format!("*{}", desc.name.color(color))
|
||||
} else {
|
||||
desc.name
|
||||
}
|
||||
})
|
||||
.intersperse(format!(" -> "))
|
||||
.collect()),
|
||||
b @ Some(&"show") | b @ Some(&"hide") => {
|
||||
let show = b == Some(&"show");
|
||||
let debug_pass: String = match commands.get(2) {
|
||||
Some(s) => s.to_string(),
|
||||
None => return Some(format!("Must specify a stage to debug")),
|
||||
};
|
||||
let pass_opt = commands.get(3);
|
||||
if let Some(desc) = passes.iter().find(|desc| desc.name == debug_pass) {
|
||||
let mut opts = vec![];
|
||||
if let Some(opt) = pass_opt {
|
||||
opts.push(opt.to_string());
|
||||
}
|
||||
let msg = format!("{} debug for pass {}", if show { "Enabling" } else { "Disabling" }, debug_pass);
|
||||
if show {
|
||||
self.options.debug_passes.insert(desc.name.clone(), PassDebugOptionsDescriptor { opts });
|
||||
} else {
|
||||
self.options.debug_passes.remove(&desc.name);
|
||||
}
|
||||
Some(msg)
|
||||
} else {
|
||||
Some(format!("Couldn't find stage: {}", debug_pass))
|
||||
}
|
||||
},
|
||||
_ => Some(format!("Unknown debug command"))
|
||||
}
|
||||
directives_from_pass_names(&pass_names)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
struct TabCompleteHandler {
|
||||
sigil: char,
|
||||
top_level_commands: CommandTree,
|
||||
@@ -313,43 +209,43 @@ impl<T: Terminal> Completer<T> for TabCompleteHandler {
|
||||
fn complete(&self, word: &str, prompter: &::linefeed::prompter::Prompter<T>, start: usize, _end: usize) -> Option<Vec<Completion>> {
|
||||
let line = prompter.buffer();
|
||||
|
||||
if line.starts_with(&format!("{}", self.sigil)) {
|
||||
let mut words = line[1..(if start == 0 { 1 } else { start })].split_whitespace();
|
||||
let mut completions = Vec::new();
|
||||
let mut command_tree: Option<&CommandTree> = Some(&self.top_level_commands);
|
||||
if !line.starts_with(self.sigil) {
|
||||
return None;
|
||||
}
|
||||
|
||||
loop {
|
||||
match words.next() {
|
||||
None => {
|
||||
let top = match command_tree {
|
||||
Some(CommandTree::Top(_)) => true,
|
||||
_ => false
|
||||
};
|
||||
let word = if top { word.get(1..).unwrap() } else { word };
|
||||
for cmd in command_tree.map(|x| x.get_children()).unwrap_or(vec![]).into_iter() {
|
||||
if cmd.starts_with(word) {
|
||||
completions.push(Completion {
|
||||
completion: format!("{}{}", if top { ":" } else { "" }, cmd),
|
||||
display: Some(cmd.to_string()),
|
||||
suffix: ::linefeed::complete::Suffix::Some(' ')
|
||||
})
|
||||
}
|
||||
let mut words = line[1..(if start == 0 { 1 } else { start })].split_whitespace();
|
||||
let mut completions = Vec::new();
|
||||
let mut command_tree: Option<&CommandTree> = Some(&self.top_level_commands);
|
||||
|
||||
loop {
|
||||
match words.next() {
|
||||
None => {
|
||||
let top = match command_tree {
|
||||
Some(CommandTree::Top(_)) => true,
|
||||
_ => false
|
||||
};
|
||||
let word = if top { word.get(1..).unwrap() } else { word };
|
||||
for cmd in command_tree.map(|x| x.get_subcommands()).unwrap_or(vec![]).into_iter() {
|
||||
if cmd.starts_with(word) {
|
||||
completions.push(Completion {
|
||||
completion: format!("{}{}", if top { ":" } else { "" }, cmd),
|
||||
display: Some(cmd.to_string()),
|
||||
suffix: ::linefeed::complete::Suffix::Some(' ')
|
||||
})
|
||||
}
|
||||
break;
|
||||
},
|
||||
Some(s) => {
|
||||
let new_ptr: Option<&CommandTree> = command_tree.and_then(|cm| match cm {
|
||||
CommandTree::Top(children) => children.iter().find(|c| c.get_cmd() == s),
|
||||
CommandTree::NonTerminal { children, .. } => children.iter().find(|c| c.get_cmd() == s),
|
||||
CommandTree::Terminal { .. } => None,
|
||||
});
|
||||
command_tree = new_ptr;
|
||||
}
|
||||
break;
|
||||
},
|
||||
Some(s) => {
|
||||
let new_ptr: Option<&CommandTree> = command_tree.and_then(|cm| match cm {
|
||||
CommandTree::Top(children) => children.iter().find(|c| c.get_cmd() == s),
|
||||
CommandTree::NonTerminal { children, .. } => children.iter().find(|c| c.get_cmd() == s),
|
||||
CommandTree::Terminal { children, .. } => children.iter().find(|c| c.get_cmd() == s),
|
||||
});
|
||||
command_tree = new_ptr;
|
||||
}
|
||||
}
|
||||
Some(completions)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
Some(completions)
|
||||
}
|
||||
}
|
||||
|
||||
47
schala-repl/src/repl/repl_options.rs
Normal file
47
schala-repl/src/repl/repl_options.rs
Normal file
@@ -0,0 +1,47 @@
|
||||
use crate::language::DebugAsk;
|
||||
|
||||
use std::io::{Read, Write};
|
||||
use std::collections::HashSet;
|
||||
use std::fs::File;
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct ReplOptions {
|
||||
pub debug_asks: HashSet<DebugAsk>,
|
||||
pub show_total_time: bool,
|
||||
pub show_stage_times: bool,
|
||||
}
|
||||
|
||||
impl ReplOptions {
|
||||
pub fn new() -> ReplOptions {
|
||||
ReplOptions {
|
||||
debug_asks: HashSet::new(),
|
||||
show_total_time: true,
|
||||
show_stage_times: false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn save_to_file(&self, filename: &str) {
|
||||
let res = File::create(filename)
|
||||
.and_then(|mut file| {
|
||||
let buf = crate::serde_json::to_string(self).unwrap();
|
||||
file.write_all(buf.as_bytes())
|
||||
});
|
||||
if let Err(err) = res {
|
||||
println!("Error saving {} file {}", filename, err);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn load_from_file(filename: &str) -> Result<ReplOptions, ()> {
|
||||
File::open(filename)
|
||||
.and_then(|mut file| {
|
||||
let mut contents = String::new();
|
||||
file.read_to_string(&mut contents)?;
|
||||
Ok(contents)
|
||||
})
|
||||
.and_then(|contents| {
|
||||
let output: ReplOptions = crate::serde_json::from_str(&contents)?;
|
||||
Ok(output)
|
||||
})
|
||||
.map_err(|_| ())
|
||||
}
|
||||
}
|
||||
67
schala-repl/src/repl/response.rs
Normal file
67
schala-repl/src/repl/response.rs
Normal file
@@ -0,0 +1,67 @@
|
||||
use colored::*;
|
||||
use std::fmt;
|
||||
use std::fmt::Write;
|
||||
|
||||
use super::ReplOptions;
|
||||
use crate::language::{ DebugAsk, ComputationResponse};
|
||||
|
||||
pub struct ReplResponse {
|
||||
label: Option<String>,
|
||||
text: String,
|
||||
color: Option<Color>
|
||||
}
|
||||
|
||||
impl fmt::Display for ReplResponse {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
let mut buf = String::new();
|
||||
if let Some(ref label) = self.label {
|
||||
write!(buf, "({})", label).unwrap();
|
||||
}
|
||||
write!(buf, "=> {}", self.text).unwrap();
|
||||
write!(f, "{}", match self.color {
|
||||
Some(c) => buf.color(c),
|
||||
None => buf.normal()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
pub fn handle_computation_response(response: ComputationResponse, options: &ReplOptions) -> Vec<ReplResponse> {
|
||||
let mut responses = vec![];
|
||||
|
||||
if options.show_total_time {
|
||||
responses.push(ReplResponse {
|
||||
label: Some("Total time".to_string()),
|
||||
text: format!("{:?}", response.global_output_stats.total_duration),
|
||||
color: None,
|
||||
});
|
||||
}
|
||||
|
||||
if options.show_stage_times {
|
||||
responses.push(ReplResponse {
|
||||
label: Some("Stage times".to_string()),
|
||||
text: format!("{:?}", response.global_output_stats.stage_durations),
|
||||
color: None,
|
||||
});
|
||||
}
|
||||
|
||||
for debug_resp in response.debug_responses {
|
||||
let stage_name = match debug_resp.ask {
|
||||
DebugAsk::ByStage { stage_name, .. } => stage_name,
|
||||
_ => continue,
|
||||
};
|
||||
responses.push(ReplResponse {
|
||||
label: Some(stage_name.to_string()),
|
||||
text: debug_resp.value,
|
||||
color: Some(Color::Red),
|
||||
});
|
||||
}
|
||||
|
||||
responses.push(match response.main_output {
|
||||
Ok(s) => ReplResponse { label: None, text: s, color: None },
|
||||
Err(e) => ReplResponse { label: Some("Error".to_string()), text: e, color: Some(Color::Red) },
|
||||
});
|
||||
|
||||
responses
|
||||
}
|
||||
|
||||
@@ -1,44 +0,0 @@
|
||||
use rocket;
|
||||
use rocket::State;
|
||||
use rocket::response::Content;
|
||||
use rocket::http::ContentType;
|
||||
use rocket_contrib::Json;
|
||||
use language::{ProgrammingLanguageInterface, EvalOptions};
|
||||
use WEBFILES;
|
||||
use ::PLIGenerator;
|
||||
|
||||
#[get("/")]
|
||||
fn index() -> Content<String> {
|
||||
let path = "static/index.html";
|
||||
let html_contents = String::from_utf8(WEBFILES.get(path).unwrap().into_owned()).unwrap();
|
||||
Content(ContentType::HTML, html_contents)
|
||||
}
|
||||
|
||||
#[get("/bundle.js")]
|
||||
fn js_bundle() -> Content<String> {
|
||||
let path = "static/bundle.js";
|
||||
let js_contents = String::from_utf8(WEBFILES.get(path).unwrap().into_owned()).unwrap();
|
||||
Content(ContentType::JavaScript, js_contents)
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
struct Input {
|
||||
source: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
struct Output {
|
||||
text: String,
|
||||
}
|
||||
|
||||
#[post("/input", format = "application/json", data = "<input>")]
|
||||
fn interpreter_input(input: Json<Input>, generators: State<Vec<PLIGenerator>>) -> Json<Output> {
|
||||
let schala_gen = generators.get(0).unwrap();
|
||||
let mut schala: Box<ProgrammingLanguageInterface> = schala_gen();
|
||||
let code_output = schala.execute_pipeline(&input.source, &EvalOptions::default());
|
||||
Json(Output { text: code_output.to_repl() })
|
||||
}
|
||||
|
||||
pub fn web_main(language_generators: Vec<PLIGenerator>) {
|
||||
rocket::ignite().manage(language_generators).mount("/", routes![index, js_bundle, interpreter_input]).launch();
|
||||
}
|
||||
17
src/main.rs
17
src/main.rs
@@ -1,20 +1,15 @@
|
||||
extern crate schala_repl;
|
||||
|
||||
extern crate maaru_lang;
|
||||
extern crate rukka_lang;
|
||||
extern crate robo_lang;
|
||||
//extern crate maaru_lang;
|
||||
//extern crate rukka_lang;
|
||||
//extern crate robo_lang;
|
||||
extern crate schala_lang;
|
||||
use schala_repl::{PLIGenerator, repl_main};
|
||||
use schala_repl::{ProgrammingLanguageInterface, start_repl};
|
||||
|
||||
extern { }
|
||||
|
||||
fn main() {
|
||||
let generators: Vec<PLIGenerator> = vec![
|
||||
Box::new(|| { Box::new(schala_lang::Schala::new())}),
|
||||
Box::new(|| { Box::new(maaru_lang::Maaru::new())}),
|
||||
Box::new(|| { Box::new(robo_lang::Robo::new())}),
|
||||
Box::new(|| { Box::new(rukka_lang::Rukka::new())}),
|
||||
];
|
||||
repl_main(generators);
|
||||
let langs: Vec<Box<dyn ProgrammingLanguageInterface>> = vec![Box::new(schala_lang::Schala::new())];
|
||||
start_repl(langs);
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user