Compare commits
132 Commits
54b33282ef
...
antiquated
Author | SHA1 | Date | |
---|---|---|---|
|
df2d882538 | ||
|
ab7d6af5f3 | ||
|
012c89b5c4 | ||
|
a6e1a6c36c | ||
|
52b2426f30 | ||
|
e7576b26e9 | ||
|
d9d6f7dc16 | ||
|
1daf23b129 | ||
|
c1b5fa392a | ||
|
4c99be700f | ||
|
2247d9b58e | ||
|
dcec8be307 | ||
|
934a390f2d | ||
|
8e8be1b449 | ||
|
5bfd79669e | ||
|
d913443e97 | ||
|
b5ec8116a2 | ||
|
b778428e98 | ||
|
354dd7d8c1 | ||
|
71911f0ab5 | ||
|
d6c5cd100b | ||
|
88dfa87e85 | ||
|
89a93d59c7 | ||
|
0eccceabd9 | ||
|
9d9331f4b0 | ||
|
1470e7fbdd | ||
|
d3866a1908 | ||
|
0a471ed71c | ||
|
9c0f60b6ce | ||
|
fc463d3807 | ||
|
7221d2cb11 | ||
|
032fe5fed9 | ||
|
bf55e6e82a | ||
|
9adae9c262 | ||
|
91985df449 | ||
|
4b11a6622a | ||
|
884c8e515f | ||
|
d2b5deb802 | ||
|
2ba0fb4869 | ||
|
a9afb6d24e | ||
|
8d5858d3d2 | ||
|
1a48e9b43a | ||
|
1797136156 | ||
|
3e422291f4 | ||
|
bba433c808 | ||
|
194cb2202a | ||
|
5a38ff8f41 | ||
|
de13e69769 | ||
|
8f3c982131 | ||
|
e5b6f2bc2f | ||
|
b760ec7eca | ||
|
94db2ea17f | ||
|
02ead69a44 | ||
|
c1ef0ee506 | ||
|
9a13848f80 | ||
|
1b6a7021e7 | ||
|
2c139df6dd | ||
|
7c3e924194 | ||
|
a41d808da3 | ||
|
eeec85c2b1 | ||
|
ec5bf12a65 | ||
|
bb26d9e674 | ||
|
219f5a183a | ||
|
69d857e94d | ||
|
8365690860 | ||
|
7ae7eaa07b | ||
|
88d2571401 | ||
|
721a499384 | ||
|
ec51659452 | ||
|
44cebec818 | ||
|
7e2b95593f | ||
|
58a1782162 | ||
|
6454cc5ad1 | ||
|
d5cd0dada7 | ||
|
65c745fb30 | ||
|
33573bf268 | ||
|
12a7fe3e3e | ||
|
7f3b4a727f | ||
|
7a8ab3d571 | ||
|
b7b4e75f01 | ||
|
7a9e43bf8e | ||
|
a666ac985b | ||
|
37e85c417e | ||
|
fc088923c0 | ||
|
8ace37c5cf | ||
|
c1e6bc8c4c | ||
|
d37be75478 | ||
|
f1ffeb155a | ||
|
222e0aad08 | ||
|
77030091bb | ||
|
b4b1a0cf63 | ||
|
a2d5f380a8 | ||
|
1cdaaee9a6 | ||
|
e6a9811ee5 | ||
|
ff1d4ef7bb | ||
|
f4029fe31a | ||
|
d38bb2278c | ||
|
96393604c3 | ||
|
29207876ae | ||
|
b0795f2dd4 | ||
|
aec3fd070e | ||
|
da4990107c | ||
|
94ee3e1897 | ||
|
05e1555a9b | ||
|
4b0aced11f | ||
|
205ab7179d | ||
|
abab667c43 | ||
|
869de8c033 | ||
|
ba8fb86e3f | ||
|
a00125d4a5 | ||
|
a93fc48ee8 | ||
|
8fe7fca88c | ||
|
6cd5a9353c | ||
|
671ce54dd3 | ||
|
c67adc3a38 | ||
|
13353f8801 | ||
|
10ea99e95c | ||
|
fa736f2dd4 | ||
|
b7f796322b | ||
|
f9349edf77 | ||
|
c5f7616303 | ||
|
5af42d0828 | ||
|
92c6d7f311 | ||
|
e618498881 | ||
|
a31735da88 | ||
|
96d12f3659 | ||
|
c3d36ab320 | ||
|
7bd6072dae | ||
|
08a4800175 | ||
|
8d7f8f555f | ||
|
fbb0269623 | ||
|
8c48f63a2d |
358
Cargo.lock
generated
358
Cargo.lock
generated
@ -13,9 +13,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "aho-corasick"
|
name = "aho-corasick"
|
||||||
version = "0.7.6"
|
version = "0.7.20"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "58fb5e95d83b38284460a5fda7d6470aa0b8844d283a0b614b8535e880800d2d"
|
checksum = "cc936419f96fa211c1b9166887b38e5e40b19958e5b895be7c1f93adec7071ac"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"memchr",
|
"memchr",
|
||||||
]
|
]
|
||||||
@ -29,12 +29,6 @@ dependencies = [
|
|||||||
"winapi 0.3.8",
|
"winapi 0.3.8",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "approx"
|
|
||||||
version = "0.1.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "08abcc3b4e9339e33a3d0a5ed15d84a687350c05689d825e0f6655eef9e76a94"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "arrayref"
|
name = "arrayref"
|
||||||
version = "0.3.5"
|
version = "0.3.5"
|
||||||
@ -110,6 +104,12 @@ dependencies = [
|
|||||||
"constant_time_eq",
|
"constant_time_eq",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "bytecount"
|
||||||
|
version = "0.6.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "72feb31ffc86498dacdbd0fcebb56138e7177a8cc5cea4516031d15ae85a742e"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "byteorder"
|
name = "byteorder"
|
||||||
version = "1.3.2"
|
version = "1.3.2"
|
||||||
@ -134,17 +134,6 @@ version = "1.0.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "cgmath"
|
|
||||||
version = "0.16.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "64a4b57c8f4e3a2e9ac07e0f6abc9c24b6fc9e1b54c3478cfb598f3d0023e51c"
|
|
||||||
dependencies = [
|
|
||||||
"approx",
|
|
||||||
"num-traits 0.1.43",
|
|
||||||
"rand 0.4.6",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cloudabi"
|
name = "cloudabi"
|
||||||
version = "0.0.3"
|
version = "0.0.3"
|
||||||
@ -156,12 +145,13 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "colored"
|
name = "colored"
|
||||||
version = "1.8.0"
|
version = "1.9.4"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "6cdb90b60f2927f8d76139c72dbde7e10c3a2bc47c8594c9c7a66529f2687c03"
|
checksum = "5a5f741c91823341bebf717d4c71bda820630ce065443b58bd1b7451af008355"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"is-terminal",
|
||||||
"lazy_static 1.4.0",
|
"lazy_static 1.4.0",
|
||||||
"winconsole",
|
"winapi 0.3.8",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -186,19 +176,19 @@ version = "0.1.21"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "ccc0a48a9b826acdf4028595adc9db92caea352f7af011a3034acd172a52a0aa"
|
checksum = "ccc0a48a9b826acdf4028595adc9db92caea352f7af011a3034acd172a52a0aa"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"quote 1.0.10",
|
"quote",
|
||||||
"syn 1.0.80",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "derivative"
|
name = "derivative"
|
||||||
version = "1.0.3"
|
version = "2.2.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "942ca430eef7a3806595a6737bc388bf51adb888d3fc0dd1b50f1c170167ee3a"
|
checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 0.4.30",
|
"proc-macro2",
|
||||||
"quote 0.6.13",
|
"quote",
|
||||||
"syn 0.15.44",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -241,9 +231,9 @@ checksum = "c34f04666d835ff5d62e058c3995147c06f42fe86ff053337632bca83e42702d"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "failure"
|
name = "failure"
|
||||||
version = "0.1.5"
|
version = "0.1.8"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "795bd83d3abeb9220f257e597aa0080a508b27533824adf336529648f6abf7e2"
|
checksum = "d32e9bd16cc02eae7db7ef620b392808b89f6a5e16bb3497d159c6b92a0f4f86"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"backtrace",
|
"backtrace",
|
||||||
"failure_derive",
|
"failure_derive",
|
||||||
@ -251,13 +241,13 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "failure_derive"
|
name = "failure_derive"
|
||||||
version = "0.1.5"
|
version = "0.1.8"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "ea1063915fd7ef4309e222a5a07cf9c319fb9c7836b1f89b85458672dbb127e1"
|
checksum = "aa4da3c766cd7a0db8242e326e9e4e081edd567072893ed320008189715366a4"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 0.4.30",
|
"proc-macro2",
|
||||||
"quote 0.6.13",
|
"quote",
|
||||||
"syn 0.15.44",
|
"syn",
|
||||||
"synstructure",
|
"synstructure",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -292,6 +282,12 @@ dependencies = [
|
|||||||
"unicode-width",
|
"unicode-width",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "hermit-abi"
|
||||||
|
version = "0.3.9"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "includedir"
|
name = "includedir"
|
||||||
version = "0.2.2"
|
version = "0.2.2"
|
||||||
@ -314,19 +310,30 @@ dependencies = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "itertools"
|
name = "is-terminal"
|
||||||
version = "0.10.1"
|
version = "0.4.12"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "69ddb889f9d0d08a67338271fa9b62996bc788c7796a5c18cf057420aaed5eaf"
|
checksum = "f23ff5ef2b80d608d61efee834934d862cd92461afc0560dedf493e4c033738b"
|
||||||
|
dependencies = [
|
||||||
|
"hermit-abi",
|
||||||
|
"libc",
|
||||||
|
"windows-sys",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "itertools"
|
||||||
|
version = "0.10.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"either",
|
"either",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "itoa"
|
name = "itoa"
|
||||||
version = "0.4.4"
|
version = "1.0.11"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "501266b7edd0174f8530248f87f99c88fbe60ca4ef3dd486835b8d8d53136f7f"
|
checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "kernel32-sys"
|
name = "kernel32-sys"
|
||||||
@ -376,7 +383,7 @@ dependencies = [
|
|||||||
"cc",
|
"cc",
|
||||||
"lazy_static 1.4.0",
|
"lazy_static 1.4.0",
|
||||||
"libc",
|
"libc",
|
||||||
"regex 1.3.1",
|
"regex 1.7.3",
|
||||||
"semver",
|
"semver",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -397,9 +404,15 @@ checksum = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "memchr"
|
name = "memchr"
|
||||||
version = "2.2.1"
|
version = "2.4.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "88579771288728879b57485cc7d6b07d648c9f0141eb955f8ab7f9d45394468e"
|
checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "minimal-lexical"
|
||||||
|
version = "0.2.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "miniz-sys"
|
name = "miniz-sys"
|
||||||
@ -462,6 +475,27 @@ dependencies = [
|
|||||||
"version_check 0.1.5",
|
"version_check 0.1.5",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "nom"
|
||||||
|
version = "7.1.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a"
|
||||||
|
dependencies = [
|
||||||
|
"memchr",
|
||||||
|
"minimal-lexical",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "nom_locate"
|
||||||
|
version = "4.2.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "1e3c83c053b0713da60c5b8de47fe8e494fe3ece5267b2f23090a07a053ba8f3"
|
||||||
|
dependencies = [
|
||||||
|
"bytecount",
|
||||||
|
"memchr",
|
||||||
|
"nom 7.1.3",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "num"
|
name = "num"
|
||||||
version = "0.1.42"
|
version = "0.1.42"
|
||||||
@ -473,7 +507,7 @@ dependencies = [
|
|||||||
"num-integer",
|
"num-integer",
|
||||||
"num-iter",
|
"num-iter",
|
||||||
"num-rational",
|
"num-rational",
|
||||||
"num-traits 0.2.8",
|
"num-traits",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -483,7 +517,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "e63899ad0da84ce718c14936262a41cee2c79c981fc0a0e7c7beb47d5a07e8c1"
|
checksum = "e63899ad0da84ce718c14936262a41cee2c79c981fc0a0e7c7beb47d5a07e8c1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"num-integer",
|
"num-integer",
|
||||||
"num-traits 0.2.8",
|
"num-traits",
|
||||||
"rand 0.4.6",
|
"rand 0.4.6",
|
||||||
"rustc-serialize",
|
"rustc-serialize",
|
||||||
]
|
]
|
||||||
@ -494,7 +528,7 @@ version = "0.1.43"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b288631d7878aaf59442cffd36910ea604ecd7745c36054328595114001c9656"
|
checksum = "b288631d7878aaf59442cffd36910ea604ecd7745c36054328595114001c9656"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"num-traits 0.2.8",
|
"num-traits",
|
||||||
"rustc-serialize",
|
"rustc-serialize",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -505,7 +539,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "b85e541ef8255f6cf42bbfe4ef361305c6c135d10919ecc26126c4e5ae94bc09"
|
checksum = "b85e541ef8255f6cf42bbfe4ef361305c6c135d10919ecc26126c4e5ae94bc09"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"autocfg",
|
"autocfg",
|
||||||
"num-traits 0.2.8",
|
"num-traits",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -516,7 +550,7 @@ checksum = "76bd5272412d173d6bf9afdf98db8612bbabc9a7a830b7bfc9c188911716132e"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"autocfg",
|
"autocfg",
|
||||||
"num-integer",
|
"num-integer",
|
||||||
"num-traits 0.2.8",
|
"num-traits",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -527,19 +561,10 @@ checksum = "ee314c74bd753fc86b4780aa9475da469155f3848473a261d2d18e35245a784e"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"num-bigint",
|
"num-bigint",
|
||||||
"num-integer",
|
"num-integer",
|
||||||
"num-traits 0.2.8",
|
"num-traits",
|
||||||
"rustc-serialize",
|
"rustc-serialize",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "num-traits"
|
|
||||||
version = "0.1.43"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "92e5113e9fd4cc14ded8e499429f396a20f98c772a47cc8622a736e1ec843c31"
|
|
||||||
dependencies = [
|
|
||||||
"num-traits 0.2.8",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "num-traits"
|
name = "num-traits"
|
||||||
version = "0.2.8"
|
version = "0.2.8"
|
||||||
@ -560,8 +585,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "peg"
|
name = "peg"
|
||||||
version = "0.7.0"
|
version = "0.8.4"
|
||||||
source = "git+https://github.com/kevinmehall/rust-peg?rev=960222580c8da25b17d32c2aae6f52f902728b62#960222580c8da25b17d32c2aae6f52f902728b62"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "295283b02df346d1ef66052a757869b2876ac29a6bb0ac3f5f7cd44aebe40e8f"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"peg-macros",
|
"peg-macros",
|
||||||
"peg-runtime",
|
"peg-runtime",
|
||||||
@ -569,18 +595,20 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "peg-macros"
|
name = "peg-macros"
|
||||||
version = "0.7.0"
|
version = "0.8.4"
|
||||||
source = "git+https://github.com/kevinmehall/rust-peg?rev=960222580c8da25b17d32c2aae6f52f902728b62#960222580c8da25b17d32c2aae6f52f902728b62"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "bdad6a1d9cf116a059582ce415d5f5566aabcd4008646779dab7fdc2a9a9d426"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"peg-runtime",
|
"peg-runtime",
|
||||||
"proc-macro2 1.0.30",
|
"proc-macro2",
|
||||||
"quote 1.0.10",
|
"quote",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "peg-runtime"
|
name = "peg-runtime"
|
||||||
version = "0.7.0"
|
version = "0.8.3"
|
||||||
source = "git+https://github.com/kevinmehall/rust-peg?rev=960222580c8da25b17d32c2aae6f52f902728b62#960222580c8da25b17d32c2aae6f52f902728b62"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "e3aeb8f54c078314c2065ee649a7241f46b9d8e418e1a9581ba0546657d7aa3a"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "phf"
|
name = "phf"
|
||||||
@ -632,31 +660,13 @@ dependencies = [
|
|||||||
"output_vt100",
|
"output_vt100",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "proc-macro2"
|
|
||||||
version = "0.4.30"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "cf3d2011ab5c909338f7887f4fc896d35932e29146c12c8d01da6b22a80ba759"
|
|
||||||
dependencies = [
|
|
||||||
"unicode-xid 0.1.0",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "proc-macro2"
|
name = "proc-macro2"
|
||||||
version = "1.0.30"
|
version = "1.0.30"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "edc3358ebc67bc8b7fa0c007f945b0b18226f78437d61bec735a9eb96b61ee70"
|
checksum = "edc3358ebc67bc8b7fa0c007f945b0b18226f78437d61bec735a9eb96b61ee70"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"unicode-xid 0.2.0",
|
"unicode-xid",
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "quote"
|
|
||||||
version = "0.6.13"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "6ce23b6b870e8f94f81fb0a363d65d86675884b34a09043c81e5562f11c1f8e1"
|
|
||||||
dependencies = [
|
|
||||||
"proc-macro2 0.4.30",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -665,14 +675,14 @@ version = "1.0.10"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "38bc8cc6a5f2e3655e0899c1b848643b2562f853f114bfec7be120678e3ace05"
|
checksum = "38bc8cc6a5f2e3655e0899c1b848643b2562f853f114bfec7be120678e3ace05"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.30",
|
"proc-macro2",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "radix_trie"
|
name = "radix_trie"
|
||||||
version = "0.1.5"
|
version = "0.1.6"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "deafbff7bda0a58975ce92723aa18be10eae2e9271f3c3f48de52d131d9f248c"
|
checksum = "3d3681b28cd95acfb0560ea9441f82d6a4504fa3b15b97bd7b6e952131820e95"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"endian-type",
|
"endian-type",
|
||||||
"nibble_vec",
|
"nibble_vec",
|
||||||
@ -839,14 +849,13 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "regex"
|
name = "regex"
|
||||||
version = "1.3.1"
|
version = "1.7.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "dc220bd33bdce8f093101afe22a037b8eb0e5af33592e6a9caafff0d4cb81cbd"
|
checksum = "8b1f693b24f6ac912f4893ef08244d70b6067480d2f1a46e950c9691e6749d1d"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"aho-corasick 0.7.6",
|
"aho-corasick 0.7.20",
|
||||||
"memchr",
|
"memchr",
|
||||||
"regex-syntax 0.6.12",
|
"regex-syntax 0.6.29",
|
||||||
"thread_local",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -860,15 +869,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "regex-syntax"
|
name = "regex-syntax"
|
||||||
version = "0.6.12"
|
version = "0.6.29"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "11a7e20d1cce64ef2fed88b66d347f88bd9babb82845b2b858f3edbf59a4f716"
|
checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rgb"
|
|
||||||
version = "0.8.14"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "2089e4031214d129e201f8c3c8c2fe97cd7322478a0d1cdf78e7029b0042efdb"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rust-argon2"
|
name = "rust-argon2"
|
||||||
@ -889,9 +892,9 @@ checksum = "4c691c0e608126e00913e33f0ccf3727d5fc84573623b8d65b2df340b5201783"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rustc-serialize"
|
name = "rustc-serialize"
|
||||||
version = "0.3.24"
|
version = "0.3.25"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda"
|
checksum = "fe834bc780604f4674073badbad26d7219cadfb4a2275802db12cbae17498401"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ryu"
|
name = "ryu"
|
||||||
@ -919,24 +922,17 @@ dependencies = [
|
|||||||
"ena",
|
"ena",
|
||||||
"failure",
|
"failure",
|
||||||
"itertools",
|
"itertools",
|
||||||
|
"nom 7.1.3",
|
||||||
|
"nom_locate",
|
||||||
"peg",
|
"peg",
|
||||||
"pretty_assertions",
|
"pretty_assertions",
|
||||||
"radix_trie",
|
"radix_trie",
|
||||||
"schala-lang-codegen",
|
|
||||||
"schala-repl",
|
"schala-repl",
|
||||||
"stopwatch",
|
"stopwatch",
|
||||||
"take_mut",
|
"take_mut",
|
||||||
"test-case",
|
"test-case",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "schala-lang-codegen"
|
|
||||||
version = "0.1.0"
|
|
||||||
dependencies = [
|
|
||||||
"quote 0.6.13",
|
|
||||||
"syn 0.15.44",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "schala-repl"
|
name = "schala-repl"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
@ -974,26 +970,26 @@ checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde"
|
name = "serde"
|
||||||
version = "1.0.101"
|
version = "1.0.185"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "9796c9b7ba2ffe7a9ce53c2287dfc48080f4b2b362fcc245a259b3a7201119dd"
|
checksum = "be9b6f69f1dfd54c3b568ffa45c310d6973a5e5148fd40cf515acaf38cf5bc31"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde_derive"
|
name = "serde_derive"
|
||||||
version = "1.0.101"
|
version = "1.0.136"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "4b133a43a1ecd55d4086bd5b4dc6c1751c68b1bfbeba7a5040442022c7e7c02e"
|
checksum = "08597e7152fcd306f41838ed3e37be9eaeed2b61c42e2117266a554fab4662f9"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.30",
|
"proc-macro2",
|
||||||
"quote 1.0.10",
|
"quote",
|
||||||
"syn 1.0.80",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde_json"
|
name = "serde_json"
|
||||||
version = "1.0.40"
|
version = "1.0.109"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "051c49229f282f7c6f3813f8286cc1e3323e8051823fce42c7ea80fe13521704"
|
checksum = "cb0652c533506ad7a2e353cce269330d6afd8bdfb6d75e0ace5b35aacbd7b9e9"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"itoa",
|
"itoa",
|
||||||
"ryu",
|
"ryu",
|
||||||
@ -1030,38 +1026,27 @@ dependencies = [
|
|||||||
"num",
|
"num",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "syn"
|
|
||||||
version = "0.15.44"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "9ca4b3b69a77cbe1ffc9e198781b7acb0c7365a883670e8f1c1bc66fba79a5c5"
|
|
||||||
dependencies = [
|
|
||||||
"proc-macro2 0.4.30",
|
|
||||||
"quote 0.6.13",
|
|
||||||
"unicode-xid 0.1.0",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "syn"
|
name = "syn"
|
||||||
version = "1.0.80"
|
version = "1.0.80"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d010a1623fbd906d51d650a9916aaefc05ffa0e4053ff7fe601167f3e715d194"
|
checksum = "d010a1623fbd906d51d650a9916aaefc05ffa0e4053ff7fe601167f3e715d194"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.30",
|
"proc-macro2",
|
||||||
"quote 1.0.10",
|
"quote",
|
||||||
"unicode-xid 0.2.0",
|
"unicode-xid",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "synstructure"
|
name = "synstructure"
|
||||||
version = "0.10.2"
|
version = "0.12.6"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "02353edf96d6e4dc81aea2d8490a7e9db177bf8acb0e951c24940bf866cb313f"
|
checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 0.4.30",
|
"proc-macro2",
|
||||||
"quote 0.6.13",
|
"quote",
|
||||||
"syn 0.15.44",
|
"syn",
|
||||||
"unicode-xid 0.1.0",
|
"unicode-xid",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -1077,7 +1062,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "8e51065bafd2abe106b6036483b69d1741f4a1ec56ce8a2378de341637de689e"
|
checksum = "8e51065bafd2abe106b6036483b69d1741f4a1ec56ce8a2378de341637de689e"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"fnv",
|
"fnv",
|
||||||
"nom",
|
"nom 4.2.3",
|
||||||
"phf",
|
"phf",
|
||||||
"phf_codegen",
|
"phf_codegen",
|
||||||
]
|
]
|
||||||
@ -1089,9 +1074,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "3b114ece25254e97bf48dd4bfc2a12bad0647adacfe4cae1247a9ca6ad302cec"
|
checksum = "3b114ece25254e97bf48dd4bfc2a12bad0647adacfe4cae1247a9ca6ad302cec"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cfg-if 1.0.0",
|
"cfg-if 1.0.0",
|
||||||
"proc-macro2 1.0.30",
|
"proc-macro2",
|
||||||
"quote 1.0.10",
|
"quote",
|
||||||
"syn 1.0.80",
|
"syn",
|
||||||
"version_check 0.9.3",
|
"version_check 0.9.3",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -1125,12 +1110,6 @@ version = "0.1.6"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "7007dbd421b92cc6e28410fe7362e2e0a2503394908f417b68ec8d1c364c4e20"
|
checksum = "7007dbd421b92cc6e28410fe7362e2e0a2503394908f417b68ec8d1c364c4e20"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "unicode-xid"
|
|
||||||
version = "0.1.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "unicode-xid"
|
name = "unicode-xid"
|
||||||
version = "0.2.0"
|
version = "0.2.0"
|
||||||
@ -1206,13 +1185,74 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
|
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "winconsole"
|
name = "windows-sys"
|
||||||
version = "0.10.0"
|
version = "0.52.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "3ef84b96d10db72dd980056666d7f1e7663ce93d82fa33b63e71c966f4cf5032"
|
checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cgmath",
|
"windows-targets",
|
||||||
"lazy_static 1.4.0",
|
|
||||||
"rgb",
|
|
||||||
"winapi 0.3.8",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows-targets"
|
||||||
|
version = "0.52.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
|
||||||
|
dependencies = [
|
||||||
|
"windows_aarch64_gnullvm",
|
||||||
|
"windows_aarch64_msvc",
|
||||||
|
"windows_i686_gnu",
|
||||||
|
"windows_i686_gnullvm",
|
||||||
|
"windows_i686_msvc",
|
||||||
|
"windows_x86_64_gnu",
|
||||||
|
"windows_x86_64_gnullvm",
|
||||||
|
"windows_x86_64_msvc",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows_aarch64_gnullvm"
|
||||||
|
version = "0.52.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows_aarch64_msvc"
|
||||||
|
version = "0.52.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows_i686_gnu"
|
||||||
|
version = "0.52.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows_i686_gnullvm"
|
||||||
|
version = "0.52.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows_i686_msvc"
|
||||||
|
version = "0.52.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows_x86_64_gnu"
|
||||||
|
version = "0.52.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows_x86_64_gnullvm"
|
||||||
|
version = "0.52.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows_x86_64_msvc"
|
||||||
|
version = "0.52.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
|
||||||
|
@ -9,7 +9,7 @@ resolver = "2"
|
|||||||
getopts = "0.2.21"
|
getopts = "0.2.21"
|
||||||
|
|
||||||
schala-repl = { path = "schala-repl" }
|
schala-repl = { path = "schala-repl" }
|
||||||
schala-lang = { path = "schala-lang/language" }
|
schala-lang = { path = "schala-lang" }
|
||||||
# maaru-lang = { path = "maaru" }
|
# maaru-lang = { path = "maaru" }
|
||||||
# rukka-lang = { path = "rukka" }
|
# rukka-lang = { path = "rukka" }
|
||||||
# robo-lang = { path = "robo" }
|
# robo-lang = { path = "robo" }
|
||||||
|
20
README.md
20
README.md
@ -20,6 +20,26 @@ environment. Type `:help` for more information, or type in text in any
|
|||||||
supported programming language (currently only `schala-lang`) to evaluate it in
|
supported programming language (currently only `schala-lang`) to evaluate it in
|
||||||
the REPL.
|
the REPL.
|
||||||
|
|
||||||
|
### Examples
|
||||||
|
|
||||||
|
Try running the following `schala-lang` code example in the REPL:
|
||||||
|
|
||||||
|
```
|
||||||
|
>> 1 + 1
|
||||||
|
(Total time)=> 736.368µs
|
||||||
|
=> 2
|
||||||
|
>> fn foo(x) { x + 10 }
|
||||||
|
(Total time)=> 772.496µs
|
||||||
|
=>
|
||||||
|
>> foo(0)
|
||||||
|
(Total time)=> 593.591µs
|
||||||
|
=> 10
|
||||||
|
>> 5 + foo(1)
|
||||||
|
(Total time)=> 1.119916ms
|
||||||
|
=> 16
|
||||||
|
>>
|
||||||
|
```
|
||||||
|
|
||||||
## History
|
## History
|
||||||
|
|
||||||
Schala started out life as an experiment in writing a Javascript-like
|
Schala started out life as an experiment in writing a Javascript-like
|
||||||
|
17
TODO.md
17
TODO.md
@ -1,5 +1,16 @@
|
|||||||
# Immediate TODOs / General Code Cleanup
|
# Immediate TODOs / General Code Cleanup
|
||||||
|
|
||||||
|
## Parsing
|
||||||
|
|
||||||
|
* cf. https://siraben.dev/2022/03/22/tree-sitter-linter.html write a tree-sitter parser for Schala
|
||||||
|
|
||||||
|
* Create a macro system, perhaps c.f. Crystal's?
|
||||||
|
* Macro system should be able to implement:
|
||||||
|
* printf-style variadic arguments
|
||||||
|
* something like the Rust/Haskell `Derive` construct
|
||||||
|
* doing useful things with all variants of an enum
|
||||||
|
* (e.g. what https://matklad.github.io//2022/03/26/self-modifying-code.html tries to solve)
|
||||||
|
|
||||||
|
|
||||||
## Testing
|
## Testing
|
||||||
|
|
||||||
@ -23,9 +34,6 @@
|
|||||||
|
|
||||||
2. Once FQSNs are aware of function parameters, most of the Rc<String> things in eval.rs can go away
|
2. Once FQSNs are aware of function parameters, most of the Rc<String> things in eval.rs can go away
|
||||||
|
|
||||||
## Parser
|
|
||||||
* I think I can restructure the parser to get rid of most instances of expect!, at least at the beginning of a rule
|
|
||||||
|
|
||||||
## Typechecking
|
## Typechecking
|
||||||
|
|
||||||
* make a type to represent types rather than relying on string comparisons
|
* make a type to represent types rather than relying on string comparisons
|
||||||
@ -44,9 +52,8 @@
|
|||||||
|
|
||||||
## Language Syntax
|
## Language Syntax
|
||||||
|
|
||||||
* a type like `type Klewos = Klewos { <fields> }` (i.e. a type with exactly one record-like variant) should be writeable as
|
|
||||||
`type Klewos = { <fields> }` as a shorthand, and should not require explicit matching.
|
|
||||||
* the `type` declaration should have some kind of GADT-like syntax
|
* the `type` declaration should have some kind of GADT-like syntax
|
||||||
|
* syntactic sugar for typestates? (cf. https://rustype.github.io/notes/notes/rust-typestate-series/rust-typestate-part-1.html )
|
||||||
* use `let` sigil to indicate a variable in a pattern explicitly:
|
* use `let` sigil to indicate a variable in a pattern explicitly:
|
||||||
|
|
||||||
```
|
```
|
||||||
|
2
rust-toolchain.toml
Normal file
2
rust-toolchain.toml
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
[toolchain]
|
||||||
|
channel = "nightly"
|
@ -10,16 +10,17 @@ take_mut = "0.2.2"
|
|||||||
failure = "0.1.5"
|
failure = "0.1.5"
|
||||||
ena = "0.11.0"
|
ena = "0.11.0"
|
||||||
stopwatch = "0.0.7"
|
stopwatch = "0.0.7"
|
||||||
derivative = "1.0.3"
|
derivative = "2.2.0"
|
||||||
colored = "1.8"
|
colored = "1.8"
|
||||||
radix_trie = "0.1.5"
|
radix_trie = "0.1.5"
|
||||||
assert_matches = "1.5"
|
assert_matches = "1.5"
|
||||||
#peg = "0.7.0"
|
#peg = "0.7.0"
|
||||||
peg = { git = "https://github.com/kevinmehall/rust-peg", rev = "960222580c8da25b17d32c2aae6f52f902728b62" }
|
peg = "0.8.1"
|
||||||
|
nom = "7.1.0"
|
||||||
|
nom_locate = "4.0.0"
|
||||||
|
|
||||||
|
|
||||||
schala-lang-codegen = { path = "../codegen" }
|
schala-repl = { path = "../schala-repl" }
|
||||||
schala-repl = { path = "../../schala-repl" }
|
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
test-case = "1.2.0"
|
test-case = "1.2.0"
|
@ -1,13 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "schala-lang-codegen"
|
|
||||||
version = "0.1.0"
|
|
||||||
authors = ["greg <greg.shuflin@protonmail.com>"]
|
|
||||||
edition = "2018"
|
|
||||||
resolver = "2"
|
|
||||||
|
|
||||||
[lib]
|
|
||||||
proc-macro = true
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
syn = { version = "0.15.12", features = ["full", "extra-traits", "fold"] }
|
|
||||||
quote = "0.6.8"
|
|
@ -1,54 +0,0 @@
|
|||||||
#![feature(box_patterns)]
|
|
||||||
#![recursion_limit="128"]
|
|
||||||
extern crate proc_macro;
|
|
||||||
#[macro_use]
|
|
||||||
extern crate quote;
|
|
||||||
#[macro_use]
|
|
||||||
extern crate syn;
|
|
||||||
|
|
||||||
use self::proc_macro::TokenStream;
|
|
||||||
use self::syn::fold::Fold;
|
|
||||||
|
|
||||||
struct RecursiveDescentFn {
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Fold for RecursiveDescentFn {
|
|
||||||
fn fold_item_fn(&mut self, mut i: syn::ItemFn) -> syn::ItemFn {
|
|
||||||
let box block = i.block;
|
|
||||||
let ident = &i.ident;
|
|
||||||
|
|
||||||
let new_block: syn::Block = parse_quote! {
|
|
||||||
{
|
|
||||||
let next_token_before_parse = self.token_handler.peek();
|
|
||||||
let record = ParseRecord {
|
|
||||||
production_name: stringify!(#ident).to_string(),
|
|
||||||
next_token: format!("{}", next_token_before_parse.to_string_with_metadata()),
|
|
||||||
level: self.parse_level,
|
|
||||||
};
|
|
||||||
self.parse_level += 1;
|
|
||||||
self.parse_record.push(record);
|
|
||||||
let result = { #block };
|
|
||||||
|
|
||||||
if self.parse_level != 0 {
|
|
||||||
self.parse_level -= 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
result.map_err(|mut parse_error: ParseError| {
|
|
||||||
parse_error.production_name = Some(stringify!(#ident).to_string());
|
|
||||||
parse_error
|
|
||||||
})
|
|
||||||
}
|
|
||||||
};
|
|
||||||
i.block = Box::new(new_block);
|
|
||||||
i
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[proc_macro_attribute]
|
|
||||||
pub fn recursive_descent_method(_attr: TokenStream, item: TokenStream) -> TokenStream {
|
|
||||||
|
|
||||||
let input: syn::ItemFn = parse_macro_input!(item as syn::ItemFn);
|
|
||||||
let mut folder = RecursiveDescentFn {};
|
|
||||||
let output = folder.fold_item_fn(input);
|
|
||||||
TokenStream::from(quote!(#output))
|
|
||||||
}
|
|
@ -1,112 +0,0 @@
|
|||||||
use crate::{
|
|
||||||
parsing::ParseError,
|
|
||||||
schala::{SourceReference, Stage},
|
|
||||||
symbol_table::SymbolError,
|
|
||||||
tokenizing::{Location, Token, TokenKind},
|
|
||||||
type_inference::TypeError,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub struct SchalaError {
|
|
||||||
errors: Vec<Error>,
|
|
||||||
//TODO unify these sometime
|
|
||||||
formatted_parse_error: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SchalaError {
|
|
||||||
pub(crate) fn display(&self) -> String {
|
|
||||||
if let Some(ref err) = self.formatted_parse_error {
|
|
||||||
err.clone()
|
|
||||||
} else {
|
|
||||||
self.errors[0].text.as_ref().cloned().unwrap_or_default()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(dead_code)]
|
|
||||||
pub(crate) fn from_type_error(err: TypeError) -> Self {
|
|
||||||
Self {
|
|
||||||
formatted_parse_error: None,
|
|
||||||
errors: vec![Error { location: None, text: Some(err.msg), stage: Stage::Typechecking }],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn from_symbol_table(symbol_errs: Vec<SymbolError>) -> Self {
|
|
||||||
//TODO this could be better
|
|
||||||
let errors = symbol_errs
|
|
||||||
.into_iter()
|
|
||||||
.map(|_symbol_err| Error {
|
|
||||||
location: None,
|
|
||||||
text: Some("symbol table error".to_string()),
|
|
||||||
stage: Stage::Symbols,
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
Self { errors, formatted_parse_error: None }
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn from_string(text: String, stage: Stage) -> Self {
|
|
||||||
Self { formatted_parse_error: None, errors: vec![Error { location: None, text: Some(text), stage }] }
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn from_parse_error(parse_error: ParseError, source_reference: &SourceReference) -> Self {
|
|
||||||
Self {
|
|
||||||
formatted_parse_error: Some(format_parse_error(parse_error, source_reference)),
|
|
||||||
errors: vec![],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn from_tokens(tokens: &[Token]) -> Option<SchalaError> {
|
|
||||||
let token_errors: Vec<Error> = tokens
|
|
||||||
.iter()
|
|
||||||
.filter_map(|tok| match tok.kind {
|
|
||||||
TokenKind::Error(ref err) => Some(Error {
|
|
||||||
location: Some(tok.location),
|
|
||||||
text: Some(err.clone()),
|
|
||||||
stage: Stage::Tokenizing,
|
|
||||||
}),
|
|
||||||
_ => None,
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
if token_errors.is_empty() {
|
|
||||||
None
|
|
||||||
} else {
|
|
||||||
Some(SchalaError { errors: token_errors, formatted_parse_error: None })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(dead_code)]
|
|
||||||
struct Error {
|
|
||||||
location: Option<Location>,
|
|
||||||
text: Option<String>,
|
|
||||||
stage: Stage,
|
|
||||||
}
|
|
||||||
|
|
||||||
fn format_parse_error(error: ParseError, source_reference: &SourceReference) -> String {
|
|
||||||
let line_num = error.token.location.line_num;
|
|
||||||
let ch = error.token.location.char_num;
|
|
||||||
let line_from_program = source_reference.get_line(line_num as usize);
|
|
||||||
let location_pointer = format!("{}^", " ".repeat(ch.into()));
|
|
||||||
|
|
||||||
let line_num_digits = format!("{}", line_num).chars().count();
|
|
||||||
let space_padding = " ".repeat(line_num_digits);
|
|
||||||
|
|
||||||
let production = match error.production_name {
|
|
||||||
Some(n) => format!("\n(from production \"{}\")", n),
|
|
||||||
None => "".to_string(),
|
|
||||||
};
|
|
||||||
|
|
||||||
format!(
|
|
||||||
r#"
|
|
||||||
{error_msg}{production}
|
|
||||||
{space_padding} |
|
|
||||||
{line_num} | {}
|
|
||||||
{space_padding} | {}
|
|
||||||
"#,
|
|
||||||
line_from_program,
|
|
||||||
location_pointer,
|
|
||||||
error_msg = error.msg,
|
|
||||||
space_padding = space_padding,
|
|
||||||
line_num = line_num,
|
|
||||||
production = production
|
|
||||||
)
|
|
||||||
}
|
|
File diff suppressed because it is too large
Load Diff
@ -1,494 +0,0 @@
|
|||||||
use std::rc::Rc;
|
|
||||||
|
|
||||||
use crate::ast::*;
|
|
||||||
|
|
||||||
fn rc_string(s: &str) -> Rc<String> {
|
|
||||||
Rc::new(s.to_string())
|
|
||||||
}
|
|
||||||
|
|
||||||
peg::parser! {
|
|
||||||
pub grammar schala_parser() for str {
|
|
||||||
|
|
||||||
rule whitespace() = [' ' | '\t' | '\n']*
|
|
||||||
|
|
||||||
rule _ = quiet!{ whitespace() }
|
|
||||||
|
|
||||||
rule __ = quiet!{ [' ' | '\t' ]* }
|
|
||||||
|
|
||||||
pub rule program() -> AST =
|
|
||||||
n:(statement() ** delimiter() ) { AST { id: Default::default(), statements: n.into() } }
|
|
||||||
|
|
||||||
rule delimiter() = (";" / "\n")+
|
|
||||||
|
|
||||||
//Note - this is a hack, ideally the rule `rule block() -> Block = "{" _ items:(statement() **
|
|
||||||
//delimiter()) _ "}" { items.into() }` would've worked, but it doesn't.
|
|
||||||
pub rule block() -> Block = "{" _ items:block_item()* _ "}" { items.into() } /
|
|
||||||
"{" _ stmt:statement() _ "}" { vec![stmt].into() }
|
|
||||||
|
|
||||||
rule block_item() -> Statement =
|
|
||||||
stmt:statement() delimiter()+ { stmt }
|
|
||||||
|
|
||||||
rule statement() -> Statement =
|
|
||||||
kind:statement_kind() { Statement { id: Default::default(), location: Default::default(), kind } }
|
|
||||||
|
|
||||||
rule statement_kind() -> StatementKind =
|
|
||||||
_ decl:declaration() { StatementKind::Declaration(decl) } /
|
|
||||||
_ expr:expression() { StatementKind::Expression(expr) }
|
|
||||||
|
|
||||||
rule declaration() -> Declaration =
|
|
||||||
binding() / type_decl() / annotation() / func()
|
|
||||||
|
|
||||||
rule func() -> Declaration =
|
|
||||||
sig:func_signature() __ body:block() { Declaration::FuncDecl(sig, body) } /
|
|
||||||
sig:func_signature() { Declaration::FuncSig(sig) }
|
|
||||||
|
|
||||||
//TODO handle operators
|
|
||||||
rule func_signature() -> Signature =
|
|
||||||
"fn" _ name:identifier() "(" _ params:formal_params() _ ")" _ type_anno:type_anno()? { Signature {
|
|
||||||
name: rc_string(name), operator: false, params, type_anno
|
|
||||||
} }
|
|
||||||
|
|
||||||
rule formal_params() -> Vec<FormalParam> = params:(formal_param() ** (_ "," _)) {? if params.len() < 256 { Ok(params) } else {
|
|
||||||
Err("function-too-long") }
|
|
||||||
}
|
|
||||||
|
|
||||||
rule formal_param() -> FormalParam =
|
|
||||||
name:identifier() _ anno:type_anno()? _ "=" expr:expression() { FormalParam { name: rc_string(name),
|
|
||||||
default: Some(expr), anno } } /
|
|
||||||
name:identifier() _ anno:type_anno()? { FormalParam { name: rc_string(name), default: None, anno } }
|
|
||||||
|
|
||||||
|
|
||||||
rule annotation() -> Declaration =
|
|
||||||
"@" name:identifier() args:annotation_args()? delimiter() _ inner:statement() { Declaration::Annotation {
|
|
||||||
name: rc_string(name), arguments: if let Some(args) = args { args } else { vec![] }, inner: Box::new(inner) }
|
|
||||||
}
|
|
||||||
|
|
||||||
rule annotation_args() -> Vec<Expression> =
|
|
||||||
"(" _ args:(expression() ** (_ "," _)) _ ")" { args }
|
|
||||||
|
|
||||||
|
|
||||||
rule binding() -> Declaration =
|
|
||||||
"let" _ mutable:"mut"? _ ident:identifier() _ type_anno:type_anno()? _ "=" _ expr:expression() {
|
|
||||||
Declaration::Binding { name: Rc::new(ident.to_string()), constant: mutable.is_none(),
|
|
||||||
type_anno, expr }
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
rule type_decl() -> Declaration =
|
|
||||||
"type" _ "alias" _ alias:type_alias() { alias } /
|
|
||||||
"type" _ mutable:"mut"? _ name:type_singleton_name() _ "=" _ body:type_body() {
|
|
||||||
Declaration::TypeDecl { name, body, mutable: mutable.is_some() }
|
|
||||||
}
|
|
||||||
|
|
||||||
rule type_singleton_name() -> TypeSingletonName =
|
|
||||||
name:identifier() params:type_params()? { TypeSingletonName { name: rc_string(name), params: if let Some(params) = params { params } else { vec![] } } }
|
|
||||||
|
|
||||||
rule type_params() -> Vec<TypeIdentifier> =
|
|
||||||
"<" _ idents:(type_identifier() ** (_ "," _)) _ ">" { idents }
|
|
||||||
|
|
||||||
rule type_identifier() -> TypeIdentifier =
|
|
||||||
"(" _ items:(type_identifier() ** (_ "," _)) _ ")" { TypeIdentifier::Tuple(items) } /
|
|
||||||
singleton:type_singleton_name() { TypeIdentifier::Singleton(singleton) }
|
|
||||||
|
|
||||||
rule type_body() -> TypeBody =
|
|
||||||
"{" _ items:(record_variant_item() ++ (_ "," _)) _ "}" { TypeBody::ImmediateRecord(Default::default(), items) } /
|
|
||||||
variants:(variant_spec() ** (_ "|" _)) { TypeBody::Variants(variants) }
|
|
||||||
|
|
||||||
rule variant_spec() -> Variant =
|
|
||||||
name:identifier() _ "{" _ typed_identifier_list:(record_variant_item() ++ (_ "," _)) _ "}" { Variant {
|
|
||||||
id: Default::default(), name: rc_string(name), kind: VariantKind::Record(typed_identifier_list)
|
|
||||||
} } /
|
|
||||||
name:identifier() "(" tuple_members:(type_identifier() ++ (_ "," _)) ")" { Variant {
|
|
||||||
id: Default::default(), name: rc_string(name), kind: VariantKind::TupleStruct(tuple_members) } } /
|
|
||||||
name:identifier() { Variant { id: Default::default(), name: rc_string(name), kind: VariantKind::UnitStruct } }
|
|
||||||
|
|
||||||
rule record_variant_item() -> (Rc<String>, TypeIdentifier) =
|
|
||||||
name:identifier() _ ":" _ ty:type_identifier() { (rc_string(name), ty) }
|
|
||||||
|
|
||||||
rule type_alias() -> Declaration =
|
|
||||||
alias:identifier() _ "=" _ name:identifier() { Declaration::TypeAlias { alias: rc_string(alias), original: rc_string(name), } }
|
|
||||||
|
|
||||||
rule type_anno() -> TypeIdentifier =
|
|
||||||
":" _ ident:identifier() { TypeIdentifier::Singleton(TypeSingletonName { name: Rc::new(ident.to_string()), params: vec![] }) }
|
|
||||||
|
|
||||||
pub rule expression() -> Expression =
|
|
||||||
_ kind:expression_kind() { Expression { id: Default::default(), type_anno: None, kind: kind } }
|
|
||||||
|
|
||||||
rule expression_no_struct() -> Expression =
|
|
||||||
_ kind:expression_kind_no_struct() { Expression { id: Default::default(), type_anno: None, kind: kind } }
|
|
||||||
|
|
||||||
rule expression_kind() -> ExpressionKind =
|
|
||||||
precedence_expr(true)
|
|
||||||
|
|
||||||
rule expression_kind_no_struct() -> ExpressionKind =
|
|
||||||
precedence_expr(false)
|
|
||||||
|
|
||||||
rule precedence_expr(struct_ok: bool) -> ExpressionKind =
|
|
||||||
first:prefix_expr(struct_ok) _ next:(precedence_continuation(struct_ok))* {
|
|
||||||
let next = next.into_iter().map(|(sigil, expr)| (BinOp::from_sigil(sigil), expr)).collect();
|
|
||||||
BinopSequence { first, next }.do_precedence()
|
|
||||||
}
|
|
||||||
|
|
||||||
rule precedence_continuation(struct_ok: bool) -> (&'input str, ExpressionKind) =
|
|
||||||
op:operator() _ expr:prefix_expr(struct_ok) _ { (op, expr) }
|
|
||||||
|
|
||||||
rule prefix_expr(struct_ok: bool) -> ExpressionKind =
|
|
||||||
prefix:prefix()? expr:extended_expr(struct_ok) {
|
|
||||||
if let Some(p) = prefix {
|
|
||||||
let expr = Expression::new(Default::default(), expr);
|
|
||||||
let prefix = PrefixOp::from_sigil(p);
|
|
||||||
ExpressionKind::PrefixExp(prefix, Box::new(expr))
|
|
||||||
} else {
|
|
||||||
expr
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
rule prefix() -> &'input str =
|
|
||||||
$(['+' | '-' | '!' ])
|
|
||||||
|
|
||||||
//TODO make the definition of operators more complex
|
|
||||||
rule operator() -> &'input str =
|
|
||||||
quiet!{$( ['+' | '-' | '*' | '/' | '%' | '<' | '>' | '=' | '!' | '$' | '&' | '|' | '?' | '^' | '`']+ )} /
|
|
||||||
expected!("operator")
|
|
||||||
|
|
||||||
|
|
||||||
rule extended_expr(struct_ok: bool) -> ExpressionKind =
|
|
||||||
item:extended_expr_ok_struct() {? if struct_ok { Ok(item) } else { Err("no-struct-allowed") } } /
|
|
||||||
item:extended_expr_no_struct() {? if !struct_ok { Ok(item) } else { Err("!no-struct-allowed") } }
|
|
||||||
|
|
||||||
#[cache_left_rec]
|
|
||||||
rule extended_expr_ok_struct() -> ExpressionKind =
|
|
||||||
indexee:extended_expr_ok_struct() indexers:index_part() {
|
|
||||||
ExpressionKind::Index {
|
|
||||||
indexee: Box::new(Expression::new(Default::default(), indexee)),
|
|
||||||
indexers,
|
|
||||||
}
|
|
||||||
} /
|
|
||||||
f:extended_expr_ok_struct() arguments:call_part() {
|
|
||||||
ExpressionKind::Call {
|
|
||||||
f: Box::new(Expression::new(Default::default(), f)),
|
|
||||||
arguments,
|
|
||||||
}
|
|
||||||
|
|
||||||
} /
|
|
||||||
expr:extended_expr_ok_struct() "." name:identifier() { ExpressionKind::Access {
|
|
||||||
name: Rc::new(name.to_string()),
|
|
||||||
expr: Box::new(Expression::new(Default::default(),expr)),
|
|
||||||
} } /
|
|
||||||
primary(true)
|
|
||||||
|
|
||||||
#[cache_left_rec]
|
|
||||||
rule extended_expr_no_struct() -> ExpressionKind =
|
|
||||||
indexee:extended_expr_no_struct() indexers:index_part() {
|
|
||||||
ExpressionKind::Index {
|
|
||||||
indexee: Box::new(Expression::new(Default::default(), indexee)),
|
|
||||||
indexers,
|
|
||||||
}
|
|
||||||
} /
|
|
||||||
f:extended_expr_no_struct() arguments:call_part() {
|
|
||||||
ExpressionKind::Call {
|
|
||||||
f: Box::new(Expression::new(Default::default(), f)),
|
|
||||||
arguments,
|
|
||||||
}
|
|
||||||
|
|
||||||
} /
|
|
||||||
expr:extended_expr_no_struct() "." name:identifier() { ExpressionKind::Access {
|
|
||||||
name: Rc::new(name.to_string()),
|
|
||||||
expr: Box::new(Expression::new(Default::default(),expr)),
|
|
||||||
} } /
|
|
||||||
primary(false)
|
|
||||||
|
|
||||||
rule index_part() -> Vec<Expression> =
|
|
||||||
"[" indexers:(expression() ++ ",") "]" { indexers }
|
|
||||||
|
|
||||||
rule call_part() -> Vec<InvocationArgument> =
|
|
||||||
"(" arguments:(invocation_argument() ** ",") ")" { arguments }
|
|
||||||
|
|
||||||
//TODO this shouldn't be an expression b/c type annotations disallowed here
|
|
||||||
rule invocation_argument() -> InvocationArgument =
|
|
||||||
_ "_" _ { InvocationArgument::Ignored } /
|
|
||||||
_ ident:identifier() _ "=" _ expr:expression() { InvocationArgument::Keyword {
|
|
||||||
name: Rc::new(ident.to_string()),
|
|
||||||
expr
|
|
||||||
} } /
|
|
||||||
_ expr:expression() _ { InvocationArgument::Positional(expr) }
|
|
||||||
|
|
||||||
|
|
||||||
rule primary(struct_ok: bool) -> ExpressionKind =
|
|
||||||
while_expr() / for_expr() / float_literal() / nat_literal() / bool_literal() / string_literal() / paren_expr() /
|
|
||||||
list_expr() / if_expr() /
|
|
||||||
item:named_struct() {? if struct_ok { Ok(item) } else { Err("no-struct-allowed") } } /
|
|
||||||
identifier_expr()
|
|
||||||
|
|
||||||
rule for_expr() -> ExpressionKind =
|
|
||||||
"for" _ enumerators:for_enumerators() _ body:for_body() {
|
|
||||||
ExpressionKind::ForExpression { enumerators, body }
|
|
||||||
}
|
|
||||||
|
|
||||||
rule for_enumerators() -> Vec<Enumerator> =
|
|
||||||
"{" _ enumerators:(enumerator() ++ ",") _ "}" { enumerators } /
|
|
||||||
enumerator:enumerator() { vec![enumerator] }
|
|
||||||
|
|
||||||
//TODO add guards, etc.
|
|
||||||
rule enumerator() -> Enumerator =
|
|
||||||
ident:identifier() _ "<-" _ generator:expression_no_struct() {
|
|
||||||
Enumerator { id: Rc::new(ident.to_string()), generator }
|
|
||||||
} /
|
|
||||||
//TODO need to distinguish these two cases in AST
|
|
||||||
ident:identifier() _ "=" _ generator:expression_no_struct() {
|
|
||||||
Enumerator { id: Rc::new(ident.to_string()), generator }
|
|
||||||
}
|
|
||||||
|
|
||||||
rule for_body() -> Box<ForBody> =
|
|
||||||
"return" _ expr:expression() { Box::new(ForBody::MonadicReturn(expr)) } /
|
|
||||||
body:block() { Box::new(ForBody::StatementBlock(body)) }
|
|
||||||
|
|
||||||
rule while_expr() -> ExpressionKind =
|
|
||||||
"while" _ cond:expression_kind_no_struct()? _ body:block() {
|
|
||||||
ExpressionKind::WhileExpression {
|
|
||||||
condition: cond.map(|kind| Box::new(Expression::new(Default::default(), kind))),
|
|
||||||
body,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
rule identifier_expr() -> ExpressionKind =
|
|
||||||
qn:qualified_identifier() { ExpressionKind::Value(qn) }
|
|
||||||
|
|
||||||
rule named_struct() -> ExpressionKind =
|
|
||||||
name:qualified_identifier() _ fields:record_block() {
|
|
||||||
ExpressionKind::NamedStruct {
|
|
||||||
name,
|
|
||||||
fields: fields.into_iter().map(|(n, exp)| (Rc::new(n.to_string()), exp)).collect(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
//TODO anonymous structs, update syntax for structs
|
|
||||||
rule record_block() -> Vec<(&'input str, Expression)> =
|
|
||||||
"{" _ entries:(record_entry() ** ",") _ "}" { entries }
|
|
||||||
|
|
||||||
rule record_entry() -> (&'input str, Expression) =
|
|
||||||
_ name:identifier() _ ":" _ expr:expression() _ { (name, expr) }
|
|
||||||
|
|
||||||
rule qualified_identifier() -> QualifiedName =
|
|
||||||
names:(identifier() ++ "::") { QualifiedName { id: Default::default(), components: names.into_iter().map(|name| Rc::new(name.to_string())).collect() } }
|
|
||||||
|
|
||||||
//TODO improve the definition of identifiers
|
|
||||||
rule identifier() -> &'input str =
|
|
||||||
$(['a'..='z' | 'A'..='Z' | '_'] ['a'..='z' | 'A'..='Z' | '0'..='9' | '_']*)
|
|
||||||
|
|
||||||
|
|
||||||
rule if_expr() -> ExpressionKind =
|
|
||||||
"if" _ discriminator:(expression()?) _ body:if_expr_body() {
|
|
||||||
ExpressionKind::IfExpression {
|
|
||||||
discriminator: discriminator.map(Box::new),
|
|
||||||
body: Box::new(body),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
rule if_expr_body() -> IfExpressionBody =
|
|
||||||
cond_block() / simple_pattern_match() / simple_conditional()
|
|
||||||
|
|
||||||
rule simple_conditional() -> IfExpressionBody =
|
|
||||||
"then" _ then_case:expr_or_block() _ else_case:else_case() {
|
|
||||||
IfExpressionBody::SimpleConditional { then_case, else_case }
|
|
||||||
}
|
|
||||||
|
|
||||||
rule simple_pattern_match() -> IfExpressionBody =
|
|
||||||
"is" _ pattern:pattern() _ "then" _ then_case:expr_or_block() _ else_case:else_case() {
|
|
||||||
IfExpressionBody::SimplePatternMatch { pattern, then_case, else_case }
|
|
||||||
}
|
|
||||||
|
|
||||||
rule cond_block() -> IfExpressionBody =
|
|
||||||
"{" _ cond_arms:(cond_arm() ++ ",") _ "}" { IfExpressionBody::CondList(cond_arms) }
|
|
||||||
|
|
||||||
rule cond_arm() -> ConditionArm =
|
|
||||||
_ "else" _ body:expr_or_block() { ConditionArm { condition: Condition::Else, guard: None, body } } /
|
|
||||||
_ condition:condition() _ guard:condition_guard() _ "then" _ body:expr_or_block()
|
|
||||||
{ ConditionArm { condition, guard, body } }
|
|
||||||
|
|
||||||
rule condition() -> Condition =
|
|
||||||
"is" _ pat:pattern() { Condition::Pattern(pat) } /
|
|
||||||
op:operator() _ expr:expression() { Condition::TruncatedOp(BinOp::from_sigil(op), expr) }
|
|
||||||
|
|
||||||
rule condition_guard() -> Option<Expression> =
|
|
||||||
("if" _ expr:expression() { expr } )?
|
|
||||||
|
|
||||||
|
|
||||||
rule expr_or_block() -> Block = block() / ex:expression() {
|
|
||||||
Statement {
|
|
||||||
id: Default::default(), location: Default::default(),
|
|
||||||
kind: StatementKind::Expression(ex)
|
|
||||||
}.into()
|
|
||||||
}
|
|
||||||
|
|
||||||
rule else_case() -> Option<Block> =
|
|
||||||
("else" _ eorb:expr_or_block() { eorb })?
|
|
||||||
|
|
||||||
rule pattern() -> Pattern =
|
|
||||||
"(" _ variants:(pattern() ++ ",") _ ")" { Pattern::TuplePattern(variants) } /
|
|
||||||
_ pat:simple_pattern() { pat }
|
|
||||||
|
|
||||||
rule simple_pattern() -> Pattern =
|
|
||||||
pattern_literal() /
|
|
||||||
qn:qualified_identifier() "(" members:(pattern() ** ",") ")" {
|
|
||||||
Pattern::TupleStruct(qn, members)
|
|
||||||
} /
|
|
||||||
qn:qualified_identifier() _ "{" _ items:(record_pattern_entry() ** ",") "}" _ {
|
|
||||||
let items = items.into_iter().map(|(name, pat)| (Rc::new(name.to_string()), pat)).collect();
|
|
||||||
Pattern::Record(qn, items)
|
|
||||||
} /
|
|
||||||
qn:qualified_identifier() { Pattern::VarOrName(qn) }
|
|
||||||
|
|
||||||
rule record_pattern_entry() -> (&'input str, Pattern) =
|
|
||||||
_ name:identifier() _ ":" _ pat:pattern() _ { (name, pat) } /
|
|
||||||
_ name:identifier() _ {
|
|
||||||
let qn = QualifiedName {
|
|
||||||
id: Default::default(),
|
|
||||||
components: vec![Rc::new(name.to_string())],
|
|
||||||
};
|
|
||||||
(name, Pattern::VarOrName(qn))
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
rule pattern_literal() -> Pattern =
|
|
||||||
"true" { Pattern::Literal(PatternLiteral::BoolPattern(true)) } /
|
|
||||||
"false" { Pattern::Literal(PatternLiteral::BoolPattern(false)) } /
|
|
||||||
s:bare_string_literal() { Pattern::Literal(PatternLiteral::StringPattern(Rc::new(s.to_string()))) } /
|
|
||||||
sign:("-"?) num:nat_literal() {
|
|
||||||
let neg = sign.is_some();
|
|
||||||
Pattern::Literal(PatternLiteral::NumPattern { neg, num })
|
|
||||||
} /
|
|
||||||
"_" { Pattern::Ignored }
|
|
||||||
|
|
||||||
|
|
||||||
rule list_expr() -> ExpressionKind =
|
|
||||||
"[" exprs:(expression() ** ",") "]" {
|
|
||||||
let mut exprs = exprs;
|
|
||||||
ExpressionKind::ListLiteral(exprs)
|
|
||||||
}
|
|
||||||
|
|
||||||
rule paren_expr() -> ExpressionKind =
|
|
||||||
"(" exprs:(expression() ** ",") ")" {
|
|
||||||
let mut exprs = exprs;
|
|
||||||
match exprs.len() {
|
|
||||||
1 => exprs.pop().unwrap().kind,
|
|
||||||
_ => ExpressionKind::TupleLiteral(exprs),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
rule string_literal() -> ExpressionKind =
|
|
||||||
s:bare_string_literal(){ ExpressionKind::StringLiteral(Rc::new(s.to_string())) }
|
|
||||||
|
|
||||||
//TODO string escapes, prefixes
|
|
||||||
rule bare_string_literal() -> &'input str =
|
|
||||||
"\"" items:$([^ '"' ]*) "\"" { items }
|
|
||||||
|
|
||||||
rule bool_literal() -> ExpressionKind =
|
|
||||||
"true" { ExpressionKind::BoolLiteral(true) } / "false" { ExpressionKind::BoolLiteral(false) }
|
|
||||||
|
|
||||||
rule nat_literal() -> ExpressionKind =
|
|
||||||
bin_literal() / hex_literal() / unmarked_literal()
|
|
||||||
|
|
||||||
rule unmarked_literal() -> ExpressionKind =
|
|
||||||
digits:digits() { ExpressionKind::NatLiteral(digits.parse().unwrap()) }
|
|
||||||
|
|
||||||
rule bin_literal() -> ExpressionKind =
|
|
||||||
"0b" digits:bin_digits() { ExpressionKind::NatLiteral(parse_binary(digits)) }
|
|
||||||
|
|
||||||
rule hex_literal() -> ExpressionKind =
|
|
||||||
"0x" digits:hex_digits() { ExpressionKind::NatLiteral(parse_hex(digits)) }
|
|
||||||
|
|
||||||
rule float_literal() -> ExpressionKind =
|
|
||||||
ds:$( digits() "." digits()? / "." digits() ) { ExpressionKind::FloatLiteral(ds.parse().unwrap()) }
|
|
||||||
|
|
||||||
rule digits() -> &'input str = $((digit_group() "_"*)+)
|
|
||||||
rule bin_digits() -> &'input str = $((bin_digit_group() "_"*)+)
|
|
||||||
rule hex_digits() -> &'input str = $((hex_digit_group() "_"*)+)
|
|
||||||
|
|
||||||
rule digit_group() -> &'input str = $(['0'..='9']+)
|
|
||||||
rule bin_digit_group() -> &'input str = $(['0' | '1']+)
|
|
||||||
rule hex_digit_group() -> &'input str = $(['0'..='9' | 'a'..='f' | 'A'..='F']+)
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn parse_binary(digits: &str /*, tok: Token*/) -> u64 {
|
|
||||||
let mut result: u64 = 0;
|
|
||||||
let mut multiplier = 1;
|
|
||||||
for d in digits.chars().rev() {
|
|
||||||
match d {
|
|
||||||
'1' => result += multiplier,
|
|
||||||
'0' => (),
|
|
||||||
'_' => continue,
|
|
||||||
_ => unreachable!(),
|
|
||||||
}
|
|
||||||
multiplier = match multiplier.checked_mul(2) {
|
|
||||||
Some(m) => m,
|
|
||||||
None =>
|
|
||||||
/*return ParseError::new_with_token("This binary expression will overflow", tok),*/
|
|
||||||
panic!(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
//Ok(result)
|
|
||||||
result
|
|
||||||
}
|
|
||||||
|
|
||||||
//TODO fix these two functions
|
|
||||||
fn parse_hex(digits: &str) -> u64 {
|
|
||||||
let mut result: u64 = 0;
|
|
||||||
let mut multiplier: u64 = 1;
|
|
||||||
for d in digits.chars().rev() {
|
|
||||||
if d == '_' {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
match d.to_digit(16) {
|
|
||||||
Some(n) => result += n as u64 * multiplier,
|
|
||||||
None => panic!(),
|
|
||||||
}
|
|
||||||
multiplier = match multiplier.checked_mul(16) {
|
|
||||||
Some(m) => m,
|
|
||||||
None => panic!(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
result
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
struct BinopSequence {
|
|
||||||
first: ExpressionKind,
|
|
||||||
next: Vec<(BinOp, ExpressionKind)>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl BinopSequence {
|
|
||||||
fn do_precedence(self) -> ExpressionKind {
|
|
||||||
fn helper(
|
|
||||||
precedence: i32,
|
|
||||||
lhs: ExpressionKind,
|
|
||||||
rest: &mut Vec<(BinOp, ExpressionKind)>,
|
|
||||||
) -> Expression {
|
|
||||||
let mut lhs = Expression::new(Default::default(), lhs);
|
|
||||||
loop {
|
|
||||||
let (next_op, next_rhs) = match rest.pop() {
|
|
||||||
Some((a, b)) => (a, b),
|
|
||||||
None => break,
|
|
||||||
};
|
|
||||||
let new_precedence = next_op.get_precedence();
|
|
||||||
if precedence >= new_precedence {
|
|
||||||
rest.push((next_op, next_rhs));
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
let rhs = helper(new_precedence, next_rhs, rest);
|
|
||||||
lhs = Expression::new(
|
|
||||||
Default::default(),
|
|
||||||
ExpressionKind::BinExp(next_op, Box::new(lhs), Box::new(rhs)),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
lhs
|
|
||||||
}
|
|
||||||
let mut as_stack = self.next.into_iter().rev().collect();
|
|
||||||
helper(BinOp::min_precedence(), self.first, &mut as_stack).kind
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,460 +0,0 @@
|
|||||||
#![allow(clippy::upper_case_acronyms)]
|
|
||||||
|
|
||||||
use std::{
|
|
||||||
convert::{TryFrom, TryInto},
|
|
||||||
fmt,
|
|
||||||
iter::{Iterator, Peekable},
|
|
||||||
rc::Rc,
|
|
||||||
};
|
|
||||||
|
|
||||||
use itertools::Itertools;
|
|
||||||
|
|
||||||
/// A location in a particular source file. Note that the
|
|
||||||
/// sizes of the internal unsigned integer types limit
|
|
||||||
/// the size of a source file to 2^32 lines of
|
|
||||||
/// at most 2^16 characters, which should be plenty big.
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Default)]
|
|
||||||
pub struct Location {
|
|
||||||
pub(crate) line_num: u32,
|
|
||||||
pub(crate) char_num: u16,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for Location {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
||||||
write!(f, "{}:{}", self.line_num, self.char_num)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
|
||||||
pub enum TokenKind {
|
|
||||||
Newline,
|
|
||||||
Semicolon,
|
|
||||||
|
|
||||||
LParen,
|
|
||||||
RParen,
|
|
||||||
LSquareBracket,
|
|
||||||
RSquareBracket,
|
|
||||||
LAngleBracket,
|
|
||||||
RAngleBracket,
|
|
||||||
LCurlyBrace,
|
|
||||||
RCurlyBrace,
|
|
||||||
Pipe,
|
|
||||||
Backslash,
|
|
||||||
AtSign,
|
|
||||||
|
|
||||||
Comma,
|
|
||||||
Period,
|
|
||||||
Colon,
|
|
||||||
Underscore,
|
|
||||||
Slash,
|
|
||||||
Equals,
|
|
||||||
|
|
||||||
Operator(Rc<String>),
|
|
||||||
DigitGroup(Rc<String>),
|
|
||||||
HexLiteral(Rc<String>),
|
|
||||||
BinNumberSigil,
|
|
||||||
StrLiteral { s: Rc<String>, prefix: Option<Rc<String>> },
|
|
||||||
Identifier(Rc<String>),
|
|
||||||
Keyword(Kw),
|
|
||||||
|
|
||||||
EOF,
|
|
||||||
|
|
||||||
Error(String),
|
|
||||||
}
|
|
||||||
use self::TokenKind::*;
|
|
||||||
|
|
||||||
impl fmt::Display for TokenKind {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
||||||
match self {
|
|
||||||
&Operator(ref s) => write!(f, "Operator({})", **s),
|
|
||||||
&DigitGroup(ref s) => write!(f, "DigitGroup({})", s),
|
|
||||||
&HexLiteral(ref s) => write!(f, "HexLiteral({})", s),
|
|
||||||
&StrLiteral { ref s, .. } => write!(f, "StrLiteral({})", s),
|
|
||||||
&Identifier(ref s) => write!(f, "Identifier({})", s),
|
|
||||||
&Error(ref s) => write!(f, "Error({})", s),
|
|
||||||
other => write!(f, "{:?}", other),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
|
||||||
pub enum Kw {
|
|
||||||
If,
|
|
||||||
Then,
|
|
||||||
Else,
|
|
||||||
Is,
|
|
||||||
Func,
|
|
||||||
For,
|
|
||||||
While,
|
|
||||||
Let,
|
|
||||||
In,
|
|
||||||
Mut,
|
|
||||||
Return,
|
|
||||||
Continue,
|
|
||||||
Break,
|
|
||||||
Alias,
|
|
||||||
Type,
|
|
||||||
SelfType,
|
|
||||||
SelfIdent,
|
|
||||||
Interface,
|
|
||||||
Impl,
|
|
||||||
True,
|
|
||||||
False,
|
|
||||||
Module,
|
|
||||||
Import,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TryFrom<&str> for Kw {
|
|
||||||
type Error = ();
|
|
||||||
|
|
||||||
fn try_from(value: &str) -> Result<Self, Self::Error> {
|
|
||||||
Ok(match value {
|
|
||||||
"if" => Kw::If,
|
|
||||||
"then" => Kw::Then,
|
|
||||||
"else" => Kw::Else,
|
|
||||||
"is" => Kw::Is,
|
|
||||||
"fn" => Kw::Func,
|
|
||||||
"for" => Kw::For,
|
|
||||||
"while" => Kw::While,
|
|
||||||
"let" => Kw::Let,
|
|
||||||
"in" => Kw::In,
|
|
||||||
"mut" => Kw::Mut,
|
|
||||||
"return" => Kw::Return,
|
|
||||||
"break" => Kw::Break,
|
|
||||||
"continue" => Kw::Continue,
|
|
||||||
"alias" => Kw::Alias,
|
|
||||||
"type" => Kw::Type,
|
|
||||||
"Self" => Kw::SelfType,
|
|
||||||
"self" => Kw::SelfIdent,
|
|
||||||
"interface" => Kw::Interface,
|
|
||||||
"impl" => Kw::Impl,
|
|
||||||
"true" => Kw::True,
|
|
||||||
"false" => Kw::False,
|
|
||||||
"module" => Kw::Module,
|
|
||||||
"import" => Kw::Import,
|
|
||||||
_ => return Err(()),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
|
||||||
pub struct Token {
|
|
||||||
pub kind: TokenKind,
|
|
||||||
pub(crate) location: Location,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Token {
|
|
||||||
pub fn to_string_with_metadata(&self) -> String {
|
|
||||||
format!("{}({})", self.kind, self.location)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_kind(&self) -> TokenKind {
|
|
||||||
self.kind.clone()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const OPERATOR_CHARS: [char; 17] =
|
|
||||||
['!', '$', '%', '&', '*', '+', '-', '.', ':', '<', '>', '=', '?', '^', '|', '~', '`'];
|
|
||||||
fn is_operator(c: &char) -> bool {
|
|
||||||
OPERATOR_CHARS.iter().any(|x| x == c)
|
|
||||||
}
|
|
||||||
|
|
||||||
type CharData = (usize, usize, char);
|
|
||||||
|
|
||||||
pub fn tokenize(input: &str) -> Vec<Token> {
|
|
||||||
let mut tokens: Vec<Token> = Vec::new();
|
|
||||||
|
|
||||||
let mut input = Iterator::intersperse(input.lines().enumerate(), (0, "\n"))
|
|
||||||
.flat_map(|(line_idx, line)| line.chars().enumerate().map(move |(ch_idx, ch)| (line_idx, ch_idx, ch)))
|
|
||||||
.peekable();
|
|
||||||
|
|
||||||
while let Some((line_num, char_num, c)) = input.next() {
|
|
||||||
let cur_tok_kind = match c {
|
|
||||||
'/' => match input.peek().map(|t| t.2) {
|
|
||||||
Some('/') => {
|
|
||||||
for (_, _, c) in input.by_ref() {
|
|
||||||
if c == '\n' {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
Some('*') => {
|
|
||||||
input.next();
|
|
||||||
let mut comment_level = 1;
|
|
||||||
while let Some((_, _, c)) = input.next() {
|
|
||||||
if c == '*' && input.peek().map(|t| t.2) == Some('/') {
|
|
||||||
input.next();
|
|
||||||
comment_level -= 1;
|
|
||||||
} else if c == '/' && input.peek().map(|t| t.2) == Some('*') {
|
|
||||||
input.next();
|
|
||||||
comment_level += 1;
|
|
||||||
}
|
|
||||||
if comment_level == 0 {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if comment_level != 0 {
|
|
||||||
Error("Unclosed comment".to_string())
|
|
||||||
} else {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => Slash,
|
|
||||||
},
|
|
||||||
c if c.is_whitespace() && c != '\n' => continue,
|
|
||||||
'\n' => Newline,
|
|
||||||
';' => Semicolon,
|
|
||||||
':' => Colon,
|
|
||||||
',' => Comma,
|
|
||||||
'(' => LParen,
|
|
||||||
')' => RParen,
|
|
||||||
'{' => LCurlyBrace,
|
|
||||||
'}' => RCurlyBrace,
|
|
||||||
'[' => LSquareBracket,
|
|
||||||
']' => RSquareBracket,
|
|
||||||
'"' => handle_quote(&mut input, None),
|
|
||||||
'\\' => Backslash,
|
|
||||||
'@' => AtSign,
|
|
||||||
c if c.is_digit(10) => handle_digit(c, &mut input),
|
|
||||||
c if c.is_alphabetic() || c == '_' => handle_alphabetic(c, &mut input),
|
|
||||||
c if is_operator(&c) => handle_operator(c, &mut input),
|
|
||||||
unknown => Error(format!("Unexpected character: {}", unknown)),
|
|
||||||
};
|
|
||||||
let location =
|
|
||||||
Location { line_num: line_num.try_into().unwrap(), char_num: char_num.try_into().unwrap() };
|
|
||||||
tokens.push(Token { kind: cur_tok_kind, location });
|
|
||||||
}
|
|
||||||
tokens
|
|
||||||
}
|
|
||||||
|
|
||||||
fn handle_digit(c: char, input: &mut Peekable<impl Iterator<Item = CharData>>) -> TokenKind {
|
|
||||||
let next_ch = input.peek().map(|&(_, _, c)| c);
|
|
||||||
|
|
||||||
if c == '0' && next_ch == Some('x') {
|
|
||||||
input.next();
|
|
||||||
let rest: String = input
|
|
||||||
.peeking_take_while(|&(_, _, ref c)| c.is_digit(16) || *c == '_')
|
|
||||||
.map(|(_, _, c)| c)
|
|
||||||
.collect();
|
|
||||||
HexLiteral(Rc::new(rest))
|
|
||||||
} else if c == '0' && next_ch == Some('b') {
|
|
||||||
input.next();
|
|
||||||
BinNumberSigil
|
|
||||||
} else {
|
|
||||||
let mut buf = c.to_string();
|
|
||||||
buf.extend(input.peeking_take_while(|&(_, _, ref c)| c.is_digit(10)).map(|(_, _, c)| c));
|
|
||||||
DigitGroup(Rc::new(buf))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn handle_quote(
|
|
||||||
input: &mut Peekable<impl Iterator<Item = CharData>>,
|
|
||||||
quote_prefix: Option<&str>,
|
|
||||||
) -> TokenKind {
|
|
||||||
let mut buf = String::new();
|
|
||||||
loop {
|
|
||||||
match input.next().map(|(_, _, c)| c) {
|
|
||||||
Some('"') => break,
|
|
||||||
Some('\\') => {
|
|
||||||
let next = input.peek().map(|&(_, _, c)| c);
|
|
||||||
if next == Some('n') {
|
|
||||||
input.next();
|
|
||||||
buf.push('\n')
|
|
||||||
} else if next == Some('"') {
|
|
||||||
input.next();
|
|
||||||
buf.push('"');
|
|
||||||
} else if next == Some('t') {
|
|
||||||
input.next();
|
|
||||||
buf.push('\t');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Some(c) => buf.push(c),
|
|
||||||
None => return TokenKind::Error("Unclosed string".to_string()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
TokenKind::StrLiteral { s: Rc::new(buf), prefix: quote_prefix.map(|s| Rc::new(s.to_string())) }
|
|
||||||
}
|
|
||||||
|
|
||||||
fn handle_alphabetic(c: char, input: &mut Peekable<impl Iterator<Item = CharData>>) -> TokenKind {
|
|
||||||
let mut buf = String::new();
|
|
||||||
buf.push(c);
|
|
||||||
let next_is_alphabetic = input.peek().map(|&(_, _, c)| !c.is_alphabetic()).unwrap_or(true);
|
|
||||||
if c == '_' && next_is_alphabetic {
|
|
||||||
return TokenKind::Underscore;
|
|
||||||
}
|
|
||||||
|
|
||||||
loop {
|
|
||||||
match input.peek().map(|&(_, _, c)| c) {
|
|
||||||
Some(c) if c == '"' => {
|
|
||||||
input.next();
|
|
||||||
return handle_quote(input, Some(&buf));
|
|
||||||
}
|
|
||||||
Some(c) if c.is_alphanumeric() || c == '_' => {
|
|
||||||
input.next();
|
|
||||||
buf.push(c);
|
|
||||||
}
|
|
||||||
_ => break,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
match Kw::try_from(buf.as_str()) {
|
|
||||||
Ok(kw) => TokenKind::Keyword(kw),
|
|
||||||
Err(()) => TokenKind::Identifier(Rc::new(buf)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn handle_operator(c: char, input: &mut Peekable<impl Iterator<Item = CharData>>) -> TokenKind {
|
|
||||||
match c {
|
|
||||||
'<' | '>' | '|' | '.' | '=' => {
|
|
||||||
let next = &input.peek().map(|&(_, _, c)| c);
|
|
||||||
let next_is_op = next.map(|n| is_operator(&n)).unwrap_or(false);
|
|
||||||
if !next_is_op {
|
|
||||||
return match c {
|
|
||||||
'<' => LAngleBracket,
|
|
||||||
'>' => RAngleBracket,
|
|
||||||
'|' => Pipe,
|
|
||||||
'.' => Period,
|
|
||||||
'=' => Equals,
|
|
||||||
_ => unreachable!(),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => (),
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut buf = String::new();
|
|
||||||
|
|
||||||
if c == '`' {
|
|
||||||
loop {
|
|
||||||
match input.peek().map(|&(_, _, c)| c) {
|
|
||||||
Some(c) if c.is_alphabetic() || c == '_' => {
|
|
||||||
input.next();
|
|
||||||
buf.push(c);
|
|
||||||
}
|
|
||||||
Some('`') => {
|
|
||||||
input.next();
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
_ => break,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
buf.push(c);
|
|
||||||
loop {
|
|
||||||
match input.peek().map(|&(_, _, c)| c) {
|
|
||||||
Some(c) if is_operator(&c) => {
|
|
||||||
input.next();
|
|
||||||
buf.push(c);
|
|
||||||
}
|
|
||||||
_ => break,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
TokenKind::Operator(Rc::new(buf))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod schala_tokenizer_tests {
|
|
||||||
use super::{Kw::*, *};
|
|
||||||
|
|
||||||
macro_rules! digit {
|
|
||||||
($ident:expr) => {
|
|
||||||
DigitGroup(Rc::new($ident.to_string()))
|
|
||||||
};
|
|
||||||
}
|
|
||||||
macro_rules! ident {
|
|
||||||
($ident:expr) => {
|
|
||||||
Identifier(Rc::new($ident.to_string()))
|
|
||||||
};
|
|
||||||
}
|
|
||||||
macro_rules! op {
|
|
||||||
($ident:expr) => {
|
|
||||||
Operator(Rc::new($ident.to_string()))
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
fn token_kinds(input: &str) -> Vec<TokenKind> {
|
|
||||||
tokenize(input).into_iter().map(move |tok| tok.kind).collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn tokens() {
|
|
||||||
let output = token_kinds("let a: A<B> = c ++ d");
|
|
||||||
assert_eq!(
|
|
||||||
output,
|
|
||||||
vec![
|
|
||||||
Keyword(Let),
|
|
||||||
ident!("a"),
|
|
||||||
Colon,
|
|
||||||
ident!("A"),
|
|
||||||
LAngleBracket,
|
|
||||||
ident!("B"),
|
|
||||||
RAngleBracket,
|
|
||||||
Equals,
|
|
||||||
ident!("c"),
|
|
||||||
op!("++"),
|
|
||||||
ident!("d")
|
|
||||||
]
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn underscores() {
|
|
||||||
let output = token_kinds("4_8");
|
|
||||||
assert_eq!(output, vec![digit!("4"), Underscore, digit!("8")]);
|
|
||||||
|
|
||||||
let output = token_kinds("aba_yo");
|
|
||||||
assert_eq!(output, vec![ident!("aba_yo")]);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn comments() {
|
|
||||||
let output = token_kinds("1 + /* hella /* bro */ */ 2");
|
|
||||||
assert_eq!(output, vec![digit!("1"), op!("+"), digit!("2")]);
|
|
||||||
|
|
||||||
let output = token_kinds("1 + /* hella /* bro */ 2");
|
|
||||||
assert_eq!(output, vec![digit!("1"), op!("+"), Error("Unclosed comment".to_string())]);
|
|
||||||
|
|
||||||
//TODO not sure if I want this behavior
|
|
||||||
let output = token_kinds("1 + /* hella */ bro */ 2");
|
|
||||||
assert_eq!(
|
|
||||||
output,
|
|
||||||
vec![
|
|
||||||
digit!("1"),
|
|
||||||
op!("+"),
|
|
||||||
Identifier(Rc::new("bro".to_string())),
|
|
||||||
Operator(Rc::new("*".to_string())),
|
|
||||||
Slash,
|
|
||||||
DigitGroup(Rc::new("2".to_string()))
|
|
||||||
]
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn backtick_operators() {
|
|
||||||
let output = token_kinds("1 `plus` 2");
|
|
||||||
assert_eq!(output, vec![digit!("1"), op!("plus"), digit!("2")]);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn string_literals() {
|
|
||||||
let output = token_kinds(r#""some string""#);
|
|
||||||
assert_eq!(output, vec![StrLiteral { s: Rc::new("some string".to_string()), prefix: None }]);
|
|
||||||
|
|
||||||
let output = token_kinds(r#"b"some bytestring""#);
|
|
||||||
assert_eq!(
|
|
||||||
output,
|
|
||||||
vec![StrLiteral {
|
|
||||||
s: Rc::new("some bytestring".to_string()),
|
|
||||||
prefix: Some(Rc::new("b".to_string()))
|
|
||||||
}]
|
|
||||||
);
|
|
||||||
|
|
||||||
let output = token_kinds(r#""Do \n \" escapes work\t""#);
|
|
||||||
assert_eq!(
|
|
||||||
output,
|
|
||||||
vec![StrLiteral { s: Rc::new("Do \n \" escapes work\t".to_string()), prefix: None }]
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
@ -14,8 +14,8 @@ fn getline(arg) { }
|
|||||||
|
|
||||||
fn map(input: Option<T>, func: Func): Option<T> {
|
fn map(input: Option<T>, func: Func): Option<T> {
|
||||||
if input {
|
if input {
|
||||||
is Option::Some(x) then Option::Some(func(x)),
|
is Option::Some(x) then Option::Some(func(x))
|
||||||
is Option::None then Option::None,
|
is Option::None then Option::None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -17,7 +17,8 @@ pub use visitor::*;
|
|||||||
use crate::{
|
use crate::{
|
||||||
derivative::Derivative,
|
derivative::Derivative,
|
||||||
identifier::{define_id_kind, Id},
|
identifier::{define_id_kind, Id},
|
||||||
tokenizing::Location,
|
parsing::Location,
|
||||||
|
util::delim_wrapped,
|
||||||
};
|
};
|
||||||
|
|
||||||
define_id_kind!(ASTItem);
|
define_id_kind!(ASTItem);
|
||||||
@ -40,12 +41,12 @@ impl fmt::Display for AST {
|
|||||||
|
|
||||||
#[derive(Derivative, Debug, Clone)]
|
#[derive(Derivative, Debug, Clone)]
|
||||||
#[derivative(PartialEq)]
|
#[derivative(PartialEq)]
|
||||||
pub struct Statement {
|
pub struct Statement<K> {
|
||||||
#[derivative(PartialEq = "ignore")]
|
#[derivative(PartialEq = "ignore")]
|
||||||
pub id: ItemId,
|
pub id: ItemId,
|
||||||
#[derivative(PartialEq = "ignore")]
|
#[derivative(PartialEq = "ignore")]
|
||||||
pub location: Location,
|
pub location: Location,
|
||||||
pub kind: StatementKind,
|
pub kind: K,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
@ -65,23 +66,23 @@ pub enum FlowControl {
|
|||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Default)]
|
#[derive(Debug, Clone, PartialEq, Default)]
|
||||||
pub struct Block {
|
pub struct Block {
|
||||||
pub statements: Vec<Statement>,
|
pub statements: Vec<Statement<StatementKind>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<Vec<Statement>> for Block {
|
impl From<Vec<Statement<StatementKind>>> for Block {
|
||||||
fn from(statements: Vec<Statement>) -> Self {
|
fn from(statements: Vec<Statement<StatementKind>>) -> Self {
|
||||||
Self { statements }
|
Self { statements }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<Statement> for Block {
|
impl From<Statement<StatementKind>> for Block {
|
||||||
fn from(statement: Statement) -> Self {
|
fn from(statement: Statement<StatementKind>) -> Self {
|
||||||
Self { statements: vec![statement] }
|
Self { statements: vec![statement] }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AsRef<[Statement]> for Block {
|
impl AsRef<[Statement<StatementKind>]> for Block {
|
||||||
fn as_ref(&self) -> &[Statement] {
|
fn as_ref(&self) -> &[Statement<StatementKind>] {
|
||||||
self.statements.as_ref()
|
self.statements.as_ref()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -123,15 +124,41 @@ pub struct FormalParam {
|
|||||||
pub enum Declaration {
|
pub enum Declaration {
|
||||||
FuncSig(Signature),
|
FuncSig(Signature),
|
||||||
FuncDecl(Signature, Block),
|
FuncDecl(Signature, Block),
|
||||||
TypeDecl { name: TypeSingletonName, body: TypeBody, mutable: bool },
|
TypeDecl {
|
||||||
|
name: TypeSingletonName,
|
||||||
|
body: TypeBody,
|
||||||
|
mutable: bool,
|
||||||
|
},
|
||||||
//TODO TypeAlias `original` needs to be a more complex type definition
|
//TODO TypeAlias `original` needs to be a more complex type definition
|
||||||
TypeAlias { alias: Rc<String>, original: Rc<String> },
|
TypeAlias {
|
||||||
Binding { name: Rc<String>, constant: bool, type_anno: Option<TypeIdentifier>, expr: Expression },
|
alias: Rc<String>,
|
||||||
Impl { type_name: TypeIdentifier, interface_name: Option<TypeSingletonName>, block: Vec<Declaration> },
|
original: Rc<String>,
|
||||||
Interface { name: Rc<String>, signatures: Vec<Signature> },
|
},
|
||||||
|
Binding {
|
||||||
|
name: Rc<String>,
|
||||||
|
constant: bool,
|
||||||
|
type_anno: Option<TypeIdentifier>,
|
||||||
|
expr: Expression,
|
||||||
|
},
|
||||||
|
Impl {
|
||||||
|
type_name: TypeIdentifier,
|
||||||
|
interface_name: Option<TypeSingletonName>,
|
||||||
|
block: Vec<Statement<Declaration>>,
|
||||||
|
},
|
||||||
|
Interface {
|
||||||
|
name: Rc<String>,
|
||||||
|
signatures: Vec<Signature>,
|
||||||
|
},
|
||||||
//TODO need to limit the types of statements that can be annotated
|
//TODO need to limit the types of statements that can be annotated
|
||||||
Annotation { name: Rc<String>, arguments: Vec<Expression>, inner: Box<Statement> },
|
Annotation {
|
||||||
Module { name: Rc<String>, items: Block },
|
name: Rc<String>,
|
||||||
|
arguments: Vec<Expression>,
|
||||||
|
inner: Box<Statement<StatementKind>>,
|
||||||
|
},
|
||||||
|
Module {
|
||||||
|
name: Rc<String>,
|
||||||
|
items: Block,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
@ -143,10 +170,15 @@ pub struct Signature {
|
|||||||
}
|
}
|
||||||
|
|
||||||
//TODO I can probably get rid of TypeBody
|
//TODO I can probably get rid of TypeBody
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
#[derive(Debug, Derivative, Clone)]
|
||||||
|
#[derivative(PartialEq)]
|
||||||
pub enum TypeBody {
|
pub enum TypeBody {
|
||||||
Variants(Vec<Variant>),
|
Variants(Vec<Variant>),
|
||||||
ImmediateRecord(ItemId, Vec<(Rc<String>, TypeIdentifier)>),
|
ImmediateRecord {
|
||||||
|
#[derivative(PartialEq = "ignore")]
|
||||||
|
id: ItemId,
|
||||||
|
fields: Vec<(Rc<String>, TypeIdentifier)>,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Derivative, Clone)]
|
#[derive(Debug, Derivative, Clone)]
|
||||||
@ -187,6 +219,22 @@ pub enum TypeIdentifier {
|
|||||||
Singleton(TypeSingletonName),
|
Singleton(TypeSingletonName),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for TypeIdentifier {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
match self {
|
||||||
|
TypeIdentifier::Tuple(items) =>
|
||||||
|
write!(f, "{}", delim_wrapped('(', ')', items.iter().map(|item| item.to_string()))),
|
||||||
|
TypeIdentifier::Singleton(tsn) => {
|
||||||
|
write!(f, "{}", tsn.name)?;
|
||||||
|
if !tsn.params.is_empty() {
|
||||||
|
write!(f, "{}", delim_wrapped('<', '>', tsn.params.iter().map(|item| item.to_string())))?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
pub struct TypeSingletonName {
|
pub struct TypeSingletonName {
|
||||||
pub name: Rc<String>,
|
pub name: Rc<String>,
|
||||||
@ -197,12 +245,13 @@ pub struct TypeSingletonName {
|
|||||||
pub enum ExpressionKind {
|
pub enum ExpressionKind {
|
||||||
NatLiteral(u64),
|
NatLiteral(u64),
|
||||||
FloatLiteral(f64),
|
FloatLiteral(f64),
|
||||||
StringLiteral(Rc<String>),
|
StringLiteral { prefix: Option<Rc<String>>, s: Rc<String> },
|
||||||
BoolLiteral(bool),
|
BoolLiteral(bool),
|
||||||
BinExp(BinOp, Box<Expression>, Box<Expression>),
|
BinExp(BinOp, Box<Expression>, Box<Expression>),
|
||||||
PrefixExp(PrefixOp, Box<Expression>),
|
PrefixExp(PrefixOp, Box<Expression>),
|
||||||
TupleLiteral(Vec<Expression>),
|
TupleLiteral(Vec<Expression>),
|
||||||
Value(QualifiedName),
|
Value(QualifiedName),
|
||||||
|
SelfValue,
|
||||||
NamedStruct { name: QualifiedName, fields: Vec<(Rc<String>, Expression)> },
|
NamedStruct { name: QualifiedName, fields: Vec<(Rc<String>, Expression)> },
|
||||||
Call { f: Box<Expression>, arguments: Vec<InvocationArgument> },
|
Call { f: Box<Expression>, arguments: Vec<InvocationArgument> },
|
||||||
Index { indexee: Box<Expression>, indexers: Vec<Expression> },
|
Index { indexee: Box<Expression>, indexers: Vec<Expression> },
|
||||||
@ -262,8 +311,9 @@ pub enum PatternLiteral {
|
|||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
pub struct Enumerator {
|
pub struct Enumerator {
|
||||||
pub id: Rc<String>, //TODO rename this field
|
pub identifier: Rc<String>,
|
||||||
pub generator: Expression,
|
pub generator: Expression,
|
||||||
|
pub assignment: bool, //true if `=`, false if `<-`
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
#[derive(Debug, PartialEq, Clone)]
|
@ -1,7 +1,5 @@
|
|||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
use crate::tokenizing::TokenKind;
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
pub struct PrefixOp {
|
pub struct PrefixOp {
|
||||||
sigil: Rc<String>,
|
sigil: Rc<String>,
|
||||||
@ -15,10 +13,6 @@ impl PrefixOp {
|
|||||||
pub fn sigil(&self) -> &str {
|
pub fn sigil(&self) -> &str {
|
||||||
&self.sigil
|
&self.sigil
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_prefix(op: &str) -> bool {
|
|
||||||
matches!(op, "+" | "-" | "!")
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
@ -35,38 +29,14 @@ impl BinOp {
|
|||||||
&self.sigil
|
&self.sigil
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn from_sigil_token(tok: &TokenKind) -> Option<BinOp> {
|
|
||||||
let s = token_kind_to_sigil(tok)?;
|
|
||||||
Some(BinOp::from_sigil(s))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn min_precedence() -> i32 {
|
pub fn min_precedence() -> i32 {
|
||||||
i32::min_value()
|
i32::min_value()
|
||||||
}
|
}
|
||||||
pub fn get_precedence_from_token(op_tok: &TokenKind) -> Option<i32> {
|
|
||||||
let s = token_kind_to_sigil(op_tok)?;
|
|
||||||
Some(binop_precedences(s))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_precedence(&self) -> i32 {
|
pub fn get_precedence(&self) -> i32 {
|
||||||
binop_precedences(self.sigil.as_ref())
|
binop_precedences(self.sigil.as_ref())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn token_kind_to_sigil(tok: &TokenKind) -> Option<&str> {
|
|
||||||
use self::TokenKind::*;
|
|
||||||
Some(match tok {
|
|
||||||
Operator(op) => op.as_str(),
|
|
||||||
Period => ".",
|
|
||||||
Pipe => "|",
|
|
||||||
Slash => "/",
|
|
||||||
LAngleBracket => "<",
|
|
||||||
RAngleBracket => ">",
|
|
||||||
Equals => "=",
|
|
||||||
_ => return None,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn binop_precedences(s: &str) -> i32 {
|
fn binop_precedences(s: &str) -> i32 {
|
||||||
let default = 10_000_000;
|
let default = 10_000_000;
|
||||||
match s {
|
match s {
|
@ -39,12 +39,10 @@ pub fn walk_block<V: ASTVisitor>(v: &mut V, block: &Block) {
|
|||||||
Import(ref import_spec) => {
|
Import(ref import_spec) => {
|
||||||
v.import(import_spec);
|
v.import(import_spec);
|
||||||
}
|
}
|
||||||
Flow(ref flow_control) => match flow_control {
|
Flow(ref flow_control) =>
|
||||||
FlowControl::Return(Some(ref retval)) => {
|
if let FlowControl::Return(Some(ref retval)) = flow_control {
|
||||||
walk_expression(v, retval);
|
walk_expression(v, retval);
|
||||||
}
|
},
|
||||||
_ => (),
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -73,7 +71,12 @@ pub fn walk_expression<V: ASTVisitor>(v: &mut V, expr: &Expression) {
|
|||||||
|
|
||||||
if let Recursion::Continue = v.expression(expr) {
|
if let Recursion::Continue = v.expression(expr) {
|
||||||
match &expr.kind {
|
match &expr.kind {
|
||||||
NatLiteral(_) | FloatLiteral(_) | StringLiteral(_) | BoolLiteral(_) | Value(_) => (),
|
NatLiteral(_)
|
||||||
|
| FloatLiteral(_)
|
||||||
|
| StringLiteral { .. }
|
||||||
|
| BoolLiteral(_)
|
||||||
|
| Value(_)
|
||||||
|
| SelfValue => (),
|
||||||
BinExp(_, lhs, rhs) => {
|
BinExp(_, lhs, rhs) => {
|
||||||
walk_expression(v, lhs);
|
walk_expression(v, lhs);
|
||||||
walk_expression(v, rhs);
|
walk_expression(v, rhs);
|
@ -29,7 +29,7 @@ pub(super) fn render_ast(ast: &AST) -> String {
|
|||||||
buf
|
buf
|
||||||
}
|
}
|
||||||
|
|
||||||
fn render_statement(stmt: &Statement, indent: usize, buf: &mut String) {
|
fn render_statement(stmt: &Statement<StatementKind>, indent: usize, buf: &mut String) {
|
||||||
use StatementKind::*;
|
use StatementKind::*;
|
||||||
do_indent(indent, buf);
|
do_indent(indent, buf);
|
||||||
match stmt.kind {
|
match stmt.kind {
|
||||||
@ -45,9 +45,10 @@ fn render_expression(expr: &Expression, indent: usize, buf: &mut String) {
|
|||||||
|
|
||||||
buf.push_str("(Expr ");
|
buf.push_str("(Expr ");
|
||||||
match &expr.kind {
|
match &expr.kind {
|
||||||
|
SelfValue => write!(buf, "(SelfValue)").unwrap(),
|
||||||
NatLiteral(n) => buf.push_str(&format!("(NatLiteral {})", n)),
|
NatLiteral(n) => buf.push_str(&format!("(NatLiteral {})", n)),
|
||||||
FloatLiteral(f) => buf.push_str(&format!("(FloatLiteral {})", f)),
|
FloatLiteral(f) => buf.push_str(&format!("(FloatLiteral {})", f)),
|
||||||
StringLiteral(s) => buf.push_str(&format!("(StringLiteral {})", s)),
|
StringLiteral { s, prefix } => buf.push_str(&format!("(StringLiteral prefix: {:?} {})", prefix, s)),
|
||||||
BoolLiteral(b) => buf.push_str(&format!("(BoolLiteral {})", b)),
|
BoolLiteral(b) => buf.push_str(&format!("(BoolLiteral {})", b)),
|
||||||
BinExp(binop, lhs, rhs) => {
|
BinExp(binop, lhs, rhs) => {
|
||||||
let new_indent = indent + LEVEL;
|
let new_indent = indent + LEVEL;
|
79
schala-lang/src/error.rs
Normal file
79
schala-lang/src/error.rs
Normal file
@ -0,0 +1,79 @@
|
|||||||
|
use crate::{
|
||||||
|
parsing::{Location, ParseError},
|
||||||
|
schala::{SourceReference, Stage},
|
||||||
|
symbol_table::SymbolError,
|
||||||
|
type_inference::TypeError,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub struct SchalaError {
|
||||||
|
errors: Vec<Error>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SchalaError {
|
||||||
|
pub(crate) fn display(&self) -> String {
|
||||||
|
match self.errors[0] {
|
||||||
|
Error::Parse(ref parse_err) => parse_err.to_string(),
|
||||||
|
Error::Standard { ref text, .. } => text.as_ref().cloned().unwrap_or_default(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub(crate) fn from_type_error(err: TypeError) -> Self {
|
||||||
|
Self {
|
||||||
|
errors: vec![Error::Standard { location: None, text: Some(err.msg), stage: Stage::Typechecking }],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn from_symbol_table(symbol_errs: Vec<SymbolError>) -> Self {
|
||||||
|
//TODO this could be better
|
||||||
|
let errors = symbol_errs
|
||||||
|
.into_iter()
|
||||||
|
.map(|_symbol_err| Error::Standard {
|
||||||
|
location: None,
|
||||||
|
text: Some("symbol table error".to_string()),
|
||||||
|
stage: Stage::Symbols,
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
Self { errors }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn from_string(text: String, stage: Stage) -> Self {
|
||||||
|
Self { errors: vec![Error::Standard { location: None, text: Some(text), stage }] }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn from_parse_error(parse_error: ParseError, source_reference: &SourceReference) -> Self {
|
||||||
|
let formatted_parse_error = format_parse_error(parse_error, source_reference);
|
||||||
|
Self { errors: vec![Error::Parse(formatted_parse_error)] }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
enum Error {
|
||||||
|
Standard { location: Option<Location>, text: Option<String>, stage: Stage },
|
||||||
|
Parse(String),
|
||||||
|
}
|
||||||
|
|
||||||
|
fn format_parse_error(error: ParseError, source_reference: &SourceReference) -> String {
|
||||||
|
let offset = error.location.offset;
|
||||||
|
let (line_start, line_num, line_from_program) = source_reference.get_line(offset);
|
||||||
|
let ch = offset - line_start;
|
||||||
|
|
||||||
|
let location_pointer = format!("{}^", " ".repeat(ch));
|
||||||
|
|
||||||
|
let line_num_digits = format!("{}", line_num).chars().count();
|
||||||
|
let space_padding = " ".repeat(line_num_digits);
|
||||||
|
|
||||||
|
format!(
|
||||||
|
r#"
|
||||||
|
{error_msg}
|
||||||
|
{space_padding} |
|
||||||
|
{line_num} | {}
|
||||||
|
{space_padding} | {}
|
||||||
|
"#,
|
||||||
|
line_from_program,
|
||||||
|
location_pointer,
|
||||||
|
error_msg = error.msg,
|
||||||
|
space_padding = space_padding,
|
||||||
|
line_num = line_num,
|
||||||
|
)
|
||||||
|
}
|
@ -38,6 +38,7 @@ where T: IdKind
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
pub struct IdStore<T>
|
pub struct IdStore<T>
|
||||||
where T: IdKind
|
where T: IdKind
|
||||||
{
|
{
|
@ -1,15 +1,13 @@
|
|||||||
#![feature(trace_macros)]
|
#![feature(trace_macros)]
|
||||||
//#![feature(unrestricted_attribute_tokens)]
|
//#![feature(unrestricted_attribute_tokens)]
|
||||||
#![feature(box_patterns, box_syntax, iter_intersperse)]
|
#![feature(box_patterns, iter_intersperse)]
|
||||||
|
|
||||||
//! `schala-lang` is where the Schala programming language is actually implemented.
|
//! `schala-lang` is where the Schala programming language is actually implemented.
|
||||||
//! It defines the `Schala` type, which contains the state for a Schala REPL, and implements
|
//! It defines the `Schala` type, which contains the state for a Schala REPL, and implements
|
||||||
//! `ProgrammingLanguageInterface` and the chain of compiler passes for it.
|
//! `ProgrammingLanguageInterface` and the chain of compiler passes for it.
|
||||||
|
|
||||||
extern crate schala_repl;
|
|
||||||
#[macro_use]
|
|
||||||
extern crate schala_lang_codegen;
|
|
||||||
extern crate derivative;
|
extern crate derivative;
|
||||||
|
extern crate schala_repl;
|
||||||
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
mod util;
|
mod util;
|
||||||
@ -19,7 +17,6 @@ mod type_inference;
|
|||||||
|
|
||||||
mod ast;
|
mod ast;
|
||||||
mod parsing;
|
mod parsing;
|
||||||
mod tokenizing;
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
mod symbol_table;
|
mod symbol_table;
|
||||||
mod builtin;
|
mod builtin;
|
1098
schala-lang/src/parsing/combinator.rs
Normal file
1098
schala-lang/src/parsing/combinator.rs
Normal file
File diff suppressed because it is too large
Load Diff
126
schala-lang/src/parsing/mod.rs
Normal file
126
schala-lang/src/parsing/mod.rs
Normal file
@ -0,0 +1,126 @@
|
|||||||
|
#![allow(clippy::upper_case_acronyms)]
|
||||||
|
|
||||||
|
pub mod combinator;
|
||||||
|
mod peg_parser;
|
||||||
|
mod test;
|
||||||
|
|
||||||
|
use std::{cell::RefCell, fmt, rc::Rc};
|
||||||
|
|
||||||
|
use combinator::Span;
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
use crate::ast::{Block, Expression};
|
||||||
|
use crate::{
|
||||||
|
ast::{ASTItem, AST},
|
||||||
|
identifier::{Id, IdStore},
|
||||||
|
};
|
||||||
|
|
||||||
|
pub(crate) type StoreRef = Rc<RefCell<IdStore<ASTItem>>>;
|
||||||
|
pub struct Parser {
|
||||||
|
id_store: StoreRef,
|
||||||
|
use_combinator: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Parser {
|
||||||
|
pub(crate) fn new() -> Self {
|
||||||
|
let id_store: IdStore<ASTItem> = IdStore::new();
|
||||||
|
Self { id_store: Rc::new(RefCell::new(id_store)), use_combinator: true }
|
||||||
|
}
|
||||||
|
pub(crate) fn parse(&mut self, input: &str) -> Result<AST, ParseError> {
|
||||||
|
if self.use_combinator {
|
||||||
|
self.parse_comb(input)
|
||||||
|
} else {
|
||||||
|
self.parse_peg(input)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn parse_peg(&mut self, input: &str) -> Result<AST, ParseError> {
|
||||||
|
peg_parser::schala_parser::program(input, self).map_err(ParseError::from_peg)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn parse_comb(&mut self, input: &str) -> Result<AST, ParseError> {
|
||||||
|
let span = Span::new_extra(input, self.id_store.clone());
|
||||||
|
convert(input, combinator::program(span))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
fn expression(&mut self, input: &str) -> Result<Expression, ParseError> {
|
||||||
|
peg_parser::schala_parser::expression(input, self).map_err(ParseError::from_peg)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
fn expression_comb(&mut self, input: &str) -> Result<Expression, ParseError> {
|
||||||
|
let span = Span::new_extra(input, self.id_store.clone());
|
||||||
|
convert(input, combinator::expression(span))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
fn block(&mut self, input: &str) -> Result<Block, ParseError> {
|
||||||
|
peg_parser::schala_parser::block(input, self).map_err(ParseError::from_peg)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
fn block_comb(&mut self, input: &str) -> Result<Block, ParseError> {
|
||||||
|
let span = Span::new_extra(input, self.id_store.clone());
|
||||||
|
convert(input, combinator::block(span))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn fresh(&mut self) -> Id<ASTItem> {
|
||||||
|
self.id_store.borrow_mut().fresh()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn convert<'a, O>(input: &'a str, result: combinator::ParseResult<'a, O>) -> Result<O, ParseError> {
|
||||||
|
use nom::{error::VerboseError, Finish};
|
||||||
|
|
||||||
|
match result.finish() {
|
||||||
|
Ok((rest, output)) => {
|
||||||
|
if rest.fragment() != &"" {
|
||||||
|
return Err(ParseError {
|
||||||
|
location: Default::default(),
|
||||||
|
msg: format!("Bad parse state, remaining text: `{}`", rest.fragment()),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(output)
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
let err = VerboseError {
|
||||||
|
errors: err.errors.into_iter().map(|(sp, kind)| (*sp.fragment(), kind)).collect(),
|
||||||
|
};
|
||||||
|
let msg = nom::error::convert_error(input, err);
|
||||||
|
Err(ParseError { msg, location: (0).into() })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Represents a parsing error
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct ParseError {
|
||||||
|
pub msg: String,
|
||||||
|
pub location: Location,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ParseError {
|
||||||
|
fn from_peg(err: peg::error::ParseError<peg::str::LineCol>) -> Self {
|
||||||
|
let msg = err.to_string();
|
||||||
|
Self { msg, location: err.location.offset.into() }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Default)]
|
||||||
|
pub struct Location {
|
||||||
|
pub(crate) offset: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<usize> for Location {
|
||||||
|
fn from(offset: usize) -> Self {
|
||||||
|
Self { offset }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for Location {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
write!(f, "{}", self.offset)
|
||||||
|
}
|
||||||
|
}
|
567
schala-lang/src/parsing/peg_parser.rs
Normal file
567
schala-lang/src/parsing/peg_parser.rs
Normal file
@ -0,0 +1,567 @@
|
|||||||
|
use std::rc::Rc;
|
||||||
|
|
||||||
|
use super::Parser;
|
||||||
|
use crate::ast::*;
|
||||||
|
|
||||||
|
fn rc_string(s: &str) -> Rc<String> {
|
||||||
|
Rc::new(s.to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
enum ExtendedPart<'a> {
|
||||||
|
Index(Vec<Expression>),
|
||||||
|
Accessor(&'a str),
|
||||||
|
Call(Vec<InvocationArgument>),
|
||||||
|
}
|
||||||
|
|
||||||
|
peg::parser! {
|
||||||
|
pub grammar schala_parser() for str {
|
||||||
|
|
||||||
|
rule whitespace() = [' ' | '\t' ]
|
||||||
|
rule whitespace_or_newline() = [' ' | '\t' | '\n' ]
|
||||||
|
|
||||||
|
rule _ = quiet!{ (block_comment() / line_comment() / whitespace())* }
|
||||||
|
|
||||||
|
rule __ = quiet!{ (block_comment() / line_comment() / whitespace_or_newline())* }
|
||||||
|
|
||||||
|
rule block_comment() = "/*" (block_comment() / !"*/" [_])* "*/"
|
||||||
|
rule line_comment() = "//" (!['\n'] [_])* &"\n"
|
||||||
|
|
||||||
|
|
||||||
|
pub rule program(parser: &mut Parser) -> AST =
|
||||||
|
__ statements:(statement(parser) ** (delimiter()+) ) __ { AST { id: parser.fresh(), statements: statements.into() } }
|
||||||
|
|
||||||
|
rule delimiter() = (";" / "\n")+
|
||||||
|
|
||||||
|
//Note - this is a hack, ideally the rule `rule block() -> Block = "{" _ items:(statement() **
|
||||||
|
//delimiter()) _ "}" { items.into() }` would've worked, but it doesn't.
|
||||||
|
pub rule block(parser: &mut Parser) -> Block =
|
||||||
|
"{" __ items:(statement(parser) ** delimiter()) delimiter()? __ "}" { items.into() } /
|
||||||
|
"{" __ stmt:statement(parser) __ "}" { vec![stmt].into() }
|
||||||
|
|
||||||
|
rule block_item(parser: &mut Parser) -> Statement<StatementKind> =
|
||||||
|
_ stmt:statement(parser) _ delimiter()+ { stmt }
|
||||||
|
|
||||||
|
rule statement(parser: &mut Parser) -> Statement<StatementKind> =
|
||||||
|
_ pos:position!() kind:statement_kind(parser) _ { Statement { id: parser.fresh(), location: pos.into(), kind } }
|
||||||
|
|
||||||
|
rule statement_kind(parser: &mut Parser) -> StatementKind =
|
||||||
|
__ import:import(parser) { StatementKind::Import(import) } /
|
||||||
|
__ decl:declaration(parser) { StatementKind::Declaration(decl) } /
|
||||||
|
__ flow:flow(parser) { StatementKind::Flow(flow) } /
|
||||||
|
__ expr:expression(parser) { StatementKind::Expression(expr) }
|
||||||
|
|
||||||
|
rule flow(parser: &mut Parser) -> FlowControl =
|
||||||
|
"continue" { FlowControl::Continue } /
|
||||||
|
"break" { FlowControl::Break } /
|
||||||
|
"return" _ expr:expression(parser)? { FlowControl::Return(expr) }
|
||||||
|
|
||||||
|
//TODO add the ability to rename and exclude imports
|
||||||
|
rule import(parser: &mut Parser) -> ImportSpecifier =
|
||||||
|
"import" _ path_components:path_components() suffix:import_suffix()? {
|
||||||
|
ImportSpecifier {
|
||||||
|
id: parser.fresh(),
|
||||||
|
path_components,
|
||||||
|
imported_names: suffix.unwrap_or(ImportedNames::LastOfPath)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
rule path_components() -> Vec<Rc<String>> =
|
||||||
|
"::"? name:identifier() rest:path_component()* {
|
||||||
|
let mut items = vec![rc_string(name)];
|
||||||
|
items.extend(rest.into_iter().map(rc_string));
|
||||||
|
items
|
||||||
|
}
|
||||||
|
|
||||||
|
rule path_component() -> &'input str = "::" ident:identifier() { ident }
|
||||||
|
|
||||||
|
rule import_suffix() -> ImportedNames =
|
||||||
|
"::*" { ImportedNames::All } /
|
||||||
|
"::{" __ names:(identifier() ** (_ "," _)) __ "}" {?
|
||||||
|
if names.is_empty() {
|
||||||
|
Err("import groups must have at least one item")
|
||||||
|
} else {
|
||||||
|
Ok(ImportedNames::List(names.into_iter().map(rc_string).collect()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
rule declaration(parser: &mut Parser) -> Declaration =
|
||||||
|
binding(parser) / type_decl(parser) / annotation(parser) / func(parser) / interface(parser) /
|
||||||
|
implementation(parser) / module(parser)
|
||||||
|
|
||||||
|
rule module(parser: &mut Parser) -> Declaration =
|
||||||
|
"module" _ name:identifier() _ items:block(parser) { Declaration::Module { name: rc_string(name), items } }
|
||||||
|
|
||||||
|
rule implementation(parser: &mut Parser) -> Declaration =
|
||||||
|
"impl" _ interface:type_singleton_name() _ "for" _ type_name:type_identifier() _ block:decl_block(parser) {
|
||||||
|
Declaration::Impl { type_name, interface_name: Some(interface), block }
|
||||||
|
|
||||||
|
} /
|
||||||
|
"impl" _ type_name:type_identifier() _ block:decl_block(parser) {
|
||||||
|
Declaration::Impl { type_name, interface_name: None, block }
|
||||||
|
}
|
||||||
|
|
||||||
|
rule decl_block(parser: &mut Parser) -> Vec<Statement<Declaration>> =
|
||||||
|
"{" __ decls:(func_declaration_stmt(parser) ** (delimiter()+)) delimiter()? __ "}" { decls }
|
||||||
|
|
||||||
|
rule func_declaration_stmt(parser: &mut Parser) -> Statement<Declaration> =
|
||||||
|
pos:position!() decl:func_declaration(parser) { Statement { id: parser.fresh(), location: pos.into(), kind: decl } }
|
||||||
|
|
||||||
|
rule interface(parser: &mut Parser) -> Declaration =
|
||||||
|
"interface" _ name:identifier() _ signatures:signature_block(parser) { Declaration::Interface { name: rc_string(name), signatures } }
|
||||||
|
|
||||||
|
rule signature_block(parser: &mut Parser) -> Vec<Signature> =
|
||||||
|
"{" __ signatures:(func_signature(parser) ** (delimiter()+)) __ "}" { signatures }
|
||||||
|
|
||||||
|
rule func(parser: &mut Parser) -> Declaration =
|
||||||
|
decl:func_declaration(parser) { decl } /
|
||||||
|
sig:func_signature(parser) { Declaration::FuncSig(sig) }
|
||||||
|
|
||||||
|
rule func_declaration(parser: &mut Parser) -> Declaration =
|
||||||
|
_ sig:func_signature(parser) __ body:block(parser) { Declaration::FuncDecl(sig, body) }
|
||||||
|
|
||||||
|
rule func_signature(parser: &mut Parser) -> Signature =
|
||||||
|
_ "fn" _ name:identifier() "(" _ params:formal_params(parser) _ ")" _ type_anno:type_anno()? { Signature {
|
||||||
|
name: rc_string(name), operator: false, params, type_anno
|
||||||
|
} } /
|
||||||
|
_ "fn" _ "(" op:operator() ")" _ "(" _ params:formal_params(parser) _ ")" _ type_anno:type_anno()? { Signature {
|
||||||
|
name: rc_string(op), operator: true, params, type_anno
|
||||||
|
} }
|
||||||
|
|
||||||
|
rule formal_params(parser: &mut Parser) -> Vec<FormalParam> =
|
||||||
|
params:(formal_param(parser) ** (_ "," _)) {? if params.len() < 256 { Ok(params) } else {
|
||||||
|
Err("function-too-long") }
|
||||||
|
}
|
||||||
|
|
||||||
|
rule formal_param(parser: &mut Parser) -> FormalParam =
|
||||||
|
name:identifier() _ anno:type_anno()? _ "=" expr:expression(parser) { FormalParam { name: rc_string(name),
|
||||||
|
default: Some(expr), anno } } /
|
||||||
|
name:identifier() _ anno:type_anno()? { FormalParam { name: rc_string(name), default: None, anno } }
|
||||||
|
|
||||||
|
|
||||||
|
rule annotation(parser: &mut Parser) -> Declaration =
|
||||||
|
"@" name:identifier() args:annotation_args(parser)? delimiter()+ _ inner:statement(parser) { Declaration::Annotation {
|
||||||
|
name: rc_string(name), arguments: if let Some(args) = args { args } else { vec![] }, inner: Box::new(inner) }
|
||||||
|
}
|
||||||
|
|
||||||
|
rule annotation_args(parser: &mut Parser) -> Vec<Expression> =
|
||||||
|
"(" _ args:(expression(parser) ** (_ "," _)) _ ")" { args }
|
||||||
|
|
||||||
|
|
||||||
|
rule binding(parser: &mut Parser) -> Declaration =
|
||||||
|
"let" _ mutable:"mut"? _ ident:identifier() _ type_anno:type_anno()? _ "=" _ expr:expression(parser) {
|
||||||
|
Declaration::Binding { name: Rc::new(ident.to_string()), constant: mutable.is_none(),
|
||||||
|
type_anno, expr }
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
rule type_decl(parser: &mut Parser) -> Declaration =
|
||||||
|
"type" _ "alias" _ alias:type_alias() { alias } /
|
||||||
|
"type" _ mutable:"mut"? _ name:type_singleton_name() _ "=" _ body:type_body(parser) {
|
||||||
|
Declaration::TypeDecl { name, body, mutable: mutable.is_some() }
|
||||||
|
}
|
||||||
|
|
||||||
|
rule type_singleton_name() -> TypeSingletonName =
|
||||||
|
name:identifier() params:type_params()? { TypeSingletonName {
|
||||||
|
name: rc_string(name), params: if let Some(params) = params { params } else { vec![] }
|
||||||
|
} }
|
||||||
|
|
||||||
|
rule type_params() -> Vec<TypeIdentifier> =
|
||||||
|
"<" _ idents:(type_identifier() ** (_ "," _)) _ ">" { idents }
|
||||||
|
|
||||||
|
rule type_identifier() -> TypeIdentifier =
|
||||||
|
"(" _ items:(type_identifier() ** (_ "," _)) _ ")" { TypeIdentifier::Tuple(items) } /
|
||||||
|
singleton:type_singleton_name() { TypeIdentifier::Singleton(singleton) }
|
||||||
|
|
||||||
|
rule type_body(parser: &mut Parser) -> TypeBody =
|
||||||
|
"{" _ items:(record_variant_item() ** (__ "," __)) __ "}" { TypeBody::ImmediateRecord { id: parser.fresh(), fields: items } } /
|
||||||
|
variants:(variant_spec(parser) ** (__ "|" __)) { TypeBody::Variants(variants) }
|
||||||
|
|
||||||
|
rule variant_spec(parser: &mut Parser) -> Variant =
|
||||||
|
name:identifier() __ "{" __ typed_identifier_list:(record_variant_item() ** (__ "," __)) __ ","? __ "}" { Variant {
|
||||||
|
id: parser.fresh(), name: rc_string(name), kind: VariantKind::Record(typed_identifier_list)
|
||||||
|
} } /
|
||||||
|
name:identifier() "(" tuple_members:(type_identifier() ++ (__ "," __)) ")" { Variant {
|
||||||
|
id: parser.fresh(), name: rc_string(name), kind: VariantKind::TupleStruct(tuple_members) } } /
|
||||||
|
name:identifier() { Variant { id: parser.fresh(), name: rc_string(name), kind: VariantKind::UnitStruct } }
|
||||||
|
|
||||||
|
rule record_variant_item() -> (Rc<String>, TypeIdentifier) =
|
||||||
|
name:identifier() _ ":" _ ty:type_identifier() { (rc_string(name), ty) }
|
||||||
|
|
||||||
|
rule type_alias() -> Declaration =
|
||||||
|
alias:identifier() _ "=" _ name:identifier() { Declaration::TypeAlias { alias: rc_string(alias), original: rc_string(name), } }
|
||||||
|
|
||||||
|
rule type_anno() -> TypeIdentifier =
|
||||||
|
":" _ identifier:type_identifier() { identifier }
|
||||||
|
|
||||||
|
pub rule expression(parser: &mut Parser) -> Expression =
|
||||||
|
__ kind:expression_kind(true, parser) _ type_anno:type_anno()? { Expression { id: parser.fresh(), type_anno, kind } }
|
||||||
|
|
||||||
|
rule expression_no_struct(parser: &mut Parser) -> Expression =
|
||||||
|
__ kind:expression_kind(false, parser) { Expression { id: parser.fresh(), type_anno: None, kind } }
|
||||||
|
|
||||||
|
rule expression_kind(struct_ok: bool, parser: &mut Parser) -> ExpressionKind =
|
||||||
|
precedence_expr(struct_ok, parser)
|
||||||
|
|
||||||
|
rule precedence_expr(struct_ok: bool, parser: &mut Parser) -> ExpressionKind =
|
||||||
|
first:prefix_expr(struct_ok, parser) _ next:(precedence_continuation(struct_ok, parser))* {
|
||||||
|
let next = next.into_iter().map(|(sigil, expr)| (BinOp::from_sigil(sigil), expr)).collect();
|
||||||
|
BinopSequence { first, next }.do_precedence(parser)
|
||||||
|
}
|
||||||
|
|
||||||
|
rule precedence_continuation(struct_ok: bool, parser: &mut Parser) -> (&'input str, ExpressionKind) =
|
||||||
|
op:operator() _ expr:prefix_expr(struct_ok, parser) _ { (op, expr) }
|
||||||
|
|
||||||
|
rule prefix_expr(struct_ok: bool, parser: &mut Parser) -> ExpressionKind =
|
||||||
|
prefix:prefix()? expr:extended_expr(struct_ok, parser) {
|
||||||
|
if let Some(p) = prefix {
|
||||||
|
let expr = Expression::new(parser.fresh(), expr);
|
||||||
|
let prefix = PrefixOp::from_sigil(p);
|
||||||
|
ExpressionKind::PrefixExp(prefix, Box::new(expr))
|
||||||
|
} else {
|
||||||
|
expr
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
rule prefix() -> &'input str =
|
||||||
|
$(['+' | '-' | '!' ])
|
||||||
|
|
||||||
|
//TODO make the definition of operators more complex
|
||||||
|
rule operator() -> &'input str =
|
||||||
|
quiet!{!"*/" s:$( ['+' | '-' | '*' | '/' | '%' | '<' | '>' | '=' | '!' | '$' | '&' | '|' | '?' | '^' | '`']+ ) { s } } /
|
||||||
|
expected!("operator")
|
||||||
|
|
||||||
|
rule extended_expr(struct_ok: bool, parser: &mut Parser) -> ExpressionKind =
|
||||||
|
primary:primary(struct_ok, parser) parts:(extended_expr_part(parser)*) {
|
||||||
|
let mut expression = Expression::new(parser.fresh(), primary);
|
||||||
|
for part in parts.into_iter() {
|
||||||
|
let kind = match part {
|
||||||
|
ExtendedPart::Index(indexers) => {
|
||||||
|
ExpressionKind::Index { indexee: Box::new(expression), indexers }
|
||||||
|
},
|
||||||
|
ExtendedPart::Accessor(name) => {
|
||||||
|
let name = rc_string(name);
|
||||||
|
ExpressionKind::Access { name, expr: Box::new(expression) }
|
||||||
|
},
|
||||||
|
ExtendedPart::Call(arguments) => {
|
||||||
|
ExpressionKind::Call { f: Box::new(expression), arguments }
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
expression = Expression::new(parser.fresh(), kind);
|
||||||
|
}
|
||||||
|
|
||||||
|
expression.kind
|
||||||
|
}
|
||||||
|
|
||||||
|
rule extended_expr_part(parser: &mut Parser) -> ExtendedPart<'input> =
|
||||||
|
indexers:index_part(parser) { ExtendedPart::Index(indexers) } /
|
||||||
|
arguments:call_part(parser) { ExtendedPart::Call(arguments) } /
|
||||||
|
"." name:identifier() { ExtendedPart::Accessor(name) }
|
||||||
|
|
||||||
|
rule index_part(parser: &mut Parser) -> Vec<Expression> =
|
||||||
|
"[" indexers:(expression(parser) ++ ",") "]" { indexers }
|
||||||
|
|
||||||
|
rule call_part(parser: &mut Parser) -> Vec<InvocationArgument> =
|
||||||
|
"(" arguments:(invocation_argument(parser) ** ",") ")" { arguments }
|
||||||
|
|
||||||
|
rule invocation_argument(parser: &mut Parser) -> InvocationArgument =
|
||||||
|
_ "_" _ { InvocationArgument::Ignored } /
|
||||||
|
_ ident:identifier() _ "=" _ expr:expression(parser) { InvocationArgument::Keyword {
|
||||||
|
name: Rc::new(ident.to_string()),
|
||||||
|
expr
|
||||||
|
} } /
|
||||||
|
_ expr:expression(parser) _ { InvocationArgument::Positional(expr) }
|
||||||
|
|
||||||
|
|
||||||
|
rule primary(struct_ok: bool, parser: &mut Parser) -> ExpressionKind =
|
||||||
|
while_expr(parser) / for_expr(parser) / float_literal() / nat_literal() / bool_literal() /
|
||||||
|
string_literal() / paren_expr(parser) /
|
||||||
|
list_expr(parser) / if_expr(parser) / lambda_expr(parser) /
|
||||||
|
item:named_struct(parser) {? if struct_ok { Ok(item) } else { Err("no-struct-allowed") } } /
|
||||||
|
identifier_expr(parser)
|
||||||
|
|
||||||
|
rule lambda_expr(parser: &mut Parser) -> ExpressionKind =
|
||||||
|
r#"\"# __ "(" _ params:formal_params(parser) _ ")" _ type_anno:(type_anno()?) _ body:block(parser) {
|
||||||
|
ExpressionKind::Lambda { params, type_anno, body }
|
||||||
|
} /
|
||||||
|
r#"\"# param:formal_param(parser) _ type_anno:(type_anno()?) _ body:block(parser) {
|
||||||
|
ExpressionKind::Lambda { params: vec![param], type_anno, body }
|
||||||
|
}
|
||||||
|
|
||||||
|
rule for_expr(parser: &mut Parser) -> ExpressionKind =
|
||||||
|
"for" _ enumerators:for_enumerators(parser) _ body:for_body(parser) {
|
||||||
|
ExpressionKind::ForExpression { enumerators, body }
|
||||||
|
}
|
||||||
|
|
||||||
|
rule for_enumerators(parser: &mut Parser) -> Vec<Enumerator> =
|
||||||
|
"{" _ enumerators:(enumerator(parser) ++ ",") _ "}" { enumerators } /
|
||||||
|
enumerator:enumerator(parser) { vec![enumerator] }
|
||||||
|
|
||||||
|
//TODO add guards, etc.
|
||||||
|
rule enumerator(parser: &mut Parser) -> Enumerator =
|
||||||
|
ident:identifier() _ "<-" _ generator:expression_no_struct(parser) {
|
||||||
|
Enumerator { identifier: Rc::new(ident.to_string()), generator, assignment: false }
|
||||||
|
} /
|
||||||
|
//TODO need to distinguish these two cases in AST
|
||||||
|
ident:identifier() _ "=" _ generator:expression_no_struct(parser) {
|
||||||
|
Enumerator { identifier: Rc::new(ident.to_string()), generator, assignment: true }
|
||||||
|
}
|
||||||
|
|
||||||
|
rule for_body(parser: &mut Parser) -> Box<ForBody> =
|
||||||
|
"return" _ expr:expression(parser) { Box::new(ForBody::MonadicReturn(expr)) } /
|
||||||
|
body:block(parser) { Box::new(ForBody::StatementBlock(body)) }
|
||||||
|
|
||||||
|
rule while_expr(parser: &mut Parser) -> ExpressionKind =
|
||||||
|
"while" _ cond:expression_kind(false, parser)? _ body:block(parser) {
|
||||||
|
ExpressionKind::WhileExpression {
|
||||||
|
condition: cond.map(|kind| Box::new(Expression::new(parser.fresh(), kind))),
|
||||||
|
body,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
rule identifier_expr(parser: &mut Parser) -> ExpressionKind =
|
||||||
|
qn:qualified_identifier(parser) { ExpressionKind::Value(qn) }
|
||||||
|
|
||||||
|
rule named_struct(parser: &mut Parser) -> ExpressionKind =
|
||||||
|
name:qualified_identifier(parser) _ fields:record_block(parser) {
|
||||||
|
ExpressionKind::NamedStruct {
|
||||||
|
name,
|
||||||
|
fields: fields.into_iter().map(|(n, exp)| (Rc::new(n.to_string()), exp)).collect(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
//TODO support anonymous structs and Elm-style update syntax for structs
|
||||||
|
rule record_block(parser: &mut Parser) -> Vec<(&'input str, Expression)> =
|
||||||
|
"{" _ entries:(record_entry(parser) ** ",") _ "}" { entries }
|
||||||
|
|
||||||
|
rule record_entry(parser: &mut Parser) -> (&'input str, Expression) =
|
||||||
|
_ name:identifier() _ ":" _ expr:expression(parser) _ { (name, expr) }
|
||||||
|
|
||||||
|
rule qualified_identifier(parser: &mut Parser) -> QualifiedName =
|
||||||
|
names:(identifier() ++ "::") { QualifiedName { id: parser.fresh(), components: names.into_iter().map(|name| Rc::new(name.to_string())).collect() } }
|
||||||
|
|
||||||
|
//TODO improve the definition of identifiers
|
||||||
|
rule identifier() -> &'input str =
|
||||||
|
!(reserved() !(ident_continuation())) text:$(['a'..='z' | 'A'..='Z' | '_'] ident_continuation()*) { text }
|
||||||
|
|
||||||
|
rule ident_continuation() -> &'input str =
|
||||||
|
text:$(['a'..='z' | 'A'..='Z' | '0'..='9' | '_'])
|
||||||
|
|
||||||
|
rule reserved() = "if" / "then" / "else" / "is" / "fn" / "for" / "while" / "let" / "in" / "mut" / "return" /
|
||||||
|
"break" / "alias" / "type" / "self" / "Self" / "interface" / "impl" / "true" / "false" / "module" / "import"
|
||||||
|
|
||||||
|
|
||||||
|
rule if_expr(parser: &mut Parser) -> ExpressionKind =
|
||||||
|
"if" _ discriminator:(expression(parser)?) _ body:if_expr_body(parser) {
|
||||||
|
ExpressionKind::IfExpression {
|
||||||
|
discriminator: discriminator.map(Box::new),
|
||||||
|
body: Box::new(body),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
rule if_expr_body(parser: &mut Parser) -> IfExpressionBody =
|
||||||
|
cond_block(parser) / simple_pattern_match(parser) / simple_conditional(parser)
|
||||||
|
|
||||||
|
rule simple_conditional(parser: &mut Parser) -> IfExpressionBody =
|
||||||
|
"then" _ then_case:expr_or_block(parser) _ else_case:else_case(parser) {
|
||||||
|
IfExpressionBody::SimpleConditional { then_case, else_case }
|
||||||
|
}
|
||||||
|
|
||||||
|
rule simple_pattern_match(parser: &mut Parser) -> IfExpressionBody =
|
||||||
|
"is" _ pattern:pattern(parser) _ "then" _ then_case:expr_or_block(parser) _ else_case:else_case(parser) {
|
||||||
|
IfExpressionBody::SimplePatternMatch { pattern, then_case, else_case }
|
||||||
|
}
|
||||||
|
|
||||||
|
rule cond_block(parser: &mut Parser) -> IfExpressionBody =
|
||||||
|
"{" __ cond_arms:(cond_arm(parser) ++ (delimiter()+)) __ "}" { IfExpressionBody::CondList(cond_arms) }
|
||||||
|
|
||||||
|
rule cond_arm(parser: &mut Parser) -> ConditionArm =
|
||||||
|
_ "else" _ body:expr_or_block(parser) { ConditionArm { condition: Condition::Else, guard: None, body } } /
|
||||||
|
_ condition:condition(parser) _ guard:condition_guard(parser) _ "then" _ body:expr_or_block(parser)
|
||||||
|
{ ConditionArm { condition, guard, body } }
|
||||||
|
|
||||||
|
rule condition(parser: &mut Parser) -> Condition =
|
||||||
|
"is" _ pat:pattern(parser) { Condition::Pattern(pat) } /
|
||||||
|
op:operator() _ expr:expression(parser) { Condition::TruncatedOp(BinOp::from_sigil(op), expr) }
|
||||||
|
|
||||||
|
rule condition_guard(parser: &mut Parser) -> Option<Expression> =
|
||||||
|
("if" _ expr:expression(parser) { expr } )?
|
||||||
|
|
||||||
|
rule expr_or_block(parser: &mut Parser) -> Block = block(parser) / pos:position!() ex:expression(parser) {
|
||||||
|
Statement {
|
||||||
|
id: parser.fresh() , location: pos.into(),
|
||||||
|
kind: StatementKind::Expression(ex)
|
||||||
|
}.into()
|
||||||
|
}
|
||||||
|
|
||||||
|
rule else_case(parser: &mut Parser) -> Option<Block> =
|
||||||
|
("else" _ eorb:expr_or_block(parser) { eorb })?
|
||||||
|
|
||||||
|
rule pattern(parser: &mut Parser) -> Pattern =
|
||||||
|
"(" _ variants:(pattern(parser) ++ ",") _ ")" { Pattern::TuplePattern(variants) } /
|
||||||
|
_ pat:simple_pattern(parser) { pat }
|
||||||
|
|
||||||
|
rule simple_pattern(parser: &mut Parser) -> Pattern =
|
||||||
|
pattern_literal() /
|
||||||
|
qn:qualified_identifier(parser) "(" members:(pattern(parser) ** ",") ")" {
|
||||||
|
Pattern::TupleStruct(qn, members)
|
||||||
|
} /
|
||||||
|
qn:qualified_identifier(parser) _ "{" _ items:(record_pattern_entry(parser) ** ",") "}" _ {
|
||||||
|
let items = items.into_iter().map(|(name, pat)| (Rc::new(name.to_string()), pat)).collect();
|
||||||
|
Pattern::Record(qn, items)
|
||||||
|
} /
|
||||||
|
qn:qualified_identifier(parser) { Pattern::VarOrName(qn) }
|
||||||
|
|
||||||
|
rule record_pattern_entry(parser: &mut Parser) -> (&'input str, Pattern) =
|
||||||
|
_ name:identifier() _ ":" _ pat:pattern(parser) _ { (name, pat) } /
|
||||||
|
_ name:identifier() _ {
|
||||||
|
let qn = QualifiedName {
|
||||||
|
id: parser.fresh(),
|
||||||
|
components: vec![Rc::new(name.to_string())],
|
||||||
|
};
|
||||||
|
(name, Pattern::VarOrName(qn))
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
rule pattern_literal() -> Pattern =
|
||||||
|
"true" { Pattern::Literal(PatternLiteral::BoolPattern(true)) } /
|
||||||
|
"false" { Pattern::Literal(PatternLiteral::BoolPattern(false)) } /
|
||||||
|
s:bare_string_literal() { Pattern::Literal(PatternLiteral::StringPattern(Rc::new(s))) } /
|
||||||
|
sign:("-"?) num:(float_literal() / nat_literal()) {
|
||||||
|
let neg = sign.is_some();
|
||||||
|
Pattern::Literal(PatternLiteral::NumPattern { neg, num })
|
||||||
|
} /
|
||||||
|
"_" { Pattern::Ignored }
|
||||||
|
|
||||||
|
|
||||||
|
rule list_expr(parser: &mut Parser) -> ExpressionKind =
|
||||||
|
"[" exprs:(expression(parser) ** ",") "]" {
|
||||||
|
let mut exprs = exprs;
|
||||||
|
ExpressionKind::ListLiteral(exprs)
|
||||||
|
}
|
||||||
|
|
||||||
|
rule paren_expr(parser: &mut Parser) -> ExpressionKind =
|
||||||
|
"(" exprs:(expression(parser) ** ",") ")" {
|
||||||
|
let mut exprs = exprs;
|
||||||
|
match exprs.len() {
|
||||||
|
1 => exprs.pop().unwrap().kind,
|
||||||
|
_ => ExpressionKind::TupleLiteral(exprs),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
rule string_literal() -> ExpressionKind =
|
||||||
|
prefix:identifier()? s:bare_string_literal(){ ExpressionKind::StringLiteral{ s: Rc::new(s),
|
||||||
|
prefix: prefix.map(rc_string)
|
||||||
|
} }
|
||||||
|
|
||||||
|
rule bare_string_literal() -> String =
|
||||||
|
"\"" chars:string_component()* "\"" { chars.into_iter().collect::<String>() }
|
||||||
|
|
||||||
|
rule string_component() -> char =
|
||||||
|
!(r#"""# / r#"\"#) ch:$([_]) { ch.chars().next().unwrap() } /
|
||||||
|
r#"\u{"# value:$(['0'..='9' | 'a'..='f' | 'A'..='F']+) "}" { char::from_u32(u32::from_str_radix(value, 16).unwrap()).unwrap() } /
|
||||||
|
r#"\n"# { '\n' } / r#"\t"# { '\t' } / r#"\""# { '"' } / r#"\\"# { '\\' } /
|
||||||
|
expected!("Valid escape sequence")
|
||||||
|
|
||||||
|
rule bool_literal() -> ExpressionKind =
|
||||||
|
"true" { ExpressionKind::BoolLiteral(true) } / "false" { ExpressionKind::BoolLiteral(false) }
|
||||||
|
|
||||||
|
rule nat_literal() -> ExpressionKind =
|
||||||
|
bin_literal() / hex_literal() / unmarked_literal()
|
||||||
|
|
||||||
|
rule unmarked_literal() -> ExpressionKind =
|
||||||
|
digits:digits() { let n = digits.chars().filter(|ch| *ch != '_').collect::<String>().parse().unwrap(); ExpressionKind::NatLiteral(n) }
|
||||||
|
|
||||||
|
rule bin_literal() -> ExpressionKind =
|
||||||
|
"0b" digits:bin_digits() {? parse_binary(digits).map(ExpressionKind::NatLiteral) }
|
||||||
|
|
||||||
|
rule hex_literal() -> ExpressionKind =
|
||||||
|
"0x" digits:hex_digits() {? parse_hex(digits).map(ExpressionKind::NatLiteral) }
|
||||||
|
|
||||||
|
rule float_literal() -> ExpressionKind =
|
||||||
|
ds:$( digits() "." digits()? / "." digits() ) { ExpressionKind::FloatLiteral(ds.parse().unwrap()) }
|
||||||
|
|
||||||
|
rule digits() -> &'input str = $((digit_group() "_"*)+)
|
||||||
|
rule bin_digits() -> &'input str = $((bin_digit_group() "_"*)+)
|
||||||
|
rule hex_digits() -> &'input str = $((hex_digit_group() "_"*)+)
|
||||||
|
|
||||||
|
rule digit_group() -> &'input str = $(['0'..='9']+)
|
||||||
|
rule bin_digit_group() -> &'input str = $(['0' | '1']+)
|
||||||
|
rule hex_digit_group() -> &'input str = $(['0'..='9' | 'a'..='f' | 'A'..='F']+)
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_binary(digits: &str) -> Result<u64, &'static str> {
|
||||||
|
let mut result: u64 = 0;
|
||||||
|
let mut multiplier = 1;
|
||||||
|
for d in digits.chars().rev() {
|
||||||
|
match d {
|
||||||
|
'1' => result += multiplier,
|
||||||
|
'0' => (),
|
||||||
|
'_' => continue,
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
multiplier = match multiplier.checked_mul(2) {
|
||||||
|
Some(m) => m,
|
||||||
|
None => return Err("Binary expression will overflow"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_hex(digits: &str) -> Result<u64, &'static str> {
|
||||||
|
let mut result: u64 = 0;
|
||||||
|
let mut multiplier: u64 = 1;
|
||||||
|
for d in digits.chars().rev() {
|
||||||
|
if d == '_' {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
match d.to_digit(16) {
|
||||||
|
Some(n) => result += n as u64 * multiplier,
|
||||||
|
None => return Err("Internal parser error: invalid hex digit"),
|
||||||
|
}
|
||||||
|
multiplier = match multiplier.checked_mul(16) {
|
||||||
|
Some(m) => m,
|
||||||
|
None => return Err("Hexadecimal expression will overflow"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
struct BinopSequence {
|
||||||
|
first: ExpressionKind,
|
||||||
|
next: Vec<(BinOp, ExpressionKind)>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl BinopSequence {
|
||||||
|
fn do_precedence(self, parser: &mut Parser) -> ExpressionKind {
|
||||||
|
fn helper(
|
||||||
|
precedence: i32,
|
||||||
|
lhs: ExpressionKind,
|
||||||
|
rest: &mut Vec<(BinOp, ExpressionKind)>,
|
||||||
|
parser: &mut Parser,
|
||||||
|
) -> Expression {
|
||||||
|
let mut lhs = Expression::new(parser.fresh(), lhs);
|
||||||
|
while let Some((next_op, next_rhs)) = rest.pop() {
|
||||||
|
let new_precedence = next_op.get_precedence();
|
||||||
|
if precedence >= new_precedence {
|
||||||
|
rest.push((next_op, next_rhs));
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
let rhs = helper(new_precedence, next_rhs, rest, parser);
|
||||||
|
lhs = Expression::new(
|
||||||
|
parser.fresh(),
|
||||||
|
ExpressionKind::BinExp(next_op, Box::new(lhs), Box::new(rhs)),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
lhs
|
||||||
|
}
|
||||||
|
let mut as_stack = self.next.into_iter().rev().collect();
|
||||||
|
helper(BinOp::min_precedence(), self.first, &mut as_stack, parser).kind
|
||||||
|
}
|
||||||
|
}
|
@ -6,8 +6,8 @@ use std::{fmt::Write, rc::Rc};
|
|||||||
|
|
||||||
use pretty_assertions::assert_eq;
|
use pretty_assertions::assert_eq;
|
||||||
|
|
||||||
use super::{new::schala_parser, tokenize, ParseResult, Parser};
|
use super::Parser;
|
||||||
use crate::{ast::*, tokenizing::Location};
|
use crate::{ast::*, parsing::Location};
|
||||||
|
|
||||||
fn rc(s: &str) -> Rc<String> {
|
fn rc(s: &str) -> Rc<String> {
|
||||||
Rc::new(s.to_owned())
|
Rc::new(s.to_owned())
|
||||||
@ -17,23 +17,15 @@ fn bx<T>(item: T) -> Box<T> {
|
|||||||
Box::new(item)
|
Box::new(item)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn make_parser(input: &str) -> Parser {
|
fn strlit(s: &str) -> ExpressionKind {
|
||||||
let tokens: Vec<crate::tokenizing::Token> = tokenize(input);
|
ExpressionKind::StringLiteral { s: Rc::new(s.to_string()), prefix: None }
|
||||||
let mut parser = super::Parser::new();
|
|
||||||
parser.add_new_tokens(tokens);
|
|
||||||
parser
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse(input: &str) -> ParseResult<AST> {
|
fn stmt<K>(kind: K) -> Statement<K> {
|
||||||
let mut parser = make_parser(input);
|
|
||||||
parser.parse()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn stmt(kind: StatementKind) -> Statement {
|
|
||||||
Statement { location: Location::default(), id: ItemId::default(), kind }
|
Statement { location: Location::default(), id: ItemId::default(), kind }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn exst(kind: ExpressionKind) -> Statement {
|
fn exst(kind: ExpressionKind) -> Statement<StatementKind> {
|
||||||
Statement {
|
Statement {
|
||||||
location: Location::default(),
|
location: Location::default(),
|
||||||
id: ItemId::default(),
|
id: ItemId::default(),
|
||||||
@ -41,7 +33,7 @@ fn exst(kind: ExpressionKind) -> Statement {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn decl(declaration: Declaration) -> Statement {
|
fn decl(declaration: Declaration) -> Statement<StatementKind> {
|
||||||
Statement {
|
Statement {
|
||||||
location: Location::default(),
|
location: Location::default(),
|
||||||
id: ItemId::default(),
|
id: ItemId::default(),
|
||||||
@ -49,7 +41,7 @@ fn decl(declaration: Declaration) -> Statement {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fn_decl(sig: Signature, stmts: Block) -> Statement {
|
fn fn_decl(sig: Signature, stmts: Block) -> Statement<StatementKind> {
|
||||||
Statement {
|
Statement {
|
||||||
kind: StatementKind::Declaration(Declaration::FuncDecl(sig, stmts)),
|
kind: StatementKind::Declaration(Declaration::FuncDecl(sig, stmts)),
|
||||||
location: Default::default(),
|
location: Default::default(),
|
||||||
@ -99,88 +91,116 @@ fn ty_simple(name: &str) -> TypeIdentifier {
|
|||||||
|
|
||||||
macro_rules! assert_ast {
|
macro_rules! assert_ast {
|
||||||
($input:expr, $statements:expr) => {
|
($input:expr, $statements:expr) => {
|
||||||
let ast = parse($input).unwrap();
|
let mut parser = Parser::new();
|
||||||
|
let ast = parser.parse_comb($input);
|
||||||
|
let ast2 = parser.parse_peg($input);
|
||||||
let expected = AST { id: Default::default(), statements: $statements.into() };
|
let expected = AST { id: Default::default(), statements: $statements.into() };
|
||||||
println!("Expected: {}", expected);
|
let ast = match ast {
|
||||||
println!("Actual: {}", ast);
|
Err(err) => {
|
||||||
|
println!("Parse error: {}", err.msg);
|
||||||
|
panic!();
|
||||||
|
}
|
||||||
|
Ok(ast) => ast,
|
||||||
|
};
|
||||||
|
assert_eq!(ast, ast2.unwrap());
|
||||||
assert_eq!(ast, expected);
|
assert_eq!(ast, expected);
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
macro_rules! assert_ast2 {
|
|
||||||
($input:expr, $statements:expr) => {
|
|
||||||
let ast = schala_parser::program($input);
|
|
||||||
let expected = AST { id: Default::default(), statements: $statements.into() };
|
|
||||||
if ast.is_err() {
|
|
||||||
println!("Parse error: {}", ast.unwrap_err());
|
|
||||||
panic!();
|
|
||||||
}
|
|
||||||
assert_eq!(ast.unwrap(), expected);
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
macro_rules! assert_fail {
|
macro_rules! assert_fail {
|
||||||
($input:expr, $failure:expr) => {
|
($input:expr) => {
|
||||||
let err = parse($input).unwrap_err();
|
let mut parser = Parser::new();
|
||||||
assert_eq!(err.msg, $failure);
|
let _err = parser.parse_comb($input).unwrap_err();
|
||||||
};
|
};
|
||||||
}
|
|
||||||
|
|
||||||
macro_rules! assert_fail2 {
|
|
||||||
($input:expr, $failure:expr) => {
|
($input:expr, $failure:expr) => {
|
||||||
let err = schala_parser::program($input).unwrap_err();
|
let mut parser = Parser::new();
|
||||||
assert_eq!(err.to_string(), $failure);
|
let err = parser.parse_comb($input).unwrap_err();
|
||||||
|
println!("assert_fail: {}", err.msg);
|
||||||
|
assert_eq!(err.msg, $failure);
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
macro_rules! assert_expr {
|
macro_rules! assert_expr {
|
||||||
($input:expr, $correct:expr) => {
|
($input:expr, $correct:expr) => {
|
||||||
let mut parser = make_parser($input);
|
let mut parser = Parser::new();
|
||||||
assert_eq!(parser.expression().unwrap(), $correct);
|
let expr = parser.expression_comb($input.trim_start());
|
||||||
|
let expr2 = parser.expression($input.trim_start());
|
||||||
|
let expr = match expr {
|
||||||
|
Err(err) => {
|
||||||
|
println!("Expression parse error: {}", err.msg);
|
||||||
|
panic!();
|
||||||
|
}
|
||||||
|
Ok(expr) => expr,
|
||||||
|
};
|
||||||
|
assert_eq!(expr, expr2.unwrap());
|
||||||
|
assert_eq!(expr, $correct);
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
macro_rules! assert_expr2 {
|
macro_rules! assert_fail_expr {
|
||||||
($input:expr, $correct:expr) => {
|
|
||||||
let expr = schala_parser::expression($input);
|
|
||||||
if expr.is_err() {
|
|
||||||
println!("Expression parse error: {}", expr.unwrap_err());
|
|
||||||
panic!();
|
|
||||||
}
|
|
||||||
assert_eq!(expr.unwrap(), $correct);
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
macro_rules! assert_fail_expr2 {
|
|
||||||
($input:expr, $failure:expr) => {
|
($input:expr, $failure:expr) => {
|
||||||
let _err = schala_parser::expression($input).unwrap_err();
|
let mut parser = Parser::new();
|
||||||
|
let _err = parser.expression_comb($input).unwrap_err();
|
||||||
//TODO make real tests for failures
|
//TODO make real tests for failures
|
||||||
//assert_eq!(err.to_string(), $failure);
|
//assert_eq!(err.to_string(), $failure);
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
macro_rules! assert_block {
|
||||||
|
($input:expr, $correct:expr) => {
|
||||||
|
let mut parser = Parser::new();
|
||||||
|
let block = parser.block_comb($input);
|
||||||
|
let block2 = parser.block($input);
|
||||||
|
let block = match block {
|
||||||
|
Err(err) => {
|
||||||
|
println!("Expression parse error: {}", err.msg);
|
||||||
|
panic!();
|
||||||
|
}
|
||||||
|
Ok(item) => item,
|
||||||
|
};
|
||||||
|
|
||||||
|
assert_eq!(block, block2.unwrap());
|
||||||
|
assert_eq!(block, $correct);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn basic_literals() {
|
fn basic_literals() {
|
||||||
use ExpressionKind::*;
|
use ExpressionKind::*;
|
||||||
|
|
||||||
assert_expr2!(".2", expr(FloatLiteral(0.2)));
|
assert_expr!(".2", expr(FloatLiteral(0.2)));
|
||||||
assert_expr2!("8.1", expr(FloatLiteral(8.1)));
|
assert_expr!("8.1", expr(FloatLiteral(8.1)));
|
||||||
assert_expr2!("0b010", expr(NatLiteral(2)));
|
assert_expr!("0b010", expr(NatLiteral(2)));
|
||||||
assert_expr2!("0b0_1_0", expr(NatLiteral(2)));
|
assert_expr!("0b0_1_0", expr(NatLiteral(2)));
|
||||||
assert_expr2!("0xff", expr(NatLiteral(255)));
|
assert_expr!("0xff", expr(NatLiteral(255)));
|
||||||
assert_expr2!("0x032f", expr(NatLiteral(815)));
|
assert_expr!("0x032f", expr(NatLiteral(815)));
|
||||||
assert_expr2!("0xf_f_", expr(NatLiteral(255)));
|
assert_expr!("0xf_f", expr(NatLiteral(255)));
|
||||||
assert_expr2!("false", expr(BoolLiteral(false)));
|
assert_expr!("false", expr(BoolLiteral(false)));
|
||||||
assert_expr2!("true", expr(BoolLiteral(true)));
|
assert_expr!("true", expr(BoolLiteral(true)));
|
||||||
assert_expr2!(r#""hello""#, expr(StringLiteral(rc("hello"))));
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn string_literals() {
|
||||||
|
use ExpressionKind::*;
|
||||||
|
|
||||||
|
assert_expr!(r#""""#, expr(strlit("")));
|
||||||
|
assert_expr!(r#""hello""#, expr(strlit("hello")));
|
||||||
|
assert_expr!(
|
||||||
|
r#"b"some bytestring""#,
|
||||||
|
expr(StringLiteral { s: rc("some bytestring"), prefix: Some(rc("b")) })
|
||||||
|
);
|
||||||
|
assert_expr!(r#""Do \n \" escapes work\t""#, expr(strlit("Do \n \" escapes work\t")));
|
||||||
|
assert_expr!(r#""Georgian letter jani \u{10ef}""#, expr(strlit("Georgian letter jani ჯ")));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn list_literals() {
|
fn list_literals() {
|
||||||
use ExpressionKind::*;
|
use ExpressionKind::*;
|
||||||
|
|
||||||
assert_expr2!("[]", expr(ListLiteral(vec![])));
|
assert_expr!("[]", expr(ListLiteral(vec![])));
|
||||||
assert_expr2!("[1,2]", expr(ListLiteral(vec![expr(NatLiteral(1)), expr(NatLiteral(2)),])));
|
assert_expr!("[1,2]", expr(ListLiteral(vec![expr(NatLiteral(1)), expr(NatLiteral(2)),])));
|
||||||
assert_fail_expr2!("[1,,2]", "some failure");
|
assert_expr!("[1, /*no*/2]", expr(ListLiteral(vec![expr(NatLiteral(1)), expr(NatLiteral(2)),])));
|
||||||
|
assert_fail_expr!("[1,,2]", "some failure");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -188,8 +208,8 @@ fn binexps() {
|
|||||||
use ExpressionKind::*;
|
use ExpressionKind::*;
|
||||||
use StatementKind::Expression;
|
use StatementKind::Expression;
|
||||||
|
|
||||||
assert_expr2!("0xf_f_+1", binop("+", expr(NatLiteral(255)), expr(NatLiteral(1))));
|
assert_expr!("0xf_f+1", binop("+", expr(NatLiteral(255)), expr(NatLiteral(1))));
|
||||||
assert_ast2!(
|
assert_ast!(
|
||||||
"3; 4; 4.3",
|
"3; 4; 4.3",
|
||||||
vec![
|
vec![
|
||||||
stmt(Expression(expr(NatLiteral(3)))),
|
stmt(Expression(expr(NatLiteral(3)))),
|
||||||
@ -198,16 +218,16 @@ fn binexps() {
|
|||||||
]
|
]
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_expr2!(
|
assert_expr!(
|
||||||
"1 + 2 * 3",
|
"1 + 2 * 3",
|
||||||
binop("+", expr(NatLiteral(1)), binop("*", expr(NatLiteral(2)), expr(NatLiteral(3))))
|
binop("+", expr(NatLiteral(1)), binop("*", expr(NatLiteral(2)), expr(NatLiteral(3))))
|
||||||
);
|
);
|
||||||
assert_expr2!(
|
assert_expr!(
|
||||||
"1 * 2 + 3",
|
"1 * 2 + 3",
|
||||||
binop("+", binop("*", expr(NatLiteral(1)), expr(NatLiteral(2))), expr(NatLiteral(3)))
|
binop("+", binop("*", expr(NatLiteral(1)), expr(NatLiteral(2))), expr(NatLiteral(3)))
|
||||||
);
|
);
|
||||||
assert_expr2!("1 && 2", binop("&&", expr(NatLiteral(1)), expr(NatLiteral(2))));
|
assert_expr!("1 && 2", binop("&&", expr(NatLiteral(1)), expr(NatLiteral(2))));
|
||||||
assert_expr2!(
|
assert_expr!(
|
||||||
"1 + 2 * 3 + 4",
|
"1 + 2 * 3 + 4",
|
||||||
binop(
|
binop(
|
||||||
"+",
|
"+",
|
||||||
@ -215,48 +235,48 @@ fn binexps() {
|
|||||||
expr(NatLiteral(4))
|
expr(NatLiteral(4))
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
assert_expr2!(
|
assert_expr!(
|
||||||
"(1 + 2) * 3",
|
"(1 + 2) * 3",
|
||||||
binop("*", binop("+", expr(NatLiteral(1)), expr(NatLiteral(2))), expr(NatLiteral(3)))
|
binop("*", binop("+", expr(NatLiteral(1)), expr(NatLiteral(2))), expr(NatLiteral(3)))
|
||||||
);
|
);
|
||||||
assert_expr2!(".1 + .2", binop("+", expr(FloatLiteral(0.1)), expr(FloatLiteral(0.2))));
|
assert_expr!(".1 + .2", binop("+", expr(FloatLiteral(0.1)), expr(FloatLiteral(0.2))));
|
||||||
assert_expr2!("1 / 2.", binop("/", expr(NatLiteral(1)), expr(FloatLiteral(2.))));
|
assert_expr!("1 / 2.", binop("/", expr(NatLiteral(1)), expr(FloatLiteral(2.))));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn prefix_exps() {
|
fn prefix_exps() {
|
||||||
use ExpressionKind::*;
|
use ExpressionKind::*;
|
||||||
|
|
||||||
assert_expr2!("-3", prefixop("-", expr(NatLiteral(3))));
|
assert_expr!("-3", prefixop("-", expr(NatLiteral(3))));
|
||||||
assert_expr2!("-0.2", prefixop("-", expr(FloatLiteral(0.2))));
|
assert_expr!("-0.2", prefixop("-", expr(FloatLiteral(0.2))));
|
||||||
assert_expr2!("!3", prefixop("!", expr(NatLiteral(3))));
|
assert_expr!("!3", prefixop("!", expr(NatLiteral(3))));
|
||||||
assert_expr2!("!t", prefixop("!", expr(Value(qn!(t)))));
|
assert_expr!("!t", prefixop("!", expr(Value(qn!(t)))));
|
||||||
assert_expr2!("a <- -b", binop("<-", expr(Value(qn!(a))), prefixop("-", expr(Value(qn!(b))))));
|
assert_expr!("a <- -b", binop("<-", expr(Value(qn!(a))), prefixop("-", expr(Value(qn!(b))))));
|
||||||
assert_expr2!("a <--b", binop("<--", expr(Value(qn!(a))), expr(Value(qn!(b)))));
|
assert_expr!("a <--b", binop("<--", expr(Value(qn!(a))), expr(Value(qn!(b)))));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn operators() {
|
fn operators() {
|
||||||
use ExpressionKind::*;
|
use ExpressionKind::*;
|
||||||
|
|
||||||
assert_expr2!("a <- 1", binop("<-", expr(Value(qn!(a))), expr(NatLiteral(1))));
|
assert_expr!("a <- 1", binop("<-", expr(Value(qn!(a))), expr(NatLiteral(1))));
|
||||||
assert_expr2!("a || 1", binop("||", expr(Value(qn!(a))), expr(NatLiteral(1))));
|
assert_expr!("a || 1", binop("||", expr(Value(qn!(a))), expr(NatLiteral(1))));
|
||||||
assert_expr2!("a <> 1", binop("<>", expr(Value(qn!(a))), expr(NatLiteral(1))));
|
assert_expr!("a <> 1", binop("<>", expr(Value(qn!(a))), expr(NatLiteral(1))));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn accessors() {
|
fn accessors() {
|
||||||
use ExpressionKind::*;
|
use ExpressionKind::*;
|
||||||
|
|
||||||
assert_expr2!("a.b", expr(Access { name: rc("b"), expr: bx(expr(Value(qn!(a)))) }));
|
assert_expr!("a.b", expr(Access { name: rc("b"), expr: bx(expr(Value(qn!(a)))) }));
|
||||||
assert_expr2!(
|
assert_expr!(
|
||||||
"a.b.c",
|
"a.b.c",
|
||||||
expr(Access {
|
expr(Access {
|
||||||
name: rc("c"),
|
name: rc("c"),
|
||||||
expr: bx(expr(Access { name: rc("b"), expr: bx(expr(Value(qn!(a)))) }))
|
expr: bx(expr(Access { name: rc("b"), expr: bx(expr(Value(qn!(a)))) }))
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
assert_expr2!(
|
assert_expr!(
|
||||||
"a.b.c(3)",
|
"a.b.c(3)",
|
||||||
expr(Call {
|
expr(Call {
|
||||||
f: bx(expr(Access {
|
f: bx(expr(Access {
|
||||||
@ -266,7 +286,7 @@ fn accessors() {
|
|||||||
arguments: vec![InvocationArgument::Positional(expr(NatLiteral(3)))],
|
arguments: vec![InvocationArgument::Positional(expr(NatLiteral(3)))],
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
assert_expr2!(
|
assert_expr!(
|
||||||
"a.b().c",
|
"a.b().c",
|
||||||
expr(Access {
|
expr(Access {
|
||||||
name: rc("c"),
|
name: rc("c"),
|
||||||
@ -282,16 +302,13 @@ fn accessors() {
|
|||||||
fn tuples() {
|
fn tuples() {
|
||||||
use ExpressionKind::*;
|
use ExpressionKind::*;
|
||||||
|
|
||||||
assert_expr2!("()", expr(TupleLiteral(vec![])));
|
assert_expr!("()", expr(TupleLiteral(vec![])));
|
||||||
assert_expr2!(
|
assert_expr!(r#"("hella", 34)"#, expr(TupleLiteral(vec![expr(strlit("hella")), expr(NatLiteral(34))])));
|
||||||
r#"("hella", 34)"#,
|
assert_expr!(
|
||||||
expr(TupleLiteral(vec![expr(StringLiteral(rc("hella"))), expr(NatLiteral(34))]))
|
|
||||||
);
|
|
||||||
assert_expr2!(
|
|
||||||
r#"(1+2, "slough")"#,
|
r#"(1+2, "slough")"#,
|
||||||
expr(TupleLiteral(vec![
|
expr(TupleLiteral(vec![
|
||||||
binop("+", expr(NatLiteral(1)), expr(NatLiteral(2))),
|
binop("+", expr(NatLiteral(1)), expr(NatLiteral(2))),
|
||||||
expr(StringLiteral(rc("slough"))),
|
expr(strlit("slough")),
|
||||||
]))
|
]))
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -300,12 +317,12 @@ fn tuples() {
|
|||||||
fn identifiers() {
|
fn identifiers() {
|
||||||
use ExpressionKind::*;
|
use ExpressionKind::*;
|
||||||
|
|
||||||
assert_expr2!("a", expr(Value(qn!(a))));
|
assert_expr!("a", expr(Value(qn!(a))));
|
||||||
assert_expr2!("some_value", expr(Value(qn!(some_value))));
|
assert_expr!("some_value", expr(Value(qn!(some_value))));
|
||||||
assert_expr2!("alpha::beta::gamma", expr(Value(qn!(alpha, beta, gamma))));
|
assert_expr!("alpha::beta::gamma", expr(Value(qn!(alpha, beta, gamma))));
|
||||||
assert_expr2!("a + b", binop("+", expr(Value(qn!(a))), expr(Value(qn!(b)))));
|
assert_expr!("a + b", binop("+", expr(Value(qn!(a))), expr(Value(qn!(b)))));
|
||||||
assert_expr2!("None", expr(Value(qn!(None))));
|
assert_expr!("None", expr(Value(qn!(None))));
|
||||||
assert_expr2!(
|
assert_expr!(
|
||||||
"thing::item::call()",
|
"thing::item::call()",
|
||||||
expr(Call { f: bx(expr(Value(qn!(thing, item, call)))), arguments: vec![] })
|
expr(Call { f: bx(expr(Value(qn!(thing, item, call)))), arguments: vec![] })
|
||||||
);
|
);
|
||||||
@ -314,14 +331,14 @@ fn identifiers() {
|
|||||||
#[test]
|
#[test]
|
||||||
fn named_struct() {
|
fn named_struct() {
|
||||||
use ExpressionKind::*;
|
use ExpressionKind::*;
|
||||||
assert_expr2!(
|
assert_expr!(
|
||||||
"Pandas { a: x + y }",
|
"Pandas { a: x + y }",
|
||||||
expr(NamedStruct {
|
expr(NamedStruct {
|
||||||
name: qn!(Pandas),
|
name: qn!(Pandas),
|
||||||
fields: vec![(rc("a"), binop("+", expr(Value(qn!(x))), expr(Value(qn!(y)))))]
|
fields: vec![(rc("a"), binop("+", expr(Value(qn!(x))), expr(Value(qn!(y)))))]
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
assert_expr2!(
|
assert_expr!(
|
||||||
"Trousers { a:1, b:800 }",
|
"Trousers { a:1, b:800 }",
|
||||||
expr(NamedStruct {
|
expr(NamedStruct {
|
||||||
name: qn!(Trousers),
|
name: qn!(Trousers),
|
||||||
@ -333,14 +350,14 @@ fn named_struct() {
|
|||||||
#[test]
|
#[test]
|
||||||
fn index() {
|
fn index() {
|
||||||
use ExpressionKind::*;
|
use ExpressionKind::*;
|
||||||
assert_expr2!(
|
assert_expr!(
|
||||||
"armok[b,c]",
|
"armok[b,c]",
|
||||||
expr(Index {
|
expr(Index {
|
||||||
indexee: bx(expr(Value(qn!(armok)))),
|
indexee: bx(expr(Value(qn!(armok)))),
|
||||||
indexers: vec![expr(Value(qn!(b))), expr(Value(qn!(c)))]
|
indexers: vec![expr(Value(qn!(b))), expr(Value(qn!(c)))]
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
assert_expr2!(
|
assert_expr!(
|
||||||
"a[b,c][1]",
|
"a[b,c][1]",
|
||||||
expr(Index {
|
expr(Index {
|
||||||
indexee: bx(expr(Index {
|
indexee: bx(expr(Index {
|
||||||
@ -350,7 +367,7 @@ fn index() {
|
|||||||
indexers: vec![expr(NatLiteral(1))]
|
indexers: vec![expr(NatLiteral(1))]
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
assert_expr2!(
|
assert_expr!(
|
||||||
"perspicacity()[a]",
|
"perspicacity()[a]",
|
||||||
expr(Index {
|
expr(Index {
|
||||||
indexee: bx(expr(Call { f: bx(expr(Value(qn!(perspicacity)))), arguments: vec![] })),
|
indexee: bx(expr(Call { f: bx(expr(Value(qn!(perspicacity)))), arguments: vec![] })),
|
||||||
@ -362,17 +379,17 @@ fn index() {
|
|||||||
let b = expr(Index { indexee: bx(a), indexers: vec![expr(Value(qn!(b)))] });
|
let b = expr(Index { indexee: bx(a), indexers: vec![expr(Value(qn!(b)))] });
|
||||||
let c = expr(Call { f: bx(b), arguments: vec![] });
|
let c = expr(Call { f: bx(b), arguments: vec![] });
|
||||||
let d = expr(Index { indexee: bx(c), indexers: vec![expr(Value(qn!(d)))] });
|
let d = expr(Index { indexee: bx(c), indexers: vec![expr(Value(qn!(d)))] });
|
||||||
assert_expr2!("a()[b]()[d]", d);
|
assert_expr!("a()[b]()[d]", d);
|
||||||
|
|
||||||
assert_fail_expr2!("a[]", "Empty index expressions are not allowed");
|
assert_fail_expr!("a[]", "Empty index expressions are not allowed");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn while_expression() {
|
fn while_expression() {
|
||||||
use ExpressionKind::*;
|
use ExpressionKind::*;
|
||||||
|
|
||||||
assert_expr2!("while { }", expr(WhileExpression { condition: None, body: Block::default() }));
|
// assert_expr_comb!("while { }", expr(WhileExpression { condition: None, body: Block::default() }));
|
||||||
assert_expr2!(
|
assert_expr!(
|
||||||
"while a == b { }",
|
"while a == b { }",
|
||||||
expr(WhileExpression {
|
expr(WhileExpression {
|
||||||
condition: Some(bx(binop("==", expr(Value(qn!(a))), expr(Value(qn!(b)))))),
|
condition: Some(bx(binop("==", expr(Value(qn!(a))), expr(Value(qn!(b)))))),
|
||||||
@ -385,18 +402,26 @@ fn while_expression() {
|
|||||||
fn for_expression() {
|
fn for_expression() {
|
||||||
use ExpressionKind::*;
|
use ExpressionKind::*;
|
||||||
|
|
||||||
assert_expr2!(
|
assert_expr!(
|
||||||
"for { a <- garodzny::maybeValue } return 1",
|
"for { a <- garodzny::maybeValue } return 1",
|
||||||
expr(ForExpression {
|
expr(ForExpression {
|
||||||
enumerators: vec![Enumerator { id: rc("a"), generator: expr(Value(qn!(garodzny, maybeValue))) }],
|
enumerators: vec![Enumerator {
|
||||||
|
identifier: rc("a"),
|
||||||
|
assignment: false,
|
||||||
|
generator: expr(Value(qn!(garodzny, maybeValue)))
|
||||||
|
}],
|
||||||
body: bx(ForBody::MonadicReturn(expr(NatLiteral(1))))
|
body: bx(ForBody::MonadicReturn(expr(NatLiteral(1))))
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_expr2!(
|
assert_expr!(
|
||||||
"for n <- someRange { f(n) ; }",
|
"for n <- someRange { f(n) ; }",
|
||||||
expr(ForExpression {
|
expr(ForExpression {
|
||||||
enumerators: vec![Enumerator { id: rc("n"), generator: expr(Value(qn!(someRange))) }],
|
enumerators: vec![Enumerator {
|
||||||
|
identifier: rc("n"),
|
||||||
|
assignment: false,
|
||||||
|
generator: expr(Value(qn!(someRange)))
|
||||||
|
}],
|
||||||
body: bx(ForBody::StatementBlock(
|
body: bx(ForBody::StatementBlock(
|
||||||
vec![stmt(StatementKind::Expression(expr(Call {
|
vec![stmt(StatementKind::Expression(expr(Call {
|
||||||
f: bx(expr(Value(qn!(f)))),
|
f: bx(expr(Value(qn!(f)))),
|
||||||
@ -460,7 +485,7 @@ fn lambda_expressions() {
|
|||||||
name: rc("String"),
|
name: rc("String"),
|
||||||
params: vec![]
|
params: vec![]
|
||||||
})),
|
})),
|
||||||
body: vec![stmt(StatementKind::Expression(expr(StringLiteral(rc("q"))))),].into()
|
body: vec![stmt(StatementKind::Expression(expr(strlit("q")))),].into()
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -503,7 +528,7 @@ fn complex_lambdas() {
|
|||||||
use ExpressionKind::*;
|
use ExpressionKind::*;
|
||||||
|
|
||||||
assert_ast! {
|
assert_ast! {
|
||||||
r#"fn wahoo() { let a = 10; \(x) { x + a } };
|
r#"fn wahoo() { let a = 10; \(x) { x + a } }
|
||||||
wahoo()(3) "#,
|
wahoo()(3) "#,
|
||||||
vec![
|
vec![
|
||||||
fn_decl(Signature { name: rc("wahoo"), operator: false, type_anno: None, params: vec![] },
|
fn_decl(Signature { name: rc("wahoo"), operator: false, type_anno: None, params: vec![] },
|
||||||
@ -538,7 +563,9 @@ fn complex_lambdas() {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn reserved_words() {
|
fn reserved_words() {
|
||||||
assert_fail!("module::item::call()", "Expected an identifier, got Colon");
|
//TODO assert a good error message for this
|
||||||
|
assert_fail!("module::item::call()");
|
||||||
|
assert_expr!("modulek::item", expr(ExpressionKind::Value(qn!(modulek, item))));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -604,7 +631,7 @@ fn type_annotations() {
|
|||||||
#[test]
|
#[test]
|
||||||
fn type_declarations() {
|
fn type_declarations() {
|
||||||
use Declaration::TypeDecl;
|
use Declaration::TypeDecl;
|
||||||
assert_ast2! {
|
assert_ast! {
|
||||||
"type Alpha = Alpha", vec![
|
"type Alpha = Alpha", vec![
|
||||||
decl(TypeDecl {
|
decl(TypeDecl {
|
||||||
name: TypeSingletonName { name: rc("Alpha"), params: vec![] },
|
name: TypeSingletonName { name: rc("Alpha"), params: vec![] },
|
||||||
@ -620,7 +647,7 @@ fn type_declarations() {
|
|||||||
]
|
]
|
||||||
};
|
};
|
||||||
|
|
||||||
assert_ast2!(
|
assert_ast!(
|
||||||
"type mut Kuah = Kuah",
|
"type mut Kuah = Kuah",
|
||||||
decl(TypeDecl {
|
decl(TypeDecl {
|
||||||
name: TypeSingletonName { name: rc("Kuah"), params: vec![] },
|
name: TypeSingletonName { name: rc("Kuah"), params: vec![] },
|
||||||
@ -633,7 +660,7 @@ fn type_declarations() {
|
|||||||
})
|
})
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_ast2! {
|
assert_ast! {
|
||||||
"type Alpha = Alpha { a: Int, b: Int }",
|
"type Alpha = Alpha { a: Int, b: Int }",
|
||||||
vec![decl(TypeDecl {
|
vec![decl(TypeDecl {
|
||||||
name: TypeSingletonName { name: rc("Alpha"), params: vec![] },
|
name: TypeSingletonName { name: rc("Alpha"), params: vec![] },
|
||||||
@ -651,20 +678,20 @@ fn type_declarations() {
|
|||||||
})]
|
})]
|
||||||
};
|
};
|
||||||
|
|
||||||
assert_ast2! {
|
assert_ast! {
|
||||||
"type Alpha = { a: Int, b: Int }",
|
"type Alpha = { a: Int, b: Int }",
|
||||||
vec![decl(TypeDecl {
|
vec![decl(TypeDecl {
|
||||||
name: TypeSingletonName { name: rc("Alpha"), params: vec![] },
|
name: TypeSingletonName { name: rc("Alpha"), params: vec![] },
|
||||||
mutable: false,
|
mutable: false,
|
||||||
body: TypeBody::ImmediateRecord(Default::default(), vec![
|
body: TypeBody::ImmediateRecord { id: Default::default(), fields: vec![
|
||||||
(rc("a"), ty_simple("Int")),
|
(rc("a"), ty_simple("Int")),
|
||||||
(rc("b"), ty_simple("Int"))
|
(rc("b"), ty_simple("Int"))
|
||||||
])
|
]}
|
||||||
|
|
||||||
})]
|
})]
|
||||||
};
|
};
|
||||||
|
|
||||||
assert_ast2!(
|
assert_ast!(
|
||||||
"type Option<T> = None | Some(T)",
|
"type Option<T> = None | Some(T)",
|
||||||
vec![decl(TypeDecl {
|
vec![decl(TypeDecl {
|
||||||
name: TypeSingletonName {
|
name: TypeSingletonName {
|
||||||
@ -686,12 +713,12 @@ fn type_declarations() {
|
|||||||
})]
|
})]
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_ast2!(
|
assert_ast!(
|
||||||
"type alias Alpha = Beta",
|
"type alias Alpha = Beta",
|
||||||
decl(Declaration::TypeAlias { alias: rc("Alpha"), original: rc("Beta") })
|
decl(Declaration::TypeAlias { alias: rc("Alpha"), original: rc("Beta") })
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_ast2!("type Complex<T, U> = Unit | Record { field: AnotherType<Bool>, field2: (Nat, Int), field3: T } | Tuple(Int, (String, T))",
|
assert_ast!("type Complex<T, U> = Unit | Record { field: AnotherType<Bool>, field2: (Nat, Int), field3: T } | Tuple(Int, (String, T))",
|
||||||
decl(TypeDecl {
|
decl(TypeDecl {
|
||||||
name: TypeSingletonName { name: rc("Complex"), params: vec![
|
name: TypeSingletonName { name: rc("Complex"), params: vec![
|
||||||
TypeIdentifier::Singleton(TypeSingletonName { name: rc("T"), params: vec![] }),
|
TypeIdentifier::Singleton(TypeSingletonName { name: rc("T"), params: vec![] }),
|
||||||
@ -728,7 +755,7 @@ fn type_declarations() {
|
|||||||
fn declarations() {
|
fn declarations() {
|
||||||
use ExpressionKind::*;
|
use ExpressionKind::*;
|
||||||
|
|
||||||
assert_ast2!(
|
assert_ast!(
|
||||||
"let q_q = Yolo::Swaggins",
|
"let q_q = Yolo::Swaggins",
|
||||||
vec![decl(Declaration::Binding {
|
vec![decl(Declaration::Binding {
|
||||||
name: rc("q_q"),
|
name: rc("q_q"),
|
||||||
@ -743,7 +770,7 @@ fn declarations() {
|
|||||||
fn bindings() {
|
fn bindings() {
|
||||||
use ExpressionKind::*;
|
use ExpressionKind::*;
|
||||||
|
|
||||||
assert_ast2!(
|
assert_ast!(
|
||||||
"let mut a = 10",
|
"let mut a = 10",
|
||||||
vec![decl(Declaration::Binding {
|
vec![decl(Declaration::Binding {
|
||||||
name: rc("a"),
|
name: rc("a"),
|
||||||
@ -753,7 +780,7 @@ fn bindings() {
|
|||||||
})]
|
})]
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_ast2!(
|
assert_ast!(
|
||||||
"let a = 2 + a",
|
"let a = 2 + a",
|
||||||
vec![stmt(StatementKind::Declaration(Declaration::Binding {
|
vec![stmt(StatementKind::Declaration(Declaration::Binding {
|
||||||
name: rc("a"),
|
name: rc("a"),
|
||||||
@ -763,7 +790,7 @@ fn bindings() {
|
|||||||
}))]
|
}))]
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_ast2!(
|
assert_ast!(
|
||||||
"let a: Nat = 2",
|
"let a: Nat = 2",
|
||||||
vec![stmt(StatementKind::Declaration(Declaration::Binding {
|
vec![stmt(StatementKind::Declaration(Declaration::Binding {
|
||||||
name: rc("a"),
|
name: rc("a"),
|
||||||
@ -777,7 +804,7 @@ fn bindings() {
|
|||||||
#[test]
|
#[test]
|
||||||
fn functions() {
|
fn functions() {
|
||||||
use ExpressionKind::*;
|
use ExpressionKind::*;
|
||||||
assert_ast2!(
|
assert_ast!(
|
||||||
"fn oi()",
|
"fn oi()",
|
||||||
vec![stmt(StatementKind::Declaration(Declaration::FuncSig(Signature {
|
vec![stmt(StatementKind::Declaration(Declaration::FuncSig(Signature {
|
||||||
name: rc("oi"),
|
name: rc("oi"),
|
||||||
@ -787,12 +814,12 @@ fn functions() {
|
|||||||
})))]
|
})))]
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_ast2!(
|
assert_ast!(
|
||||||
"oi()",
|
"oi()",
|
||||||
vec![stmt(StatementKind::Expression(expr(Call { f: bx(expr(Value(qn!(oi)))), arguments: vec![] })))]
|
vec![stmt(StatementKind::Expression(expr(Call { f: bx(expr(Value(qn!(oi)))), arguments: vec![] })))]
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_expr2!(
|
assert_expr!(
|
||||||
"oi(a, 2+2)",
|
"oi(a, 2+2)",
|
||||||
expr(Call {
|
expr(Call {
|
||||||
f: bx(expr(Value(qn!(oi)))),
|
f: bx(expr(Value(qn!(oi)))),
|
||||||
@ -802,9 +829,10 @@ fn functions() {
|
|||||||
]
|
]
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
assert_fail!("a(b,,c)", "Expected a literal expression, got Comma");
|
let err_msg = "0: at line 1, in Alpha:\na(b,,c)\n ^\n\n1: at line 1, in Alt:\na(b,,c)\n ^\n\n2: at line 1, in token:\na(b,,c)\n ^\n\n3: at line 1, in identifier-expr:\na(b,,c)\n ^\n\n4: at line 1, in Alt:\na(b,,c)\n ^\n\n5: at line 1, in primary-expr:\na(b,,c)\n ^\n\n6: at line 1, in extended-expr:\na(b,,c)\n ^\n\n7: at line 1, in prefix-expr:\na(b,,c)\n ^\n\n8: at line 1, in expression-kind:\na(b,,c)\n ^\n\n9: at line 1, in Alt:\na(b,,c)\n ^\n\n10: at line 1, in invocation-argument:\na(b,,c)\n ^\n\n11: at line 1, in call-part:\na(b,,c)\n ^\n\n12: at line 1, in extended-expr:\na(b,,c)\n^\n\n13: at line 1, in prefix-expr:\na(b,,c)\n^\n\n14: at line 1, in expression-kind:\na(b,,c)\n^\n\n15: at line 1, in Parsing-statement:\na(b,,c)\n^\n\n16: at line 1, in AST:\na(b,,c)\n^\n\n";
|
||||||
|
assert_fail!("a(b,,c)", err_msg);
|
||||||
|
|
||||||
assert_ast2!(
|
assert_ast!(
|
||||||
"fn a(b, c: Int): Int",
|
"fn a(b, c: Int): Int",
|
||||||
vec![stmt(StatementKind::Declaration(Declaration::FuncSig(Signature {
|
vec![stmt(StatementKind::Declaration(Declaration::FuncSig(Signature {
|
||||||
name: rc("a"),
|
name: rc("a"),
|
||||||
@ -823,18 +851,57 @@ fn functions() {
|
|||||||
type_anno: Some(TypeIdentifier::Singleton(TypeSingletonName { name: rc("Int"), params: vec![] })),
|
type_anno: Some(TypeIdentifier::Singleton(TypeSingletonName { name: rc("Int"), params: vec![] })),
|
||||||
})))]
|
})))]
|
||||||
);
|
);
|
||||||
|
|
||||||
|
let source = r#"
|
||||||
|
fn some_function() {
|
||||||
|
|
||||||
|
}"#;
|
||||||
|
|
||||||
|
assert_ast!(
|
||||||
|
source,
|
||||||
|
vec![fn_decl(
|
||||||
|
Signature { name: rc("some_function"), operator: false, type_anno: None, params: vec![] },
|
||||||
|
vec![].into()
|
||||||
|
)]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn custom_operator() {
|
||||||
|
let source = "fn (!!)(lhs,rhs)";
|
||||||
|
assert_ast!(
|
||||||
|
source,
|
||||||
|
vec![stmt(StatementKind::Declaration(Declaration::FuncSig(Signature {
|
||||||
|
name: rc("!!"),
|
||||||
|
operator: true,
|
||||||
|
params: vec![
|
||||||
|
FormalParam { name: rc("lhs"), default: None, anno: None },
|
||||||
|
FormalParam { name: rc("rhs"), default: None, anno: None },
|
||||||
|
],
|
||||||
|
type_anno: None
|
||||||
|
})))]
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn max_function_params() {
|
fn max_function_params() {
|
||||||
let mut buf = "fn longfunc(".to_string();
|
let mut buf = "fn longfunc(".to_string();
|
||||||
for n in 0..256 {
|
for n in 0..255 {
|
||||||
write!(buf, "a{}, ", n).unwrap();
|
write!(buf, "a{}, ", n).unwrap();
|
||||||
}
|
}
|
||||||
|
write!(buf, " a256").unwrap();
|
||||||
write!(buf, ") {{ return 20 }}").unwrap();
|
write!(buf, ") {{ return 20 }}").unwrap();
|
||||||
//assert_fail2!(&buf, "A function cannot have more than 255 arguments");
|
//TODO need to create a good, custom error message for this case
|
||||||
//TODO better errors again
|
//assert_fail!(&buf, "A function cannot have more than 255 arguments");
|
||||||
assert_fail2!(&buf, "error at 1:1439: expected ['a' ..= 'z' | 'A' ..= 'Z' | '_']");
|
assert_fail!(&buf);
|
||||||
|
|
||||||
|
let mut buf = r#"\("#.to_string();
|
||||||
|
for n in 0..255 {
|
||||||
|
write!(buf, "a{}, ", n).unwrap();
|
||||||
|
}
|
||||||
|
write!(buf, " a256").unwrap();
|
||||||
|
write!(buf, ") {{ return 10 }}").unwrap();
|
||||||
|
assert_fail!(&buf);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -922,44 +989,42 @@ fn interface() {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn impls() {
|
fn impls() {
|
||||||
use Declaration::{FuncSig, Impl};
|
use Declaration::{FuncDecl, Impl};
|
||||||
|
|
||||||
|
let block = vec![
|
||||||
|
stmt(FuncDecl(
|
||||||
|
Signature { name: rc("yolo"), operator: false, params: vec![], type_anno: None },
|
||||||
|
vec![].into(),
|
||||||
|
)),
|
||||||
|
stmt(FuncDecl(
|
||||||
|
Signature { name: rc("swagg"), operator: false, params: vec![], type_anno: None },
|
||||||
|
vec![].into(),
|
||||||
|
)),
|
||||||
|
];
|
||||||
|
|
||||||
assert_ast!(
|
assert_ast!(
|
||||||
"impl Heh { fn yolo(); fn swagg(); }",
|
"impl Heh { fn yolo() { }; fn swagg() { } }",
|
||||||
vec![decl(Impl {
|
vec![decl(Impl { type_name: ty_simple("Heh"), interface_name: None, block: block.clone() })]
|
||||||
type_name: ty_simple("Heh"),
|
|
||||||
interface_name: None,
|
|
||||||
block: vec![
|
|
||||||
FuncSig(Signature { name: rc("yolo"), operator: false, params: vec![], type_anno: None }),
|
|
||||||
FuncSig(Signature { name: rc("swagg"), operator: false, params: vec![], type_anno: None })
|
|
||||||
]
|
|
||||||
})]
|
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_ast!(
|
assert_ast!(
|
||||||
"impl Heh<X> { fn yolo(); fn swagg(); }",
|
"impl Heh<X> { fn yolo() { }; fn swagg() { }; }",
|
||||||
vec![decl(Impl {
|
vec![decl(Impl {
|
||||||
type_name: TypeIdentifier::Singleton(TypeSingletonName {
|
type_name: TypeIdentifier::Singleton(TypeSingletonName {
|
||||||
name: rc("Heh"),
|
name: rc("Heh"),
|
||||||
params: vec![ty_simple("X")]
|
params: vec![ty_simple("X")]
|
||||||
}),
|
}),
|
||||||
interface_name: None,
|
interface_name: None,
|
||||||
block: vec![
|
block: block.clone(),
|
||||||
FuncSig(Signature { name: rc("yolo"), operator: false, params: vec![], type_anno: None }),
|
|
||||||
FuncSig(Signature { name: rc("swagg"), operator: false, params: vec![], type_anno: None })
|
|
||||||
]
|
|
||||||
})]
|
})]
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_ast!(
|
assert_ast!(
|
||||||
"impl Heh for Saraz { fn yolo(); fn swagg(); }",
|
"impl Heh for Saraz { fn yolo() {}; fn swagg() {} }",
|
||||||
vec![decl(Impl {
|
vec![decl(Impl {
|
||||||
type_name: ty_simple("Saraz"),
|
type_name: ty_simple("Saraz"),
|
||||||
interface_name: Some(TypeSingletonName { name: rc("Heh"), params: vec![] }),
|
interface_name: Some(TypeSingletonName { name: rc("Heh"), params: vec![] }),
|
||||||
block: vec![
|
block: block.clone(),
|
||||||
FuncSig(Signature { name: rc("yolo"), operator: false, params: vec![], type_anno: None }),
|
|
||||||
FuncSig(Signature { name: rc("swagg"), operator: false, params: vec![], type_anno: None })
|
|
||||||
]
|
|
||||||
})]
|
})]
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -1058,14 +1123,7 @@ fn imports() {
|
|||||||
}))]
|
}))]
|
||||||
};
|
};
|
||||||
|
|
||||||
assert_ast! {
|
assert_fail!("import bespouri::{}");
|
||||||
"import bespouri::{}",
|
|
||||||
vec![stmt(StatementKind::Import(ImportSpecifier {
|
|
||||||
id: Default::default(),
|
|
||||||
path_components: vec![rc("bespouri")],
|
|
||||||
imported_names: ImportedNames::List(vec![]),
|
|
||||||
}))]
|
|
||||||
};
|
|
||||||
|
|
||||||
assert_ast! {
|
assert_ast! {
|
||||||
"import bespouri::*",
|
"import bespouri::*",
|
||||||
@ -1091,7 +1149,8 @@ fn if_exprs() {
|
|||||||
})
|
})
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_expr2!(
|
//TODO add tests for named expressions
|
||||||
|
assert_expr!(
|
||||||
"if a then b else c",
|
"if a then b else c",
|
||||||
expr(IfExpression {
|
expr(IfExpression {
|
||||||
discriminator: Some(bx(expr(Value(qn!(a))))),
|
discriminator: Some(bx(expr(Value(qn!(a))))),
|
||||||
@ -1103,8 +1162,7 @@ fn if_exprs() {
|
|||||||
);
|
);
|
||||||
|
|
||||||
assert_expr!(
|
assert_expr!(
|
||||||
r#"
|
r#"if true then {
|
||||||
if true then {
|
|
||||||
let a = 10
|
let a = 10
|
||||||
b
|
b
|
||||||
} else {
|
} else {
|
||||||
@ -1134,7 +1192,7 @@ fn pattern_matching() {
|
|||||||
use ExpressionKind::*;
|
use ExpressionKind::*;
|
||||||
|
|
||||||
for item in ["if x is Some(a) then { 4 } else { 9 }", "if x is Some(a) then 4 else 9"] {
|
for item in ["if x is Some(a) then { 4 } else { 9 }", "if x is Some(a) then 4 else 9"] {
|
||||||
assert_expr2!(
|
assert_expr!(
|
||||||
item,
|
item,
|
||||||
expr(IfExpression {
|
expr(IfExpression {
|
||||||
discriminator: Some(bx(expr(Value(qn!(x))))),
|
discriminator: Some(bx(expr(Value(qn!(x))))),
|
||||||
@ -1147,7 +1205,7 @@ fn pattern_matching() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
assert_expr2!(
|
assert_expr!(
|
||||||
"if x is Something { a, b: x } then { 4 } else { 9 }",
|
"if x is Something { a, b: x } then { 4 } else { 9 }",
|
||||||
expr(IfExpression {
|
expr(IfExpression {
|
||||||
discriminator: Some(bx(expr(Value(qn!(x))))),
|
discriminator: Some(bx(expr(Value(qn!(x))))),
|
||||||
@ -1162,7 +1220,7 @@ fn pattern_matching() {
|
|||||||
})
|
})
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_expr2!(
|
assert_expr!(
|
||||||
"if x is -1 then 1 else 2",
|
"if x is -1 then 1 else 2",
|
||||||
expr(IfExpression {
|
expr(IfExpression {
|
||||||
discriminator: Some(bx(expr(Value(qn!(x))))),
|
discriminator: Some(bx(expr(Value(qn!(x))))),
|
||||||
@ -1174,7 +1232,7 @@ fn pattern_matching() {
|
|||||||
})
|
})
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_expr2!(
|
assert_expr!(
|
||||||
"if x is true then 1 else 2",
|
"if x is true then 1 else 2",
|
||||||
expr(IfExpression {
|
expr(IfExpression {
|
||||||
discriminator: Some(bx(expr(Value(qn!(x))))),
|
discriminator: Some(bx(expr(Value(qn!(x))))),
|
||||||
@ -1186,8 +1244,8 @@ fn pattern_matching() {
|
|||||||
})
|
})
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_expr2!(
|
assert_expr!(
|
||||||
"if x { is 1 then 5, else 20 }",
|
"if x { is 1 then 5; else 20 }",
|
||||||
expr(IfExpression {
|
expr(IfExpression {
|
||||||
discriminator: Some(bx(expr(Value(qn!(x))))),
|
discriminator: Some(bx(expr(Value(qn!(x))))),
|
||||||
body: bx(IfExpressionBody::CondList(vec![
|
body: bx(IfExpressionBody::CondList(vec![
|
||||||
@ -1208,7 +1266,7 @@ fn pattern_matching() {
|
|||||||
})
|
})
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_expr2!(
|
assert_expr!(
|
||||||
r#"if x is "gnosticism" then 1 else 2"#,
|
r#"if x is "gnosticism" then 1 else 2"#,
|
||||||
expr(IfExpression {
|
expr(IfExpression {
|
||||||
discriminator: Some(bx(expr(Value(qn!(x))))),
|
discriminator: Some(bx(expr(Value(qn!(x))))),
|
||||||
@ -1223,14 +1281,14 @@ fn pattern_matching() {
|
|||||||
assert_expr! {
|
assert_expr! {
|
||||||
r#"
|
r#"
|
||||||
if (45, "panda", false, 2.2) {
|
if (45, "panda", false, 2.2) {
|
||||||
is (49, "pablo", _, 28.4) then "no"
|
is (49, "pablo", _, 28.4) then "no"
|
||||||
is (_, "panda", _, -2.2) then "yes"
|
is (_, "panda", _, -2.2) then "yes"
|
||||||
is _ then "maybe"
|
is _ then "maybe"
|
||||||
}"#,
|
}"#,
|
||||||
expr(
|
expr(
|
||||||
IfExpression {
|
IfExpression {
|
||||||
discriminator: Some(bx(expr(TupleLiteral(vec![
|
discriminator: Some(bx(expr(TupleLiteral(vec![
|
||||||
expr(NatLiteral(45)), expr(StringLiteral(rc("panda"))), expr(BoolLiteral(false)), expr(FloatLiteral(2.2))
|
expr(NatLiteral(45)), expr(strlit("panda")), expr(BoolLiteral(false)), expr(FloatLiteral(2.2))
|
||||||
])))),
|
])))),
|
||||||
body: bx(IfExpressionBody::CondList(vec![
|
body: bx(IfExpressionBody::CondList(vec![
|
||||||
ConditionArm {
|
ConditionArm {
|
||||||
@ -1243,7 +1301,7 @@ if (45, "panda", false, 2.2) {
|
|||||||
]
|
]
|
||||||
)),
|
)),
|
||||||
guard: None,
|
guard: None,
|
||||||
body: vec![stmt(StatementKind::Expression(expr(StringLiteral(rc("no")))))].into(),
|
body: vec![stmt(StatementKind::Expression(expr(strlit("no"))))].into(),
|
||||||
},
|
},
|
||||||
ConditionArm {
|
ConditionArm {
|
||||||
condition: Condition::Pattern(Pattern::TuplePattern(
|
condition: Condition::Pattern(Pattern::TuplePattern(
|
||||||
@ -1255,12 +1313,12 @@ if (45, "panda", false, 2.2) {
|
|||||||
]
|
]
|
||||||
)),
|
)),
|
||||||
guard: None,
|
guard: None,
|
||||||
body: vec![stmt(StatementKind::Expression(expr(StringLiteral(rc("yes")))))].into(),
|
body: vec![stmt(StatementKind::Expression(expr(strlit("yes"))))].into(),
|
||||||
},
|
},
|
||||||
ConditionArm {
|
ConditionArm {
|
||||||
condition: Condition::Pattern(Pattern::Ignored),
|
condition: Condition::Pattern(Pattern::Ignored),
|
||||||
guard: None,
|
guard: None,
|
||||||
body: vec![exst(StringLiteral(rc("maybe")))].into(),
|
body: vec![exst(strlit("maybe"))].into(),
|
||||||
},
|
},
|
||||||
]))
|
]))
|
||||||
}
|
}
|
||||||
@ -1272,7 +1330,7 @@ if (45, "panda", false, 2.2) {
|
|||||||
fn flow_control() {
|
fn flow_control() {
|
||||||
use ExpressionKind::*;
|
use ExpressionKind::*;
|
||||||
|
|
||||||
// This is an incorrect program, but shoudl parse correctly.
|
// This is an incorrect program, but should parse correctly.
|
||||||
let source = r#"
|
let source = r#"
|
||||||
fn test() {
|
fn test() {
|
||||||
let a = 10;
|
let a = 10;
|
||||||
@ -1310,7 +1368,94 @@ fn blocks() {
|
|||||||
let cases = ["{ a }", "{ a; }", "{a}", "{ a\n }", "{ a\n\n }", "{ a;\n\n; }"];
|
let cases = ["{ a }", "{ a; }", "{a}", "{ a\n }", "{ a\n\n }", "{ a;\n\n; }"];
|
||||||
|
|
||||||
for case in cases.iter() {
|
for case in cases.iter() {
|
||||||
let block = schala_parser::block(case);
|
assert_block!(case, vec![exst(Value(qn!(a)))].into());
|
||||||
assert_eq!(block.unwrap(), vec![exst(Value(qn!(a)))].into());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let source = r#"{
|
||||||
|
fn quah() {
|
||||||
|
fn foo() { }
|
||||||
|
}
|
||||||
|
}"#;
|
||||||
|
assert_block!(
|
||||||
|
source,
|
||||||
|
vec![decl(Declaration::FuncDecl(
|
||||||
|
Signature { name: rc("quah"), operator: false, params: vec![], type_anno: None },
|
||||||
|
vec![decl(Declaration::FuncDecl(
|
||||||
|
Signature { name: rc("foo"), operator: false, params: vec![], type_anno: None },
|
||||||
|
vec![].into(),
|
||||||
|
))]
|
||||||
|
.into()
|
||||||
|
))]
|
||||||
|
.into()
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_block!("{}", vec![].into());
|
||||||
|
|
||||||
|
let source = r#"{
|
||||||
|
|
||||||
|
//hella
|
||||||
|
4_5 //bog
|
||||||
|
11; /*chutney*/0xf
|
||||||
|
}"#;
|
||||||
|
|
||||||
|
assert_block!(
|
||||||
|
source,
|
||||||
|
vec![
|
||||||
|
Statement {
|
||||||
|
id: Default::default(),
|
||||||
|
location: Default::default(),
|
||||||
|
kind: StatementKind::Expression(Expression::new(
|
||||||
|
Default::default(),
|
||||||
|
ExpressionKind::NatLiteral(45)
|
||||||
|
))
|
||||||
|
},
|
||||||
|
Statement {
|
||||||
|
id: Default::default(),
|
||||||
|
location: Default::default(),
|
||||||
|
kind: StatementKind::Expression(Expression::new(
|
||||||
|
Default::default(),
|
||||||
|
ExpressionKind::NatLiteral(11)
|
||||||
|
))
|
||||||
|
},
|
||||||
|
Statement {
|
||||||
|
id: Default::default(),
|
||||||
|
location: Default::default(),
|
||||||
|
kind: StatementKind::Expression(Expression::new(
|
||||||
|
Default::default(),
|
||||||
|
ExpressionKind::NatLiteral(15)
|
||||||
|
))
|
||||||
|
},
|
||||||
|
]
|
||||||
|
.into()
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn comments() {
|
||||||
|
use ExpressionKind::*;
|
||||||
|
|
||||||
|
let source = "1 + /* hella /* bro */ */ 2";
|
||||||
|
assert_expr!(source, binop("+", expr(NatLiteral(1)), expr(NatLiteral(2))));
|
||||||
|
|
||||||
|
//TODO make sure this error message makes sense
|
||||||
|
let source = "1 + /* hella /* bro */ 2";
|
||||||
|
assert_fail_expr!(source, "foo");
|
||||||
|
|
||||||
|
let source = "1 + /* hella */ bro */ 2";
|
||||||
|
assert_fail_expr!(source, binop("+", expr(NatLiteral(1)), expr(NatLiteral(2))));
|
||||||
|
|
||||||
|
let source = "5//no man\n";
|
||||||
|
assert_ast!(source, vec![exst(NatLiteral(5))]);
|
||||||
|
|
||||||
|
let source = " /*yolo*/ barnaby";
|
||||||
|
assert_ast!(source, exst(ExpressionKind::Value(qn!(barnaby))));
|
||||||
|
}
|
||||||
|
|
||||||
|
//TODO support backtick operators like this
|
||||||
|
/*
|
||||||
|
#[test]
|
||||||
|
fn backtick_operators() {
|
||||||
|
let output = token_kinds("1 `plus` 2");
|
||||||
|
assert_eq!(output, vec![digit!("1"), op!("plus"), digit!("2")]);
|
||||||
|
}
|
||||||
|
*/
|
@ -32,7 +32,7 @@ impl<'a, 'b> Reducer<'a, 'b> {
|
|||||||
// First reduce all functions
|
// First reduce all functions
|
||||||
// TODO once this works, maybe rewrite it using the Visitor
|
// TODO once this works, maybe rewrite it using the Visitor
|
||||||
for statement in ast.statements.statements.iter() {
|
for statement in ast.statements.statements.iter() {
|
||||||
self.top_level_statement(statement);
|
self.top_level_definition(statement);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Then compute the entrypoint statements (which may reference previously-computed
|
// Then compute the entrypoint statements (which may reference previously-computed
|
||||||
@ -51,9 +51,8 @@ impl<'a, 'b> Reducer<'a, 'b> {
|
|||||||
..
|
..
|
||||||
}) => {
|
}) => {
|
||||||
let symbol = self.symbol_table.lookup_symbol(item_id).unwrap();
|
let symbol = self.symbol_table.lookup_symbol(item_id).unwrap();
|
||||||
let def_id = symbol.def_id().unwrap();
|
|
||||||
entrypoint.push(Statement::Binding {
|
entrypoint.push(Statement::Binding {
|
||||||
id: def_id,
|
id: symbol.def_id(),
|
||||||
constant: *constant,
|
constant: *constant,
|
||||||
expr: self.expression(expr),
|
expr: self.expression(expr),
|
||||||
});
|
});
|
||||||
@ -65,17 +64,30 @@ impl<'a, 'b> Reducer<'a, 'b> {
|
|||||||
ReducedIR { functions: self.functions, entrypoint }
|
ReducedIR { functions: self.functions, entrypoint }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn top_level_statement(&mut self, statement: &ast::Statement) {
|
fn top_level_definition(&mut self, statement: &ast::Statement<ast::StatementKind>) {
|
||||||
let ast::Statement { id: item_id, kind, .. } = statement;
|
let ast::Statement { id: item_id, kind, .. } = statement;
|
||||||
match kind {
|
match kind {
|
||||||
ast::StatementKind::Expression(_expr) => {
|
ast::StatementKind::Expression(_expr) => {
|
||||||
//TODO expressions can in principle contain definitions, but I won't worry
|
//TODO expressions can in principle contain definitions, but I won't worry
|
||||||
//about it now
|
//about it now
|
||||||
}
|
}
|
||||||
ast::StatementKind::Declaration(decl) =>
|
ast::StatementKind::Declaration(decl) => match decl {
|
||||||
if let ast::Declaration::FuncDecl(_, statements) = decl {
|
ast::Declaration::FuncDecl(_, statements) => {
|
||||||
self.insert_function_definition(item_id, statements);
|
self.insert_function_definition(item_id, statements);
|
||||||
},
|
}
|
||||||
|
ast::Declaration::Impl { type_name: _, interface_name: _, block } =>
|
||||||
|
for item in block {
|
||||||
|
if let ast::Statement {
|
||||||
|
id: item_id,
|
||||||
|
kind: ast::Declaration::FuncDecl(_, statements),
|
||||||
|
..
|
||||||
|
} = item
|
||||||
|
{
|
||||||
|
self.insert_function_definition(item_id, statements);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
_ => (),
|
||||||
|
},
|
||||||
// Imports should have already been processed by the symbol table and are irrelevant
|
// Imports should have already been processed by the symbol table and are irrelevant
|
||||||
// for this representation.
|
// for this representation.
|
||||||
ast::StatementKind::Import(..) => (),
|
ast::StatementKind::Import(..) => (),
|
||||||
@ -85,7 +97,10 @@ impl<'a, 'b> Reducer<'a, 'b> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn function_internal_statement(&mut self, statement: &ast::Statement) -> Option<Statement> {
|
fn function_internal_statement(
|
||||||
|
&mut self,
|
||||||
|
statement: &ast::Statement<ast::StatementKind>,
|
||||||
|
) -> Option<Statement> {
|
||||||
let ast::Statement { id: item_id, kind, .. } = statement;
|
let ast::Statement { id: item_id, kind, .. } = statement;
|
||||||
match kind {
|
match kind {
|
||||||
ast::StatementKind::Expression(expr) => Some(Statement::Expression(self.expression(expr))),
|
ast::StatementKind::Expression(expr) => Some(Statement::Expression(self.expression(expr))),
|
||||||
@ -96,8 +111,11 @@ impl<'a, 'b> Reducer<'a, 'b> {
|
|||||||
}
|
}
|
||||||
ast::Declaration::Binding { constant, expr, .. } => {
|
ast::Declaration::Binding { constant, expr, .. } => {
|
||||||
let symbol = self.symbol_table.lookup_symbol(item_id).unwrap();
|
let symbol = self.symbol_table.lookup_symbol(item_id).unwrap();
|
||||||
let def_id = symbol.def_id().unwrap();
|
Some(Statement::Binding {
|
||||||
Some(Statement::Binding { id: def_id, constant: *constant, expr: self.expression(expr) })
|
id: symbol.def_id(),
|
||||||
|
constant: *constant,
|
||||||
|
expr: self.expression(expr),
|
||||||
|
})
|
||||||
}
|
}
|
||||||
_ => None,
|
_ => None,
|
||||||
},
|
},
|
||||||
@ -115,26 +133,48 @@ impl<'a, 'b> Reducer<'a, 'b> {
|
|||||||
|
|
||||||
fn insert_function_definition(&mut self, item_id: &ast::ItemId, statements: &ast::Block) {
|
fn insert_function_definition(&mut self, item_id: &ast::ItemId, statements: &ast::Block) {
|
||||||
let symbol = self.symbol_table.lookup_symbol(item_id).unwrap();
|
let symbol = self.symbol_table.lookup_symbol(item_id).unwrap();
|
||||||
let def_id = symbol.def_id().unwrap();
|
|
||||||
let function_def = FunctionDefinition { body: self.function_internal_block(statements) };
|
let function_def = FunctionDefinition { body: self.function_internal_block(statements) };
|
||||||
self.functions.insert(def_id, function_def);
|
self.functions.insert(symbol.def_id(), function_def);
|
||||||
|
}
|
||||||
|
|
||||||
|
//TODO this needs to be type-aware to work correctly
|
||||||
|
fn lookup_method(&mut self, name: &str) -> Option<DefId> {
|
||||||
|
for (def_id, function) in self.functions.iter() {
|
||||||
|
let symbol = self.symbol_table.lookup_symbol_by_def(def_id)?;
|
||||||
|
println!("Def Id: {} symbol: {:?}", def_id, symbol);
|
||||||
|
if symbol.local_name() == name {
|
||||||
|
return Some(*def_id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expression(&mut self, expr: &ast::Expression) -> Expression {
|
fn expression(&mut self, expr: &ast::Expression) -> Expression {
|
||||||
use crate::ast::ExpressionKind::*;
|
use crate::ast::ExpressionKind::*;
|
||||||
|
|
||||||
match &expr.kind {
|
match &expr.kind {
|
||||||
|
SelfValue => Expression::Lookup(Lookup::SelfParam),
|
||||||
NatLiteral(n) => Expression::Literal(Literal::Nat(*n)),
|
NatLiteral(n) => Expression::Literal(Literal::Nat(*n)),
|
||||||
FloatLiteral(f) => Expression::Literal(Literal::Float(*f)),
|
FloatLiteral(f) => Expression::Literal(Literal::Float(*f)),
|
||||||
StringLiteral(s) => Expression::Literal(Literal::StringLit(s.clone())),
|
//TODO implement handling string literal prefixes
|
||||||
|
StringLiteral { s, prefix: _ } => Expression::Literal(Literal::StringLit(s.clone())),
|
||||||
BoolLiteral(b) => Expression::Literal(Literal::Bool(*b)),
|
BoolLiteral(b) => Expression::Literal(Literal::Bool(*b)),
|
||||||
BinExp(binop, lhs, rhs) => self.binop(binop, lhs, rhs),
|
BinExp(binop, lhs, rhs) => self.binop(binop, lhs, rhs),
|
||||||
PrefixExp(op, arg) => self.prefix(op, arg),
|
PrefixExp(op, arg) => self.prefix(op, arg),
|
||||||
Value(qualified_name) => self.value(qualified_name),
|
Value(qualified_name) => self.value(qualified_name),
|
||||||
Call { f, arguments } => Expression::Call {
|
Call { f, arguments } => {
|
||||||
f: Box::new(self.expression(f)),
|
let f = self.expression(f);
|
||||||
args: arguments.iter().map(|arg| self.invocation_argument(arg)).collect(),
|
let args = arguments.iter().map(|arg| self.invocation_argument(arg)).collect();
|
||||||
},
|
//TODO need to have full type availability at this point to do this method lookup
|
||||||
|
//correctly
|
||||||
|
if let Expression::Access { name, expr } = f {
|
||||||
|
let def_id = self.lookup_method(&name).unwrap();
|
||||||
|
let method = Expression::Lookup(Lookup::Function(def_id));
|
||||||
|
Expression::CallMethod { f: Box::new(method), args, self_expr: expr }
|
||||||
|
} else {
|
||||||
|
Expression::Call { f: Box::new(f), args }
|
||||||
|
}
|
||||||
|
}
|
||||||
TupleLiteral(exprs) => Expression::Tuple(exprs.iter().map(|e| self.expression(e)).collect()),
|
TupleLiteral(exprs) => Expression::Tuple(exprs.iter().map(|e| self.expression(e)).collect()),
|
||||||
IfExpression { discriminator, body } =>
|
IfExpression { discriminator, body } =>
|
||||||
self.reduce_if_expression(discriminator.as_ref().map(|x| x.as_ref()), body),
|
self.reduce_if_expression(discriminator.as_ref().map(|x| x.as_ref()), body),
|
||||||
@ -145,7 +185,7 @@ impl<'a, 'b> Reducer<'a, 'b> {
|
|||||||
NamedStruct { name, fields } => {
|
NamedStruct { name, fields } => {
|
||||||
let symbol = match self.symbol_table.lookup_symbol(&name.id) {
|
let symbol = match self.symbol_table.lookup_symbol(&name.id) {
|
||||||
Some(symbol) => symbol,
|
Some(symbol) => symbol,
|
||||||
None => return Expression::ReductionError(format!("No symbol found for {:?}", name)),
|
None => return Expression::ReductionError(format!("No symbol found for {}", name)),
|
||||||
};
|
};
|
||||||
let (tag, type_id) = match symbol.spec() {
|
let (tag, type_id) = match symbol.spec() {
|
||||||
SymbolSpec::RecordConstructor { tag, type_id } => (tag, type_id),
|
SymbolSpec::RecordConstructor { tag, type_id } => (tag, type_id),
|
||||||
@ -305,7 +345,7 @@ impl<'a, 'b> Reducer<'a, 'b> {
|
|||||||
let lval = match &lhs.kind {
|
let lval = match &lhs.kind {
|
||||||
ast::ExpressionKind::Value(qualified_name) => {
|
ast::ExpressionKind::Value(qualified_name) => {
|
||||||
if let Some(symbol) = self.symbol_table.lookup_symbol(&qualified_name.id) {
|
if let Some(symbol) = self.symbol_table.lookup_symbol(&qualified_name.id) {
|
||||||
symbol.def_id().unwrap()
|
symbol.def_id()
|
||||||
} else {
|
} else {
|
||||||
return ReductionError(format!("Couldn't look up name: {:?}", qualified_name));
|
return ReductionError(format!("Couldn't look up name: {:?}", qualified_name));
|
||||||
}
|
}
|
||||||
@ -330,16 +370,16 @@ impl<'a, 'b> Reducer<'a, 'b> {
|
|||||||
let symbol = match self.symbol_table.lookup_symbol(&qualified_name.id) {
|
let symbol = match self.symbol_table.lookup_symbol(&qualified_name.id) {
|
||||||
Some(s) => s,
|
Some(s) => s,
|
||||||
None =>
|
None =>
|
||||||
return Expression::ReductionError(format!("No symbol found for name: {:?}", qualified_name)),
|
return Expression::ReductionError(format!("No symbol found for name: `{}`", qualified_name)),
|
||||||
};
|
};
|
||||||
|
|
||||||
let def_id = symbol.def_id();
|
let def_id = symbol.def_id();
|
||||||
|
|
||||||
match symbol.spec() {
|
match symbol.spec() {
|
||||||
Builtin(b) => Expression::Callable(Callable::Builtin(b)),
|
Builtin(b) => Expression::Callable(Callable::Builtin(b)),
|
||||||
Func => Expression::Lookup(Lookup::Function(def_id.unwrap())),
|
Func { .. } => Expression::Lookup(Lookup::Function(def_id)),
|
||||||
GlobalBinding => Expression::Lookup(Lookup::GlobalVar(def_id.unwrap())),
|
GlobalBinding => Expression::Lookup(Lookup::GlobalVar(def_id)),
|
||||||
LocalVariable => Expression::Lookup(Lookup::LocalVar(def_id.unwrap())),
|
LocalVariable => Expression::Lookup(Lookup::LocalVar(def_id)),
|
||||||
FunctionParam(n) => Expression::Lookup(Lookup::Param(n)),
|
FunctionParam(n) => Expression::Lookup(Lookup::Param(n)),
|
||||||
DataConstructor { tag, type_id } =>
|
DataConstructor { tag, type_id } =>
|
||||||
Expression::Callable(Callable::DataConstructor { type_id, tag }),
|
Expression::Callable(Callable::DataConstructor { type_id, tag }),
|
||||||
@ -392,7 +432,7 @@ impl ast::Pattern {
|
|||||||
SymbolSpec::DataConstructor { tag, type_id: _ } =>
|
SymbolSpec::DataConstructor { tag, type_id: _ } =>
|
||||||
Pattern::Tuple { tag: Some(tag), subpatterns: vec![] },
|
Pattern::Tuple { tag: Some(tag), subpatterns: vec![] },
|
||||||
SymbolSpec::LocalVariable => {
|
SymbolSpec::LocalVariable => {
|
||||||
let def_id = symbol.def_id().unwrap();
|
let def_id = symbol.def_id();
|
||||||
Pattern::Binding(def_id)
|
Pattern::Binding(def_id)
|
||||||
}
|
}
|
||||||
spec => return Err(format!("Unexpected VarOrName symbol: {:?}", spec).into()),
|
spec => return Err(format!("Unexpected VarOrName symbol: {:?}", spec).into()),
|
@ -40,5 +40,22 @@ fn test_ir() {
|
|||||||
|
|
||||||
let reduced = build_ir(src);
|
let reduced = build_ir(src);
|
||||||
assert_eq!(reduced.functions.len(), 3);
|
assert_eq!(reduced.functions.len(), 3);
|
||||||
//assert!(1 == 2);
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_methods() {
|
||||||
|
let src = r#"
|
||||||
|
type Thing = Thing
|
||||||
|
impl Thing {
|
||||||
|
fn a_method() {
|
||||||
|
20
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
let a = Thing
|
||||||
|
4 + a.a_method()
|
||||||
|
"#;
|
||||||
|
let reduced = build_ir(src);
|
||||||
|
assert_eq!(reduced.functions.len(), 1);
|
||||||
}
|
}
|
@ -57,6 +57,7 @@ pub enum Expression {
|
|||||||
Access { name: String, expr: Box<Expression> },
|
Access { name: String, expr: Box<Expression> },
|
||||||
Callable(Callable),
|
Callable(Callable),
|
||||||
Call { f: Box<Expression>, args: Vec<Expression> },
|
Call { f: Box<Expression>, args: Vec<Expression> },
|
||||||
|
CallMethod { f: Box<Expression>, args: Vec<Expression>, self_expr: Box<Expression> },
|
||||||
Conditional { cond: Box<Expression>, then_clause: Vec<Statement>, else_clause: Vec<Statement> },
|
Conditional { cond: Box<Expression>, then_clause: Vec<Statement>, else_clause: Vec<Statement> },
|
||||||
CaseMatch { cond: Box<Expression>, alternatives: Vec<Alternative> },
|
CaseMatch { cond: Box<Expression>, alternatives: Vec<Alternative> },
|
||||||
Loop { cond: Box<Expression>, statements: Vec<Statement> },
|
Loop { cond: Box<Expression>, statements: Vec<Statement> },
|
||||||
@ -90,6 +91,7 @@ pub enum Lookup {
|
|||||||
GlobalVar(DefId),
|
GlobalVar(DefId),
|
||||||
Function(DefId),
|
Function(DefId),
|
||||||
Param(u8),
|
Param(u8),
|
||||||
|
SelfParam,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
#[derive(Debug, Clone, PartialEq)]
|
@ -4,9 +4,7 @@ use schala_repl::{
|
|||||||
};
|
};
|
||||||
use stopwatch::Stopwatch;
|
use stopwatch::Stopwatch;
|
||||||
|
|
||||||
use crate::{
|
use crate::{error::SchalaError, parsing, reduced_ir, symbol_table, tree_walk_eval, type_inference};
|
||||||
error::SchalaError, parsing, reduced_ir, symbol_table, tokenizing, tree_walk_eval, type_inference,
|
|
||||||
};
|
|
||||||
|
|
||||||
/// All the state necessary to parse and execute a Schala program are stored in this struct.
|
/// All the state necessary to parse and execute a Schala program are stored in this struct.
|
||||||
pub struct Schala<'a> {
|
pub struct Schala<'a> {
|
||||||
@ -74,18 +72,10 @@ impl<'a> Schala<'a> {
|
|||||||
self.timings = vec![];
|
self.timings = vec![];
|
||||||
let sw = Stopwatch::start_new();
|
let sw = Stopwatch::start_new();
|
||||||
|
|
||||||
// 1st stage - tokenization
|
self.source_reference.load_new_source(source);
|
||||||
// TODO tokenize should return its own error type
|
|
||||||
let tokens = tokenizing::tokenize(source);
|
|
||||||
if let Some(err) = SchalaError::from_tokens(&tokens) {
|
|
||||||
return Err(err);
|
|
||||||
}
|
|
||||||
|
|
||||||
//2nd stage - parsing
|
|
||||||
self.active_parser.add_new_tokens(tokens);
|
|
||||||
let ast = self
|
let ast = self
|
||||||
.active_parser
|
.active_parser
|
||||||
.parse()
|
.parse(source)
|
||||||
.map_err(|err| SchalaError::from_parse_error(err, &self.source_reference))?;
|
.map_err(|err| SchalaError::from_parse_error(err, &self.source_reference))?;
|
||||||
self.timings.push(("parsing", sw.elapsed()));
|
self.timings.push(("parsing", sw.elapsed()));
|
||||||
|
|
||||||
@ -98,8 +88,7 @@ impl<'a> Schala<'a> {
|
|||||||
self.timings.push(("symbol_table", sw.elapsed()));
|
self.timings.push(("symbol_table", sw.elapsed()));
|
||||||
|
|
||||||
// Typechecking
|
// Typechecking
|
||||||
// TODO typechecking not working
|
let _overall_type = self.type_context.typecheck(&ast).map_err(SchalaError::from_type_error);
|
||||||
//let _overall_type = self.type_context.typecheck(&ast).map_err(SchalaError::from_type_error);
|
|
||||||
|
|
||||||
let sw = Stopwatch::start_new();
|
let sw = Stopwatch::start_new();
|
||||||
let reduced_ir = reduced_ir::reduce(&ast, &self.symbol_table, &self.type_context);
|
let reduced_ir = reduced_ir::reduce(&ast, &self.symbol_table, &self.type_context);
|
||||||
@ -122,31 +111,50 @@ impl<'a> Schala<'a> {
|
|||||||
|
|
||||||
/// Represents lines of source code
|
/// Represents lines of source code
|
||||||
pub(crate) struct SourceReference {
|
pub(crate) struct SourceReference {
|
||||||
lines: Option<Vec<String>>,
|
last_source: Option<String>,
|
||||||
|
/// Offsets in *bytes* (not chars) representing a newline character
|
||||||
|
newline_offsets: Vec<usize>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SourceReference {
|
impl SourceReference {
|
||||||
fn new() -> SourceReference {
|
pub(crate) fn new() -> SourceReference {
|
||||||
SourceReference { lines: None }
|
SourceReference { last_source: None, newline_offsets: vec![] }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn load_new_source(&mut self, source: &str) {
|
pub(crate) fn load_new_source(&mut self, source: &str) {
|
||||||
//TODO this is a lot of heap allocations - maybe there's a way to make it more efficient?
|
self.newline_offsets = vec![];
|
||||||
self.lines = Some(source.lines().map(|s| s.to_string()).collect());
|
for (offset, ch) in source.as_bytes().iter().enumerate() {
|
||||||
|
if *ch == b'\n' {
|
||||||
|
self.newline_offsets.push(offset);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self.last_source = Some(source.to_string());
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_line(&self, line: usize) -> String {
|
// (line_start, line_num, the string itself)
|
||||||
self.lines
|
pub fn get_line(&self, line: usize) -> (usize, usize, String) {
|
||||||
.as_ref()
|
if self.newline_offsets.is_empty() {
|
||||||
.and_then(|x| x.get(line).map(|s| s.to_string()))
|
return (0, 0, self.last_source.as_ref().cloned().unwrap());
|
||||||
.unwrap_or_else(|| "NO LINE FOUND".to_string())
|
}
|
||||||
|
|
||||||
|
//TODO make sure this is utf8-safe
|
||||||
|
let start_idx = match self.newline_offsets.binary_search(&line) {
|
||||||
|
Ok(index) | Err(index) => index,
|
||||||
|
};
|
||||||
|
|
||||||
|
let last_source = self.last_source.as_ref().unwrap();
|
||||||
|
|
||||||
|
let start = self.newline_offsets[start_idx];
|
||||||
|
let end = self.newline_offsets.get(start_idx + 1).cloned().unwrap_or_else(|| last_source.len());
|
||||||
|
|
||||||
|
let slice = &last_source.as_bytes()[start..end];
|
||||||
|
(start, start_idx, std::str::from_utf8(slice).unwrap().to_string())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
#[derive(Clone, Copy, Debug)]
|
#[derive(Clone, Copy, Debug)]
|
||||||
pub(crate) enum Stage {
|
pub(crate) enum Stage {
|
||||||
Tokenizing,
|
|
||||||
Parsing,
|
Parsing,
|
||||||
Symbols,
|
Symbols,
|
||||||
ScopeResolution,
|
ScopeResolution,
|
||||||
@ -156,7 +164,7 @@ pub(crate) enum Stage {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn stage_names() -> Vec<&'static str> {
|
fn stage_names() -> Vec<&'static str> {
|
||||||
vec!["tokenizing", "parsing", "symbol-table", "typechecking", "ast-reduction", "ast-walking-evaluation"]
|
vec!["parsing", "symbol-table", "typechecking", "ast-reduction", "ast-walking-evaluation"]
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Default, Clone)]
|
#[derive(Default, Clone)]
|
||||||
@ -177,14 +185,13 @@ impl<'a> ProgrammingLanguageInterface for Schala<'a> {
|
|||||||
|
|
||||||
fn run_computation(&mut self, request: ComputationRequest<Self::Config>) -> ComputationResponse {
|
fn run_computation(&mut self, request: ComputationRequest<Self::Config>) -> ComputationResponse {
|
||||||
let ComputationRequest { source, debug_requests: _, config: _ } = request;
|
let ComputationRequest { source, debug_requests: _, config: _ } = request;
|
||||||
self.source_reference.load_new_source(source);
|
|
||||||
let sw = Stopwatch::start_new();
|
let sw = Stopwatch::start_new();
|
||||||
|
|
||||||
let main_output =
|
let main_output =
|
||||||
self.run_pipeline(source, request.config).map_err(|schala_err| schala_err.display());
|
self.run_pipeline(source, request.config).map_err(|schala_err| schala_err.display());
|
||||||
let total_duration = sw.elapsed();
|
let total_duration = sw.elapsed();
|
||||||
|
|
||||||
let stage_durations: Vec<_> = std::mem::replace(&mut self.timings, vec![])
|
let stage_durations: Vec<_> = std::mem::take(&mut self.timings)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|(label, duration)| (label.to_string(), duration))
|
.map(|(label, duration)| (label.to_string(), duration))
|
||||||
.collect();
|
.collect();
|
@ -17,6 +17,12 @@ impl Fqsn {
|
|||||||
Fqsn { scopes: v }
|
Fqsn { scopes: v }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn extend(&self, new_item: &str) -> Self {
|
||||||
|
let mut new = self.clone();
|
||||||
|
new.scopes.push(ScopeSegment::Name(Rc::new(new_item.to_string())));
|
||||||
|
new
|
||||||
|
}
|
||||||
|
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
pub fn from_strs(strs: &[&str]) -> Fqsn {
|
pub fn from_strs(strs: &[&str]) -> Fqsn {
|
||||||
let mut scopes = vec![];
|
let mut scopes = vec![];
|
@ -10,7 +10,7 @@ use crate::{
|
|||||||
ast,
|
ast,
|
||||||
ast::ItemId,
|
ast::ItemId,
|
||||||
builtin::Builtin,
|
builtin::Builtin,
|
||||||
tokenizing::Location,
|
parsing::Location,
|
||||||
type_inference::{TypeContext, TypeId},
|
type_inference::{TypeContext, TypeId},
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -32,9 +32,10 @@ pub type DefId = Id<DefItem>;
|
|||||||
pub enum SymbolError {
|
pub enum SymbolError {
|
||||||
DuplicateName { prev_name: Fqsn, location: Location },
|
DuplicateName { prev_name: Fqsn, location: Location },
|
||||||
DuplicateVariant { type_fqsn: Fqsn, name: String },
|
DuplicateVariant { type_fqsn: Fqsn, name: String },
|
||||||
DuplicateRecord { type_name: Fqsn, location: Location, member: String },
|
DuplicateRecord { type_fqsn: Fqsn, location: Location, record: String, member: String },
|
||||||
UnknownAnnotation { name: String },
|
UnknownAnnotation { name: String },
|
||||||
BadAnnotation { name: String, msg: String },
|
BadAnnotation { name: String, msg: String },
|
||||||
|
BadImplBlockEntry,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
@ -52,7 +53,10 @@ enum NameKind {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
struct TypeKind;
|
enum TypeKind {
|
||||||
|
Function,
|
||||||
|
Constructor,
|
||||||
|
}
|
||||||
|
|
||||||
/// Keeps track of what names were used in a given namespace.
|
/// Keeps track of what names were used in a given namespace.
|
||||||
struct NameTable<K> {
|
struct NameTable<K> {
|
||||||
@ -114,7 +118,7 @@ impl SymbolTable {
|
|||||||
) -> Result<(), Vec<SymbolError>> {
|
) -> Result<(), Vec<SymbolError>> {
|
||||||
let mut populator = SymbolTablePopulator { type_context, table: self };
|
let mut populator = SymbolTablePopulator { type_context, table: self };
|
||||||
|
|
||||||
let errs = populator.populate_name_tables(ast);
|
let errs = populator.populate_definition_tables(ast);
|
||||||
if !errs.is_empty() {
|
if !errs.is_empty() {
|
||||||
return Err(errs);
|
return Err(errs);
|
||||||
}
|
}
|
||||||
@ -153,7 +157,8 @@ impl SymbolTable {
|
|||||||
/// to a Symbol, a descriptor of what that name refers to.
|
/// to a Symbol, a descriptor of what that name refers to.
|
||||||
fn add_symbol(&mut self, id: &ItemId, fqsn: Fqsn, spec: SymbolSpec) {
|
fn add_symbol(&mut self, id: &ItemId, fqsn: Fqsn, spec: SymbolSpec) {
|
||||||
let def_id = self.def_id_store.fresh();
|
let def_id = self.def_id_store.fresh();
|
||||||
let symbol = Rc::new(Symbol { fully_qualified_name: fqsn.clone(), spec, def_id });
|
let local_name = fqsn.last_elem();
|
||||||
|
let symbol = Rc::new(Symbol { fully_qualified_name: fqsn.clone(), local_name, spec, def_id });
|
||||||
self.symbol_trie.insert(&fqsn, def_id);
|
self.symbol_trie.insert(&fqsn, def_id);
|
||||||
self.id_to_def.insert(*id, def_id);
|
self.id_to_def.insert(*id, def_id);
|
||||||
self.def_to_symbol.insert(def_id, symbol);
|
self.def_to_symbol.insert(def_id, symbol);
|
||||||
@ -162,7 +167,8 @@ impl SymbolTable {
|
|||||||
fn populate_single_builtin(&mut self, fqsn: Fqsn, builtin: Builtin) {
|
fn populate_single_builtin(&mut self, fqsn: Fqsn, builtin: Builtin) {
|
||||||
let def_id = self.def_id_store.fresh();
|
let def_id = self.def_id_store.fresh();
|
||||||
let spec = SymbolSpec::Builtin(builtin);
|
let spec = SymbolSpec::Builtin(builtin);
|
||||||
let symbol = Rc::new(Symbol { fully_qualified_name: fqsn.clone(), spec, def_id });
|
let local_name = fqsn.last_elem();
|
||||||
|
let symbol = Rc::new(Symbol { fully_qualified_name: fqsn.clone(), local_name, spec, def_id });
|
||||||
|
|
||||||
self.symbol_trie.insert(&fqsn, def_id);
|
self.symbol_trie.insert(&fqsn, def_id);
|
||||||
self.def_to_symbol.insert(def_id, symbol);
|
self.def_to_symbol.insert(def_id, symbol);
|
||||||
@ -173,17 +179,18 @@ impl SymbolTable {
|
|||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct Symbol {
|
pub struct Symbol {
|
||||||
fully_qualified_name: Fqsn,
|
fully_qualified_name: Fqsn,
|
||||||
|
local_name: Rc<String>,
|
||||||
spec: SymbolSpec,
|
spec: SymbolSpec,
|
||||||
def_id: DefId,
|
def_id: DefId,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Symbol {
|
impl Symbol {
|
||||||
pub fn local_name(&self) -> Rc<String> {
|
pub fn local_name(&self) -> &str {
|
||||||
self.fully_qualified_name.last_elem()
|
self.local_name.as_ref()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn def_id(&self) -> Option<DefId> {
|
pub fn def_id(&self) -> DefId {
|
||||||
Some(self.def_id)
|
self.def_id
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn spec(&self) -> SymbolSpec {
|
pub fn spec(&self) -> SymbolSpec {
|
||||||
@ -212,7 +219,7 @@ impl fmt::Display for Symbol {
|
|||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub enum SymbolSpec {
|
pub enum SymbolSpec {
|
||||||
Builtin(Builtin),
|
Builtin(Builtin),
|
||||||
Func,
|
Func { method: Option<crate::ast::TypeSingletonName> },
|
||||||
DataConstructor { tag: u32, type_id: TypeId },
|
DataConstructor { tag: u32, type_id: TypeId },
|
||||||
RecordConstructor { tag: u32, type_id: TypeId },
|
RecordConstructor { tag: u32, type_id: TypeId },
|
||||||
GlobalBinding, //Only for global variables, not for function-local ones or ones within a `let` scope context
|
GlobalBinding, //Only for global variables, not for function-local ones or ones within a `let` scope context
|
||||||
@ -225,7 +232,7 @@ impl fmt::Display for SymbolSpec {
|
|||||||
use self::SymbolSpec::*;
|
use self::SymbolSpec::*;
|
||||||
match self {
|
match self {
|
||||||
Builtin(b) => write!(f, "Builtin: {:?}", b),
|
Builtin(b) => write!(f, "Builtin: {:?}", b),
|
||||||
Func => write!(f, "Func"),
|
Func { .. } => write!(f, "Func"),
|
||||||
DataConstructor { tag, type_id } => write!(f, "DataConstructor(tag: {}, type: {})", tag, type_id),
|
DataConstructor { tag, type_id } => write!(f, "DataConstructor(tag: {}, type: {})", tag, type_id),
|
||||||
RecordConstructor { type_id, tag, .. } =>
|
RecordConstructor { type_id, tag, .. } =>
|
||||||
write!(f, "RecordConstructor(tag: {})(<members> -> {})", tag, type_id),
|
write!(f, "RecordConstructor(tag: {})(<members> -> {})", tag, type_id),
|
@ -11,7 +11,7 @@ use crate::{
|
|||||||
TypeSingletonName, Variant, VariantKind, AST,
|
TypeSingletonName, Variant, VariantKind, AST,
|
||||||
},
|
},
|
||||||
builtin::Builtin,
|
builtin::Builtin,
|
||||||
tokenizing::Location,
|
parsing::Location,
|
||||||
type_inference::{self, PendingType, TypeBuilder, TypeContext, VariantBuilder},
|
type_inference::{self, PendingType, TypeBuilder, TypeContext, VariantBuilder},
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -32,14 +32,14 @@ impl<'a> SymbolTablePopulator<'a> {
|
|||||||
/// constants, functions, types, and modules defined within. This simultaneously
|
/// constants, functions, types, and modules defined within. This simultaneously
|
||||||
/// checks for dupicate definitions (and returns errors if discovered), and sets
|
/// checks for dupicate definitions (and returns errors if discovered), and sets
|
||||||
/// up name tables that will be used by further parts of the compiler
|
/// up name tables that will be used by further parts of the compiler
|
||||||
pub fn populate_name_tables(&mut self, ast: &AST) -> Vec<SymbolError> {
|
pub fn populate_definition_tables(&mut self, ast: &AST) -> Vec<SymbolError> {
|
||||||
let mut scope_stack = vec![];
|
let mut scope_stack = vec![];
|
||||||
self.add_from_scope(ast.statements.as_ref(), &mut scope_stack, false)
|
self.add_from_scope(ast.statements.as_ref(), &mut scope_stack, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn add_from_scope(
|
fn add_from_scope(
|
||||||
&mut self,
|
&mut self,
|
||||||
statements: &[Statement],
|
statements: &[Statement<StatementKind>],
|
||||||
scope_stack: &mut Vec<ScopeSegment>,
|
scope_stack: &mut Vec<ScopeSegment>,
|
||||||
function_scope: bool,
|
function_scope: bool,
|
||||||
) -> Vec<SymbolError> {
|
) -> Vec<SymbolError> {
|
||||||
@ -51,24 +51,58 @@ impl<'a> SymbolTablePopulator<'a> {
|
|||||||
if let Err(err) = self.add_single_statement(id, kind, location, scope_stack, function_scope) {
|
if let Err(err) = self.add_single_statement(id, kind, location, scope_stack, function_scope) {
|
||||||
errors.push(err);
|
errors.push(err);
|
||||||
} else {
|
} else {
|
||||||
|
let decl = match kind {
|
||||||
|
StatementKind::Declaration(decl) => decl,
|
||||||
|
_ => continue,
|
||||||
|
};
|
||||||
// If there's an error with a name, don't recurse into subscopes of that name
|
// If there's an error with a name, don't recurse into subscopes of that name
|
||||||
let recursive_errs = match kind {
|
let recursive_errs = match decl {
|
||||||
StatementKind::Declaration(Declaration::FuncDecl(signature, body)) => {
|
Declaration::FuncDecl(signature, body) => {
|
||||||
let new_scope = ScopeSegment::Name(signature.name.clone());
|
let new_scope = ScopeSegment::Name(signature.name.clone());
|
||||||
scope_stack.push(new_scope);
|
scope_stack.push(new_scope);
|
||||||
let output = self.add_from_scope(body.as_ref(), scope_stack, true);
|
let output = self.add_from_scope(body.as_ref(), scope_stack, true);
|
||||||
scope_stack.pop();
|
scope_stack.pop();
|
||||||
output
|
output
|
||||||
}
|
}
|
||||||
StatementKind::Declaration(Declaration::Module { name, items }) => {
|
Declaration::Module { name, items } => {
|
||||||
let new_scope = ScopeSegment::Name(name.clone());
|
let new_scope = ScopeSegment::Name(name.clone());
|
||||||
scope_stack.push(new_scope);
|
scope_stack.push(new_scope);
|
||||||
let output = self.add_from_scope(items.as_ref(), scope_stack, false);
|
let output = self.add_from_scope(items.as_ref(), scope_stack, false);
|
||||||
scope_stack.pop();
|
scope_stack.pop();
|
||||||
output
|
output
|
||||||
}
|
}
|
||||||
StatementKind::Declaration(Declaration::TypeDecl { name, body, mutable }) =>
|
Declaration::TypeDecl { name, body, mutable } => {
|
||||||
self.add_type_members(name, body, mutable, location, scope_stack),
|
let type_fqsn = Fqsn::from_scope_stack(scope_stack, name.name.clone());
|
||||||
|
self.add_type_members(name, body, mutable, location, type_fqsn)
|
||||||
|
}
|
||||||
|
|
||||||
|
Declaration::Impl { type_name, interface_name: _, block } => {
|
||||||
|
let mut errors = vec![];
|
||||||
|
let new_scope = ScopeSegment::Name(Rc::new(format!("<impl-block>{}", type_name)));
|
||||||
|
scope_stack.push(new_scope);
|
||||||
|
|
||||||
|
for decl_stmt in block.iter() {
|
||||||
|
let Statement { id, kind, location } = decl_stmt;
|
||||||
|
let location = *location;
|
||||||
|
match kind {
|
||||||
|
decl @ Declaration::FuncDecl(signature, body) => {
|
||||||
|
let output =
|
||||||
|
self.add_single_declaration(id, decl, location, scope_stack, true);
|
||||||
|
if let Err(e) = output {
|
||||||
|
errors.push(e);
|
||||||
|
};
|
||||||
|
let new_scope = ScopeSegment::Name(signature.name.clone());
|
||||||
|
scope_stack.push(new_scope);
|
||||||
|
let output = self.add_from_scope(body.as_ref(), scope_stack, true);
|
||||||
|
scope_stack.pop();
|
||||||
|
errors.extend(output.into_iter());
|
||||||
|
}
|
||||||
|
_other => errors.push(SymbolError::BadImplBlockEntry),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
scope_stack.pop();
|
||||||
|
errors
|
||||||
|
}
|
||||||
_ => vec![],
|
_ => vec![],
|
||||||
};
|
};
|
||||||
errors.extend(recursive_errs.into_iter());
|
errors.extend(recursive_errs.into_iter());
|
||||||
@ -87,30 +121,51 @@ impl<'a> SymbolTablePopulator<'a> {
|
|||||||
function_scope: bool,
|
function_scope: bool,
|
||||||
) -> Result<(), SymbolError> {
|
) -> Result<(), SymbolError> {
|
||||||
match kind {
|
match kind {
|
||||||
StatementKind::Declaration(Declaration::FuncSig(signature)) => {
|
StatementKind::Declaration(decl) =>
|
||||||
|
self.add_single_declaration(id, decl, location, scope_stack, function_scope),
|
||||||
|
_ => return Ok(()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn add_single_declaration(
|
||||||
|
&mut self,
|
||||||
|
id: &ItemId,
|
||||||
|
decl: &Declaration,
|
||||||
|
location: Location,
|
||||||
|
scope_stack: &[ScopeSegment],
|
||||||
|
function_scope: bool,
|
||||||
|
) -> Result<(), SymbolError> {
|
||||||
|
match decl {
|
||||||
|
Declaration::FuncSig(signature) => {
|
||||||
let fq_function = Fqsn::from_scope_stack(scope_stack, signature.name.clone());
|
let fq_function = Fqsn::from_scope_stack(scope_stack, signature.name.clone());
|
||||||
self.table
|
self.table
|
||||||
.fq_names
|
.fq_names
|
||||||
.register(fq_function.clone(), NameSpec { location, kind: NameKind::Function })?;
|
.register(fq_function.clone(), NameSpec { location, kind: NameKind::Function })?;
|
||||||
self.table.types.register(fq_function.clone(), NameSpec { location, kind: TypeKind })?;
|
self.table
|
||||||
|
.types
|
||||||
|
.register(fq_function.clone(), NameSpec { location, kind: TypeKind::Function })?;
|
||||||
|
|
||||||
self.add_symbol(id, fq_function, SymbolSpec::Func);
|
self.add_symbol(id, fq_function, SymbolSpec::Func { method: None });
|
||||||
}
|
}
|
||||||
StatementKind::Declaration(Declaration::FuncDecl(signature, ..)) => {
|
Declaration::FuncDecl(signature, ..) => {
|
||||||
let fn_name = &signature.name;
|
let fn_name = &signature.name;
|
||||||
let fq_function = Fqsn::from_scope_stack(scope_stack, fn_name.clone());
|
let fq_function = Fqsn::from_scope_stack(scope_stack, fn_name.clone());
|
||||||
self.table
|
self.table
|
||||||
.fq_names
|
.fq_names
|
||||||
.register(fq_function.clone(), NameSpec { location, kind: NameKind::Function })?;
|
.register(fq_function.clone(), NameSpec { location, kind: NameKind::Function })?;
|
||||||
self.table.types.register(fq_function.clone(), NameSpec { location, kind: TypeKind })?;
|
self.table
|
||||||
|
.types
|
||||||
|
.register(fq_function.clone(), NameSpec { location, kind: TypeKind::Function })?;
|
||||||
|
|
||||||
self.add_symbol(id, fq_function, SymbolSpec::Func);
|
self.add_symbol(id, fq_function, SymbolSpec::Func { method: None });
|
||||||
}
|
}
|
||||||
StatementKind::Declaration(Declaration::TypeDecl { name, .. }) => {
|
Declaration::TypeDecl { name, .. } => {
|
||||||
let fq_type = Fqsn::from_scope_stack(scope_stack, name.name.clone());
|
let fq_type = Fqsn::from_scope_stack(scope_stack, name.name.clone());
|
||||||
self.table.types.register(fq_type, NameSpec { location, kind: TypeKind })?;
|
self.table.types.register(fq_type, NameSpec { location, kind: TypeKind::Constructor })?;
|
||||||
}
|
}
|
||||||
StatementKind::Declaration(Declaration::Binding { name, .. }) => {
|
//TODO handle type aliases
|
||||||
|
Declaration::TypeAlias { .. } => (),
|
||||||
|
Declaration::Binding { name, .. } => {
|
||||||
let fq_binding = Fqsn::from_scope_stack(scope_stack, name.clone());
|
let fq_binding = Fqsn::from_scope_stack(scope_stack, name.clone());
|
||||||
self.table
|
self.table
|
||||||
.fq_names
|
.fq_names
|
||||||
@ -119,11 +174,14 @@ impl<'a> SymbolTablePopulator<'a> {
|
|||||||
self.add_symbol(id, fq_binding, SymbolSpec::GlobalBinding);
|
self.add_symbol(id, fq_binding, SymbolSpec::GlobalBinding);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
StatementKind::Declaration(Declaration::Module { name, .. }) => {
|
//TODO implement interfaces
|
||||||
|
Declaration::Interface { .. } => (),
|
||||||
|
Declaration::Impl { .. } => (),
|
||||||
|
Declaration::Module { name, .. } => {
|
||||||
let fq_module = Fqsn::from_scope_stack(scope_stack, name.clone());
|
let fq_module = Fqsn::from_scope_stack(scope_stack, name.clone());
|
||||||
self.table.fq_names.register(fq_module, NameSpec { location, kind: NameKind::Module })?;
|
self.table.fq_names.register(fq_module, NameSpec { location, kind: NameKind::Module })?;
|
||||||
}
|
}
|
||||||
StatementKind::Declaration(Declaration::Annotation { name, arguments, inner }) => {
|
Declaration::Annotation { name, arguments, inner } => {
|
||||||
let inner = inner.as_ref();
|
let inner = inner.as_ref();
|
||||||
self.add_single_statement(
|
self.add_single_statement(
|
||||||
&inner.id,
|
&inner.id,
|
||||||
@ -134,7 +192,6 @@ impl<'a> SymbolTablePopulator<'a> {
|
|||||||
)?;
|
)?;
|
||||||
self.process_annotation(name.as_ref(), arguments.as_slice(), scope_stack, inner)?;
|
self.process_annotation(name.as_ref(), arguments.as_slice(), scope_stack, inner)?;
|
||||||
}
|
}
|
||||||
_ => (),
|
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@ -144,7 +201,7 @@ impl<'a> SymbolTablePopulator<'a> {
|
|||||||
name: &str,
|
name: &str,
|
||||||
arguments: &[Expression],
|
arguments: &[Expression],
|
||||||
scope_stack: &[ScopeSegment],
|
scope_stack: &[ScopeSegment],
|
||||||
inner: &Statement,
|
inner: &Statement<StatementKind>,
|
||||||
) -> Result<(), SymbolError> {
|
) -> Result<(), SymbolError> {
|
||||||
if name == "register_builtin" {
|
if name == "register_builtin" {
|
||||||
if let Statement {
|
if let Statement {
|
||||||
@ -190,11 +247,11 @@ impl<'a> SymbolTablePopulator<'a> {
|
|||||||
type_body: &TypeBody,
|
type_body: &TypeBody,
|
||||||
_mutable: &bool,
|
_mutable: &bool,
|
||||||
location: Location,
|
location: Location,
|
||||||
scope_stack: &mut Vec<ScopeSegment>,
|
type_fqsn: Fqsn,
|
||||||
) -> Vec<SymbolError> {
|
) -> Vec<SymbolError> {
|
||||||
let (variants, immediate_variant) = match type_body {
|
let (variants, immediate_variant) = match type_body {
|
||||||
TypeBody::Variants(variants) => (variants.clone(), false),
|
TypeBody::Variants(variants) => (variants.clone(), false),
|
||||||
TypeBody::ImmediateRecord(id, fields) => (
|
TypeBody::ImmediateRecord { id, fields } => (
|
||||||
vec![Variant {
|
vec![Variant {
|
||||||
id: *id,
|
id: *id,
|
||||||
name: type_name.name.clone(),
|
name: type_name.name.clone(),
|
||||||
@ -203,10 +260,6 @@ impl<'a> SymbolTablePopulator<'a> {
|
|||||||
true,
|
true,
|
||||||
),
|
),
|
||||||
};
|
};
|
||||||
let type_fqsn = Fqsn::from_scope_stack(scope_stack, type_name.name.clone());
|
|
||||||
|
|
||||||
let new_scope = ScopeSegment::Name(type_name.name.clone());
|
|
||||||
scope_stack.push(new_scope);
|
|
||||||
|
|
||||||
// Check for duplicates before registering any types with the TypeContext
|
// Check for duplicates before registering any types with the TypeContext
|
||||||
let mut seen_variants = HashSet::new();
|
let mut seen_variants = HashSet::new();
|
||||||
@ -222,15 +275,15 @@ impl<'a> SymbolTablePopulator<'a> {
|
|||||||
seen_variants.insert(variant.name.clone());
|
seen_variants.insert(variant.name.clone());
|
||||||
|
|
||||||
if let VariantKind::Record(ref members) = variant.kind {
|
if let VariantKind::Record(ref members) = variant.kind {
|
||||||
let variant_name = Fqsn::from_scope_stack(scope_stack.as_ref(), variant.name.clone());
|
|
||||||
let mut seen_members = HashMap::new();
|
let mut seen_members = HashMap::new();
|
||||||
for (member_name, _) in members.iter() {
|
for (member_name, _) in members.iter() {
|
||||||
match seen_members.entry(member_name.as_ref()) {
|
match seen_members.entry(member_name.as_ref()) {
|
||||||
Entry::Occupied(o) => {
|
Entry::Occupied(o) => {
|
||||||
let location = *o.get();
|
let location = *o.get();
|
||||||
errors.push(SymbolError::DuplicateRecord {
|
errors.push(SymbolError::DuplicateRecord {
|
||||||
type_name: variant_name.clone(),
|
type_fqsn: type_fqsn.clone(),
|
||||||
location,
|
location,
|
||||||
|
record: variant.name.as_ref().to_string(),
|
||||||
member: member_name.as_ref().to_string(),
|
member: member_name.as_ref().to_string(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@ -249,11 +302,11 @@ impl<'a> SymbolTablePopulator<'a> {
|
|||||||
|
|
||||||
let mut type_builder = TypeBuilder::new(type_name.name.as_ref());
|
let mut type_builder = TypeBuilder::new(type_name.name.as_ref());
|
||||||
|
|
||||||
let mut fqsn_id_map = HashMap::new();
|
let mut variant_name_map = HashMap::new();
|
||||||
for variant in variants.iter() {
|
for variant in variants.iter() {
|
||||||
let Variant { name, kind, id } = variant;
|
let Variant { name, kind, id } = variant;
|
||||||
|
|
||||||
fqsn_id_map.insert(Fqsn::from_scope_stack(scope_stack.as_ref(), name.clone()), id);
|
variant_name_map.insert(name.clone(), id);
|
||||||
|
|
||||||
let mut variant_builder = VariantBuilder::new(name.as_ref());
|
let mut variant_builder = VariantBuilder::new(name.as_ref());
|
||||||
match kind {
|
match kind {
|
||||||
@ -277,30 +330,23 @@ impl<'a> SymbolTablePopulator<'a> {
|
|||||||
|
|
||||||
// This index is guaranteed to be the correct tag
|
// This index is guaranteed to be the correct tag
|
||||||
for (index, variant) in type_definition.variants.iter().enumerate() {
|
for (index, variant) in type_definition.variants.iter().enumerate() {
|
||||||
let fqsn = Fqsn::from_scope_stack(scope_stack.as_ref(), Rc::new(variant.name.to_string()));
|
let id = variant_name_map.get(&variant.name).unwrap();
|
||||||
let id = fqsn_id_map.get(&fqsn).unwrap();
|
|
||||||
let tag = index as u32;
|
let tag = index as u32;
|
||||||
let spec = match &variant.members {
|
let spec = match &variant.members {
|
||||||
type_inference::VariantMembers::Unit => SymbolSpec::DataConstructor { tag, type_id },
|
type_inference::VariantMembers::Unit => SymbolSpec::DataConstructor { tag, type_id },
|
||||||
type_inference::VariantMembers::Tuple(..) => SymbolSpec::DataConstructor { tag, type_id },
|
type_inference::VariantMembers::Tuple(..) => SymbolSpec::DataConstructor { tag, type_id },
|
||||||
type_inference::VariantMembers::Record(..) => SymbolSpec::RecordConstructor { tag, type_id },
|
type_inference::VariantMembers::Record(..) => SymbolSpec::RecordConstructor { tag, type_id },
|
||||||
};
|
};
|
||||||
self.table.add_symbol(id, fqsn, spec);
|
self.table.add_symbol(id, type_fqsn.extend(&variant.name), spec);
|
||||||
}
|
}
|
||||||
|
|
||||||
if immediate_variant {
|
if immediate_variant {
|
||||||
let variant = &type_definition.variants[0];
|
let variant = &type_definition.variants[0];
|
||||||
let fqsn = Fqsn::from_scope_stack(scope_stack.as_ref(), Rc::new(variant.name.to_string()));
|
let id = variant_name_map.get(&variant.name).unwrap();
|
||||||
let id = fqsn_id_map.get(&fqsn).unwrap();
|
|
||||||
let abbrev_fqsn = Fqsn::from_scope_stack(
|
|
||||||
scope_stack[0..scope_stack.len() - 1].as_ref(),
|
|
||||||
Rc::new(variant.name.to_string()),
|
|
||||||
);
|
|
||||||
let spec = SymbolSpec::RecordConstructor { tag: 0, type_id };
|
let spec = SymbolSpec::RecordConstructor { tag: 0, type_id };
|
||||||
self.table.add_symbol(id, abbrev_fqsn, spec);
|
self.table.add_symbol(id, type_fqsn, spec);
|
||||||
}
|
}
|
||||||
|
|
||||||
scope_stack.pop();
|
|
||||||
vec![]
|
vec![]
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -15,19 +15,20 @@ enum NameType {
|
|||||||
Import(Fqsn),
|
Import(Fqsn),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type LexScope<'a> = ScopeStack<'a, Rc<String>, NameType, ScopeType>;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
enum ScopeType {
|
enum ScopeType {
|
||||||
Function { name: Rc<String> },
|
Function { name: Rc<String> },
|
||||||
Lambda,
|
Lambda,
|
||||||
PatternMatch,
|
PatternMatch,
|
||||||
|
ImplBlock,
|
||||||
//TODO add some notion of a let-like scope?
|
//TODO add some notion of a let-like scope?
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct ScopeResolver<'a> {
|
pub struct ScopeResolver<'a> {
|
||||||
symbol_table: &'a mut super::SymbolTable,
|
symbol_table: &'a mut super::SymbolTable,
|
||||||
//TODO maybe this shouldn't be a scope stack, b/c the recursion behavior comes from multiple
|
lexical_scopes: LexScope<'a>,
|
||||||
//instances of ScopeResolver
|
|
||||||
lexical_scopes: ScopeStack<'a, Rc<String>, NameType, ScopeType>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> ScopeResolver<'a> {
|
impl<'a> ScopeResolver<'a> {
|
||||||
@ -43,6 +44,7 @@ impl<'a> ScopeResolver<'a> {
|
|||||||
/// This method correctly modifies the id_to_def table (ItemId) to have the appropriate
|
/// This method correctly modifies the id_to_def table (ItemId) to have the appropriate
|
||||||
/// mappings.
|
/// mappings.
|
||||||
fn lookup_name_in_scope(&mut self, name: &QualifiedName) {
|
fn lookup_name_in_scope(&mut self, name: &QualifiedName) {
|
||||||
|
//TODO this method badly needs attention
|
||||||
let QualifiedName { id, components } = name;
|
let QualifiedName { id, components } = name;
|
||||||
|
|
||||||
let local_name = components.first().unwrap().clone();
|
let local_name = components.first().unwrap().clone();
|
||||||
@ -55,7 +57,7 @@ impl<'a> ScopeResolver<'a> {
|
|||||||
if components.len() == 1 {
|
if components.len() == 1 {
|
||||||
match name_type {
|
match name_type {
|
||||||
Some(NameType::Import(fqsn)) => {
|
Some(NameType::Import(fqsn)) => {
|
||||||
let def_id = self.symbol_table.symbol_trie.lookup(&fqsn);
|
let def_id = self.symbol_table.symbol_trie.lookup(fqsn);
|
||||||
|
|
||||||
if let Some(def_id) = def_id {
|
if let Some(def_id) = def_id {
|
||||||
self.symbol_table.id_to_def.insert(*id, def_id);
|
self.symbol_table.id_to_def.insert(*id, def_id);
|
||||||
@ -71,14 +73,14 @@ impl<'a> ScopeResolver<'a> {
|
|||||||
Some(NameType::LocalFunction(item_id)) => {
|
Some(NameType::LocalFunction(item_id)) => {
|
||||||
let def_id = self.symbol_table.id_to_def.get(item_id);
|
let def_id = self.symbol_table.id_to_def.get(item_id);
|
||||||
if let Some(def_id) = def_id {
|
if let Some(def_id) = def_id {
|
||||||
let def_id = def_id.clone();
|
let def_id = *def_id;
|
||||||
self.symbol_table.id_to_def.insert(*id, def_id);
|
self.symbol_table.id_to_def.insert(*id, def_id);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Some(NameType::LocalVariable(item_id)) => {
|
Some(NameType::LocalVariable(item_id)) => {
|
||||||
let def_id = self.symbol_table.id_to_def.get(item_id);
|
let def_id = self.symbol_table.id_to_def.get(item_id);
|
||||||
if let Some(def_id) = def_id {
|
if let Some(def_id) = def_id {
|
||||||
let def_id = def_id.clone();
|
let def_id = *def_id;
|
||||||
self.symbol_table.id_to_def.insert(*id, def_id);
|
self.symbol_table.id_to_def.insert(*id, def_id);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -87,10 +89,8 @@ impl<'a> ScopeResolver<'a> {
|
|||||||
self.symbol_table.id_to_def.insert(*id, def_id);
|
self.symbol_table.id_to_def.insert(*id, def_id);
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
} else {
|
} else if let Some(def_id) = def_id {
|
||||||
if let Some(def_id) = def_id {
|
self.symbol_table.id_to_def.insert(*id, def_id);
|
||||||
self.symbol_table.id_to_def.insert(*id, def_id);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -140,6 +140,8 @@ impl<'a> ASTVisitor for ScopeResolver<'a> {
|
|||||||
let param_names = signature.params.iter().map(|param| param.name.clone());
|
let param_names = signature.params.iter().map(|param| param.name.clone());
|
||||||
//TODO I'm 90% sure this is right, until I get to closures
|
//TODO I'm 90% sure this is right, until I get to closures
|
||||||
//let mut new_scope = self.lexical_scopes.new_scope(Some(ScopeType::Function { name: signature.name.clone() }));
|
//let mut new_scope = self.lexical_scopes.new_scope(Some(ScopeType::Function { name: signature.name.clone() }));
|
||||||
|
//TODO this will recurse unwantedly into scopes; need to pop an outer function
|
||||||
|
//scope off first before going into a non-closure scope
|
||||||
let mut new_scope =
|
let mut new_scope =
|
||||||
ScopeStack::new(Some(ScopeType::Function { name: signature.name.clone() }));
|
ScopeStack::new(Some(ScopeType::Function { name: signature.name.clone() }));
|
||||||
|
|
||||||
@ -164,6 +166,15 @@ impl<'a> ASTVisitor for ScopeResolver<'a> {
|
|||||||
}
|
}
|
||||||
Recursion::Continue
|
Recursion::Continue
|
||||||
}
|
}
|
||||||
|
Declaration::Impl { block, .. } => {
|
||||||
|
let new_scope = ScopeStack::new(Some(ScopeType::ImplBlock));
|
||||||
|
let mut new_resolver =
|
||||||
|
ScopeResolver { symbol_table: self.symbol_table, lexical_scopes: new_scope };
|
||||||
|
for stmt in block.iter() {
|
||||||
|
walk_declaration(&mut new_resolver, &stmt.kind, &stmt.id);
|
||||||
|
}
|
||||||
|
Recursion::Stop
|
||||||
|
}
|
||||||
_ => Recursion::Continue,
|
_ => Recursion::Continue,
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -2,7 +2,7 @@
|
|||||||
use assert_matches::assert_matches;
|
use assert_matches::assert_matches;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::{tokenizing::Location, util::quick_ast};
|
use crate::util::quick_ast;
|
||||||
|
|
||||||
fn add_symbols(src: &str) -> (SymbolTable, Result<(), Vec<SymbolError>>) {
|
fn add_symbols(src: &str) -> (SymbolTable, Result<(), Vec<SymbolError>>) {
|
||||||
let ast = quick_ast(src);
|
let ast = quick_ast(src);
|
||||||
@ -79,9 +79,11 @@ fn no_type_definition_duplicates() {
|
|||||||
let err = &errs[0];
|
let err = &errs[0];
|
||||||
|
|
||||||
match err {
|
match err {
|
||||||
SymbolError::DuplicateName { location, prev_name } => {
|
SymbolError::DuplicateName { location: _, prev_name } => {
|
||||||
assert_eq!(prev_name, &Fqsn::from_strs(&["Food"]));
|
assert_eq!(prev_name, &Fqsn::from_strs(&["Food"]));
|
||||||
assert_eq!(location, &Location { line_num: 2, char_num: 2 });
|
|
||||||
|
//TODO restore this Location test
|
||||||
|
//assert_eq!(location, &Location { line_num: 2, char_num: 2 });
|
||||||
}
|
}
|
||||||
_ => panic!(),
|
_ => panic!(),
|
||||||
}
|
}
|
||||||
@ -135,7 +137,7 @@ fn dont_falsely_detect_duplicates() {
|
|||||||
let a = 40;
|
let a = 40;
|
||||||
77
|
77
|
||||||
}
|
}
|
||||||
let q = 39;
|
let q = 39
|
||||||
"#;
|
"#;
|
||||||
let (symbols, _) = add_symbols(source);
|
let (symbols, _) = add_symbols(source);
|
||||||
|
|
||||||
@ -171,7 +173,8 @@ fn second_inner_func() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
inner_func(x)
|
inner_func(x)
|
||||||
}"#;
|
}
|
||||||
|
"#;
|
||||||
let (symbols, _) = add_symbols(source);
|
let (symbols, _) = add_symbols(source);
|
||||||
assert!(symbols.fq_names.table.get(&make_fqsn(&["outer_func"])).is_some());
|
assert!(symbols.fq_names.table.get(&make_fqsn(&["outer_func"])).is_some());
|
||||||
assert!(symbols.fq_names.table.get(&make_fqsn(&["outer_func", "inner_func"])).is_some());
|
assert!(symbols.fq_names.table.get(&make_fqsn(&["outer_func", "inner_func"])).is_some());
|
||||||
@ -187,7 +190,8 @@ inner_func(x)
|
|||||||
fn enclosing_scopes_3() {
|
fn enclosing_scopes_3() {
|
||||||
let source = r#"
|
let source = r#"
|
||||||
fn outer_func(x) {
|
fn outer_func(x) {
|
||||||
fn inner_func(arg) {
|
|
||||||
|
fn inner_func(arg) {
|
||||||
arg
|
arg
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -250,17 +254,61 @@ fn duplicate_modules() {
|
|||||||
fn duplicate_struct_members() {
|
fn duplicate_struct_members() {
|
||||||
let source = r#"
|
let source = r#"
|
||||||
type Tarak = Tarak {
|
type Tarak = Tarak {
|
||||||
loujet: i32,
|
loujet: i32
|
||||||
mets: i32,
|
,
|
||||||
mets: i32,
|
mets: i32,
|
||||||
|
mets: i32
|
||||||
|
,
|
||||||
}
|
}
|
||||||
"#;
|
"#;
|
||||||
|
|
||||||
let (_, output) = add_symbols(source);
|
let (_, output) = add_symbols(source);
|
||||||
let errs = output.unwrap_err();
|
let errs = dbg!(output.unwrap_err());
|
||||||
assert_matches!(&errs[..], [
|
assert_matches!(&errs[..], [
|
||||||
SymbolError::DuplicateRecord {
|
SymbolError::DuplicateRecord {
|
||||||
type_name, member, ..},
|
type_fqsn, member, record, ..},
|
||||||
] if type_name == &Fqsn::from_strs(&["Tarak", "Tarak"]) && member == "mets"
|
] if type_fqsn == &Fqsn::from_strs(&["Tarak"]) && member == "mets" && record == "Tarak"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn method_definition_added_to_symbol_table() {
|
||||||
|
let source = r#"
|
||||||
|
|
||||||
|
type Foo = { x: Int, y: Int }
|
||||||
|
|
||||||
|
impl Foo {
|
||||||
|
fn hella() {
|
||||||
|
let a = 50
|
||||||
|
self.x + a
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"#;
|
||||||
|
let (symbols, _) = add_symbols(source);
|
||||||
|
symbols.debug();
|
||||||
|
assert!(symbols.fq_names.table.get(&make_fqsn(&["<impl-block>Foo", "hella"])).is_some());
|
||||||
|
assert!(symbols.fq_names.table.get(&make_fqsn(&["<impl-block>Foo", "hella", "a"])).is_some());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn duplicate_method_definitions_detected() {
|
||||||
|
let source = r#"
|
||||||
|
|
||||||
|
type Foo = { x: Int, y: Int }
|
||||||
|
|
||||||
|
impl Foo {
|
||||||
|
fn hella() {
|
||||||
|
self.x + 50
|
||||||
|
}
|
||||||
|
|
||||||
|
fn hella() {
|
||||||
|
self.x + 40
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"#;
|
||||||
|
let (_symbols, output) = add_symbols(source);
|
||||||
|
let errs = output.unwrap_err();
|
||||||
|
assert_matches!(&errs[..], [
|
||||||
|
SymbolError::DuplicateName { prev_name: pn1, ..},
|
||||||
|
] if pn1 == &Fqsn::from_strs(&["<impl-block>Foo", "hella"]));
|
||||||
|
}
|
@ -38,7 +38,7 @@ impl<'a, 'b> Evaluator<'a, 'b> {
|
|||||||
let mut acc = vec![];
|
let mut acc = vec![];
|
||||||
for (def_id, function) in reduced.functions.into_iter() {
|
for (def_id, function) in reduced.functions.into_iter() {
|
||||||
let mem = (&def_id).into();
|
let mem = (&def_id).into();
|
||||||
self.state.environments.insert(mem, MemoryValue::Function(function));
|
self.state.memory.insert(mem, MemoryValue::Function(function));
|
||||||
}
|
}
|
||||||
|
|
||||||
for statement in reduced.entrypoint.into_iter() {
|
for statement in reduced.entrypoint.into_iter() {
|
||||||
@ -67,8 +67,7 @@ impl<'a, 'b> Evaluator<'a, 'b> {
|
|||||||
if self.early_returning {
|
if self.early_returning {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
if let Some(_) = self.loop_control {
|
if self.loop_control.is_some() {
|
||||||
println!("We here?");
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -79,7 +78,7 @@ impl<'a, 'b> Evaluator<'a, 'b> {
|
|||||||
match stmt {
|
match stmt {
|
||||||
Statement::Binding { ref id, expr, constant: _ } => {
|
Statement::Binding { ref id, expr, constant: _ } => {
|
||||||
let evaluated = self.expression(expr)?;
|
let evaluated = self.expression(expr)?;
|
||||||
self.state.environments.insert(id.into(), evaluated.into());
|
self.state.memory.insert(id.into(), evaluated.into());
|
||||||
Ok(StatementOutput::Nothing)
|
Ok(StatementOutput::Nothing)
|
||||||
}
|
}
|
||||||
Statement::Expression(expr) => {
|
Statement::Expression(expr) => {
|
||||||
@ -120,7 +119,7 @@ impl<'a, 'b> Evaluator<'a, 'b> {
|
|||||||
Expression::Lookup(kind) => match kind {
|
Expression::Lookup(kind) => match kind {
|
||||||
Lookup::Function(ref id) => {
|
Lookup::Function(ref id) => {
|
||||||
let mem = id.into();
|
let mem = id.into();
|
||||||
match self.state.environments.lookup(&mem) {
|
match self.state.memory.lookup(&mem) {
|
||||||
// This just checks that the function exists in "memory" by ID, we don't
|
// This just checks that the function exists in "memory" by ID, we don't
|
||||||
// actually retrieve it until `apply_function()`
|
// actually retrieve it until `apply_function()`
|
||||||
Some(MemoryValue::Function(_)) => Primitive::Callable(Callable::UserDefined(*id)),
|
Some(MemoryValue::Function(_)) => Primitive::Callable(Callable::UserDefined(*id)),
|
||||||
@ -129,14 +128,21 @@ impl<'a, 'b> Evaluator<'a, 'b> {
|
|||||||
}
|
}
|
||||||
Lookup::Param(n) => {
|
Lookup::Param(n) => {
|
||||||
let mem = n.into();
|
let mem = n.into();
|
||||||
match self.state.environments.lookup(&mem) {
|
match self.state.memory.lookup(&mem) {
|
||||||
Some(MemoryValue::Primitive(prim)) => prim.clone(),
|
Some(MemoryValue::Primitive(prim)) => prim.clone(),
|
||||||
e => return Err(format!("Param lookup error, got {:?}", e).into()),
|
e => return Err(format!("Param lookup error, got {:?}", e).into()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Lookup::SelfParam => {
|
||||||
|
let mem = Memory::self_param();
|
||||||
|
match self.state.memory.lookup(&mem) {
|
||||||
|
Some(MemoryValue::Primitive(prim)) => prim.clone(),
|
||||||
|
e => return Err(format!("SelfParam lookup error, got {:?}", e).into()),
|
||||||
|
}
|
||||||
|
}
|
||||||
Lookup::LocalVar(ref id) | Lookup::GlobalVar(ref id) => {
|
Lookup::LocalVar(ref id) | Lookup::GlobalVar(ref id) => {
|
||||||
let mem = id.into();
|
let mem = id.into();
|
||||||
match self.state.environments.lookup(&mem) {
|
match self.state.memory.lookup(&mem) {
|
||||||
Some(MemoryValue::Primitive(expr)) => expr.clone(),
|
Some(MemoryValue::Primitive(expr)) => expr.clone(),
|
||||||
_ =>
|
_ =>
|
||||||
return Err(
|
return Err(
|
||||||
@ -149,10 +155,12 @@ impl<'a, 'b> Evaluator<'a, 'b> {
|
|||||||
let mem = lval.into();
|
let mem = lval.into();
|
||||||
let evaluated = self.expression(rval)?;
|
let evaluated = self.expression(rval)?;
|
||||||
println!("Inserting {:?} into {:?}", evaluated, mem);
|
println!("Inserting {:?} into {:?}", evaluated, mem);
|
||||||
self.state.environments.insert(mem, MemoryValue::Primitive(evaluated));
|
self.state.memory.insert(mem, MemoryValue::Primitive(evaluated));
|
||||||
Primitive::unit()
|
Primitive::unit()
|
||||||
}
|
}
|
||||||
Expression::Call { box f, args } => self.call_expression(f, args)?,
|
Expression::Call { box f, args } => self.call_expression(f, args, None)?,
|
||||||
|
Expression::CallMethod { box f, args, box self_expr } =>
|
||||||
|
self.call_expression(f, args, Some(self_expr))?,
|
||||||
Expression::Callable(Callable::DataConstructor { type_id, tag }) => {
|
Expression::Callable(Callable::DataConstructor { type_id, tag }) => {
|
||||||
let arity = self.type_context.lookup_variant_arity(&type_id, tag).unwrap();
|
let arity = self.type_context.lookup_variant_arity(&type_id, tag).unwrap();
|
||||||
if arity == 0 {
|
if arity == 0 {
|
||||||
@ -303,9 +311,9 @@ impl<'a, 'b> Evaluator<'a, 'b> {
|
|||||||
let cond = self.expression(cond)?;
|
let cond = self.expression(cond)?;
|
||||||
|
|
||||||
for alt in alternatives.into_iter() {
|
for alt in alternatives.into_iter() {
|
||||||
let mut new_scope = self.state.environments.new_scope(None);
|
let mut new_scope = self.state.memory.new_scope(None);
|
||||||
if matches(&cond, &alt.pattern, &mut new_scope) {
|
if matches(&cond, &alt.pattern, &mut new_scope) {
|
||||||
let mut new_state = State { environments: new_scope };
|
let mut new_state = State { memory: new_scope };
|
||||||
let mut evaluator = Evaluator::new(&mut new_state, self.type_context);
|
let mut evaluator = Evaluator::new(&mut new_state, self.type_context);
|
||||||
let output = evaluator.block(alt.item);
|
let output = evaluator.block(alt.item);
|
||||||
self.early_returning = evaluator.early_returning;
|
self.early_returning = evaluator.early_returning;
|
||||||
@ -315,7 +323,13 @@ impl<'a, 'b> Evaluator<'a, 'b> {
|
|||||||
Err("No valid match in match expression".into())
|
Err("No valid match in match expression".into())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn call_expression(&mut self, f: Expression, args: Vec<Expression>) -> EvalResult<Primitive> {
|
//TODO need to do something with self_expr to make method invocations actually work
|
||||||
|
fn call_expression(
|
||||||
|
&mut self,
|
||||||
|
f: Expression,
|
||||||
|
args: Vec<Expression>,
|
||||||
|
self_expr: Option<Expression>,
|
||||||
|
) -> EvalResult<Primitive> {
|
||||||
let func = match self.expression(f)? {
|
let func = match self.expression(f)? {
|
||||||
Primitive::Callable(func) => func,
|
Primitive::Callable(func) => func,
|
||||||
other => return Err(format!("Trying to call non-function value: {:?}", other).into()),
|
other => return Err(format!("Trying to call non-function value: {:?}", other).into()),
|
||||||
@ -324,10 +338,10 @@ impl<'a, 'b> Evaluator<'a, 'b> {
|
|||||||
Callable::Builtin(builtin) => self.apply_builtin(builtin, args),
|
Callable::Builtin(builtin) => self.apply_builtin(builtin, args),
|
||||||
Callable::UserDefined(def_id) => {
|
Callable::UserDefined(def_id) => {
|
||||||
let mem = (&def_id).into();
|
let mem = (&def_id).into();
|
||||||
match self.state.environments.lookup(&mem) {
|
match self.state.memory.lookup(&mem) {
|
||||||
Some(MemoryValue::Function(FunctionDefinition { body })) => {
|
Some(MemoryValue::Function(FunctionDefinition { body })) => {
|
||||||
let body = body.clone(); //TODO ideally this clone would not happen
|
let body = body.clone(); //TODO ideally this clone would not happen
|
||||||
self.apply_function(body, args)
|
self.apply_function(body, args, self_expr)
|
||||||
}
|
}
|
||||||
e => Err(format!("Error looking up function with id {}: {:?}", def_id, e).into()),
|
e => Err(format!("Error looking up function with id {}: {:?}", def_id, e).into()),
|
||||||
}
|
}
|
||||||
@ -341,7 +355,7 @@ impl<'a, 'b> Evaluator<'a, 'b> {
|
|||||||
)
|
)
|
||||||
.into());
|
.into());
|
||||||
}
|
}
|
||||||
self.apply_function(body, args)
|
self.apply_function(body, args, None)
|
||||||
}
|
}
|
||||||
Callable::DataConstructor { type_id, tag } => {
|
Callable::DataConstructor { type_id, tag } => {
|
||||||
let arity = self.type_context.lookup_variant_arity(&type_id, tag).unwrap();
|
let arity = self.type_context.lookup_variant_arity(&type_id, tag).unwrap();
|
||||||
@ -470,21 +484,30 @@ impl<'a, 'b> Evaluator<'a, 'b> {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn apply_function(&mut self, body: Vec<Statement>, args: Vec<Expression>) -> EvalResult<Primitive> {
|
fn apply_function(
|
||||||
|
&mut self,
|
||||||
|
body: Vec<Statement>,
|
||||||
|
args: Vec<Expression>,
|
||||||
|
self_expr: Option<Expression>,
|
||||||
|
) -> EvalResult<Primitive> {
|
||||||
|
let self_expr = if let Some(expr) = self_expr { Some(self.expression(expr)?) } else { None };
|
||||||
let mut evaluated_args: Vec<Primitive> = vec![];
|
let mut evaluated_args: Vec<Primitive> = vec![];
|
||||||
for arg in args.into_iter() {
|
for arg in args.into_iter() {
|
||||||
evaluated_args.push(self.expression(arg)?);
|
evaluated_args.push(self.expression(arg)?);
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut frame_state = State { environments: self.state.environments.new_scope(None) };
|
let mut frame_state = State { memory: self.state.memory.new_scope(None) };
|
||||||
let mut evaluator = Evaluator::new(&mut frame_state, self.type_context);
|
let mut evaluator = Evaluator::new(&mut frame_state, self.type_context);
|
||||||
|
|
||||||
|
if let Some(evaled) = self_expr {
|
||||||
|
let mem = Memory::self_param();
|
||||||
|
evaluator.state.memory.insert(mem, MemoryValue::Primitive(evaled));
|
||||||
|
}
|
||||||
for (n, evaled) in evaluated_args.into_iter().enumerate() {
|
for (n, evaled) in evaluated_args.into_iter().enumerate() {
|
||||||
let n = n as u8;
|
let n = n as u8;
|
||||||
let mem = n.into();
|
let mem = n.into();
|
||||||
evaluator.state.environments.insert(mem, MemoryValue::Primitive(evaled));
|
evaluator.state.memory.insert(mem, MemoryValue::Primitive(evaled));
|
||||||
}
|
}
|
||||||
|
|
||||||
evaluator.block(body)
|
evaluator.block(body)
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -4,7 +4,7 @@ use crate::{
|
|||||||
reduced_ir::{Callable, Expression, FunctionDefinition, Literal, ReducedIR},
|
reduced_ir::{Callable, Expression, FunctionDefinition, Literal, ReducedIR},
|
||||||
symbol_table::DefId,
|
symbol_table::DefId,
|
||||||
type_inference::{TypeContext, TypeId},
|
type_inference::{TypeContext, TypeId},
|
||||||
util::ScopeStack,
|
util::{delim_wrapped, ScopeStack},
|
||||||
};
|
};
|
||||||
|
|
||||||
mod evaluator;
|
mod evaluator;
|
||||||
@ -14,7 +14,7 @@ type EvalResult<T> = Result<T, RuntimeError>;
|
|||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct State<'a> {
|
pub struct State<'a> {
|
||||||
environments: ScopeStack<'a, Memory, MemoryValue>,
|
memory: ScopeStack<'a, Memory, MemoryValue>,
|
||||||
}
|
}
|
||||||
|
|
||||||
//TODO - eh, I dunno, maybe it doesn't matter exactly how memory works in the tree-walking
|
//TODO - eh, I dunno, maybe it doesn't matter exactly how memory works in the tree-walking
|
||||||
@ -24,6 +24,12 @@ enum Memory {
|
|||||||
Index(u32),
|
Index(u32),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Memory {
|
||||||
|
fn self_param() -> Self {
|
||||||
|
Memory::Index(3_999_999)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// This is for function param lookups, and is a hack
|
// This is for function param lookups, and is a hack
|
||||||
impl From<u8> for Memory {
|
impl From<u8> for Memory {
|
||||||
fn from(n: u8) -> Self {
|
fn from(n: u8) -> Self {
|
||||||
@ -61,19 +67,6 @@ impl RuntimeError {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn delim_wrapped(lhs: char, rhs: char, terms: impl Iterator<Item = String>) -> String {
|
|
||||||
let mut buf = String::new();
|
|
||||||
write!(buf, "{}", lhs).unwrap();
|
|
||||||
for term in terms.map(Some).intersperse(None) {
|
|
||||||
match term {
|
|
||||||
Some(e) => write!(buf, "{}", e).unwrap(),
|
|
||||||
None => write!(buf, ", ").unwrap(),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
write!(buf, "{}", rhs).unwrap();
|
|
||||||
buf
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Anything that can be stored in memory; that is, a function definition, or a fully-evaluated
|
/// Anything that can be stored in memory; that is, a function definition, or a fully-evaluated
|
||||||
/// program value.
|
/// program value.
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
@ -165,7 +158,7 @@ impl From<Literal> for Primitive {
|
|||||||
|
|
||||||
impl<'a> State<'a> {
|
impl<'a> State<'a> {
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
Self { environments: ScopeStack::new(Some("global".to_string())) }
|
Self { memory: ScopeStack::new(Some("global".to_string())) }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn evaluate(
|
pub fn evaluate(
|
@ -43,7 +43,7 @@ fn test_basic_eval() {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn op_eval() {
|
fn op_eval() {
|
||||||
eval_assert("- 13", "-13");
|
eval_assert("-13", "-13");
|
||||||
eval_assert("10 - 2", "8");
|
eval_assert("10 - 2", "8");
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -95,8 +95,7 @@ trad()"#,
|
|||||||
"30",
|
"30",
|
||||||
);
|
);
|
||||||
|
|
||||||
let err =
|
let err = "No symbol found for name: `a`";
|
||||||
"No symbol found for name: QualifiedName { id: Id { idx: 4, t: PhantomData }, components: [\"a\"] }";
|
|
||||||
|
|
||||||
eval_assert_failure(
|
eval_assert_failure(
|
||||||
r#"
|
r#"
|
||||||
@ -271,26 +270,26 @@ fn full_if_matching() {
|
|||||||
let source = r#"
|
let source = r#"
|
||||||
type Option<T> = Some(T) | None
|
type Option<T> = Some(T) | None
|
||||||
let a = Option::None
|
let a = Option::None
|
||||||
if a { is Option::None then 4, is Option::Some(x) then x }
|
if a { is Option::None then 4; is Option::Some(x) then x }
|
||||||
"#;
|
"#;
|
||||||
eval_assert(source, "4");
|
eval_assert(source, "4");
|
||||||
|
|
||||||
let source = r#"
|
let source = r#"
|
||||||
type Option<T> = Some(T) | None
|
type Option<T> = Some(T) | None
|
||||||
let sara = Option::Some(99)
|
let sara = Option::Some(99)
|
||||||
if sara { is Option::None then 1 + 3, is Option::Some(x) then x }
|
if sara { is Option::None then 1 + 3; is Option::Some(x) then x }
|
||||||
"#;
|
"#;
|
||||||
eval_assert(source, "99");
|
eval_assert(source, "99");
|
||||||
|
|
||||||
let source = r#"
|
let source = r#"
|
||||||
let a = 10
|
let a = 10
|
||||||
if a { is 10 then "x", is 4 then "y" }
|
if a { is 10 then "x"; is 4 then "y" }
|
||||||
"#;
|
"#;
|
||||||
eval_assert(source, "\"x\"");
|
eval_assert(source, "\"x\"");
|
||||||
|
|
||||||
let source = r#"
|
let source = r#"
|
||||||
let a = 10
|
let a = 10
|
||||||
if a { is 15 then "x", is 10 then "y" }
|
if a { is 15 then "x"; is 10 then "y" }
|
||||||
"#;
|
"#;
|
||||||
eval_assert(source, "\"y\"");
|
eval_assert(source, "\"y\"");
|
||||||
}
|
}
|
||||||
@ -300,7 +299,7 @@ if a { is 15 then "x", is 10 then "y" }
|
|||||||
fn string_pattern() {
|
fn string_pattern() {
|
||||||
let source = r#"
|
let source = r#"
|
||||||
let a = "foo"
|
let a = "foo"
|
||||||
if a { is "foo" then "x", is _ then "y" }
|
if a { is "foo" then "x"; is _ then "y" }
|
||||||
"#;
|
"#;
|
||||||
eval_assert(source, "\"x\"");
|
eval_assert(source, "\"x\"");
|
||||||
}
|
}
|
||||||
@ -310,7 +309,7 @@ fn boolean_pattern() {
|
|||||||
let source = r#"
|
let source = r#"
|
||||||
let a = true
|
let a = true
|
||||||
if a {
|
if a {
|
||||||
is true then "x",
|
is true then "x"
|
||||||
is false then "y"
|
is false then "y"
|
||||||
}
|
}
|
||||||
"#;
|
"#;
|
||||||
@ -321,7 +320,7 @@ if a {
|
|||||||
fn boolean_pattern_2() {
|
fn boolean_pattern_2() {
|
||||||
let source = r#"
|
let source = r#"
|
||||||
let a = false
|
let a = false
|
||||||
if a { is true then "x", is false then "y" }
|
if a { is true then "x"; is false then "y" }
|
||||||
"#;
|
"#;
|
||||||
eval_assert(source, "\"y\"");
|
eval_assert(source, "\"y\"");
|
||||||
}
|
}
|
||||||
@ -341,7 +340,7 @@ if Option::Some(10) {
|
|||||||
fn tuple_pattern() {
|
fn tuple_pattern() {
|
||||||
let source = r#"
|
let source = r#"
|
||||||
if (1, 2) {
|
if (1, 2) {
|
||||||
is (1, x) then x,
|
is (1, x) then x;
|
||||||
is _ then 99
|
is _ then 99
|
||||||
}
|
}
|
||||||
"#;
|
"#;
|
||||||
@ -352,7 +351,7 @@ if (1, 2) {
|
|||||||
fn tuple_pattern_2() {
|
fn tuple_pattern_2() {
|
||||||
let source = r#"
|
let source = r#"
|
||||||
if (1, 2) {
|
if (1, 2) {
|
||||||
is (10, x) then x,
|
is (10, x) then x
|
||||||
is (y, x) then x + y
|
is (y, x) then x + y
|
||||||
}
|
}
|
||||||
"#;
|
"#;
|
||||||
@ -363,7 +362,7 @@ if (1, 2) {
|
|||||||
fn tuple_pattern_3() {
|
fn tuple_pattern_3() {
|
||||||
let source = r#"
|
let source = r#"
|
||||||
if (1, 5) {
|
if (1, 5) {
|
||||||
is (10, x) then x,
|
is (10, x) then x
|
||||||
is (1, x) then x
|
is (1, x) then x
|
||||||
}
|
}
|
||||||
"#;
|
"#;
|
||||||
@ -374,8 +373,8 @@ if (1, 5) {
|
|||||||
fn tuple_pattern_4() {
|
fn tuple_pattern_4() {
|
||||||
let source = r#"
|
let source = r#"
|
||||||
if (1, 5) {
|
if (1, 5) {
|
||||||
is (10, x) then x,
|
is (10, x) then x
|
||||||
is (1, x) then x,
|
is (1, x) then x
|
||||||
}
|
}
|
||||||
"#;
|
"#;
|
||||||
eval_assert(source, "5");
|
eval_assert(source, "5");
|
||||||
@ -390,21 +389,21 @@ let b = Stuff::Jugs(1, "haha")
|
|||||||
let c = Stuff::Mardok
|
let c = Stuff::Mardok
|
||||||
|
|
||||||
let x = if a {
|
let x = if a {
|
||||||
is Stuff::Mulch(20) then "x",
|
is Stuff::Mulch(20) then "x"
|
||||||
is _ then "ERR"
|
is _ then "ERR"
|
||||||
}
|
}
|
||||||
|
|
||||||
let y = if b {
|
let y = if b {
|
||||||
is Stuff::Mulch(n) then "ERR",
|
is Stuff::Mulch(n) then "ERR"
|
||||||
is Stuff::Jugs(2, _) then "ERR",
|
is Stuff::Jugs(2, _) then "ERR"
|
||||||
is Stuff::Jugs(1, s) then s,
|
is Stuff::Jugs(1, s) then s
|
||||||
is _ then "ERR",
|
is _ then "ERR"
|
||||||
}
|
}
|
||||||
|
|
||||||
let z = if c {
|
let z = if c {
|
||||||
is Stuff::Jugs(_, _) then "ERR",
|
is Stuff::Jugs(_, _) then "ERR"
|
||||||
is Stuff::Mardok then "NIGH",
|
is Stuff::Mardok then "NIGH"
|
||||||
is _ then "ERR",
|
is _ then "ERR"
|
||||||
}
|
}
|
||||||
|
|
||||||
(x, y, z)
|
(x, y, z)
|
||||||
@ -546,3 +545,20 @@ fn foo() { return 2 }
|
|||||||
"(7, 9)",
|
"(7, 9)",
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn eval_method() {
|
||||||
|
let src = r#"
|
||||||
|
type Thing = Thing
|
||||||
|
impl Thing {
|
||||||
|
fn a_method() {
|
||||||
|
20
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
let a = Thing::Thing
|
||||||
|
4 + a.a_method()
|
||||||
|
"#;
|
||||||
|
eval_assert(src, "24");
|
||||||
|
}
|
@ -1,7 +1,7 @@
|
|||||||
use std::{collections::HashMap, convert::From};
|
use std::{collections::HashMap, convert::From};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
ast::TypeIdentifier,
|
ast::{TypeIdentifier, AST},
|
||||||
identifier::{define_id_kind, Id, IdStore},
|
identifier::{define_id_kind, Id, IdStore},
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -96,6 +96,11 @@ impl TypeContext {
|
|||||||
pub fn lookup_type(&self, type_id: &TypeId) -> Option<&DefinedType> {
|
pub fn lookup_type(&self, type_id: &TypeId) -> Option<&DefinedType> {
|
||||||
self.defined_types.get(type_id)
|
self.defined_types.get(type_id)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//TODO return some kind of overall type later?
|
||||||
|
pub fn typecheck(&mut self, ast: &AST) -> Result<(), TypeError> {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A type defined in program source code, as opposed to a builtin.
|
/// A type defined in program source code, as opposed to a builtin.
|
||||||
@ -211,12 +216,12 @@ macro_rules! ty {
|
|||||||
Type::Const(crate::type_inference::TypeConst::$type_name)
|
Type::Const(crate::type_inference::TypeConst::$type_name)
|
||||||
};
|
};
|
||||||
($t1:ident -> $t2:ident) => {
|
($t1:ident -> $t2:ident) => {
|
||||||
Type::Arrow { params: vec![ty!($t1)], ret: box ty!($t2) }
|
Type::Arrow { params: vec![ty!($t1)], ret: Box::new(ty!($t2)) }
|
||||||
};
|
};
|
||||||
($t1:ident -> $t2:ident -> $t3:ident) => {
|
($t1:ident -> $t2:ident -> $t3:ident) => {
|
||||||
Type::Arrow { params: vec![ty!($t1), ty!($t2)], ret: box ty!($t3) }
|
Type::Arrow { params: vec![ty!($t1), ty!($t2)], ret: Box::new(ty!($t3)) }
|
||||||
};
|
};
|
||||||
($type_list:ident, $ret_type:ident) => {
|
($type_list:ident, $ret_type:ident) => {
|
||||||
Type::Arrow { params: $type_list, ret: box $ret_type }
|
Type::Arrow { params: $type_list, ret: Box::new($ret_type) }
|
||||||
};
|
};
|
||||||
}
|
}
|
@ -1,4 +1,18 @@
|
|||||||
use std::{cmp::Eq, collections::HashMap, hash::Hash};
|
use std::{cmp::Eq, collections::HashMap, fmt::Write, hash::Hash};
|
||||||
|
|
||||||
|
/// Utility function for printing a comma-delimited list of things
|
||||||
|
pub(crate) fn delim_wrapped(lhs: char, rhs: char, terms: impl Iterator<Item = String>) -> String {
|
||||||
|
let mut buf = String::new();
|
||||||
|
write!(buf, "{}", lhs).unwrap();
|
||||||
|
for term in terms.map(Some).intersperse(None) {
|
||||||
|
match term {
|
||||||
|
Some(e) => write!(buf, "{}", e).unwrap(),
|
||||||
|
None => write!(buf, ", ").unwrap(),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
write!(buf, "{}", rhs).unwrap();
|
||||||
|
buf
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Default, Debug)]
|
#[derive(Default, Debug)]
|
||||||
pub struct ScopeStack<'a, T: 'a, V: 'a, N = String>
|
pub struct ScopeStack<'a, T: 'a, V: 'a, N = String>
|
||||||
@ -52,11 +66,15 @@ where T: Hash + Eq
|
|||||||
/// Quickly create an AST from a string, with no error checking. For test use only
|
/// Quickly create an AST from a string, with no error checking. For test use only
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
pub fn quick_ast(input: &str) -> crate::ast::AST {
|
pub fn quick_ast(input: &str) -> crate::ast::AST {
|
||||||
let tokens = crate::tokenizing::tokenize(input);
|
|
||||||
let mut parser = crate::parsing::Parser::new();
|
let mut parser = crate::parsing::Parser::new();
|
||||||
parser.add_new_tokens(tokens);
|
let output = parser.parse(input);
|
||||||
let output = parser.parse();
|
match output {
|
||||||
output.unwrap()
|
Ok(output) => output,
|
||||||
|
Err(err) => {
|
||||||
|
println!("Parse error: {}", err.msg);
|
||||||
|
panic!();
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(unused_macros)]
|
#[allow(unused_macros)]
|
@ -3,8 +3,5 @@ extern crate includedir_codegen;
|
|||||||
use includedir_codegen::Compression;
|
use includedir_codegen::Compression;
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
includedir_codegen::start("WEBFILES")
|
includedir_codegen::start("WEBFILES").dir("../static", Compression::Gzip).build("static.rs").unwrap();
|
||||||
.dir("../static", Compression::Gzip)
|
|
||||||
.build("static.rs")
|
|
||||||
.unwrap();
|
|
||||||
}
|
}
|
||||||
|
@ -1,8 +1,10 @@
|
|||||||
use crate::directive_actions::DirectiveAction;
|
|
||||||
use crate::language::ProgrammingLanguageInterface;
|
|
||||||
use crate::{InterpreterDirectiveOutput, Repl};
|
|
||||||
use colored::*;
|
use colored::*;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
directive_actions::DirectiveAction, language::ProgrammingLanguageInterface, InterpreterDirectiveOutput,
|
||||||
|
Repl,
|
||||||
|
};
|
||||||
|
|
||||||
/// A CommandTree is either a `Terminal` or a `NonTerminal`. When command parsing reaches the first
|
/// A CommandTree is either a `Terminal` or a `NonTerminal`. When command parsing reaches the first
|
||||||
/// Terminal, it will use the `DirectiveAction` found there to find an appropriate function to execute,
|
/// Terminal, it will use the `DirectiveAction` found there to find an appropriate function to execute,
|
||||||
/// and then execute it with any remaining arguments
|
/// and then execute it with any remaining arguments
|
||||||
@ -39,12 +41,7 @@ impl CommandTree {
|
|||||||
children: Vec<CommandTree>,
|
children: Vec<CommandTree>,
|
||||||
action: DirectiveAction,
|
action: DirectiveAction,
|
||||||
) -> CommandTree {
|
) -> CommandTree {
|
||||||
CommandTree::Terminal {
|
CommandTree::Terminal { name: s.to_string(), help_msg: help.map(|x| x.to_string()), children, action }
|
||||||
name: s.to_string(),
|
|
||||||
help_msg: help.map(|x| x.to_string()),
|
|
||||||
children,
|
|
||||||
action,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn nonterm(s: &str, help: Option<&str>, children: Vec<CommandTree>) -> CommandTree {
|
pub fn nonterm(s: &str, help: Option<&str>, children: Vec<CommandTree>) -> CommandTree {
|
||||||
@ -65,14 +62,10 @@ impl CommandTree {
|
|||||||
}
|
}
|
||||||
pub fn get_help(&self) -> &str {
|
pub fn get_help(&self) -> &str {
|
||||||
match self {
|
match self {
|
||||||
CommandTree::Terminal { help_msg, .. } => help_msg
|
CommandTree::Terminal { help_msg, .. } =>
|
||||||
.as_ref()
|
help_msg.as_ref().map(|s| s.as_str()).unwrap_or("<no help text provided>"),
|
||||||
.map(|s| s.as_str())
|
CommandTree::NonTerminal { help_msg, .. } =>
|
||||||
.unwrap_or("<no help text provided>"),
|
help_msg.as_ref().map(|s| s.as_str()).unwrap_or("<no help text provided>"),
|
||||||
CommandTree::NonTerminal { help_msg, .. } => help_msg
|
|
||||||
.as_ref()
|
|
||||||
.map(|s| s.as_str())
|
|
||||||
.unwrap_or("<no help text provided>"),
|
|
||||||
CommandTree::Top(_) => "",
|
CommandTree::Top(_) => "",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -96,11 +89,7 @@ impl CommandTree {
|
|||||||
|
|
||||||
let res: Result<(DirectiveAction, usize), String> = loop {
|
let res: Result<(DirectiveAction, usize), String> = loop {
|
||||||
match dir_pointer {
|
match dir_pointer {
|
||||||
CommandTree::Top(subcommands)
|
CommandTree::Top(subcommands) | CommandTree::NonTerminal { children: subcommands, .. } => {
|
||||||
| CommandTree::NonTerminal {
|
|
||||||
children: subcommands,
|
|
||||||
..
|
|
||||||
} => {
|
|
||||||
let next_command = match arguments.get(idx) {
|
let next_command = match arguments.get(idx) {
|
||||||
Some(cmd) => cmd,
|
Some(cmd) => cmd,
|
||||||
None => break Err("Command requires arguments".to_owned()),
|
None => break Err("Command requires arguments".to_owned()),
|
||||||
|
@ -1,10 +1,11 @@
|
|||||||
use crate::help::help;
|
|
||||||
use crate::language::{
|
|
||||||
LangMetaRequest, LangMetaResponse, ProgrammingLanguageInterface,
|
|
||||||
};
|
|
||||||
use crate::{InterpreterDirectiveOutput, Repl};
|
|
||||||
use std::fmt::Write as FmtWrite;
|
use std::fmt::Write as FmtWrite;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
help::help,
|
||||||
|
language::{LangMetaRequest, LangMetaResponse, ProgrammingLanguageInterface},
|
||||||
|
InterpreterDirectiveOutput, Repl,
|
||||||
|
};
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub enum DirectiveAction {
|
pub enum DirectiveAction {
|
||||||
Null,
|
Null,
|
||||||
@ -31,10 +32,7 @@ impl DirectiveAction {
|
|||||||
::std::process::exit(0)
|
::std::process::exit(0)
|
||||||
}
|
}
|
||||||
ListPasses => {
|
ListPasses => {
|
||||||
let pass_names = match repl
|
let pass_names = match repl.language_state.request_meta(LangMetaRequest::StageNames) {
|
||||||
.language_state
|
|
||||||
.request_meta(LangMetaRequest::StageNames)
|
|
||||||
{
|
|
||||||
LangMetaResponse::StageNames(names) => names,
|
LangMetaResponse::StageNames(names) => names,
|
||||||
_ => vec![],
|
_ => vec![],
|
||||||
};
|
};
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
use crate::command_tree::CommandTree;
|
use crate::{command_tree::CommandTree, directive_actions::DirectiveAction};
|
||||||
use crate::directive_actions::DirectiveAction;
|
|
||||||
|
|
||||||
pub fn directives_from_pass_names(pass_names: &[String]) -> CommandTree {
|
pub fn directives_from_pass_names(pass_names: &[String]) -> CommandTree {
|
||||||
let passes_directives: Vec<CommandTree> = pass_names
|
let passes_directives: Vec<CommandTree> = pass_names
|
||||||
@ -33,11 +32,7 @@ fn get_list(passes_directives: &[CommandTree], include_help: bool) -> Vec<Comman
|
|||||||
CommandTree::terminal(
|
CommandTree::terminal(
|
||||||
"help",
|
"help",
|
||||||
Some("Print this help message"),
|
Some("Print this help message"),
|
||||||
if include_help {
|
if include_help { get_list(passes_directives, false) } else { vec![] },
|
||||||
get_list(passes_directives, false)
|
|
||||||
} else {
|
|
||||||
vec![]
|
|
||||||
},
|
|
||||||
Help,
|
Help,
|
||||||
),
|
),
|
||||||
CommandTree::nonterm(
|
CommandTree::nonterm(
|
||||||
@ -68,11 +63,6 @@ fn get_list(passes_directives: &[CommandTree], include_help: bool) -> Vec<Comman
|
|||||||
),
|
),
|
||||||
],
|
],
|
||||||
),
|
),
|
||||||
CommandTree::terminal(
|
CommandTree::terminal("doc", Some("Get language-specific help for an item"), vec![], Doc),
|
||||||
"doc",
|
|
||||||
Some("Get language-specific help for an item"),
|
|
||||||
vec![],
|
|
||||||
Doc,
|
|
||||||
),
|
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
@ -1,10 +1,11 @@
|
|||||||
use std::fmt::Write as FmtWrite;
|
use std::fmt::Write as FmtWrite;
|
||||||
|
|
||||||
use crate::command_tree::CommandTree;
|
|
||||||
use crate::language::ProgrammingLanguageInterface;
|
|
||||||
use crate::{InterpreterDirectiveOutput, Repl};
|
|
||||||
use colored::*;
|
use colored::*;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
command_tree::CommandTree, language::ProgrammingLanguageInterface, InterpreterDirectiveOutput, Repl,
|
||||||
|
};
|
||||||
|
|
||||||
pub fn help<L: ProgrammingLanguageInterface>(
|
pub fn help<L: ProgrammingLanguageInterface>(
|
||||||
repl: &mut Repl<L>,
|
repl: &mut Repl<L>,
|
||||||
arguments: &[&str],
|
arguments: &[&str],
|
||||||
@ -21,8 +22,7 @@ pub fn help<L: ProgrammingLanguageInterface>(
|
|||||||
let children = dir.get_children();
|
let children = dir.get_children();
|
||||||
writeln!(buf, "`{}` - {}", cmd, dir.get_help()).unwrap();
|
writeln!(buf, "`{}` - {}", cmd, dir.get_help()).unwrap();
|
||||||
for sub in children.iter() {
|
for sub in children.iter() {
|
||||||
writeln!(buf, "\t`{} {}` - {}", cmd, sub.get_cmd(), sub.get_help())
|
writeln!(buf, "\t`{} {}` - {}", cmd, sub.get_cmd(), sub.get_help()).unwrap();
|
||||||
.unwrap();
|
|
||||||
}
|
}
|
||||||
buf
|
buf
|
||||||
}
|
}
|
||||||
@ -31,16 +31,11 @@ pub fn help<L: ProgrammingLanguageInterface>(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_directive_from_commands<'a>(
|
fn get_directive_from_commands<'a>(commands: &[&str], dirs: &'a CommandTree) -> Option<&'a CommandTree> {
|
||||||
commands: &[&str],
|
|
||||||
dirs: &'a CommandTree,
|
|
||||||
) -> Option<&'a CommandTree> {
|
|
||||||
let mut directive_list = dirs.get_children();
|
let mut directive_list = dirs.get_children();
|
||||||
let mut matched_directive = None;
|
let mut matched_directive = None;
|
||||||
for cmd in commands {
|
for cmd in commands {
|
||||||
let found = directive_list
|
let found = directive_list.iter().find(|directive| directive.get_cmd() == *cmd);
|
||||||
.iter()
|
|
||||||
.find(|directive| directive.get_cmd() == *cmd);
|
|
||||||
if let Some(dir) = found {
|
if let Some(dir) = found {
|
||||||
directive_list = dir.get_children();
|
directive_list = dir.get_children();
|
||||||
}
|
}
|
||||||
@ -53,33 +48,16 @@ fn get_directive_from_commands<'a>(
|
|||||||
fn global_help<L: ProgrammingLanguageInterface>(repl: &mut Repl<L>) -> InterpreterDirectiveOutput {
|
fn global_help<L: ProgrammingLanguageInterface>(repl: &mut Repl<L>) -> InterpreterDirectiveOutput {
|
||||||
let mut buf = String::new();
|
let mut buf = String::new();
|
||||||
|
|
||||||
writeln!(
|
writeln!(buf, "{} version {}", "Schala REPL".bright_red().bold(), crate::VERSION_STRING).unwrap();
|
||||||
buf,
|
|
||||||
"{} version {}",
|
|
||||||
"Schala REPL".bright_red().bold(),
|
|
||||||
crate::VERSION_STRING
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
writeln!(buf, "-----------------------").unwrap();
|
writeln!(buf, "-----------------------").unwrap();
|
||||||
|
|
||||||
for directive in repl.get_directives().get_children() {
|
for directive in repl.get_directives().get_children() {
|
||||||
writeln!(
|
writeln!(buf, "{}{} - {}", repl.sigil, directive.get_cmd(), directive.get_help()).unwrap();
|
||||||
buf,
|
|
||||||
"{}{} - {}",
|
|
||||||
repl.sigil,
|
|
||||||
directive.get_cmd(),
|
|
||||||
directive.get_help()
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
writeln!(buf).unwrap();
|
writeln!(buf).unwrap();
|
||||||
writeln!(
|
writeln!(buf, "Language-specific help for {}", <L as ProgrammingLanguageInterface>::language_name())
|
||||||
buf,
|
.unwrap();
|
||||||
"Language-specific help for {}",
|
|
||||||
<L as ProgrammingLanguageInterface>::language_name()
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
writeln!(buf, "-----------------------").unwrap();
|
writeln!(buf, "-----------------------").unwrap();
|
||||||
Some(buf)
|
Some(buf)
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
use std::collections::HashSet;
|
use std::{collections::HashSet, time};
|
||||||
use std::time;
|
|
||||||
|
|
||||||
pub trait ProgrammingLanguageInterface {
|
pub trait ProgrammingLanguageInterface {
|
||||||
type Config: Default + Clone;
|
type Config: Default + Clone;
|
||||||
@ -9,10 +8,7 @@ pub trait ProgrammingLanguageInterface {
|
|||||||
fn run_computation(&mut self, _request: ComputationRequest<Self::Config>) -> ComputationResponse;
|
fn run_computation(&mut self, _request: ComputationRequest<Self::Config>) -> ComputationResponse;
|
||||||
|
|
||||||
fn request_meta(&mut self, _request: LangMetaRequest) -> LangMetaResponse {
|
fn request_meta(&mut self, _request: LangMetaRequest) -> LangMetaResponse {
|
||||||
LangMetaResponse::Custom {
|
LangMetaResponse::Custom { kind: "not-implemented".to_owned(), value: format!("") }
|
||||||
kind: "not-implemented".to_owned(),
|
|
||||||
value: format!(""),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -37,10 +33,7 @@ pub struct GlobalOutputStats {
|
|||||||
#[derive(Debug, Clone, Hash, Eq, PartialEq, Deserialize, Serialize)]
|
#[derive(Debug, Clone, Hash, Eq, PartialEq, Deserialize, Serialize)]
|
||||||
pub enum DebugAsk {
|
pub enum DebugAsk {
|
||||||
Timing,
|
Timing,
|
||||||
ByStage {
|
ByStage { stage_name: String, token: Option<String> },
|
||||||
stage_name: String,
|
|
||||||
token: Option<String>,
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct DebugResponse {
|
pub struct DebugResponse {
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
#![feature(box_patterns, box_syntax, proc_macro_hygiene, decl_macro, iter_intersperse)]
|
#![feature(box_patterns, proc_macro_hygiene, decl_macro, iter_intersperse)]
|
||||||
#![feature(plugin)]
|
|
||||||
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate serde_derive;
|
extern crate serde_derive;
|
||||||
@ -17,16 +16,14 @@ mod directives;
|
|||||||
use directives::directives_from_pass_names;
|
use directives::directives_from_pass_names;
|
||||||
mod help;
|
mod help;
|
||||||
mod response;
|
mod response;
|
||||||
use response::ReplResponse;
|
use std::{collections::HashSet, sync::Arc};
|
||||||
|
|
||||||
use colored::*;
|
use colored::*;
|
||||||
use std::collections::HashSet;
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
pub use language::{
|
pub use language::{
|
||||||
ComputationRequest, ComputationResponse, DebugAsk, DebugResponse, GlobalOutputStats,
|
ComputationRequest, ComputationResponse, DebugAsk, DebugResponse, GlobalOutputStats, LangMetaRequest,
|
||||||
LangMetaRequest, LangMetaResponse, ProgrammingLanguageInterface,
|
LangMetaResponse, ProgrammingLanguageInterface,
|
||||||
};
|
};
|
||||||
|
use response::ReplResponse;
|
||||||
|
|
||||||
include!(concat!(env!("OUT_DIR"), "/static.rs"));
|
include!(concat!(env!("OUT_DIR"), "/static.rs"));
|
||||||
const VERSION_STRING: &str = "0.1.0";
|
const VERSION_STRING: &str = "0.1.0";
|
||||||
@ -58,20 +55,12 @@ impl<L: ProgrammingLanguageInterface> Repl<L> {
|
|||||||
let line_reader = Interface::new("schala-repl").unwrap();
|
let line_reader = Interface::new("schala-repl").unwrap();
|
||||||
let sigil = ':';
|
let sigil = ':';
|
||||||
|
|
||||||
Repl {
|
Repl { sigil, line_reader, language_state: initial_state, options: ReplOptions::new() }
|
||||||
sigil,
|
|
||||||
line_reader,
|
|
||||||
language_state: initial_state,
|
|
||||||
options: ReplOptions::new(),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn run_repl(&mut self, config: L::Config) {
|
pub fn run_repl(&mut self, config: L::Config) {
|
||||||
println!("Schala meta-interpeter version {}", VERSION_STRING);
|
println!("Schala meta-interpeter version {}", VERSION_STRING);
|
||||||
println!(
|
println!("Type {} for help with the REPL", format!("{}help", self.sigil).bright_green().bold());
|
||||||
"Type {} for help with the REPL",
|
|
||||||
format!("{}help", self.sigil).bright_green().bold()
|
|
||||||
);
|
|
||||||
self.load_options();
|
self.load_options();
|
||||||
self.handle_repl_loop(config);
|
self.handle_repl_loop(config);
|
||||||
self.save_before_exit();
|
self.save_before_exit();
|
||||||
@ -79,9 +68,7 @@ impl<L: ProgrammingLanguageInterface> Repl<L> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn load_options(&mut self) {
|
fn load_options(&mut self) {
|
||||||
self.line_reader
|
self.line_reader.load_history(HISTORY_SAVE_FILE).unwrap_or(());
|
||||||
.load_history(HISTORY_SAVE_FILE)
|
|
||||||
.unwrap_or(());
|
|
||||||
match ReplOptions::load_from_file(OPTIONS_SAVE_FILE) {
|
match ReplOptions::load_from_file(OPTIONS_SAVE_FILE) {
|
||||||
Ok(options) => {
|
Ok(options) => {
|
||||||
self.options = options;
|
self.options = options;
|
||||||
@ -113,7 +100,7 @@ impl<L: ProgrammingLanguageInterface> Repl<L> {
|
|||||||
self.line_reader.add_history_unique(input.to_string());
|
self.line_reader.add_history_unique(input.to_string());
|
||||||
let mut chars = input.chars().peekable();
|
let mut chars = input.chars().peekable();
|
||||||
let repl_responses = match chars.next() {
|
let repl_responses = match chars.next() {
|
||||||
Some(ch) if ch == self.sigil => {
|
Some(ch) if ch == self.sigil =>
|
||||||
if chars.peek() == Some(&'{') {
|
if chars.peek() == Some(&'{') {
|
||||||
let mut buf = String::new();
|
let mut buf = String::new();
|
||||||
buf.push_str(input.get(2..).unwrap());
|
buf.push_str(input.get(2..).unwrap());
|
||||||
@ -130,12 +117,11 @@ impl<L: ProgrammingLanguageInterface> Repl<L> {
|
|||||||
}
|
}
|
||||||
self.handle_input(&buf, &config)
|
self.handle_input(&buf, &config)
|
||||||
} else {
|
} else {
|
||||||
if let Some(output) = self.handle_interpreter_directive(input.get(1..).unwrap()) {
|
if let Some(output) = self.handle_interpreter_directive(input.get(1..).unwrap()) {
|
||||||
println!("{}", output);
|
println!("{}", output);
|
||||||
}
|
}
|
||||||
continue;
|
continue;
|
||||||
}
|
},
|
||||||
}
|
|
||||||
_ => self.handle_input(input, &config),
|
_ => self.handle_input(input, &config),
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -147,8 +133,7 @@ impl<L: ProgrammingLanguageInterface> Repl<L> {
|
|||||||
|
|
||||||
fn update_line_reader(&mut self) {
|
fn update_line_reader(&mut self) {
|
||||||
let tab_complete_handler = TabCompleteHandler::new(self.sigil, self.get_directives());
|
let tab_complete_handler = TabCompleteHandler::new(self.sigil, self.get_directives());
|
||||||
self.line_reader
|
self.line_reader.set_completer(Arc::new(tab_complete_handler)); //TODO fix this here
|
||||||
.set_completer(Arc::new(tab_complete_handler)); //TODO fix this here
|
|
||||||
self.set_prompt(PromptStyle::Normal);
|
self.set_prompt(PromptStyle::Normal);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -162,9 +147,7 @@ impl<L: ProgrammingLanguageInterface> Repl<L> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn save_before_exit(&self) {
|
fn save_before_exit(&self) {
|
||||||
self.line_reader
|
self.line_reader.save_history(HISTORY_SAVE_FILE).unwrap_or(());
|
||||||
.save_history(HISTORY_SAVE_FILE)
|
|
||||||
.unwrap_or(());
|
|
||||||
self.options.save_to_file(OPTIONS_SAVE_FILE);
|
self.options.save_to_file(OPTIONS_SAVE_FILE);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -185,20 +168,13 @@ impl<L: ProgrammingLanguageInterface> Repl<L> {
|
|||||||
debug_requests.insert(ask.clone());
|
debug_requests.insert(ask.clone());
|
||||||
}
|
}
|
||||||
|
|
||||||
let request = ComputationRequest {
|
let request = ComputationRequest { source: input, config: config.clone(), debug_requests };
|
||||||
source: input,
|
|
||||||
config: config.clone(),
|
|
||||||
debug_requests,
|
|
||||||
};
|
|
||||||
let response = self.language_state.run_computation(request);
|
let response = self.language_state.run_computation(request);
|
||||||
response::handle_computation_response(response, &self.options)
|
response::handle_computation_response(response, &self.options)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_directives(&mut self) -> CommandTree {
|
fn get_directives(&mut self) -> CommandTree {
|
||||||
let pass_names = match self
|
let pass_names = match self.language_state.request_meta(LangMetaRequest::StageNames) {
|
||||||
.language_state
|
|
||||||
.request_meta(LangMetaRequest::StageNames)
|
|
||||||
{
|
|
||||||
LangMetaResponse::StageNames(names) => names,
|
LangMetaResponse::StageNames(names) => names,
|
||||||
_ => vec![],
|
_ => vec![],
|
||||||
};
|
};
|
||||||
@ -212,15 +188,14 @@ struct TabCompleteHandler {
|
|||||||
top_level_commands: CommandTree,
|
top_level_commands: CommandTree,
|
||||||
}
|
}
|
||||||
|
|
||||||
use linefeed::complete::{Completer, Completion};
|
use linefeed::{
|
||||||
use linefeed::terminal::Terminal;
|
complete::{Completer, Completion},
|
||||||
|
terminal::Terminal,
|
||||||
|
};
|
||||||
|
|
||||||
impl TabCompleteHandler {
|
impl TabCompleteHandler {
|
||||||
fn new(sigil: char, top_level_commands: CommandTree) -> TabCompleteHandler {
|
fn new(sigil: char, top_level_commands: CommandTree) -> TabCompleteHandler {
|
||||||
TabCompleteHandler {
|
TabCompleteHandler { top_level_commands, sigil }
|
||||||
top_level_commands,
|
|
||||||
sigil,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -247,11 +222,7 @@ impl<T: Terminal> Completer<T> for TabCompleteHandler {
|
|||||||
None => {
|
None => {
|
||||||
let top = matches!(command_tree, Some(CommandTree::Top(_)));
|
let top = matches!(command_tree, Some(CommandTree::Top(_)));
|
||||||
let word = if top { word.get(1..).unwrap() } else { word };
|
let word = if top { word.get(1..).unwrap() } else { word };
|
||||||
for cmd in command_tree
|
for cmd in command_tree.map(|x| x.get_subcommands()).unwrap_or_default().into_iter() {
|
||||||
.map(|x| x.get_subcommands())
|
|
||||||
.unwrap_or_default()
|
|
||||||
.into_iter()
|
|
||||||
{
|
|
||||||
if cmd.starts_with(word) {
|
if cmd.starts_with(word) {
|
||||||
completions.push(Completion {
|
completions.push(Completion {
|
||||||
completion: format!("{}{}", if top { ":" } else { "" }, cmd),
|
completion: format!("{}{}", if top { ":" } else { "" }, cmd),
|
||||||
@ -265,12 +236,9 @@ impl<T: Terminal> Completer<T> for TabCompleteHandler {
|
|||||||
Some(s) => {
|
Some(s) => {
|
||||||
let new_ptr: Option<&CommandTree> = command_tree.and_then(|cm| match cm {
|
let new_ptr: Option<&CommandTree> = command_tree.and_then(|cm| match cm {
|
||||||
CommandTree::Top(children) => children.iter().find(|c| c.get_cmd() == s),
|
CommandTree::Top(children) => children.iter().find(|c| c.get_cmd() == s),
|
||||||
CommandTree::NonTerminal { children, .. } => {
|
CommandTree::NonTerminal { children, .. } =>
|
||||||
children.iter().find(|c| c.get_cmd() == s)
|
children.iter().find(|c| c.get_cmd() == s),
|
||||||
}
|
CommandTree::Terminal { children, .. } => children.iter().find(|c| c.get_cmd() == s),
|
||||||
CommandTree::Terminal { children, .. } => {
|
|
||||||
children.iter().find(|c| c.get_cmd() == s)
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
command_tree = new_ptr;
|
command_tree = new_ptr;
|
||||||
}
|
}
|
||||||
|
@ -1,8 +1,10 @@
|
|||||||
use crate::language::DebugAsk;
|
use std::{
|
||||||
|
collections::HashSet,
|
||||||
|
fs::File,
|
||||||
|
io::{self, Read, Write},
|
||||||
|
};
|
||||||
|
|
||||||
use std::collections::HashSet;
|
use crate::language::DebugAsk;
|
||||||
use std::fs::File;
|
|
||||||
use std::io::{self, Read, Write};
|
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize)]
|
#[derive(Serialize, Deserialize)]
|
||||||
pub struct ReplOptions {
|
pub struct ReplOptions {
|
||||||
@ -13,11 +15,7 @@ pub struct ReplOptions {
|
|||||||
|
|
||||||
impl ReplOptions {
|
impl ReplOptions {
|
||||||
pub fn new() -> ReplOptions {
|
pub fn new() -> ReplOptions {
|
||||||
ReplOptions {
|
ReplOptions { debug_asks: HashSet::new(), show_total_time: true, show_stage_times: false }
|
||||||
debug_asks: HashSet::new(),
|
|
||||||
show_total_time: true,
|
|
||||||
show_stage_times: false,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn save_to_file(&self, filename: &str) {
|
pub fn save_to_file(&self, filename: &str) {
|
||||||
|
@ -1,9 +1,11 @@
|
|||||||
use colored::*;
|
use std::{fmt, fmt::Write};
|
||||||
use std::fmt;
|
|
||||||
use std::fmt::Write;
|
|
||||||
|
|
||||||
use crate::language::{ComputationResponse, DebugAsk};
|
use colored::*;
|
||||||
use crate::ReplOptions;
|
|
||||||
|
use crate::{
|
||||||
|
language::{ComputationResponse, DebugAsk},
|
||||||
|
ReplOptions,
|
||||||
|
};
|
||||||
|
|
||||||
pub struct ReplResponse {
|
pub struct ReplResponse {
|
||||||
label: Option<String>,
|
label: Option<String>,
|
||||||
@ -64,16 +66,8 @@ pub fn handle_computation_response(
|
|||||||
}
|
}
|
||||||
|
|
||||||
responses.push(match response.main_output {
|
responses.push(match response.main_output {
|
||||||
Ok(s) => ReplResponse {
|
Ok(s) => ReplResponse { label: None, text: s, color: None },
|
||||||
label: None,
|
Err(e) => ReplResponse { label: Some("Error".to_string()), text: e, color: Some(Color::Red) },
|
||||||
text: s,
|
|
||||||
color: None,
|
|
||||||
},
|
|
||||||
Err(e) => ReplResponse {
|
|
||||||
label: Some("Error".to_string()),
|
|
||||||
text: e,
|
|
||||||
color: Some(Color::Red),
|
|
||||||
},
|
|
||||||
});
|
});
|
||||||
|
|
||||||
responses
|
responses
|
||||||
|
@ -77,7 +77,7 @@ x is Some(t) // type bool
|
|||||||
|
|
||||||
if x {
|
if x {
|
||||||
is Some(t) => {
|
is Some(t) => {
|
||||||
},
|
}
|
||||||
is None => {
|
is None => {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
41
src/main.rs
41
src/main.rs
@ -1,17 +1,15 @@
|
|||||||
use schala_repl::{Repl, ProgrammingLanguageInterface, ComputationRequest};
|
use std::{collections::HashSet, fs::File, io::Read, path::PathBuf, process::exit};
|
||||||
|
|
||||||
use std::{fs::File, io::Read, path::PathBuf, process::exit, collections::HashSet};
|
|
||||||
use schala_lang::{Schala, SchalaConfig};
|
use schala_lang::{Schala, SchalaConfig};
|
||||||
|
use schala_repl::{ComputationRequest, ProgrammingLanguageInterface, Repl};
|
||||||
|
|
||||||
//TODO specify multiple langs, and have a way to switch between them
|
//TODO specify multiple langs, and have a way to switch between them
|
||||||
fn main() {
|
fn main() {
|
||||||
let args: Vec<String> = std::env::args().collect();
|
let args: Vec<String> = std::env::args().collect();
|
||||||
let matches = command_line_options()
|
let matches = command_line_options().parse(&args[1..]).unwrap_or_else(|e| {
|
||||||
.parse(&args[1..])
|
eprintln!("Error parsing options: {}", e);
|
||||||
.unwrap_or_else(|e| {
|
exit(1);
|
||||||
eprintln!("Error parsing options: {}", e);
|
});
|
||||||
exit(1);
|
|
||||||
});
|
|
||||||
|
|
||||||
if matches.opt_present("help") {
|
if matches.opt_present("help") {
|
||||||
println!("{}", command_line_options().usage("Schala metainterpreter"));
|
println!("{}", command_line_options().usage("Schala metainterpreter"));
|
||||||
@ -27,27 +25,28 @@ fn main() {
|
|||||||
let paths: Vec<PathBuf> = matches.free.iter().map(PathBuf::from).collect();
|
let paths: Vec<PathBuf> = matches.free.iter().map(PathBuf::from).collect();
|
||||||
//TODO handle more than one file
|
//TODO handle more than one file
|
||||||
let filename = &paths[0];
|
let filename = &paths[0];
|
||||||
let extension = filename.extension().and_then(|e| e.to_str())
|
let extension = filename.extension().and_then(|e| e.to_str()).unwrap_or_else(|| {
|
||||||
.unwrap_or_else(|| {
|
|
||||||
eprintln!("Source file `{}` has no extension.", filename.display());
|
eprintln!("Source file `{}` has no extension.", filename.display());
|
||||||
exit(1);
|
exit(1);
|
||||||
});
|
});
|
||||||
|
|
||||||
//TODO this proably should be a macro for every supported language
|
//TODO this proably should be a macro for every supported language
|
||||||
if extension == Schala::source_file_suffix() {
|
if extension == Schala::source_file_suffix() {
|
||||||
let config = SchalaConfig {
|
let config = SchalaConfig { repl: false };
|
||||||
repl: false,
|
|
||||||
};
|
|
||||||
|
|
||||||
run_noninteractive(paths, Schala::new(), config);
|
run_noninteractive(paths, Schala::new(), config);
|
||||||
} else {
|
} else {
|
||||||
eprintln!("Extension .{} not recognized", extension);
|
eprintln!("Extension .{} not recognized", extension);
|
||||||
exit(1);
|
exit(1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn run_noninteractive<L: ProgrammingLanguageInterface>(filenames: Vec<PathBuf>, mut language: L, config: L::Config) {
|
pub fn run_noninteractive<L: ProgrammingLanguageInterface>(
|
||||||
|
filenames: Vec<PathBuf>,
|
||||||
|
mut language: L,
|
||||||
|
config: L::Config,
|
||||||
|
) {
|
||||||
// for now, ony do something with the first filename
|
// for now, ony do something with the first filename
|
||||||
|
|
||||||
let filename = &filenames[0];
|
let filename = &filenames[0];
|
||||||
@ -55,11 +54,7 @@ pub fn run_noninteractive<L: ProgrammingLanguageInterface>(filenames: Vec<PathBu
|
|||||||
let mut buffer = String::new();
|
let mut buffer = String::new();
|
||||||
source_file.read_to_string(&mut buffer).unwrap();
|
source_file.read_to_string(&mut buffer).unwrap();
|
||||||
|
|
||||||
let request = ComputationRequest {
|
let request = ComputationRequest { source: &buffer, config, debug_requests: HashSet::new() };
|
||||||
source: &buffer,
|
|
||||||
config,
|
|
||||||
debug_requests: HashSet::new(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let response = language.run_computation(request);
|
let response = language.run_computation(request);
|
||||||
match response.main_output {
|
match response.main_output {
|
||||||
|
Loading…
Reference in New Issue
Block a user