252 Commits

Author SHA1 Message Date
greg
38eeec5718 Use lib flag to get debug 2018-04-28 20:44:42 -07:00
greg
431c0fdf8c Starting to try and annotate real compiler passes 2018-04-28 15:46:11 -07:00
greg
9015cbee21 More experimentation 2018-04-28 15:35:04 -07:00
greg
bf7533cdbe Some experimentation 2018-04-28 03:31:53 -07:00
greg
2c79984678 Starting codegen work 2018-04-28 00:08:16 -07:00
greg
82cfd3f03d Adding proc macro for codegen
This should hopefully make the compiler pass thing I want to do possible
2018-04-27 02:19:09 -07:00
greg
9547275355 Backtick operators supported in tokenizing 2018-04-25 03:01:41 -07:00
greg
2dae8d6629 Show artifacts on failure 2018-04-24 23:11:04 -07:00
greg
056ca1c162 TODO note 2018-04-24 20:31:17 -07:00
greg
00ace0b682 Put this stuff back
More complicated to separate out repl than I thought
2018-04-24 20:31:17 -07:00
greg
5fcf0815c0 Start refactoring how interpreter options are split up 2018-04-24 20:31:17 -07:00
greg
748250d383 Swap sigil from . to : 2018-04-24 20:31:17 -07:00
greg
87cc14356d Want to change 'trait' to 'interface' 2018-04-24 20:31:17 -07:00
greg
7004960434 Fix history adding 2018-04-24 20:31:17 -07:00
greg
f5aff0b276 Trait -> Interface 2018-04-24 16:30:17 -07:00
greg
1d6f104b12 Kill old advanced_slice_patterns 2018-04-03 23:24:21 -07:00
greg
c11ae3b50d Debug stages from command line 2018-03-27 00:50:31 -07:00
greg
7f21b70e3f Make REPL interpreter return a value 2018-03-25 00:11:18 -07:00
greg
0b2e2cf68b Kill unused items in schala-repl 2018-03-24 23:28:00 -07:00
greg
1129d97603 Start killing old code in language 2018-03-24 19:02:16 -07:00
greg
e9c538da49 Fix interspersing of newlines in tokenizer infra 2018-03-24 18:38:28 -07:00
greg
3295242115 Show err output when evaluating non-interactively 2018-03-24 17:27:54 -07:00
greg
4bea717e72 Colored repl command output 2018-03-24 15:14:24 -07:00
greg
20f879d68d Hook schala function up to debug booleans
Not sure if I like this API, but eh, it's what I've got
2018-03-24 13:41:54 -07:00
greg
20cf877d21 Add back color 2018-03-24 13:20:10 -07:00
greg
df51a1e04a rename schala_main -> repl_main 2018-03-23 19:04:32 -07:00
greg
1c7574150e Add version string 2018-03-23 18:56:09 -07:00
greg
36b3f58f77 Get rid of all top-level dependencies 2018-03-23 18:48:15 -07:00
greg
f181e2f284 move schala into separate crate 2018-03-23 18:43:43 -07:00
greg
34086b3b2b Color in terminal error output 2018-03-22 03:39:42 -07:00
greg
fd4f5e17df Move robo to separate crate 2018-03-21 01:46:11 -07:00
greg
8ca5a77174 Move rukka to crate 2018-03-21 01:43:43 -07:00
greg
78a250bcba Move maaru into separate crate 2018-03-20 23:29:56 -07:00
greg
31fc751799 Rename schala-lib -> schala-repl 2018-03-20 21:17:46 -07:00
greg
e99479ffcc Removed (for now) LLVMCodeString 2018-03-20 21:13:34 -07:00
greg
6fcc7ded59 new thing compiles 2018-03-20 20:29:57 -07:00
greg
de073a6d9b Switch over schala to new system 2018-03-19 22:57:54 -07:00
greg
638afd47cc Update schala example code 2018-03-17 23:38:37 -07:00
greg
5ab7c2d254 Nested comments 2018-03-17 22:25:43 -07:00
greg
7a606980cb Changing comments to use //, /* 2018-03-17 19:12:58 -07:00
greg
ad8d2b22cd Starting to improve infrastrucutre for lang output
To make repl vs non-repl output better
2018-03-11 14:53:08 -07:00
greg
57f56168c5 Hacky fix for displaying error output non-interactively 2018-03-11 12:56:51 -07:00
greg
64a3705e35 Some changes necessary to handle non-interactive code 2018-03-09 00:50:24 -08:00
greg
7e23e40a2f Eval list literals 2018-03-08 12:42:05 -08:00
greg
4c88a7ada6 Parse list literals 2018-03-08 12:01:24 -08:00
greg
367719d408 Tighten some code 2018-03-08 00:32:19 -08:00
greg
2e80045750 Rename ReplOutput -> LanguageOutput 2018-03-07 22:07:13 -08:00
greg
35c67f73c3 Make directory for schala source files 2018-03-07 21:53:55 -08:00
greg
da9aa1e29d Index evaluation 2018-03-07 21:46:21 -08:00
greg
ca67f9b4fe Proper index exprs 2018-03-06 02:51:45 -08:00
greg
60cce3fe9c Some macro simplifications 2018-03-06 01:31:31 -08:00
greg
4eb22f94d0 Trying to make tests less verbose 2018-03-06 01:06:36 -08:00
greg
355c8170a4 Added test for lambda call 2018-03-06 00:56:19 -08:00
greg
e3671a579d Changed BNF grammar of call statements
To allow calling lambdas
2018-03-06 00:38:33 -08:00
greg
08ca48b2ba lambdas 2018-03-04 02:11:22 -08:00
greg
fea9b9575b Print output of tuples 2018-03-03 13:26:22 -08:00
greg
276dad56d7 Handle tuple literals in type system 2018-03-03 11:55:20 -08:00
greg
695e733584 Sum types in type schema 2018-03-03 11:52:07 -08:00
greg
9bfd751db6 Kill unused import 2018-03-03 11:32:38 -08:00
greg
b058e47d79 Kill some compiler warnings 2018-03-03 00:28:52 -08:00
greg
5be53dc847 Evaluator now only prints when a builtin print is called 2018-03-03 00:25:08 -08:00
greg
a0bea0d55a Kill comments 2018-03-02 23:33:01 -08:00
greg
9747374e8a Fix bug in delimited macro
Had to do with bad strictness testing.
2018-03-02 22:44:17 -08:00
greg
9ab1ca28f8 Improve tokenizer debug output 2018-03-02 22:11:25 -08:00
greg
66cd51a355 Cleanup 2018-03-02 21:59:14 -08:00
greg
1056be12e7 Include line count in token debug 2018-03-02 15:21:48 -08:00
greg
48e7c0be03 Munged types to make tokenizer compile 2018-03-02 15:15:12 -08:00
greg
6e82d1207e SOme work
WIP
2018-03-02 02:57:04 -08:00
greg
f0e7c9906e Fixed bug w/ lines in functions
Also improved debugging
2018-03-02 00:42:52 -08:00
greg
57c7858c87 Frame-aware lookups 2018-03-01 23:13:32 -08:00
greg
a105c84943 Kill debug 2018-03-01 22:54:03 -08:00
greg
2b8d63d9cc Better debugging for types 2018-03-01 22:32:38 -08:00
greg
c807c20292 Use UVars in type signatures of functions 2018-03-01 03:35:09 -08:00
greg
a643c8a792 Add history saving 2018-03-01 02:49:14 -08:00
greg
69200048fa Switch to rustyline library 2018-03-01 02:43:11 -08:00
greg
55e372a670 Introduced fresh type variable method 2018-02-28 05:45:20 -08:00
greg
c50626241e Continuing work 2018-02-27 03:01:05 -08:00
greg
232bec97a7 Re-added symbol table infra 2018-02-26 21:43:53 -08:00
greg
ce1d967f08 Some logic for function call inferring 2018-02-26 21:28:11 -08:00
greg
daa0062108 Starting on function application typechecking 2018-02-26 21:00:36 -08:00
greg
3e7c7a50b4 Move some code around 2018-02-26 19:57:46 -08:00
greg
2574a1b9c0 Function calls work 2018-02-26 19:55:27 -08:00
greg
c285ee182e Temporarily disable type-erroring
and tighten some code
2018-02-26 19:16:49 -08:00
greg
f7659a5598 Handle variable lookups 2018-02-26 18:23:10 -08:00
greg
1064d9993a Evaluate binding declarations 2018-02-26 18:18:42 -08:00
greg
0e3320e183 Separate Value and NamedStruct syntactic categories 2018-02-26 18:12:37 -08:00
greg
89a2be19f4 Fixed | 2018-02-26 02:27:36 -08:00
greg
d9e96398a4 More operator stuff 2018-02-26 02:21:21 -08:00
greg
a564ffa1ce Operator changes 2018-02-26 02:11:56 -08:00
greg
b3fff100d2 Fixed tests w/ respect to binop
There's a few unnecessary conversions of &str 's to Rc<String> and back
2018-02-24 17:50:57 -08:00
greg
cfd6df7ba5 Centralize data for prefix ops too 2018-02-24 17:43:26 -08:00
greg
bb2e1ae27a Added type information to binop definitions
Also started centralizing precedence there too
2018-02-24 17:37:23 -08:00
greg
4333563d03 Make sigil field private 2018-02-24 14:39:45 -08:00
greg
e7cabb2a79 Function evaluation work 2018-02-24 14:31:04 -08:00
greg
5da7c809b2 Give State a pointer to its parent
For function call lookups
2018-02-24 13:56:04 -08:00
greg
d229a57837 Finished initial BinOp/PrefixOp 2018-02-23 19:06:37 -08:00
greg
0dd8861f83 Starting to munge BinOp types
Incomplete, doesn't yet compile
2018-02-23 04:10:00 -08:00
greg
4ab900d601 ReplState -> State
Not everythign is a repl
2018-02-23 03:07:58 -08:00
greg
501b975fb6 Move bx! macro up to mod.rs
And make use of it in parser
2018-02-23 03:04:19 -08:00
greg
83315e97ac Move anno-to-type to a method on TypeName 2018-02-23 02:30:34 -08:00
greg
6259a0808c Fix tests too 2018-02-23 01:59:53 -08:00
greg
0c69476fd0 Separate tokenizing module
Parsing was getting too long
2018-02-23 01:58:06 -08:00
greg
1caccc6ae2 Some work on binops 2018-02-23 01:49:37 -08:00
greg
23af2b1455 Some more type-checking work 2018-02-22 19:59:53 -08:00
greg
61795b0331 More work on evaluating applications
for later testing + to kill a compiler warning
2018-02-22 03:34:36 -08:00
greg
a1b874c891 Fix traits, silence warnings 2018-02-22 03:26:32 -08:00
greg
e7103b925b type of a declaration should be Void, not Unit
I think this makes sense

Also kill some compiler warnings
2018-02-22 03:25:05 -08:00
greg
c35401da65 Types in bindings 2018-02-22 03:21:58 -08:00
greg
d51a9a73d7 Simplified match 2018-02-22 00:31:13 -08:00
greg
fddd43b86e Added trait declaration 2018-02-21 22:06:56 -08:00
greg
12f55fa844 More static type work 2018-02-21 18:12:46 -08:00
greg
bb0fb716e4 Finished basic constant type inference 2018-02-21 14:14:24 -08:00
greg
687d482853 More type implementing - WIP
This has a borrowing bug currently
2018-02-21 04:32:30 -08:00
greg
628eb28deb Fix some integer overflows with binary and hex 2018-02-21 03:52:16 -08:00
greg
4c8b4c8c71 Starting basic type stuff 2018-02-21 03:39:40 -08:00
greg
c674148772 Starting over with types 2018-02-21 02:35:09 -08:00
greg
7b4f69dce5 Additional TODO 2018-02-21 02:35:09 -08:00
greg
98caf1cac3 Add todo note 2018-02-20 17:56:13 -08:00
greg
457799e0f7 More type things 2018-02-12 01:45:36 -08:00
greg
681d767855 Type singletons test work 2018-02-12 00:51:53 -08:00
greg
ef4620e90a TypeSingletonName broken out 2018-02-12 00:25:48 -08:00
greg
1f2a4c706f Fix struct literals in if expressions
With special case-ing, sigh :( Also will need to do this for match
expressions but I'll cross that bridge when I come to it
2018-02-11 22:10:21 -08:00
greg
a452bccd1c Don't need clone() here 2018-02-11 16:45:26 -08:00
greg
eca2218f6a Kill separate is_digit method
I care about 10 vs 16 distinction
2018-02-11 16:43:51 -08:00
greg
83aedb0efb Hex parsing done 2018-02-11 16:35:38 -08:00
greg
76841de784 Save settings on ctrl-D 2018-02-11 16:28:17 -08:00
greg
c0574ff1ef Added a bunch of notes 2018-02-11 02:37:52 -08:00
greg
faa5c6ab42 Fix parse level calculation 2018-02-10 17:45:00 -08:00
greg
9c2d2190b0 Proper indentation of parser debug 2018-02-10 15:10:06 -08:00
greg
21511f5120 Move some code around 2018-02-08 01:15:27 -08:00
greg
8bd399f97a Better hex literals 2018-01-08 06:12:45 -08:00
greg
30a6d0929a Starting hex parsing 2018-01-08 05:57:36 -08:00
greg
bec8aedc22 Simplify some code 2018-01-08 05:21:04 -08:00
greg
1b642c6321 Assign a specific rocket version 2017-12-31 15:46:08 -08:00
greg
559306ffc8 unified BoolAtom 2017-12-30 23:19:42 -08:00
greg
540ffde4bc Rukka source file 2017-12-30 01:18:48 -08:00
greg
fa8d46e3d7 Print operation 2017-12-29 05:10:03 -08:00
greg
4598802999 Refactoring 2017-12-29 05:03:30 -08:00
greg
5ea83e2da6 Delete some unneeded code 2017-12-29 04:55:03 -08:00
greg
23c0f54042 Forgot to change name here 2017-12-29 04:52:47 -08:00
greg
8618de313b Name change
builtin -> primitive
2017-12-29 04:51:14 -08:00
greg
cd23b23a91 Get rid of some printlns 2017-12-29 04:46:19 -08:00
greg
e6475a1262 Implement lambda application 2017-12-29 03:57:27 -08:00
greg
e6f81b28f9 Plus and multiply 2017-12-25 23:10:16 -08:00
greg
c20d75faf1 Builtins - + 2017-12-24 23:48:13 -08:00
greg
85aabed344 Framework for multiple environments 2017-12-21 03:11:56 -08:00
greg
aa821e720a Apply wokr 2017-12-21 01:14:05 -08:00
greg
0e25720927 Fixing quote 2017-12-21 01:12:05 -08:00
greg
c101610cde Starting builtins 2017-12-20 22:56:24 -08:00
greg
1217f6e143 Lambda abstraction 2017-12-20 18:23:44 -08:00
greg
6f41167402 Kill this linker thing 2017-12-18 01:30:33 -08:00
greg
cf0af7e0c9 Flesh out TODO, README 2017-12-13 00:52:54 -08:00
greg
a7fd515e7b Add Rukka to README 2017-12-13 00:25:59 -08:00
greg
c6509338d8 Kill unused code 2017-12-12 02:57:19 -08:00
greg
192a6bf6e1 Some lambda work 2017-12-12 02:56:10 -08:00
greg
7e8c4267c2 Remove a unimplemented 2017-12-11 01:55:08 -08:00
greg
25527bbdf0 Add fn literal variant 2017-12-11 01:53:27 -08:00
greg
5e7aef1040 Even more concise 2017-12-10 19:01:44 -08:00
greg
3386fcc505 Refactoring 2017-12-10 18:44:21 -08:00
greg
18a839bb91 Starting lambdas 2017-12-10 03:58:44 -08:00
greg
92d641fca0 Make var methods better 2017-12-10 03:35:51 -08:00
greg
9b4499c5ac If expressions 2017-12-10 02:58:07 -08:00
greg
07e19cbfa2 Rukka - Variables 2017-12-09 19:08:08 -08:00
greg
2016fcab41 Add schala idea 2017-12-09 18:56:34 -08:00
greg
0e7b6f25b3 Can specify language name with -l in any case 2017-12-09 18:19:07 -08:00
greg
7e7aa55d6e Go directly to langauge by name 2017-12-09 13:38:55 -08:00
greg
cc79565fb3 Define half-working 2017-12-07 20:28:09 -08:00
greg
5659bab684 Language name in prompt 2017-12-07 20:08:31 -08:00
greg
405f91a770 Get rid of old import 2017-12-07 19:55:18 -08:00
greg
faed1d6f25 eq? 2017-12-07 19:54:53 -08:00
greg
f6d047e3b8 True and False primitives 2017-12-07 19:51:34 -08:00
greg
fcd980f148 Some primitive implementations 2017-12-07 19:48:48 -08:00
greg
c3919daa66 Fix pointer alias problem 2017-12-07 11:22:59 -08:00
greg
f8152f68ad Still tryign to make the pointer-munging work 2017-12-07 08:15:28 -08:00
greg
9273773bf4 This has broken sexp parsing 2017-12-04 04:56:29 -08:00
greg
844cef36c7 Fix print bug 2017-12-04 03:26:38 -08:00
greg
0e17e45f3e Convert to more lispish Cons 2017-12-04 03:23:55 -08:00
greg
18d8ca7bd5 Special forms list 2017-12-04 03:06:54 -08:00
greg
2ee14bf740 Unwraps 2017-12-04 03:02:38 -08:00
greg
502497687a Handle top-level empty list 2017-12-04 03:01:47 -08:00
greg
107897ec97 print list 2017-12-04 02:44:09 -08:00
greg
8534fb4118 Tighten code 2017-12-04 02:00:00 -08:00
greg
a1e38aba8e Some more code 2017-12-04 01:57:24 -08:00
greg
2f8ef99b08 Type simplification 2017-12-03 22:20:43 -08:00
greg
2bb55b6cca State for eval 2017-12-03 22:10:19 -08:00
greg
bcd70ff538 Numbers 2017-12-03 19:21:56 -08:00
greg
728393671f Fixed string parsing 2017-12-03 17:47:17 -08:00
greg
9c3e223e51 Strings partway working 2017-12-03 17:11:17 -08:00
greg
2738119f17 Quotes 2017-12-03 06:04:53 -08:00
greg
630ead289c Change Symbol -> Word for token 2017-12-01 03:00:42 -08:00
greg
485e869c90 Fix bug 2017-12-01 02:58:09 -08:00
greg
9e8a3d1f08 Tighten code 2017-12-01 02:39:17 -08:00
greg
b1da524a8f Intersperse 2017-12-01 02:36:52 -08:00
greg
787b6d51a4 Parsing correctly yay 2017-12-01 02:16:28 -08:00
greg
1dae4443cd Tokens 2017-11-30 22:37:49 -08:00
greg
210a45c92e Sexp parsing 2017-11-29 02:08:30 -08:00
greg
815e0401f2 Parses ( 2017-11-29 01:45:29 -08:00
greg
753247ee83 Some halfwritten stuff 2017-11-28 03:37:16 -08:00
greg
6223fc20f3 List datatype 2017-11-27 00:57:26 -08:00
greg
da928db351 Add a new language - Rukka
This is a (simple) lisp, partially for fun, partially for testing the
generic interfaces
2017-11-26 21:17:17 -08:00
greg
93d0cfe5b8 Make schala-lib::language private and reexport 2017-11-02 02:45:26 -07:00
greg
687b28d1d1 Take TokenError type out of schala-lib 2017-11-01 22:41:34 -07:00
greg
b62f618256 I don't need this syntax 2017-11-01 01:25:26 -07:00
greg
f25b76ea11 Kill some packages from schala bin 2017-11-01 01:23:54 -07:00
greg
6b2736348d Get rid of unused imports 2017-10-31 00:45:15 -07:00
greg
69d5f38ea1 Refactor into libs part II
woo it compiles
2017-10-30 22:18:02 -07:00
greg
a6f8616839 Halfway done to library-ifying schala 2017-10-30 20:06:20 -07:00
greg
cdcb55e3b8 PLIGenerators can be authoritative, not the instances themselves 2017-10-29 13:45:55 -07:00
greg
74ac26841f Some simplification 2017-10-29 12:27:24 -07:00
greg
8fd29b5090 Passing things along as generators 2017-10-29 04:09:10 -07:00
greg
5ebc96daa7 Don't need mutex, kill it 2017-10-29 04:04:54 -07:00
greg
277e039251 Finally removed schala dependency
Now need to clena up everything
2017-10-29 03:41:40 -07:00
greg
6e8f57e54f Okay this compiles
The secret (from #rust) appeared to be that Fn() needed to have + Send
explicitly annotated on it
2017-10-29 03:16:08 -07:00
greg
ae02391270 Working on solution to Rocket state problem 2017-10-27 00:30:28 -07:00
greg
9379485713 Some linker bullshit
I don't know why I needed to do this
2017-10-26 02:03:47 -07:00
greg
910522537c Splitting up some code
In preparation for splitting schala into crates
2017-10-23 20:51:08 -07:00
greg
98e1a5235a Some more structure in evaluator 2017-10-23 01:54:35 -07:00
greg
e054c4b27f Revert "Starting to split project into multiple crates"
This reverts commit e3b0f4a51e.
Bah, this was a bad idea, wrong way to do it
2017-10-23 00:45:01 -07:00
greg
e3b0f4a51e Starting to split project into multiple crates 2017-10-23 00:43:43 -07:00
greg
911f26e9c6 Halfway done with evaluating tuples 2017-10-23 00:22:25 -07:00
greg
677e3ae0a9 Add module keyword 2017-10-22 03:58:09 -07:00
greg
9611770bb3 Switch from request to superagent
For doing HTTP requests. Makes the js bundle a lot smaller.

Also I should do something about the fact that I now have to change the
js and also rebuild the rust binary to change code
2017-10-15 02:37:02 -07:00
greg
4c256cb5f7 Literal non-primitive values 2017-10-14 13:54:17 -07:00
greg
688e1c7f5d Starting work on literal non-primitve values 2017-10-13 18:56:02 -07:00
greg
26c9c72bcc Can eval custom data constructors now 2017-10-13 03:05:18 -07:00
greg
2d614aa17a Float literals, kill old code 2017-10-13 00:01:43 -07:00
greg
ecb2eb0f87 Some more primitive types + binop-checking 2017-10-12 23:59:52 -07:00
greg
4c4004d3ac Add required imports 2017-10-12 21:46:12 -07:00
greg
9b4a23c4f2 Some partial work on refactoring type infer fn 2017-10-12 20:14:33 -07:00
greg
936c168cef Add colored output to non-interactive 2017-10-12 10:43:54 -07:00
greg
db835f42aa Convert webapp to using included files 2017-10-12 02:13:55 -07:00
greg
cd5fc36c37 Fix type check macro to add symbol table 2017-10-11 16:43:04 -07:00
greg
d7a33c974e More work with unification 2017-10-11 02:33:46 -07:00
greg
b2288206d2 the evar table
TODO find a better way to represent this
2017-10-11 02:11:12 -07:00
greg
d962e2c27a Unify work 2017-10-11 02:03:50 -07:00
greg
4534c1d3d6 Move type-level func up 2017-10-11 01:55:45 -07:00
greg
f79dc0b1e3 Okay I am figuring things out about hindley-milner again 2017-10-11 01:50:04 -07:00
greg
4928fc0019 rename type_var to ty 2017-10-10 22:14:55 -07:00
greg
d735e45688 String and () types 2017-10-10 21:51:45 -07:00
greg
b4208b696d Change around some stuff 2017-10-10 21:23:24 -07:00
greg
ff3dbbcbc6 Change Variable to Value 2017-10-10 21:02:32 -07:00
greg
e3261be8a0 Partial handling of user defined types 2017-10-10 17:29:28 -07:00
greg
f131105b50 Starting to make unify actually work 2017-10-10 04:38:59 -07:00
greg
1089a33634 Convert unify to are types
b/c Type implements Clone
Maybe wanna kill this later for efficiency
2017-10-10 04:26:40 -07:00
greg
6c60794485 Have + do something different with strings
Needed to introduce polymorphism soon
2017-10-10 02:45:25 -07:00
greg
2f18529bcc Operator typing a little bit 2017-10-10 02:41:17 -07:00
greg
c68e09d89d Slight refactoring 2017-10-10 02:32:02 -07:00
greg
d9e8178a90 Renamed all the type-related types 2017-10-10 02:17:07 -07:00
greg
57536e6399 Move some type checking code around 2017-10-10 01:11:24 -07:00
greg
32e077c407 Merge branch 'master' of github.com:neunenak/schala 2017-10-10 01:09:27 -07:00
greg
33d0d49d30 Basic typing test 2017-10-09 12:26:25 -07:00
greg
76a9367284 String types 2017-10-09 11:42:53 -07:00
28 changed files with 1161 additions and 3640 deletions

View File

@@ -1,920 +0,0 @@
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedLists #-}
{-# LANGUAGE OverloadedStrings #-}
-- | This module is an extensively documented walkthrough for typechecking a
-- basic functional language using the Hindley-Damas-Milner algorithm.
--
-- In the end, we'll be able to infer the type of expressions like
--
-- @
-- find (λx. (>) x 0)
-- :: [Integer] -> Either () Integer
-- @
--
-- It can be used in multiple different forms:
--
-- * The source is written in literate programming style, so you can almost
-- read it from top to bottom, minus some few references to later topics.
-- * /Loads/ of doctests (runnable and verified code examples) are included
-- * The code is runnable in GHCi, all definitions are exposed.
-- * A small main module that gives many examples of what you might try out in
-- GHCi is also included.
-- * The Haddock output yields a nice overview over the definitions given, with
-- a nice rendering of a truckload of Haddock comments.
module HindleyMilner where
import Control.Monad.Trans
import Control.Monad.Trans.Except
import Control.Monad.Trans.State
import Data.Map (Map)
import qualified Data.Map as M
import Data.Monoid
import Data.Set (Set)
import qualified Data.Set as S
import Data.String
import Data.Text (Text)
import qualified Data.Text as T
-- $setup
--
-- For running doctests:
--
-- >>> :set -XOverloadedStrings
-- >>> :set -XOverloadedLists
-- >>> :set -XLambdaCase
-- >>> import qualified Data.Text.IO as T
-- >>> let putPprLn = T.putStrLn . ppr
-- #############################################################################
-- #############################################################################
-- * Preliminaries
-- #############################################################################
-- #############################################################################
-- #############################################################################
-- ** Prettyprinting
-- #############################################################################
-- | A prettyprinter class. Similar to 'Show', but with a focus on having
-- human-readable output as opposed to being valid Haskell.
class Pretty a where
ppr :: a -> Text
-- #############################################################################
-- ** Names
-- #############################################################################
-- | A 'name' is an identifier in the language we're going to typecheck.
-- Variables on both the term and type level have 'Name's, for example.
newtype Name = Name Text
deriving (Eq, Ord, Show)
-- | >>> "lorem" :: Name
-- Name "lorem"
instance IsString Name where
fromString = Name . T.pack
-- | >>> putPprLn (Name "var")
-- var
instance Pretty Name where
ppr (Name n) = n
-- #############################################################################
-- ** Monotypes
-- #############################################################################
-- | A monotype is an unquantified/unparametric type, in other words it contains
-- no @forall@s. Monotypes are the inner building blocks of all types. Examples
-- of monotypes are @Int@, @a@, @a -> b@.
--
-- In formal notation, 'MType's are often called τ (tau) types.
data MType = TVar Name -- ^ @a@
| TFun MType MType -- ^ @a -> b@
| TConst Name -- ^ @Int@, @()@, …
-- Since we can't declare our own types in our simple type system
-- here, we'll hard-code certain basic ones so we can typecheck some
-- familar functions that use them later.
| TList MType -- ^ @[a]@
| TEither MType MType -- ^ @Either a b@
| TTuple MType MType -- ^ @(a,b)@
deriving Show
-- | >>> putPprLn (TFun (TEither (TVar "a") (TVar "b")) (TFun (TVar "c") (TVar "d")))
-- Either a b → c → d
--
-- Using the 'IsString' instance:
--
-- >>> putPprLn (TFun (TEither "a" "b") (TFun "c" "d"))
-- Either a b → c → d
instance Pretty MType where
ppr = go False
where
go _ (TVar name) = ppr name
go _ (TList a) = "[" <> ppr a <> "]"
go _ (TEither l r) = "Either " <> ppr l <> " " <> ppr r
go _ (TTuple a b) = "(" <> ppr a <> ", " <> ppr b <> ")"
go _ (TConst name) = ppr name
go parenthesize (TFun a b)
| parenthesize = "(" <> lhs <> "" <> rhs <> ")"
| otherwise = lhs <> "" <> rhs
where lhs = go True a
rhs = go False b
-- | >>> "var" :: MType
-- TVar (Name "var")
instance IsString MType where
fromString = TVar . fromString
-- | The free variables of an 'MType'. This is simply the collection of all the
-- individual type variables occurring inside of it.
--
-- __Example:__ The free variables of @a -> b@ are @a@ and @b@.
freeMType :: MType -> Set Name
freeMType = \case
TVar a -> [a]
TFun a b -> freeMType a <> freeMType b
TList a -> freeMType a
TEither l r -> freeMType l <> freeMType r
TTuple a b -> freeMType a <> freeMType b
TConst _ -> []
-- | Substitute all the contained type variables mentioned in the substitution,
-- and leave everything else alone.
instance Substitutable MType where
applySubst s = \case
TVar a -> let Subst s' = s
in M.findWithDefault (TVar a) a s'
TFun f x -> TFun (applySubst s f) (applySubst s x)
TList a -> TList (applySubst s a)
TEither l r -> TEither (applySubst s l) (applySubst s r)
TTuple a b -> TTuple (applySubst s a) (applySubst s b)
c@TConst {} -> c
-- #############################################################################
-- ** Polytypes
-- #############################################################################
-- | A polytype is a monotype universally quantified over a number of type
-- variables. In Haskell, all definitions have polytypes, but since the @forall@
-- is implicit they look a bit like monotypes, maybe confusingly so. For
-- example, the type of @1 :: Int@ is actually @forall <nothing>. Int@, and the
-- type of @id@ is @forall a. a -> a@, although GHC displays it as @a -> a@.
--
-- A polytype claims to work "for all imaginable type parameters", very similar
-- to how a lambda claims to work "for all imaginable value parameters". We can
-- insert a value into a lambda's parameter to evaluate it to a new value, and
-- similarly we'll later insert types into a polytype's quantified variables to
-- gain new types.
--
-- __Example:__ in a definition @id :: forall a. a -> a@, the @a@ after the
-- ∀ ("forall") is the collection of type variables, and @a -> a@ is the 'MType'
-- quantified over. When we have such an @id@, we also have its specialized
-- version @Int -> Int@ available. This process will be the topic of the type
-- inference/unification algorithms.
--
-- In formal notation, 'PType's are often called σ (sigma) types.
--
-- The purpose of having monotypes and polytypes is that we'd like to only have
-- universal quantification at the top level, restricting our language to rank-1
-- polymorphism, where type inferece is total (all types can be inferred) and
-- simple (only a handful of typing rules). Weakening this constraint would be
-- easy: if we allowed universal quantification within function types we would
-- get rank-N polymorphism. Taking it even further to allow it anywhere,
-- effectively replacing all occurrences of 'MType' with 'PType', yields
-- impredicative types. Both these extensions make the type system
-- *significantly* more complex though.
data PType = Forall (Set Name) MType -- ^ ∀{α}. τ
-- | >>> putPprLn (Forall ["a"] (TFun "a" "a"))
-- ∀a. a → a
instance Pretty PType where
ppr (Forall qs mType) = "" <> pprUniversals <> ". " <> ppr mType
where
pprUniversals
| S.null qs = ""
| otherwise = (T.intercalate " " . map ppr . S.toList) qs
-- | The free variables of a 'PType' are the free variables of the contained
-- 'MType', except those universally quantified.
--
-- >>> let sigma = Forall ["a"] (TFun "a" (TFun (TTuple "b" "a") "c"))
-- >>> putPprLn sigma
-- ∀a. a → (b, a) → c
-- >>> let display = T.putStrLn . T.intercalate ", " . foldMap (\x -> [ppr x])
-- >>> display (freePType sigma)
-- b, c
freePType :: PType -> Set Name
freePType (Forall qs mType) = freeMType mType `S.difference` qs
-- | Substitute all the free type variables.
instance Substitutable PType where
applySubst (Subst subst) (Forall qs mType) =
let qs' = M.fromSet (const ()) qs
subst' = Subst (subst `M.difference` qs')
in Forall qs (applySubst subst' mType)
-- #############################################################################
-- ** The environment
-- #############################################################################
-- | The environment consists of all the values available in scope, and their
-- associated polytypes. Other common names for it include "(typing) context",
-- and because of the commonly used symbol for it sometimes directly
-- \"Gamma"/@"Γ"@.
--
-- There are two kinds of membership in an environment,
--
-- - @∈@: an environment @Γ@ can be viewed as a set of @(value, type)@ pairs,
-- and we can test whether something is /literally contained/ by it via
-- x:σ ∈ Γ
-- - @⊢@, pronounced /entails/, describes all the things that are well-typed,
-- given an environment @Γ@. @Γ ⊢ x:τ@ can thus be seen as a judgement that
-- @x:τ@ is /figuratively contained/ in @Γ@.
--
-- For example, the environment @{x:Int}@ literally contains @x@, but given
-- this, it also entails @λy. x@, @λy z. x@, @let id = λy. y in id x@ and so on.
--
-- In Haskell terms, the environment consists of all the things you currently
-- have available, or that can be built by comining them. If you import the
-- Prelude, your environment entails
--
-- @
-- id → ∀a. a→a
-- map → ∀a b. (a→b) → [a] → [b]
-- putStrLn → ∀∅. String → IO ()
-- …
-- id map → ∀a b. (a→b) → [a] → [b]
-- map putStrLn → ∀∅. [String] -> [IO ()]
-- …
-- @
newtype Env = Env (Map Name PType)
-- | >>> :{
-- putPprLn (Env
-- [ ("id", Forall ["a"] (TFun "a" "a"))
-- , ("const", Forall ["a", "b"] (TFun "a" (TFun "b" "a"))) ])
-- :}
-- Γ = { const : ∀a b. a → b → a
-- , id : ∀a. a → a }
instance Pretty Env where
ppr (Env env) = "Γ = { " <> T.intercalate "\n , " pprBindings <> " }"
where
bindings = M.assocs env
pprBinding (name, pType) = ppr name <> " : " <> ppr pType
pprBindings = map pprBinding bindings
-- | The free variables of an 'Env'ironment are all the free variables of the
-- 'PType's it contains.
freeEnv :: Env -> Set Name
freeEnv (Env env) = let allPTypes = M.elems env
in S.unions (map freePType allPTypes)
-- | Performing a 'Subst'itution in an 'Env'ironment means performing that
-- substituion on all the contained 'PType's.
instance Substitutable Env where
applySubst s (Env env) = Env (M.map (applySubst s) env)
-- #############################################################################
-- ** Substitutions
-- #############################################################################
-- | A substitution is a mapping from type variables to 'MType's. Applying a
-- substitution means applying those replacements. For example, the substitution
-- @a -> Int@ applied to @a -> a@ yields the result @Int -> Int@.
--
-- A key concept behind Hindley-Milner is that once we dive deeper into an
-- expression, we learn more about our type variables. We might learn that @a@
-- has to be specialized to @b -> b@, and then later on that @b@ is actually
-- @Int@. Substitutions are an organized way of carrying this information along.
newtype Subst = Subst (Map Name MType)
-- | We're going to apply substitutions to a variety of other values that
-- somehow contain type variables, so we overload this application operation in
-- a class here.
--
-- Laws:
--
-- @
-- 'applySubst' 'mempty' ≡ 'id'
-- 'applySubst' (s1 '<>' s2) ≡ 'applySubst' s1 . 'applySubst' s2
-- @
class Substitutable a where
applySubst :: Subst -> a -> a
instance (Substitutable a, Substitutable b) => Substitutable (a,b) where
applySubst s (x,y) = (applySubst s x, applySubst s y)
-- | @'applySubst' s1 s2@ applies one substitution to another, replacing all the
-- bindings in the second argument @s2@ with their values mentioned in the first
-- one (@s1@).
instance Substitutable Subst where
applySubst s (Subst target) = Subst (fmap (applySubst s) target)
-- | >>> :{
-- putPprLn (Subst
-- [ ("a", TFun "b" "b")
-- , ("b", TEither "c" "d") ])
-- :}
-- { a > b → b
-- , b > Either c d }
instance Pretty Subst where
ppr (Subst s) = "{ " <> T.intercalate "\n, " [ ppr k <> " > " <> ppr v | (k,v) <- M.toList s ] <> " }"
-- | Combine two substitutions by applying all substitutions mentioned in the
-- first argument to the type variables contained in the second.
instance Monoid Subst where
-- Considering that all we can really do with a substitution is apply it, we
-- can use the one of 'Substitutable's laws to show that substitutions
-- combine associatively,
--
-- @
-- applySubst (compose s1 (compose s2 s3))
-- = applySubst s1 . applySubst (compose s2 s3)
-- = applySubst s1 . applySubst s2 . applySubst s3
-- = applySubst (compose s1 s2) . applySubst s3
-- = applySubst (compose (compose s1 s2) s3)
-- @
mappend subst1 subst2 = Subst (s1 `M.union` s2)
where
Subst s1 = subst1
Subst s2 = applySubst subst1 subst2
mempty = Subst M.empty
-- #############################################################################
-- #############################################################################
-- * Typechecking
-- #############################################################################
-- #############################################################################
-- $ Typechecking does two things:
--
-- 1. If two types are not immediately identical, attempt to 'unify' them
-- to get a type compatible with both of them
-- 2. 'infer' the most general type of a value by comparing the values in its
-- definition with the 'Env'ironment
-- #############################################################################
-- ** Inference context
-- #############################################################################
-- | The inference type holds a supply of unique names, and can fail with a
-- descriptive error if something goes wrong.
--
-- /Invariant:/ the supply must be infinite, or we might run out of names to
-- give to things.
newtype Infer a = Infer (ExceptT InferError (State [Name]) a)
deriving (Functor, Applicative, Monad)
-- | Errors that can happen during the type inference process.
data InferError =
-- | Two types that don't match were attempted to be unified.
--
-- For example, @a -> a@ and @Int@ do not unify.
--
-- >>> putPprLn (CannotUnify (TFun "a" "a") (TConst "Int"))
-- Cannot unify a → a with Int
CannotUnify MType MType
-- | A 'TVar' is bound to an 'MType' that already contains it.
--
-- The canonical example of this is @λx. x x@, where the first @x@
-- in the body has to have type @a -> b@, and the second one @a@. Since
-- they're both the same @x@, this requires unification of @a@ with
-- @a -> b@, which only works if @a = a -> b = (a -> b) -> b = …@, yielding
-- an infinite type.
--
-- >>> putPprLn (OccursCheckFailed "a" (TFun "a" "a"))
-- Occurs check failed: a already appears in a → a
| OccursCheckFailed Name MType
-- | The value of an unknown identifier was read.
--
-- >>> putPprLn (UnknownIdentifier "a")
-- Unknown identifier: a
| UnknownIdentifier Name
deriving Show
-- | >>> putPprLn (CannotUnify (TEither "a" "b") (TTuple "a" "b"))
-- Cannot unify Either a b with (a, b)
instance Pretty InferError where
ppr = \case
CannotUnify t1 t2 ->
"Cannot unify " <> ppr t1 <> " with " <> ppr t2
OccursCheckFailed name ty ->
"Occurs check failed: " <> ppr name <> " already appears in " <> ppr ty
UnknownIdentifier name ->
"Unknown identifier: " <> ppr name
-- | Evaluate a value in an 'Infer'ence context.
--
-- >>> let expr = EAbs "f" (EAbs "g" (EAbs "x" (EApp (EApp "f" "x") (EApp "g" "x"))))
-- >>> putPprLn expr
-- λf g x. f x (g x)
-- >>> let inferred = runInfer (infer (Env []) expr)
-- >>> let demonstrate = \case Right (_, ty) -> T.putStrLn (":: " <> ppr ty)
-- >>> demonstrate inferred
-- :: (c → e → f) → (c → e) → c → f
runInfer :: Infer a -- ^ Inference data
-> Either InferError a
runInfer (Infer inf) =
evalState (runExceptT inf) (map Name (infiniteSupply alphabet))
where
alphabet = map T.singleton ['a'..'z']
-- [a, b, c] ==> [a,b,c, a1,b1,c1, a2,b2,c2, …]
infiniteSupply supply = supply <> addSuffixes supply (1 :: Integer)
where
addSuffixes xs n = map (\x -> addSuffix x n) xs <> addSuffixes xs (n+1)
addSuffix x n = x <> T.pack (show n)
-- | Throw an 'InferError' in an 'Infer'ence context.
--
-- >>> case runInfer (throw (UnknownIdentifier "var")) of Left err -> putPprLn err
-- Unknown identifier: var
throw :: InferError -> Infer a
throw = Infer . throwE
-- #############################################################################
-- ** Unification
-- #############################################################################
-- $ Unification describes the process of making two different types compatible
-- by specializing them where needed. A desirable property to have here is being
-- able to find the most general unifier. Luckily, we'll be able to do that in
-- our type system.
-- | The unification of two 'MType's is the most general substituion that can be
-- applied to both of them in order to yield the same result.
--
-- >>> let m1 = TFun "a" "b"
-- >>> putPprLn m1
-- a → b
-- >>> let m2 = TFun "c" (TEither "d" "e")
-- >>> putPprLn m2
-- c → Either d e
-- >>> let inferSubst = unify (m1, m2)
-- >>> case runInfer inferSubst of Right subst -> putPprLn subst
-- { a > c
-- , b > Either d e }
unify :: (MType, MType) -> Infer Subst
unify = \case
(TFun a b, TFun x y) -> unifyBinary (a,b) (x,y)
(TVar v, x) -> v `bindVariableTo` x
(x, TVar v) -> v `bindVariableTo` x
(TConst a, TConst b) | a == b -> pure mempty
(TList a, TList b) -> unify (a,b)
(TEither a b, TEither x y) -> unifyBinary (a,b) (x,y)
(TTuple a b, TTuple x y) -> unifyBinary (a,b) (x,y)
(a, b) -> throw (CannotUnify a b)
where
-- Unification of binary type constructors, such as functions and Either.
-- Unification is first done for the first operand, and assuming the
-- required substitution, for the second one.
unifyBinary :: (MType, MType) -> (MType, MType) -> Infer Subst
unifyBinary (a,b) (x,y) = do
s1 <- unify (a, x)
s2 <- unify (applySubst s1 (b, y))
pure (s1 <> s2)
-- | Build a 'Subst'itution that binds a 'Name' of a 'TVar' to an 'MType'. The
-- resulting substitution should be idempotent, i.e. applying it more than once
-- to something should not be any different from applying it only once.
--
-- - In the simplest case, this just means building a substitution that just
-- does that.
-- - Substituting a 'Name' with a 'TVar' with the same name unifies a type
-- variable with itself, and the resulting substitution does nothing new.
-- - If the 'Name' we're trying to bind to an 'MType' already occurs in that
-- 'MType', the resulting substitution would not be idempotent: the 'MType'
-- would be replaced again, yielding a different result. This is known as the
-- Occurs Check.
bindVariableTo :: Name -> MType -> Infer Subst
bindVariableTo name (TVar v) | boundToSelf = pure mempty
where
boundToSelf = name == v
bindVariableTo name mType | name `occursIn` mType = throw (OccursCheckFailed name mType)
where
n `occursIn` ty = n `S.member` freeMType ty
bindVariableTo name mType = pure (Subst (M.singleton name mType))
-- #############################################################################
-- ** Type inference
-- #############################################################################
-- $ Type inference is the act of finding out a value's type by looking at the
-- environment it is in, in order to make it compatible with it.
--
-- In literature, the Hindley-Damas-Milner inference algorithm ("Algorithm W")
-- is often presented in the style of logical formulas, and below you'll find
-- that version along with code that actually does what they say.
--
-- These formulas look a bit like fractions, where the "numerator" is a
-- collection of premises, and the denominator is the consequence if all of them
-- hold.
--
-- __Example:__
--
-- @
-- Γ ⊢ even : Int → Bool Γ ⊢ 1 : Int
--
-- Γ ⊢ even 1 : Bool
-- @
--
-- means that if we have a value of type @Int -> Bool@ called "even" and a value
-- of type @Int@ called @1@, then we also have a value of type @Bool@ via
-- @even 1@ available to us.
--
-- The actual inference rules are polymorphic versions of this example, and
-- the code comments will explain each step in detail.
-- -----------------------------------------------------------------------------
-- *** The language: typed lambda calculus
-- -----------------------------------------------------------------------------
-- | The syntax tree of the language we'd like to typecheck. You can view it as
-- a close relative to simply typed lambda calculus, having only the most
-- necessary syntax elements.
--
-- Since 'ELet' is non-recursive, the usual fixed-point function
-- @fix : (a → a) → a@ can be introduced to allow recursive definitions.
data Exp = ELit Lit -- ^ True, 1
| EVar Name -- ^ @x@
| EApp Exp Exp -- ^ @f x@
| EAbs Name Exp -- ^ @λx. e@
| ELet Name Exp Exp -- ^ @let x = e in e'@ (non-recursive)
deriving Show
-- | Literals we'd like to support. Since we can't define new data types in our
-- simple type system, we'll have to hard-code the possible ones here.
data Lit = LBool Bool
| LInteger Integer
deriving Show
-- | >>> putPprLn (EAbs "f" (EAbs "g" (EAbs "x" (EApp (EApp "f" "x") (EApp "g" "x")))))
-- λf g x. f x (g x)
instance Pretty Exp where
ppr (ELit lit) = ppr lit
ppr (EVar name) = ppr name
ppr (EApp f x) = pprApp1 f <> " " <> pprApp2 x
where
pprApp1 = \case
eLet@ELet{} -> "(" <> ppr eLet <> ")"
eLet@EAbs{} -> "(" <> ppr eLet <> ")"
e -> ppr e
pprApp2 = \case
eApp@EApp{} -> "(" <> ppr eApp <> ")"
e -> pprApp1 e
ppr x@EAbs{} = pprAbs True x
where
pprAbs True (EAbs name expr) = "λ" <> ppr name <> pprAbs False expr
pprAbs False (EAbs name expr) = " " <> ppr name <> pprAbs False expr
pprAbs _ expr = ". " <> ppr expr
ppr (ELet name value body) =
"let " <> ppr name <> " = " <> ppr value <> " in " <> ppr body
-- | >>> putPprLn (LBool True)
-- True
--
-- >>> putPprLn (LInteger 127)
-- 127
instance Pretty Lit where
ppr = \case
LBool b -> showT b
LInteger i -> showT i
where
showT :: Show a => a -> Text
showT = T.pack . show
-- | >>> "var" :: Exp
-- EVar (Name "var")
instance IsString Exp where
fromString = EVar . fromString
-- -----------------------------------------------------------------------------
-- *** Some useful definitions
-- -----------------------------------------------------------------------------
-- | Generate a fresh 'Name' in a type 'Infer'ence context. An example use case
-- of this is η expansion, which transforms @f@ into @λx. f x@, where "x" is a
-- new name, i.e. unbound in the current context.
fresh :: Infer MType
fresh = drawFromSupply >>= \case
Right name -> pure (TVar name)
Left err -> throw err
where
drawFromSupply :: Infer (Either InferError Name)
drawFromSupply = Infer (do
s:upply <- lift get
lift (put upply)
pure (Right s) )
-- | Add a new binding to the environment.
--
-- The Haskell equivalent would be defining a new value, for example in module
-- scope or in a @let@ block. This corresponds to the "comma" operation used in
-- formal notation,
--
-- @
-- Γ, x:σ ≡ extendEnv Γ (x,σ)
-- @
extendEnv :: Env -> (Name, PType) -> Env
extendEnv (Env env) (name, pType) = Env (M.insert name pType env)
-- -----------------------------------------------------------------------------
-- *** Inferring the types of all language constructs
-- -----------------------------------------------------------------------------
-- | Infer the type of an 'Exp'ression in an 'Env'ironment, resulting in the
-- 'Exp's 'MType' along with a substitution that has to be done in order to reach
-- this goal.
--
-- This is widely known as /Algorithm W/.
infer :: Env -> Exp -> Infer (Subst, MType)
infer env = \case
ELit lit -> inferLit lit
EVar name -> inferVar env name
EApp f x -> inferApp env f x
EAbs x e -> inferAbs env x e
ELet x e e' -> inferLet env x e e'
-- | Literals such as 'True' and '1' have their types hard-coded.
inferLit :: Lit -> Infer (Subst, MType)
inferLit lit = pure (mempty, TConst litTy)
where
litTy = case lit of
LBool {} -> "Bool"
LInteger {} -> "Integer"
-- | Inferring the type of a variable is done via
--
-- @
-- x:σ ∈ Γ τ = instantiate(σ)
-- [Var]
-- Γ ⊢ x:τ
-- @
--
-- This means that if @Γ@ /literally contains/ (@∈@) a value, then it also
-- /entails it/ (@⊢@) in all its instantiations.
inferVar :: Env -> Name -> Infer (Subst, MType)
inferVar env name = do
sigma <- lookupEnv env name -- x:σ ∈ Γ
tau <- instantiate sigma -- τ = instantiate(σ)
-- ------------------
pure (mempty, tau) -- Γ ⊢ x:τ
-- | Look up the 'PType' of a 'Name' in the 'Env'ironment.
--
-- This checks whether @x:σ@ is /literally contained/ in @Γ@. For more details
-- about this, see the documentation of 'Env'.
--
-- To give a Haskell analogon, looking up @id@ when @Prelude@ is loaded, the
-- resulting 'PType' would be @id@'s type, namely @forall a. a -> a@.
lookupEnv :: Env -> Name -> Infer PType
lookupEnv (Env env) name = case M.lookup name env of
Just x -> pure x
Nothing -> throw (UnknownIdentifier name)
-- | Bind all quantified variables of a 'PType' to 'fresh' type variables.
--
-- __Example:__ instantiating @forall a. a -> b -> a@ results in the 'MType'
-- @c -> b -> c@, where @c@ is a fresh name (to avoid shadowing issues).
--
-- You can picture the 'PType' to be the prototype converted to an instantiated
-- 'MType', which can now be used in the unification process.
--
-- Another way of looking at it is by simply forgetting which variables were
-- quantified, carefully avoiding name clashes when doing so.
--
-- 'instantiate' can also be seen as the opposite of 'generalize', which we'll
-- need later to convert an 'MType' to a 'PType'.
instantiate :: PType -> Infer MType
instantiate (Forall qs t) = do
subst <- substituteAllWithFresh qs
pure (applySubst subst t)
where
-- For each given name, add a substitution from that name to a fresh type
-- variable to the result.
substituteAllWithFresh :: Set Name -> Infer Subst
substituteAllWithFresh xs = do
let freshSubstActions = M.fromSet (const fresh) xs
freshSubsts <- sequenceA freshSubstActions
pure (Subst freshSubsts)
-- | Function application captures the fact that if we have a function and an
-- argument we can give to that function, we also have the result value of the
-- result type available to us.
--
-- @
-- Γ ⊢ f : fτ Γ ⊢ x : xτ fxτ = fresh unify(fτ, xτ → fxτ)
-- [App]
-- Γ ⊢ f x : fxτ
-- @
--
-- This rule says that given a function and a value with a type, the function
-- type has to unify with a function type that allows the value type to be its
-- argument.
inferApp
:: Env
-> Exp -- ^ __f__ x
-> Exp -- ^ f __x__
-> Infer (Subst, MType)
inferApp env f x = do
(s1, fTau) <- infer env f -- f : fτ
(s2, xTau) <- infer (applySubst s1 env) x -- x : xτ
fxTau <- fresh -- fxτ = fresh
s3 <- unify (applySubst s2 fTau, TFun xTau fxTau) -- unify (fτ, xτ → fxτ)
let s = s3 <> s2 <> s1 -- --------------------
pure (s, applySubst s3 fxTau) -- f x : fxτ
-- | Lambda abstraction is based on the fact that when we introduce a new
-- variable, the resulting lambda maps from that variable's type to the type of
-- the body.
--
-- @
-- τ = fresh σ = ∀∅. τ Γ, x:σ ⊢ e:τ'
-- [Abs]
-- Γ ⊢ λx.e : τ→τ'
-- @
--
-- Here, @Γ, x:τ@ is @Γ@ extended by one additional mapping, namely @x:τ@.
--
-- Abstraction is typed by extending the environment by a new 'MType', and if
-- under this assumption we can construct a function mapping to a value of that
-- type, we can say that the lambda takes a value and maps to it.
inferAbs
:: Env
-> Name -- ^ λ__x__. e
-> Exp -- ^ λx. __e__
-> Infer (Subst, MType)
inferAbs env x e = do
tau <- fresh -- τ = fresh
let sigma = Forall [] tau -- σ = ∀∅. τ
env' = extendEnv env (x, sigma) -- Γ, x:σ
(s, tau') <- infer env' e -- … ⊢ e:τ'
-- ---------------
pure (s, TFun (applySubst s tau) tau') -- λx.e : τ→τ'
-- | A let binding allows extending the environment with new bindings in a
-- principled manner. To do this, we first have to typecheck the expression to
-- be introduced. The result of this is then generalized to a 'PType', since let
-- bindings introduce new polymorphic values, which are then added to the
-- environment. Now we can finally typecheck the body of the "in" part of the
-- let binding.
--
-- Note that in our simple language, let is non-recursive, but recursion can be
-- introduced as usual by adding a primitive @fix : (a → a) → a@ if desired.
--
-- @
-- Γ ⊢ e:τ σ = gen(Γ,τ) Γ, x:σ ⊢ e':τ'
-- [Let]
-- Γ ⊢ let x = e in e' : τ'
-- @
inferLet
:: Env
-> Name -- ^ let __x__ = e in e'
-> Exp -- ^ let x = __e__ in e'
-> Exp -- ^ let x = e in __e'__
-> Infer (Subst, MType)
inferLet env x e e' = do
(s1, tau) <- infer env e -- Γ ⊢ e:τ
let env' = applySubst s1 env
let sigma = generalize env' tau -- σ = gen(Γ,τ)
let env'' = extendEnv env' (x, sigma) -- Γ, x:σ
(s2, tau') <- infer env'' e' -- Γ ⊢ …
-- --------------------------
pure (s2 <> s1, tau') -- … let x = e in e' : τ'
-- | Generalize an 'MType' to a 'PType' by universally quantifying over all the
-- type variables contained in it, except those already free in the environment.
--
-- >>> let tau = TFun "a" (TFun "b" "a")
-- >>> putPprLn tau
-- a → b → a
-- >>> putPprLn (generalize (Env [("x", Forall [] "b")]) tau)
-- ∀a. a → b → a
--
-- In more formal notation,
--
-- @
-- gen(Γ,τ) = ∀{α}. τ
-- where {α} = free(τ) free(Γ)
-- @
--
-- 'generalize' can also be seen as the opposite of 'instantiate', which
-- converts a 'PType' to an 'MType'.
generalize :: Env -> MType -> PType
generalize env mType = Forall qs mType
where
qs = freeMType mType `S.difference` freeEnv env

185
Main.hs
View File

@@ -1,185 +0,0 @@
{-# LANGUAGE OverloadedLists #-}
{-# LANGUAGE OverloadedStrings #-}
module Main where
import qualified Data.Map as M
import Data.Monoid
import Data.Text (Text)
import qualified Data.Text.IO as T
import HindleyMilner
-- #############################################################################
-- #############################################################################
-- * Testing
-- #############################################################################
-- #############################################################################
-- #############################################################################
-- ** A small custom Prelude
-- #############################################################################
prelude :: Env
prelude = Env (M.fromList
[ ("(*)", Forall [] (tInteger ~> tInteger ~> tInteger))
, ("(+)", Forall [] (tInteger ~> tInteger ~> tInteger))
, ("(,)", Forall ["a","b"] ("a" ~> "b" ~> TTuple "a" "b"))
, ("(-)", Forall [] (tInteger ~> tInteger ~> tInteger))
, ("(.)", Forall ["a", "b", "c"] (("b" ~> "c") ~> ("a" ~> "b") ~> "a" ~> "c"))
, ("(<)", Forall [] (tInteger ~> tInteger ~> tBool))
, ("(<=)", Forall [] (tInteger ~> tInteger ~> tBool))
, ("(>)", Forall [] (tInteger ~> tInteger ~> tBool))
, ("(>=)", Forall [] (tInteger ~> tInteger ~> tBool))
, ("const", Forall ["a","b"] ("a" ~> "b" ~> "a"))
, ("Cont/>>=", Forall ["a"] ((("a" ~> "r") ~> "r") ~> ("a" ~> (("b" ~> "r") ~> "r")) ~> (("b" ~> "r") ~> "r")))
, ("find", Forall ["a","b"] (("a" ~> tBool) ~> TList "a" ~> tMaybe "a"))
, ("fix", Forall ["a"] (("a" ~> "a") ~> "a"))
, ("foldr", Forall ["a","b"] (("a" ~> "b" ~> "b") ~> "b" ~> TList "a" ~> "b"))
, ("id", Forall ["a"] ("a" ~> "a"))
, ("ifThenElse", Forall ["a"] (tBool ~> "a" ~> "a" ~> "a"))
, ("Left", Forall ["a","b"] ("a" ~> TEither "a" "b"))
, ("length", Forall ["a"] (TList "a" ~> tInteger))
, ("map", Forall ["a","b"] (("a" ~> "b") ~> TList "a" ~> TList "b"))
, ("reverse", Forall ["a"] (TList "a" ~> TList "a"))
, ("Right", Forall ["a","b"] ("b" ~> TEither "a" "b"))
, ("[]", Forall ["a"] (TList "a"))
, ("(:)", Forall ["a"] ("a" ~> TList "a" ~> TList "a"))
])
where
tBool = TConst "Bool"
tInteger = TConst "Integer"
tMaybe = TEither (TConst "()")
-- | Synonym for 'TFun' to make writing type signatures easier.
--
-- Instead of
--
-- @
-- Forall ["a","b"] (TFun "a" (TFun "b" "a"))
-- @
--
-- we can write
--
-- @
-- Forall ["a","b"] ("a" ~> "b" ~> "a")
-- @
(~>) :: MType -> MType -> MType
(~>) = TFun
infixr 9 ~>
-- #############################################################################
-- ** Run it!
-- #############################################################################
-- | Run type inference on a cuple of values
main :: IO ()
main = do
let inferAndPrint = T.putStrLn . (" " <>) . showType prelude
T.putStrLn "Well-typed:"
do
inferAndPrint (lambda ["x"] "x")
inferAndPrint (lambda ["f","g","x"] (apply "f" ["x", apply "g" ["x"]]))
inferAndPrint (lambda ["f","g","x"] (apply "f" [apply "g" ["x"]]))
inferAndPrint (lambda ["m", "k", "c"] (apply "m" [lambda ["x"] (apply "k" ["x", "c"])])) -- >>= for Cont
inferAndPrint (lambda ["f"] (apply "(.)" ["reverse", apply "map" ["f"]]))
inferAndPrint (apply "find" [lambda ["x"] (apply "(>)" ["x", int 0])])
inferAndPrint (apply "map" [apply "map" ["map"]])
inferAndPrint (apply "(*)" [int 1, int 2])
inferAndPrint (apply "foldr" ["(+)", int 0])
inferAndPrint (apply "map" ["length"])
inferAndPrint (apply "map" ["map"])
inferAndPrint (lambda ["x"] (apply "ifThenElse" [apply "(<)" ["x", int 0], int 0, "x"]))
inferAndPrint (lambda ["x"] (apply "fix" [lambda ["xs"] (apply "(:)" ["x", "xs"])]))
T.putStrLn "Ill-typed:"
do
inferAndPrint (apply "(*)" [int 1, bool True])
inferAndPrint (apply "foldr" [int 1])
inferAndPrint (lambda ["x"] (apply "x" ["x"]))
inferAndPrint (lambda ["x"] (ELet "xs" (apply "(:)" ["x", "xs"]) "xs"))
-- | Build multiple lambda bindings.
--
-- Instead of
--
-- @
-- EAbs "f" (EAbs "x" (EApp "f" "x"))
-- @
--
-- we can write
--
-- @
-- lambda ["f", "x"] (EApp "f" "x")
-- @
--
-- for
--
-- @
-- λf x. f x
-- @
lambda :: [Name] -> Exp -> Exp
lambda names expr = foldr EAbs expr names
-- | Apply a function to multiple arguments.
--
-- Instead of
--
-- @
-- EApp (EApp (EApp "f" "x") "y") "z")
-- @
--
-- we can write
--
-- @
-- apply "f" ["x", "y", "z"]
-- @
--
-- for
--
-- @
-- f x y z
-- @
apply :: Exp -> [Exp] -> Exp
apply = foldl EApp
-- | Construct an integer literal.
int :: Integer -> Exp
int = ELit . LInteger
-- | Construct a boolean literal.
bool :: Bool -> Exp
bool = ELit . LBool
-- | Convenience function to run type inference algorithm
showType :: Env -- ^ Starting environment, e.g. 'prelude'.
-> Exp -- ^ Expression to typecheck
-> Text -- ^ Text representation of the result. Contains an error
-- message on failure.
showType env expr =
case (runInfer . fmap (generalize (Env mempty) . uncurry applySubst) . infer env) expr of
Left err -> "Error inferring type of " <> ppr expr <>": " <> ppr err
Right ty -> ppr expr <> " :: " <> ppr ty

View File

@@ -66,7 +66,6 @@ https://skillsmatter.com/skillscasts/10868-inside-the-rust-compiler
### Parsing
http://journal.stuffwithstuff.com/2011/03/19/pratt-parsers-expression-parsing-made-easy/
https://soc.github.io/languages/unified-condition-syntax
[Crafting Interpreters](http://www.craftinginterpreters.com/)

50
TODO.md
View File

@@ -2,56 +2,6 @@
# TODO Items
- https://nshipster.com/never/
-https://cranelift.readthedocs.io/en/latest/?badge=latest<Paste>
-consult http://gluon-lang.org/book/embedding-api.html
- if/match playground
simple if
`if x == 1.0 { "a" } else { "b" }`
one comparison multiple targets:
`if x == { 1.0 -> "a", 2.0 -> "b", else -> "c" }`
different comparison operators/ method calls:
`if x { == 1.0 -> "a", eq NaN -> "n", .hella() -> "h", else -> "z" }`
pattern matching/introducing bindings:
`if alice { .age < 18 -> "18", is Person("Alice", age) -> "${age}", else -> "none" }`
pattern matching w/ if-let:
`if person is Person("Alice", age) { "${age}" } else { "nope" }`
-https://soc.github.io/languages/unified-condition-syntax syntax:
`if <cond-expr>" then <then-expr> else <else-expr>`
`if <half-expr> \n <rest-expr1> then <result1-expr> \n <rest-expr2> then <result-expr2> else <result3-expr>`
-and rest-exprs (or "targets") can have 'is' for pattern-matching, actually so can a full cond-expr
UNIFIED IF EXPRESSIONS FINAL WORK:
basic syntax:
`if_expr := if discriminator '{' (guard_expr)* '}'`
`guard_expr := pattern 'then' block_or_expr'`
`pattern := rhs | is_pattern`
`is_pattern := 'is' ???`
`rhs := expression | ???`
if the only two guard patterns are true and false, then the abbreviated syntax:
`'if' discriminator 'then' block_or_expr 'else' block_or_expr`
can replace `'if' discriminator '{' 'true' 'then' block_or_expr; 'false' 'then' block_or_expr '}'`
- Next priorities: - get ADTs working, get matches working
- inclusive/exclusive range syntax like .. vs ..=
- sketch of an idea for the REPL:
-each compiler pass should be a (procedural?) macro like
compiler_pass!("parse", dataproducts: ["ast", "parse_tree"], {

View File

@@ -7,7 +7,7 @@ mod parser;
mod eval;
mod compilation;
use schala_repl::{ProgrammingLanguageInterface, EvalOptions, UnfinishedComputation, FinishedComputation, TraceArtifact};
use schala_repl::{ProgrammingLanguageInterface, EvalOptions, LanguageOutput, TraceArtifact};
#[derive(Debug)]
pub struct TokenError {
@@ -42,37 +42,40 @@ impl<'a> ProgrammingLanguageInterface for Maaru<'a> {
format!("maaru")
}
fn execute_pipeline(&mut self, input: &str, options: &EvalOptions) -> FinishedComputation {
let mut output = UnfinishedComputation::default();
fn evaluate_in_repl(&mut self, input: &str, options: &EvalOptions) -> LanguageOutput {
let mut output = LanguageOutput::default();
let tokens = match tokenizer::tokenize(input) {
Ok(tokens) => {
if let Some(_) = options.debug_passes.get("tokens") {
if options.debug.tokens {
output.add_artifact(TraceArtifact::new("tokens", format!("{:?}", tokens)));
}
tokens
},
Err(err) => {
return output.finish(Err(format!("Tokenization error: {:?}\n", err.msg)))
output.add_output(format!("Tokenization error: {:?}\n", err.msg));
return output;
}
};
let ast = match parser::parse(&tokens, &[]) {
Ok(ast) => {
if let Some(_) = options.debug_passes.get("ast") {
if options.debug.ast {
output.add_artifact(TraceArtifact::new("ast", format!("{:?}", ast)));
}
ast
},
Err(err) => {
return output.finish(Err(format!("Parse error: {:?}\n", err.msg)))
output.add_output(format!("Parse error: {:?}\n", err.msg));
return output;
}
};
let mut evaluation_output = String::new();
for s in self.evaluator.run(ast).iter() {
evaluation_output.push_str(s);
}
output.finish(Ok(evaluation_output))
output.add_output(evaluation_output);
return output;
}
/* TODO make this work with new framework */

View File

@@ -4,7 +4,7 @@ extern crate itertools;
extern crate schala_repl;
use itertools::Itertools;
use schala_repl::{ProgrammingLanguageInterface, EvalOptions, FinishedComputation, UnfinishedComputation};
use schala_repl::{ProgrammingLanguageInterface, EvalOptions, LanguageOutput};
pub struct Robo {
}
@@ -155,16 +155,18 @@ impl ProgrammingLanguageInterface for Robo {
format!("robo")
}
fn execute_pipeline(&mut self, input: &str, _eval_options: &EvalOptions) -> FinishedComputation {
let output = UnfinishedComputation::default();
fn evaluate_in_repl(&mut self, input: &str, _eval_options: &EvalOptions) -> LanguageOutput {
let mut output = LanguageOutput::default();
let tokens = match tokenize(input) {
Ok(tokens) => tokens,
Err(e) => {
return output.finish(Err(format!("Tokenize error: {:?}", e)));
output.add_output(format!("Tokenize error: {:?}", e));
return output;
}
};
output.finish(Ok(format!("{:?}", tokens)))
output.add_output(format!("{:?}", tokens));
output
}
}

View File

@@ -4,7 +4,7 @@ extern crate itertools;
extern crate schala_repl;
use itertools::Itertools;
use schala_repl::{ProgrammingLanguageInterface, EvalOptions, UnfinishedComputation, FinishedComputation};
use schala_repl::{ProgrammingLanguageInterface, EvalOptions, LanguageOutput};
use std::iter::Peekable;
use std::vec::IntoIter;
use std::str::Chars;
@@ -73,11 +73,12 @@ impl ProgrammingLanguageInterface for Rukka {
format!("rukka")
}
fn execute_pipeline(&mut self, input: &str, _eval_options: &EvalOptions) -> FinishedComputation {
let output = UnfinishedComputation::default();
fn evaluate_in_repl(&mut self, input: &str, _eval_options: &EvalOptions) -> LanguageOutput {
let mut output = LanguageOutput::default();
let sexps = match read(input) {
Err(err) => {
return output.finish(Err(format!("Error: {}", err)));
output.add_output(format!("Error: {}", err));
return output;
},
Ok(sexps) => sexps
};
@@ -88,7 +89,8 @@ impl ProgrammingLanguageInterface for Rukka {
Err(err) => format!("{} Error: {}", i, err),
}
}).intersperse(format!("\n")).collect();
output.finish(Ok(output_str))
output.add_output(output_str);
output
}
}

View File

@@ -4,9 +4,9 @@ version = "0.1.0"
authors = ["greg <greg.shuflin@protonmail.com>"]
[dependencies]
quote = "0.5.2"
syn = { version = "0.13.1", features = ["full", "extra-traits"] }
quote = "0.5"
schala-repl = { path = "../schala-repl" }
[lib]
proc-macro = true

View File

@@ -1,103 +1,95 @@
#![feature(trace_macros)]
#![feature(proc_macro)]
extern crate proc_macro;
#[macro_use]
extern crate quote;
extern crate syn;
extern crate schala_repl;
#[macro_use]
extern crate quote;
use proc_macro::TokenStream;
use syn::{Ident, Attribute, DeriveInput};
use syn::{Expr, Lit, ExprLit};
use syn::punctuated::Punctuated;
use syn::synom::Synom;
fn extract_attribute_arg_by_name(name: &str, attrs: &Vec<Attribute>) -> Option<String> {
use syn::{Meta, Lit, MetaNameValue};
attrs.iter().map(|attr| attr.interpret_meta()).find(|meta| {
match meta {
&Some(Meta::NameValue(MetaNameValue { ident, .. })) if ident.as_ref() == name => true,
_ => false
}
}).and_then(|meta| {
match meta {
Some(Meta::NameValue(MetaNameValue { lit: Lit::Str(litstr), .. })) => Some(litstr.value()),
_ => None,
}
})
fn get_string_args(input: Expr) -> Vec<String> {
let mut contained_strings = Vec::new();
match input {
Expr::Array(array) => {
for item in array.elems {
if let Expr::Lit(ExprLit { lit: Lit::Str(s), ..}) = item {
contained_strings.push(s.value());
} else {
panic!("Non-string-literal input to compiler_pass_sequence");
}
}
},
_ => panic!("Non-array input to compiler_pass_sequence"),
}
contained_strings
}
fn extract_attribute_list(name: &str, attrs: &Vec<Attribute>) -> Option<Vec<(Ident, Option<Vec<Ident>>)>> {
use syn::{Meta, MetaList, NestedMeta};
attrs.iter().find(|attr| {
match attr.path.segments.iter().nth(0) {
Some(segment) if segment.ident.as_ref() == name => true,
_ => false
#[proc_macro]
pub fn compiler_pass_sequence(input: TokenStream) -> TokenStream {
/*
for token_tree in input {
//println!("ITEM: {:?}", token_tree.kind);
match token_tree.kind {
TokenNode::Literal(l) => println!("{:?}", l),
_ => ()
}
}).and_then(|attr| {
match attr.interpret_meta() {
Some(Meta::List(MetaList { nested, .. })) => {
Some(nested.iter().map(|nested_meta| match nested_meta {
&NestedMeta::Meta(Meta::Word(ident)) => (ident, None),
&NestedMeta::Meta(Meta::List(MetaList { ident, nested: ref nested2, .. })) => {
let own_args = nested2.iter().map(|nested_meta2| match nested_meta2 {
&NestedMeta::Meta(Meta::Word(ident)) => ident,
_ => panic!("Bad format for doubly-nested attribute list")
}).collect();
(ident, Some(own_args))
},
_ => panic!("Bad format for nested list")
}).collect())
},
_ => panic!("{} must be a comma-delimited list surrounded by parens", name)
}
})
}
}
*/
#[proc_macro_derive(ProgrammingLanguageInterface, attributes(LanguageName, SourceFileExtension, PipelineSteps))]
pub fn derive_programming_language_interface(input: TokenStream) -> TokenStream {
use schala_repl::PassDescriptor;
let ast: DeriveInput = syn::parse(input).unwrap();
let name = &ast.ident;
let attrs = &ast.attrs;
let input: Expr = syn::parse(input).unwrap();
let stages = get_string_args(input);
let from_macro = format!("{:?}", stages);
let language_name: String = extract_attribute_arg_by_name("LanguageName", attrs).expect("LanguageName is required");
let file_ext = extract_attribute_arg_by_name("SourceFileExtension", attrs).expect("SourceFileExtension is required");
let passes = extract_attribute_list("PipelineSteps", attrs).expect("PipelineSteps are required");
let pass_idents = passes.iter().map(|x| x.0);
//let pass_names: Vec<String> = passes.iter().map(|pass| pass.0.to_string()).collect();
let pass_descriptors = passes.iter().map(|pass| {
let name = pass.0.to_string();
let opts: Vec<String> = match &pass.1 {
None => vec![],
Some(opts) => opts.iter().map(|o| o.to_string()).collect(),
};
quote! {
PassDescriptor {
name: #name.to_string(),
debug_options: vec![#(format!(#opts)),*]
}
}
});
let tokens = quote! {
use schala_repl::PassDescriptor;
impl ProgrammingLanguageInterface for #name {
fn get_language_name(&self) -> String {
#language_name.to_string()
}
fn get_source_file_suffix(&self) -> String {
#file_ext.to_string()
}
fn execute_pipeline(&mut self, input: &str, options: &EvalOptions) -> FinishedComputation {
let mut chain = pass_chain![self, options; #(#pass_idents),* ];
chain(input)
}
fn get_passes(&self) -> Vec<PassDescriptor> {
vec![ #(#pass_descriptors),* ]
//vec![ #(PassDescriptor { name: #pass_names.to_string(), debug_options: vec![] }),* ]
}
let output = quote! {
fn new_execute(&mut self, input: &str, _options: &EvalOptions) -> FinishedComputation {
let evaluation = UnfinishedComputation::default();
evaluation.output(Err(#from_macro.to_string()))
}
};
tokens.into()
output.into()
}
/* #[compiler_pass(<name of pass>*/
#[proc_macro_attribute]
pub fn compiler_pass(metadata: TokenStream, function: TokenStream) -> TokenStream {
//println!("FROM MACRO: {}", function);
println!("Compiler pass metadata: {}", metadata);
function
}
#[cfg(test)]
mod tests {
#[test]
fn it_works() {
assert_eq!(2 + 2, 4);
}
}
/* in Rocket
*
#[get("/")]
fn hi() -> &'static str {
"hello"
}
GETS MAPPED TO:
static hi_info = RouteInfo {
name: "hi",
method: Method::Get,
path: "/",
handler: hi_route,
}
fn hi_route(req: &Request) -> Outcome {
let responder = hi();
Outcome::from(req, responder);
}
*/

View File

@@ -1,176 +0,0 @@
use std::rc::Rc;
use builtin::{BinOp, PrefixOp};
#[derive(Debug, PartialEq)]
pub struct AST(pub Vec<Statement>);
#[derive(Debug, PartialEq, Clone)]
pub enum Statement {
ExpressionStatement(Expression),
Declaration(Declaration),
}
pub type Block = Vec<Statement>;
pub type ParamName = Rc<String>;
pub type InterfaceName = Rc<String>; //should be a singleton I think??
pub type FormalParam = (ParamName, Option<TypeName>);
#[derive(Debug, PartialEq, Clone)]
pub enum Declaration {
FuncSig(Signature),
FuncDecl(Signature, Block),
TypeDecl {
name: TypeSingletonName,
body: TypeBody,
mutable: bool
},
TypeAlias(Rc<String>, Rc<String>), //should have TypeSingletonName in it, or maybe just String, not sure
Binding {
name: Rc<String>,
constant: bool,
expr: Expression,
},
Impl {
type_name: TypeName,
interface_name: Option<InterfaceName>,
block: Vec<Declaration>,
},
Interface {
name: Rc<String>,
signatures: Vec<Signature>
}
}
#[derive(Debug, PartialEq, Clone)]
pub struct Signature {
pub name: Rc<String>,
pub params: Vec<FormalParam>,
pub type_anno: Option<TypeName>,
}
#[derive(Debug, PartialEq, Clone)]
pub struct TypeBody(pub Vec<Variant>);
#[derive(Debug, PartialEq, Clone)]
pub enum Variant {
UnitStruct(Rc<String>),
TupleStruct(Rc<String>, Vec<TypeName>),
Record(Rc<String>, Vec<(Rc<String>, TypeName)>),
}
#[derive(Debug, PartialEq, Clone)]
pub struct Expression(pub ExpressionType, pub Option<TypeName>);
#[derive(Debug, PartialEq, Clone)]
pub enum TypeName {
Tuple(Vec<TypeName>),
Singleton(TypeSingletonName)
}
#[derive(Debug, PartialEq, Clone)]
pub struct TypeSingletonName {
pub name: Rc<String>,
pub params: Vec<TypeName>,
}
#[derive(Debug, PartialEq, Clone)]
pub enum ExpressionType {
NatLiteral(u64),
FloatLiteral(f64),
StringLiteral(Rc<String>),
BoolLiteral(bool),
BinExp(BinOp, Box<Expression>, Box<Expression>),
PrefixExp(PrefixOp, Box<Expression>),
TupleLiteral(Vec<Expression>),
Value(Rc<String>),
NamedStruct {
name: Rc<String>,
fields: Vec<(Rc<String>, Expression)>,
},
Call {
f: Box<Expression>,
arguments: Vec<Expression>,
},
Index {
indexee: Box<Expression>,
indexers: Vec<Expression>,
},
IfExpression {
discriminator: Box<Discriminator>,
body: Box<IfExpressionBody>,
},
WhileExpression {
condition: Option<Box<Expression>>,
body: Block,
},
ForExpression {
enumerators: Vec<Enumerator>,
body: Box<ForBody>,
},
Lambda {
params: Vec<FormalParam>,
body: Block,
},
ListLiteral(Vec<Expression>),
}
#[derive(Debug, PartialEq, Clone)]
pub enum Discriminator {
Simple(Expression),
BinOp(Expression, BinOp)
}
#[derive(Debug, PartialEq, Clone)]
pub enum IfExpressionBody {
SimpleConditional(Block, Option<Block>),
SimplePatternMatch(Pattern, Block, Option<Block>),
GuardList(Vec<GuardArm>)
}
#[derive(Debug, PartialEq, Clone)]
pub struct GuardArm {
pub guard: Guard,
pub body: Block,
}
#[derive(Debug, PartialEq, Clone)]
pub enum Guard {
Pat(Pattern),
HalfExpr(HalfExpr)
}
#[derive(Debug, PartialEq, Clone)]
pub struct HalfExpr {
pub op: Option<BinOp>,
pub expr: ExpressionType,
}
#[derive(Debug, PartialEq, Clone)]
pub enum Pattern {
Ignored,
TuplePattern(Vec<Pattern>),
Literal(PatternLiteral),
TupleStruct(Rc<String>, Vec<Pattern>),
Record(Rc<String>, Vec<(Rc<String>, Pattern)>),
}
#[derive(Debug, PartialEq, Clone)]
pub enum PatternLiteral {
NumPattern(ExpressionType),
StringPattern(Rc<String>),
BoolPattern(bool),
VarPattern(Rc<String>)
}
#[derive(Debug, PartialEq, Clone)]
pub struct Enumerator {
pub id: Rc<String>,
pub generator: Expression,
}
#[derive(Debug, PartialEq, Clone)]
pub enum ForBody {
MonadicReturn(Expression),
StatementBlock(Block),
}

View File

@@ -1,31 +1,8 @@
use std::rc::Rc;
use std::collections::HashMap;
use std::fmt;
use self::Type::*; use self::TConstOld::*;
//TODO get rid of these types and replace them with the right MonoType or whatever ones later
#[derive(Debug, PartialEq, Clone)]
pub enum Type {
Const(TConstOld),
Func(Box<Type>, Box<Type>),
}
#[derive(Debug, PartialEq, Clone)]
pub enum TConstOld {
Nat,
Int,
Float,
StringT,
Bool,
}
impl fmt::Display for Type {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?}", self)
}
}
use typechecking::{Type, TypeResult, TConst};
use self::Type::*; use self::TConst::*;
#[derive(Debug, PartialEq, Clone)]
pub struct BinOp {
@@ -39,7 +16,7 @@ impl BinOp {
pub fn sigil(&self) -> &Rc<String> {
&self.sigil
}
pub fn get_type(&self) -> Result<Type, String> {
pub fn get_type(&self) -> TypeResult<Type> {
let s = self.sigil.as_str();
BINOPS.get(s).map(|x| x.0.clone()).ok_or(format!("Binop {} not found", s))
}
@@ -67,7 +44,7 @@ impl PrefixOp {
pub fn is_prefix(op: &str) -> bool {
PREFIX_OPS.get(op).is_some()
}
pub fn get_type(&self) -> Result<Type, String> {
pub fn get_type(&self) -> TypeResult<Type> {
let s = self.sigil.as_str();
PREFIX_OPS.get(s).map(|x| x.0.clone()).ok_or(format!("Prefix op {} not found", s))
}
@@ -86,15 +63,15 @@ lazy_static! {
lazy_static! {
static ref BINOPS: HashMap<&'static str, (Type, (), i32)> =
hashmap! {
"+" => (Func(bx!(Const(Nat)), bx!(Func(bx!(Const(Nat)), bx!(Const(Nat))))), (), 10),
"-" => (Func(bx!(Const(Nat)), bx!(Func(bx!(Const(Nat)), bx!(Const(Nat))))), (), 10),
"*" => (Func(bx!(Const(Nat)), bx!(Func(bx!(Const(Nat)), bx!(Const(Nat))))), (), 20),
"/" => (Func(bx!(Const(Nat)), bx!(Func(bx!(Const(Nat)), bx!(Const(Float))))), (), 20),
"//" => (Func(bx!(Const(Nat)), bx!(Func(bx!(Const(Nat)), bx!(Const(Nat))))), (), 20), //TODO change this to `quot`
"%" => (Func(bx!(Const(Nat)), bx!(Func(bx!(Const(Nat)), bx!(Const(Nat))))), (), 20),
"+" => (Func(bx!(Const(Int)), bx!(Func(bx!(Const(Int)), bx!(Const(Int))))), (), 10),
"-" => (Func(bx!(Const(Int)), bx!(Func(bx!(Const(Int)), bx!(Const(Int))))), (), 10),
"*" => (Func(bx!(Const(Int)), bx!(Func(bx!(Const(Int)), bx!(Const(Int))))), (), 20),
"/" => (Func(bx!(Const(Int)), bx!(Func(bx!(Const(Int)), bx!(Const(Float))))), (), 20),
"//" => (Func(bx!(Const(Int)), bx!(Func(bx!(Const(Int)), bx!(Const(Int))))), (), 20), //TODO change this to `quot`
"%" => (Func(bx!(Const(Int)), bx!(Func(bx!(Const(Int)), bx!(Const(Int))))), (), 20),
"++" => (Func(bx!(Const(StringT)), bx!(Func(bx!(Const(StringT)), bx!(Const(StringT))))), (), 30),
"^" => (Func(bx!(Const(Nat)), bx!(Func(bx!(Const(Nat)), bx!(Const(Nat))))), (), 20),
"&" => (Func(bx!(Const(Nat)), bx!(Func(bx!(Const(Nat)), bx!(Const(Nat))))), (), 20),
"|" => (Func(bx!(Const(Nat)), bx!(Func(bx!(Const(Nat)), bx!(Const(Nat))))), (), 20),
"^" => (Func(bx!(Const(Int)), bx!(Func(bx!(Const(Int)), bx!(Const(Int))))), (), 20),
"&" => (Func(bx!(Const(Int)), bx!(Func(bx!(Const(Int)), bx!(Const(Int))))), (), 20),
"|" => (Func(bx!(Const(Int)), bx!(Func(bx!(Const(Int)), bx!(Const(Int))))), (), 20),
};
}

View File

@@ -1,375 +1,317 @@
use std::cell::RefCell;
use std::collections::HashMap;
use std::rc::Rc;
use std::fmt::Write;
use std::io;
use itertools::Itertools;
use util::StateStack;
use reduced_ast::{ReducedAST, Stmt, Expr, Lit, Func};
use symbol_table::{SymbolSpec, Symbol, SymbolTable};
use parsing::{AST, Statement, Declaration, Expression, Variant, ExpressionType};
use builtin::{BinOp, PrefixOp};
pub struct State<'a> {
values: StateStack<'a, Rc<String>, ValueEntry>,
symbol_table_handle: Rc<RefCell<SymbolTable>>,
}
macro_rules! builtin_binding {
($name:expr, $values:expr) => {
$values.insert(Rc::new(format!($name)), ValueEntry::Binding { constant: true, val: Expr::Func(Func::BuiltIn(Rc::new(format!($name)))) });
}
parent_frame: Option<&'a State<'a>>,
values: HashMap<Rc<String>, ValueEntry>,
}
impl<'a> State<'a> {
pub fn new(symbol_table_handle: Rc<RefCell<SymbolTable>>) -> State<'a> {
let mut values = StateStack::new(Some(format!("global")));
builtin_binding!("print", values);
builtin_binding!("println", values);
builtin_binding!("getline", values);
State { values, symbol_table_handle }
}
pub fn debug_print(&self) -> String {
format!("Values: {:?}", self.values)
fn insert(&mut self, name: Rc<String>, value: ValueEntry) {
self.values.insert(name, value);
}
fn lookup(&self, name: &Rc<String>) -> Option<&ValueEntry> {
match (self.values.get(name), self.parent_frame) {
(None, None) => None,
(None, Some(parent)) => parent.lookup(name),
(Some(value), _) => Some(value),
}
}
}
#[derive(Debug)]
enum ValueEntry {
Binding {
constant: bool,
val: /*FullyEvaluatedExpr*/ Expr,
val: FullyEvaluatedExpr,
},
Function {
param_names: Vec<Rc<String>>,
body: Vec<Statement>,
}
}
type EvalResult<T> = Result<T, String>;
#[derive(Debug, PartialEq, Clone)]
enum FullyEvaluatedExpr {
UnsignedInt(u64),
SignedInt(i64),
Float(f64),
Str(String),
Bool(bool),
FuncLit(Rc<String>),
Custom {
string_rep: Rc<String>,
},
Tuple(Vec<FullyEvaluatedExpr>),
List(Vec<FullyEvaluatedExpr>)
}
impl Expr {
fn to_repl(&self) -> String {
use self::Lit::*;
use self::Func::*;
fn paren_wrapped_vec(exprs: &Vec<Expr>) -> String {
let mut buf = String::new();
write!(buf, "(").unwrap();
for term in exprs.iter().map(|e| Some(e)).intersperse(None) {
match term {
Some(e) => write!(buf, "{}", e.to_repl()).unwrap(),
None => write!(buf, ", ").unwrap(),
};
}
write!(buf, ")").unwrap();
buf
}
impl FullyEvaluatedExpr {
fn to_string(&self) -> String {
use self::FullyEvaluatedExpr::*;
match self {
Expr::Lit(ref l) => match l {
Nat(n) => format!("{}", n),
Int(i) => format!("{}", i),
Float(f) => format!("{}", f),
Bool(b) => format!("{}", b),
StringLit(s) => format!("\"{}\"", s),
Custom(name, args) if args.len() == 0 => format!("{}", name),
Custom(name, args) => format!("{}{}", name, paren_wrapped_vec(args)),
&UnsignedInt(ref n) => format!("{}", n),
&SignedInt(ref n) => format!("{}", n),
&Float(ref f) => format!("{}", f),
&Str(ref s) => format!("\"{}\"", s),
&Bool(ref b) => format!("{}", b),
&Custom { ref string_rep } => format!("{}", string_rep),
&Tuple(ref items) => {
let mut buf = String::new();
write!(buf, "(").unwrap();
for term in items.iter().map(|e| Some(e)).intersperse(None) {
match term {
Some(e) => write!(buf, "{}", e.to_string()).unwrap(),
None => write!(buf, ", ").unwrap(),
};
}
write!(buf, ")").unwrap();
buf
},
Expr::Func(f) => match f {
BuiltIn(name) => format!("<built-in function {}>", name),
UserDefined { name: None, .. } => format!("<function>"),
UserDefined { name: Some(name), .. } => format!("<function {}>", name),
},
Expr::Constructor { name } => format!("<constructor {}>", name),
Expr::Tuple(exprs) => paren_wrapped_vec(exprs),
_ => format!("{:?}", self),
&FuncLit(ref name) => format!("<function {}>", name),
&List(ref items) => {
let mut buf = String::new();
write!(buf, "[").unwrap();
for term in items.iter().map(|e| Some(e)).intersperse(None) {
match term {
Some(e) => write!(buf, "{}", e.to_string()).unwrap(),
None => write!(buf, ", ").unwrap()
}
}
write!(buf, "]").unwrap();
buf
}
}
}
}
impl<'a> State<'a> {
pub fn evaluate(&mut self, ast: ReducedAST, repl: bool) -> Vec<Result<String, String>> {
pub fn new() -> State<'a> {
State { parent_frame: None, values: HashMap::new() }
}
pub fn new_with_parent(parent: &'a State<'a>) -> State<'a> {
State { parent_frame: Some(parent), values: HashMap::new() }
}
pub fn evaluate(&mut self, ast: AST) -> Vec<Result<String, String>> {
let mut acc = vec![];
// handle prebindings
for statement in ast.0.iter() {
self.prebinding(statement);
}
for statement in ast.0 {
match self.statement(statement) {
Ok(Some(ref output)) if repl => acc.push(Ok(output.to_repl())),
Ok(_) => (),
match self.eval_statement(statement) {
Ok(output) => {
if let Some(fully_evaluated) = output {
acc.push(Ok(fully_evaluated.to_string()));
}
},
Err(error) => {
acc.push(Err(format!("Runtime error: {}", error)));
acc.push(Err(format!("Eval error: {}", error)));
return acc;
},
}
}
acc
}
}
fn prebinding(&mut self, stmt: &Stmt) {
match stmt {
Stmt::PreBinding { name, func } => {
let v_entry = ValueEntry::Binding { constant: true, val: Expr::Func(func.clone()) };
self.values.insert(name.clone(), v_entry);
},
Stmt::Expr(_expr) => {
//TODO have this support things like nested function defs
},
_ => ()
}
}
fn statement(&mut self, stmt: Stmt) -> EvalResult<Option<Expr>> {
match stmt {
Stmt::Binding { name, constant, expr } => {
let val = self.expression(expr)?;
self.values.insert(name.clone(), ValueEntry::Binding { constant, val });
Ok(None)
},
Stmt::Expr(expr) => Ok(Some(self.expression(expr)?)),
Stmt::PreBinding {..} | Stmt::Noop => Ok(None),
}
}
fn block(&mut self, stmts: Vec<Stmt>) -> EvalResult<Expr> {
let mut ret = None;
for stmt in stmts {
ret = self.statement(stmt)?;
}
Ok(ret.unwrap_or(Expr::Unit))
}
fn expression(&mut self, expr: Expr) -> EvalResult<Expr> {
use self::Expr::*;
match expr {
literal @ Lit(_) => Ok(literal),
Call { box f, args } => {
match self.expression(f)? {
Constructor {name} => self.apply_data_constructor(name, args),
Func(f) => self.apply_function(f, args),
other => return Err(format!("Tried to call {:?} which is not a function or data constructor", other)),
}
},
Val(v) => self.value(v),
constr @ Constructor { .. } => Ok(constr),
func @ Func(_) => Ok(func),
Tuple(exprs) => Ok(Tuple(exprs.into_iter().map(|expr| self.expression(expr)).collect::<Result<Vec<Expr>,_>>()?)),
Conditional { box cond, then_clause, else_clause } => self.conditional(cond, then_clause, else_clause),
Assign { box val, box expr } => {
let name = match val {
Expr::Val(name) => name,
_ => return Err(format!("Trying to assign to a non-value")),
};
let constant = match self.values.lookup(&name) {
None => return Err(format!("{} is undefined", name)),
Some(ValueEntry::Binding { constant, .. }) => constant.clone(),
};
if constant {
return Err(format!("trying to update {}, a non-mutable binding", name));
}
let val = self.expression(expr)?;
self.values.insert(name.clone(), ValueEntry::Binding { constant: false, val });
Ok(Expr::Unit)
},
e => Err(format!("Expr {:?} eval not implemented", e))
}
}
fn apply_data_constructor(&mut self, name: Rc<String>, args: Vec<Expr>) -> EvalResult<Expr> {
{
let symbol_table = self.symbol_table_handle.borrow();
match symbol_table.values.get(&name) {
Some(Symbol { spec: SymbolSpec::DataConstructor { type_name, type_args }, name }) => {
if args.len() != type_args.len() {
return Err(format!("Data constructor {} requires {} args", name, type_args.len()));
}
()
},
_ => return Err(format!("Bad symbol {}", name))
}
}
let evaled_args = args.into_iter().map(|expr| self.expression(expr)).collect::<Result<Vec<Expr>,_>>()?;
//let evaled_args = vec![];
Ok(Expr::Lit(self::Lit::Custom(name.clone(), evaled_args)))
}
fn apply_function(&mut self, f: Func, args: Vec<Expr>) -> EvalResult<Expr> {
match f {
Func::BuiltIn(sigil) => self.apply_builtin(sigil, args),
Func::UserDefined { params, body, name } => {
if params.len() != args.len() {
return Err(format!("calling a {}-argument function with {} args", params.len(), args.len()))
}
let mut func_state = State {
values: self.values.new_frame(name.map(|n| format!("{}", n))),
symbol_table_handle: self.symbol_table_handle.clone(),
};
for (param, val) in params.into_iter().zip(args.into_iter()) {
let val = func_state.expression(val)?;
func_state.values.insert(param, ValueEntry::Binding { constant: true, val });
}
// TODO figure out function return semantics
func_state.block(body)
}
}
}
fn apply_builtin(&mut self, name: Rc<String>, args: Vec<Expr>) -> EvalResult<Expr> {
use self::Expr::*;
use self::Lit::*;
let evaled_args: Result<Vec<Expr>, String> = args.into_iter().map(|arg| self.expression(arg)).collect();
let evaled_args = evaled_args?;
Ok(match (name.as_str(), evaled_args.as_slice()) {
/* binops */
("+", &[Lit(Nat(l)), Lit(Nat(r))]) => Lit(Nat(l + r)),
("++", &[Lit(StringLit(ref s1)), Lit(StringLit(ref s2))]) => Lit(StringLit(Rc::new(format!("{}{}", s1, s2)))),
("-", &[Lit(Nat(l)), Lit(Nat(r))]) => Lit(Nat(l - r)),
("*", &[Lit(Nat(l)), Lit(Nat(r))]) => Lit(Nat(l * r)),
("/", &[Lit(Nat(l)), Lit(Nat(r))]) => Lit(Float((l as f64)/ (r as f64))),
("//", &[Lit(Nat(l)), Lit(Nat(r))]) => if r == 0 {
return Err(format!("divide by zero"));
} else {
Lit(Nat(l / r))
},
("%", &[Lit(Nat(l)), Lit(Nat(r))]) => Lit(Nat(l % r)),
("^", &[Lit(Nat(l)), Lit(Nat(r))]) => Lit(Nat(l ^ r)),
("&", &[Lit(Nat(l)), Lit(Nat(r))]) => Lit(Nat(l & r)),
("|", &[Lit(Nat(l)), Lit(Nat(r))]) => Lit(Nat(l | r)),
("==", &[Lit(Nat(l)), Lit(Nat(r))]) => Lit(Bool(l == r)),
("==", &[Lit(Int(l)), Lit(Int(r))]) => Lit(Bool(l == r)),
("==", &[Lit(Float(l)), Lit(Float(r))]) => Lit(Bool(l == r)),
("==", &[Lit(Bool(l)), Lit(Bool(r))]) => Lit(Bool(l == r)),
("==", &[Lit(StringLit(ref l)), Lit(StringLit(ref r))]) => Lit(Bool(l == r)),
/* prefix ops */
("!", &[Lit(Bool(true))]) => Lit(Bool(false)),
("!", &[Lit(Bool(false))]) => Lit(Bool(true)),
("-", &[Lit(Nat(n))]) => Lit(Int(-1*(n as i64))),
("-", &[Lit(Int(n))]) => Lit(Int(-1*(n as i64))),
("+", &[Lit(Int(n))]) => Lit(Int(n)),
("+", &[Lit(Nat(n))]) => Lit(Nat(n)),
/* builtin functions */
("print", &[ref anything]) => {
print!("{}", anything.to_repl());
Expr::Unit
},
("println", &[ref anything]) => {
println!("{}", anything.to_repl());
Expr::Unit
},
("getline", &[]) => {
let mut buf = String::new();
io::stdin().read_line(&mut buf).expect("Error readling line in 'getline'");
Lit(StringLit(Rc::new(buf.trim().to_string())))
},
(x, args) => return Err(format!("bad or unimplemented builtin {:?} | {:?}", x, args)),
impl<'a> State<'a> {
fn eval_statement(&mut self, statement: Statement) -> EvalResult<Option<FullyEvaluatedExpr>> {
Ok(match statement {
Statement::ExpressionStatement(expr) => Some(self.eval_expr(expr)?),
Statement::Declaration(decl) => { self.eval_decl(decl)?; None }
})
}
fn conditional(&mut self, cond: Expr, then_clause: Vec<Stmt>, else_clause: Vec<Stmt>) -> EvalResult<Expr> {
let cond = self.expression(cond)?;
Ok(match cond {
Expr::Lit(Lit::Bool(true)) => self.block(then_clause)?,
Expr::Lit(Lit::Bool(false)) => self.block(else_clause)?,
_ => return Err(format!("Conditional with non-boolean condition"))
})
fn eval_decl(&mut self, decl: Declaration) -> EvalResult<()> {
use self::Declaration::*;
use self::Variant::*;
match decl {
FuncDecl(signature, statements) => {
let name = signature.name;
let param_names: Vec<Rc<String>> = signature.params.iter().map(|fp| fp.0.clone()).collect();
self.insert(name, ValueEntry::Function { body: statements.clone(), param_names });
},
TypeDecl(_name, body) => {
for variant in body.0.iter() {
match variant {
&UnitStruct(ref name) => self.insert(name.clone(),
ValueEntry::Binding { val: FullyEvaluatedExpr::Custom { string_rep: name.clone() } }),
&TupleStruct(ref _name, ref _args) => unimplemented!(),
&Record(ref _name, ref _fields) => unimplemented!(),
};
}
},
Binding { name, expr, ..} => {
let val = self.eval_expr(expr)?;
self.insert(name.clone(), ValueEntry::Binding { val });
},
_ => return Err(format!("Declaration evaluation not yet implemented"))
}
Ok(())
}
fn value(&mut self, name: Rc<String>) -> EvalResult<Expr> {
use self::ValueEntry::*;
use self::Func::*;
//TODO add a layer of indirection here to talk to the symbol table first, and only then look up
//in the values table
fn eval_expr(&mut self, expr: Expression) -> EvalResult<FullyEvaluatedExpr> {
use self::ExpressionType::*;
use self::FullyEvaluatedExpr::*;
let symbol_table = self.symbol_table_handle.borrow();
let value = symbol_table.values.get(&name);
Ok(match value {
Some(Symbol { name, spec }) => match spec {
SymbolSpec::DataConstructor { type_name, type_args } => {
if type_args.len() == 0 {
Expr::Lit(Lit::Custom(name.clone(), vec![]))
} else {
return Err(format!("This data constructor thing not done"))
let expr_type = expr.0;
match expr_type {
IntLiteral(n) => Ok(UnsignedInt(n)),
FloatLiteral(f) => Ok(Float(f)),
StringLiteral(s) => Ok(Str(s.to_string())),
BoolLiteral(b) => Ok(Bool(b)),
PrefixExp(op, expr) => self.eval_prefix_exp(op, expr),
BinExp(op, lhs, rhs) => self.eval_binexp(op, lhs, rhs),
Value(name) => self.eval_value(name),
TupleLiteral(expressions) => {
let mut evals = Vec::new();
for expr in expressions {
match self.eval_expr(expr) {
Ok(fully_evaluated) => evals.push(fully_evaluated),
error => return error,
}
},
SymbolSpec::Func(_) => match self.values.lookup(&name) {
Some(Binding { val: Expr::Func(UserDefined { name, params, body }), .. }) => {
Expr::Func(UserDefined { name: name.clone(), params: params.clone(), body: body.clone() })
}
Ok(Tuple(evals))
}
Call { f, arguments } => {
let mut evaled_arguments = Vec::new();
for arg in arguments.into_iter() {
evaled_arguments.push(self.eval_expr(arg)?);
}
self.eval_application(*f, evaled_arguments)
},
Index { box indexee, indexers } => {
let evaled = self.eval_expr(indexee)?;
match evaled {
Tuple(mut exprs) => {
let len = indexers.len();
if len == 1 {
let idx = indexers.into_iter().nth(0).unwrap();
match self.eval_expr(idx)? {
UnsignedInt(n) if (n as usize) < exprs.len() => Ok(exprs.drain(n as usize..).next().unwrap()),
UnsignedInt(n) => Err(format!("Index {} out of range", n)),
other => Err(format!("{:?} is not an unsigned integer", other)),
}
} else {
Err(format!("Tuple index must be one integer"))
}
},
_ => unreachable!(),
},
_ => Err(format!("Bad index expression"))
}
},
/* see if it's an ordinary variable TODO make variables go in symbol table */
None => match self.values.lookup(&name) {
Some(Binding { val, .. }) => val.clone(),
None => return Err(format!("Couldn't find value {}", name)),
ListLiteral(items) => Ok(List(items.into_iter().map(|item| self.eval_expr(item)).collect::<Result<Vec<_>,_>>()?)),
x => Err(format!("Unimplemented thing {:?}", x)),
}
}
fn eval_application(&mut self, f: Expression, arguments: Vec<FullyEvaluatedExpr>) -> EvalResult<FullyEvaluatedExpr> {
use self::ExpressionType::*;
match f {
Expression(Value(ref identifier), _) if self.is_builtin(identifier) => self.eval_builtin(identifier, arguments),
Expression(Value(identifier), _) => {
match self.lookup(&identifier) {
Some(&ValueEntry::Function { ref body, ref param_names }) => {
if arguments.len() != param_names.len() {
return Err(format!("Wrong number of arguments for the function"));
}
let mut new_state = State::new_with_parent(self);
let sub_ast = body.clone();
for (param, val) in param_names.iter().zip(arguments.into_iter()) {
new_state.insert(param.clone(), ValueEntry::Binding { val });
}
let mut ret: Option<FullyEvaluatedExpr> = None;
for statement in sub_ast.into_iter() {
ret = new_state.eval_statement(statement)?;
}
Ok(ret.unwrap_or(FullyEvaluatedExpr::Custom { string_rep: Rc::new("()".to_string()) }))
},
_ => Err(format!("Function {} not found", identifier)),
}
},
x => Err(format!("Trying to apply {:?} which is not a function", x)),
}
}
fn is_builtin(&self, name: &Rc<String>) -> bool {
match &name.as_ref()[..] {
"print" | "println" => true,
_ => false
}
}
fn eval_builtin(&mut self, name: &Rc<String>, args: Vec<FullyEvaluatedExpr>) -> EvalResult<FullyEvaluatedExpr> {
use self::FullyEvaluatedExpr::*;
match &name.as_ref()[..] {
"print" => {
for arg in args {
print!("{}", arg.to_string());
}
Ok(Tuple(vec![]))
},
"println" => {
for arg in args {
println!("{}", arg.to_string());
}
Ok(Tuple(vec![]))
},
_ => unreachable!()
}
}
fn eval_value(&mut self, name: Rc<String>) -> EvalResult<FullyEvaluatedExpr> {
use self::ValueEntry::*;
match self.lookup(&name) {
None => return Err(format!("Value {} not found", *name)),
Some(lookup) => match lookup {
&Binding { ref val } => Ok(val.clone()),
&Function { .. } => Ok(FullyEvaluatedExpr::FuncLit(name.clone()))
}
}
}
fn eval_binexp(&mut self, op: BinOp, lhs: Box<Expression>, rhs: Box<Expression>) -> EvalResult<FullyEvaluatedExpr> {
use self::FullyEvaluatedExpr::*;
let evaled_lhs = self.eval_expr(*lhs)?;
let evaled_rhs = self.eval_expr(*rhs)?;
let sigil = op.sigil();
//let sigil: &str = op.sigil().as_ref().as_str();
Ok(match (sigil.as_str(), evaled_lhs, evaled_rhs) {
("+", UnsignedInt(l), UnsignedInt(r)) => UnsignedInt(l + r),
("++", Str(s1), Str(s2)) => Str(format!("{}{}", s1, s2)),
("-", UnsignedInt(l), UnsignedInt(r)) => UnsignedInt(l - r),
("*", UnsignedInt(l), UnsignedInt(r)) => UnsignedInt(l * r),
("/", UnsignedInt(l), UnsignedInt(r)) => Float((l as f64)/ (r as f64)),
("//", UnsignedInt(l), UnsignedInt(r)) => if r == 0 {
return Err(format!("Runtime error: divide by zero"));
} else {
UnsignedInt(l / r)
},
("%", UnsignedInt(l), UnsignedInt(r)) => UnsignedInt(l % r),
("^", UnsignedInt(l), UnsignedInt(r)) => UnsignedInt(l ^ r),
("&", UnsignedInt(l), UnsignedInt(r)) => UnsignedInt(l & r),
("|", UnsignedInt(l), UnsignedInt(r)) => UnsignedInt(l | r),
_ => return Err(format!("Runtime error: not yet implemented")),
})
}
fn eval_prefix_exp(&mut self, op: PrefixOp, expr: Box<Expression>) -> EvalResult<FullyEvaluatedExpr> {
use self::FullyEvaluatedExpr::*;
let evaled_expr = self.eval_expr(*expr)?;
let sigil = op.sigil();
Ok(match (sigil.as_str(), evaled_expr) {
("!", Bool(true)) => Bool(false),
("!", Bool(false)) => Bool(true),
("-", UnsignedInt(n)) => SignedInt(-1*(n as i64)),
("-", SignedInt(n)) => SignedInt(-1*(n as i64)),
("+", SignedInt(n)) => SignedInt(n),
("+", UnsignedInt(n)) => UnsignedInt(n),
_ => return Err(format!("Runtime error: not yet implemented")),
})
}
}
#[cfg(test)]
mod eval_tests {
use std::cell::RefCell;
use std::rc::Rc;
use symbol_table::SymbolTable;
use tokenizing::tokenize;
use parsing::parse;
use eval::State;
macro_rules! fresh_env {
($string:expr, $correct:expr) => {
let symbol_table = Rc::new(RefCell::new(SymbolTable::new()));
let mut state = State::new(symbol_table);
let ast = parse(tokenize($string)).0.unwrap();
state.symbol_table_handle.borrow_mut().add_top_level_symbols(&ast);
let reduced = ast.reduce(&state.symbol_table_handle.borrow());
let all_output = state.evaluate(reduced, true);
let ref output = all_output.last().unwrap();
assert_eq!(**output, Ok($correct.to_string()));
}
}
#[test]
fn test_basic_eval() {
fresh_env!("1 + 2", "3");
fresh_env!("let mut a = 1; a = 2", "Unit");
fresh_env!("let mut a = 1; a = 2; a", "2");
fresh_env!(r#"("a", 1 + 2)"#, r#"("a", 3)"#);
}
#[test]
fn function_eval() {
fresh_env!("fn oi(x) { x + 1 }; oi(4)", "5");
fresh_env!("fn oi(x) { x + 1 }; oi(1+2)", "4");
}
#[test]
fn scopes() {
let scope_ok = r#"
let a = 20
fn haha() {
let a = 10
a
}
haha()
"#;
fresh_env!(scope_ok, "10");
let scope_ok = r#"
let a = 20
fn haha() {
let a = 10
a
}
a
"#;
fresh_env!(scope_ok, "20");
}
}

View File

@@ -1,4 +1,3 @@
#![feature(trace_macros)]
#![feature(slice_patterns, box_patterns, box_syntax)]
#![feature(proc_macro)]
extern crate itertools;
@@ -6,14 +5,11 @@ extern crate itertools;
extern crate lazy_static;
#[macro_use]
extern crate maplit;
#[macro_use]
extern crate schala_repl;
#[macro_use]
extern crate schala_codegen;
use std::cell::RefCell;
use std::rc::Rc;
use std::collections::HashMap;
use itertools::Itertools;
use schala_repl::{ProgrammingLanguageInterface, EvalOptions, TraceArtifact, UnfinishedComputation, FinishedComputation};
@@ -21,127 +17,111 @@ macro_rules! bx {
($e:expr) => { Box::new($e) }
}
mod util;
mod builtin;
mod tokenizing;
mod ast;
mod parsing;
mod symbol_table;
mod typechecking;
mod reduced_ast;
mod eval;
//trace_macros!(true);
#[derive(ProgrammingLanguageInterface)]
#[LanguageName = "Schala"]
#[SourceFileExtension = "schala"]
#[PipelineSteps(tokenizing, parsing(compact,expanded,trace), symbol_table, typechecking, ast_reducing, eval)]
use self::typechecking::{TypeContext};
/* TODO eventually custom-derive ProgrammingLanguageInterface with compiler passes as options */
pub struct Schala {
state: eval::State<'static>,
symbol_table: Rc<RefCell<symbol_table::SymbolTable>>,
type_context: typechecking::TypeContext<'static>,
type_context: TypeContext
}
impl Schala {
fn new_blank_env() -> Schala {
let symbols = Rc::new(RefCell::new(symbol_table::SymbolTable::new()));
Schala {
symbol_table: symbols.clone(),
type_context: typechecking::TypeContext::new(symbols.clone()),
state: eval::State::new(symbols),
}
}
pub fn new() -> Schala {
let prelude = r#"
type Option<T> = Some(T) | None
"#;
let mut s = Schala::new_blank_env();
s.execute_pipeline(prelude, &EvalOptions::default());
s
}
}
fn tokenizing(_handle: &mut Schala, input: &str, comp: Option<&mut UnfinishedComputation>) -> Result<Vec<tokenizing::Token>, String> {
let tokens = tokenizing::tokenize(input);
comp.map(|comp| {
let token_string = tokens.iter().map(|t| format!("{:?}<L:{},C:{}>", t.token_type, t.offset.0, t.offset.1)).join(", ");
comp.add_artifact(TraceArtifact::new("tokens", token_string));
});
let errors: Vec<String> = tokens.iter().filter_map(|t| t.get_error()).collect();
if errors.len() == 0 {
Ok(tokens)
} else {
Err(format!("{:?}", errors))
}
}
fn parsing(_handle: &mut Schala, input: Vec<tokenizing::Token>, comp: Option<&mut UnfinishedComputation>) -> Result<ast::AST, String> {
let (ast, trace) = parsing::parse(input);
comp.map(|comp| {
//TODO need to control which of these debug stages get added
let opt = comp.cur_debug_options.get(0).map(|s| s.clone());
match opt {
None => comp.add_artifact(TraceArtifact::new("ast", format!("{:?}", ast))),
Some(ref s) if s == "compact" => comp.add_artifact(TraceArtifact::new("ast", format!("{:?}", ast))),
Some(ref s) if s == "expanded" => comp.add_artifact(TraceArtifact::new("ast", format!("{:#?}", ast))),
Some(ref s) if s == "trace" => comp.add_artifact(TraceArtifact::new_parse_trace(trace)),
Some(ref x) => println!("Bad parsing debug option: {}", x),
};
});
ast.map_err(|err| err.msg)
}
fn symbol_table(handle: &mut Schala, input: ast::AST, comp: Option<&mut UnfinishedComputation>) -> Result<ast::AST, String> {
let add = handle.symbol_table.borrow_mut().add_top_level_symbols(&input);
match add {
Ok(()) => {
let artifact = TraceArtifact::new("symbol_table", handle.symbol_table.borrow().debug_symbol_table());
comp.map(|comp| comp.add_artifact(artifact));
Ok(input)
},
Err(msg) => Err(msg)
}
}
fn typechecking(handle: &mut Schala, input: ast::AST, comp: Option<&mut UnfinishedComputation>) -> Result<ast::AST, String> {
match handle.type_context.type_check_ast(&input) {
Ok(ty) => {
comp.map(|c| {
c.add_artifact(TraceArtifact::new("type_table", format!("{}", handle.type_context.debug_types())));
c.add_artifact(TraceArtifact::new("type_check", format!("{:?}", ty)));
});
Ok(input)
},
Err(msg) => {
comp.map(|comp| {
comp.add_artifact(TraceArtifact::new("type_table", format!("{}", handle.type_context.debug_types())));
comp.add_artifact(TraceArtifact::new("type_check", format!("Type error: {:?}", msg)));
});
Ok(input)
Schala {
state: eval::State::new(),
type_context: TypeContext::new(),
}
}
}
fn ast_reducing(handle: &mut Schala, input: ast::AST, comp: Option<&mut UnfinishedComputation>) -> Result<reduced_ast::ReducedAST, String> {
let ref symbol_table = handle.symbol_table.borrow();
let output = input.reduce(symbol_table);
comp.map(|comp| comp.add_artifact(TraceArtifact::new("ast_reducing", format!("{:?}", output))));
Ok(output)
impl ProgrammingLanguageInterface for Schala {
schala_codegen::compiler_pass_sequence!(["tokenize", "parse", "yolo"]);
fn get_language_name(&self) -> String {
"Schala".to_string()
}
fn get_source_file_suffix(&self) -> String {
format!("schala")
}
fn execute(&mut self, input: &str, options: &EvalOptions) -> FinishedComputation {
let mut evaluation = UnfinishedComputation::default();
//tokenzing
let tokens = tokenizing::tokenize(input);
if options.debug.tokens {
let token_string = tokens.iter().map(|t| format!("{:?}<L:{},C:{}>", t.token_type, t.offset.0, t.offset.1)).join(", ");
evaluation.add_artifact(TraceArtifact::new("tokens", token_string));
}
{
let token_errors: Vec<&String> = tokens.iter().filter_map(|t| t.get_error()).collect();
if token_errors.len() != 0 {
return evaluation.output(Err(format!("Tokenization error: {:?}\n", token_errors)));
}
}
// parsing
let ast = match parsing::parse(tokens) {
(Ok(ast), trace) => {
if options.debug.parse_tree {
evaluation.add_artifact(TraceArtifact::new_parse_trace(trace));
}
if options.debug.ast {
evaluation.add_artifact(TraceArtifact::new("ast", format!("{:#?}", ast)));
}
ast
},
(Err(err), trace) => {
if options.debug.parse_tree {
evaluation.add_artifact(TraceArtifact::new_parse_trace(trace));
}
return evaluation.output(Err(format!("Parse error: {:?}\n", err.msg)));
}
};
//symbol table
match self.type_context.add_top_level_types(&ast) {
Ok(()) => (),
Err(msg) => {
if options.debug.type_checking {
evaluation.add_artifact(TraceArtifact::new("type_check", msg));
}
}
};
//typechecking
match self.type_context.type_check_ast(&ast) {
Ok(ty) => {
if options.debug.type_checking {
evaluation.add_artifact(TraceArtifact::new("type_check", format!("{:?}", ty)));
}
},
Err(msg) => evaluation.add_artifact(TraceArtifact::new("type_check", msg)),
};
let text = self.type_context.debug_symbol_table();
if options.debug.symbol_table {
evaluation.add_artifact(TraceArtifact::new("symbol_table", text));
}
let evaluation_outputs = self.state.evaluate(ast);
let text_output: Result<Vec<String>, String> = evaluation_outputs
.into_iter()
.collect();
let eval_output = text_output
.map(|v| { v.into_iter().intersperse(format!("\n")).collect() });
evaluation.output(eval_output)
}
}
fn eval(handle: &mut Schala, input: reduced_ast::ReducedAST, comp: Option<&mut UnfinishedComputation>) -> Result<String, String> {
comp.map(|comp| comp.add_artifact(TraceArtifact::new("value_state", handle.state.debug_print())));
let evaluation_outputs = handle.state.evaluate(input, true);
let text_output: Result<Vec<String>, String> = evaluation_outputs
.into_iter()
.collect();
let eval_output: Result<String, String> = text_output
.map(|v| { v.into_iter().intersperse(format!("\n")).collect() });
eval_output
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,217 +0,0 @@
use std::rc::Rc;
use ast::{AST, Statement, Expression, Declaration, Discriminator, IfExpressionBody, Pattern};
use symbol_table::{Symbol, SymbolSpec, SymbolTable};
use builtin::{BinOp, PrefixOp};
#[derive(Debug)]
pub struct ReducedAST(pub Vec<Stmt>);
#[derive(Debug, Clone)]
pub enum Stmt {
PreBinding {
name: Rc<String>,
func: Func,
},
Binding {
name: Rc<String>,
constant: bool,
expr: Expr,
},
Expr(Expr),
Noop,
}
#[derive(Debug, Clone)]
pub enum Expr {
Lit(Lit),
Func(Func),
Tuple(Vec<Expr>),
Constructor {
variant: usize,
expr: Box<Expr>,
},
Val(Rc<String>),
Call {
f: Box<Expr>,
args: Vec<Expr>,
},
Assign {
val: Box<Expr>,
expr: Box<Expr>,
},
Conditional {
cond: Box<Expr>,
then_clause: Vec<Stmt>,
else_clause: Vec<Stmt>,
},
Match {
cond: Box<Expr>,
arms: Vec<(Pattern, Vec<Stmt>)>
},
UnimplementedSigilValue
}
pub enum Pat {
Ignored
}
#[derive(Debug, Clone)]
pub enum Lit {
Nat(u64),
Int(i64),
Float(f64),
Bool(bool),
StringLit(Rc<String>),
Custom(Rc<String>, Vec<Expr>),
}
#[derive(Debug, Clone)]
pub enum Func {
BuiltIn(Rc<String>),
UserDefined {
name: Option<Rc<String>>,
params: Vec<Rc<String>>,
body: Vec<Stmt>,
}
}
impl AST {
pub fn reduce(&self, symbol_table: &SymbolTable) -> ReducedAST {
let mut output = vec![];
for statement in self.0.iter() {
output.push(statement.reduce(symbol_table));
}
ReducedAST(output)
}
}
impl Statement {
fn reduce(&self, symbol_table: &SymbolTable) -> Stmt {
use ast::Statement::*;
match self {
ExpressionStatement(expr) => Stmt::Expr(expr.reduce(symbol_table)),
Declaration(decl) => decl.reduce(symbol_table),
}
}
}
impl Expression {
fn reduce(&self, symbol_table: &SymbolTable) -> Expr {
use ast::ExpressionType::*;
let ref input = self.0;
match input {
NatLiteral(n) => Expr::Lit(Lit::Nat(*n)),
FloatLiteral(f) => Expr::Lit(Lit::Float(*f)),
StringLiteral(s) => Expr::Lit(Lit::StringLit(s.clone())),
BoolLiteral(b) => Expr::Lit(Lit::Bool(*b)),
BinExp(binop, lhs, rhs) => binop.reduce(symbol_table, lhs, rhs),
PrefixExp(op, arg) => op.reduce(symbol_table, arg),
//remember Some(5) is a CallExpr
// => ast: Ok(AST([ExpressionStatement(Expression(Call { f: Expression(Value("Some"), None), arguments: [Expression(NatLiteral(5), None)] }, None))]))
Value(name) => {
match symbol_table.values.get(name) {
Some(Symbol { spec: SymbolSpec::DataConstructor { type_args, .. }, .. }) => {
Expr::Constructor { type_name: name.clone() }
},
_ => Expr::Val(name.clone()),
}
},
Call { f, arguments } => Expr::Call {
f: Box::new(f.reduce(symbol_table)),
args: arguments.iter().map(|arg| arg.reduce(symbol_table)).collect(),
},
TupleLiteral(exprs) => Expr::Tuple(exprs.iter().map(|e| e.reduce(symbol_table)).collect()),
IfExpression { discriminator, body } => reduce_if_expression(discriminator, body, symbol_table),
_ => Expr::UnimplementedSigilValue,
}
}
}
fn reduce_if_expression(discriminator: &Discriminator, body: &IfExpressionBody, symbol_table: &SymbolTable) -> Expr {
let cond = Box::new(match *discriminator {
Discriminator::Simple(ref expr) => expr.reduce(symbol_table),
_ => panic!(),
});
match *body {
IfExpressionBody::SimpleConditional(ref then_clause, ref else_clause) => {
let then_clause = then_clause.iter().map(|expr| expr.reduce(symbol_table)).collect();
let else_clause = match else_clause {
None => vec![],
Some(stmts) => stmts.iter().map(|expr| expr.reduce(symbol_table)).collect(),
};
Expr::Conditional { cond, then_clause, else_clause }
},
IfExpressionBody::SimplePatternMatch(ref pat, ref then_clause, ref else_clause) => {
let then_clause = then_clause.iter().map(|expr| expr.reduce(symbol_table)).collect();
let else_clause = match else_clause {
None => vec![],
Some(stmts) => stmts.iter().map(|expr| expr.reduce(symbol_table)).collect(),
};
Expr::Match {
cond,
arms: vec![
(pat.clone(), then_clause),
(Pattern::Ignored, else_clause)
],
}
},
IfExpressionBody::GuardList(ref _guard_arms) => panic!(),
}
}
impl Pattern {
fn reduce(&self, symbol_table: &SymbolTable) -> Pat {
match self {
Pattern::Ignored => Pat::Ignored,
Pattern::TuplePattern(_) => panic!(),
Pattern::Literal(_) => panic!(),
Pattern::TupleStruct(_, _) => panic!(),
Pattern::Record(_, _) => panic!(),
}
}
}
impl Declaration {
fn reduce(&self, symbol_table: &SymbolTable) -> Stmt {
use self::Declaration::*;
use ::ast::Signature;
match self {
Binding {name, constant, expr } => Stmt::Binding { name: name.clone(), constant: *constant, expr: expr.reduce(symbol_table) },
FuncDecl(Signature { name, params, .. }, statements) => Stmt::PreBinding {
name: name.clone(),
func: Func::UserDefined {
name: Some(name.clone()),
params: params.iter().map(|param| param.0.clone()).collect(),
body: statements.iter().map(|stmt| stmt.reduce(symbol_table)).collect(),
}
},
TypeDecl { .. } => Stmt::Noop,
TypeAlias(_, _) => Stmt::Noop,
Interface { .. } => Stmt::Noop,
Impl { .. } => Stmt::Expr(Expr::UnimplementedSigilValue),
_ => Stmt::Expr(Expr::UnimplementedSigilValue)
}
}
}
impl BinOp {
fn reduce(&self, symbol_table: &SymbolTable, lhs: &Box<Expression>, rhs: &Box<Expression>) -> Expr {
if **self.sigil() == "=" {
Expr::Assign {
val: Box::new(lhs.reduce(symbol_table)),
expr: Box::new(rhs.reduce(symbol_table)),
}
} else {
let f = Box::new(Expr::Func(Func::BuiltIn(self.sigil().clone())));
Expr::Call { f, args: vec![lhs.reduce(symbol_table), rhs.reduce(symbol_table)]}
}
}
}
impl PrefixOp {
fn reduce(&self, symbol_table: &SymbolTable, arg: &Box<Expression>) -> Expr {
let f = Box::new(Expr::Func(Func::BuiltIn(self.sigil().clone())));
Expr::Call { f, args: vec![arg.reduce(symbol_table)]}
}
}

View File

@@ -1,121 +0,0 @@
use std::collections::HashMap;
use std::rc::Rc;
use std::fmt;
use std::fmt::Write;
use ast;
use typechecking::TypeName;
//cf. p. 150 or so of Language Implementation Patterns
pub struct SymbolTable {
pub values: HashMap<Rc<String>, Symbol> //TODO this will eventually have real type information
}
impl SymbolTable {
pub fn new() -> SymbolTable {
SymbolTable { values: HashMap::new() }
}
}
#[derive(Debug)]
pub struct Symbol {
pub name: Rc<String>,
pub spec: SymbolSpec,
}
impl fmt::Display for Symbol {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "<Name: {}, Spec: {}>", self.name, self.spec)
}
}
#[derive(Debug)]
pub enum SymbolSpec {
Func(Vec<TypeName>),
DataConstructor {
type_name: Rc<String>,
type_args: Vec<Rc<String>>,
},
}
impl fmt::Display for SymbolSpec {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use self::SymbolSpec::*;
match self {
Func(type_names) => write!(f, "Func({:?})", type_names),
DataConstructor { type_name, type_args } => write!(f, "DataConstructor({:?} -> {})", type_args, type_name),
}
}
}
impl SymbolTable {
/* note: this adds names for *forward reference* but doesn't actually create any types. solve that problem
* later */
pub fn add_top_level_symbols(&mut self, ast: &ast::AST) -> Result<(), String> {
use self::ast::{Statement, TypeName, Variant, TypeSingletonName, TypeBody};
use self::ast::Declaration::*;
for statement in ast.0.iter() {
if let Statement::Declaration(decl) = statement {
match decl {
FuncSig(signature) | FuncDecl(signature, _) => {
let mut ch: char = 'a';
let mut types = vec![];
for param in signature.params.iter() {
match param {
(_, Some(ty)) => {
//TODO eventually handle this case different
types.push(Rc::new(format!("{}", ch)));
ch = ((ch as u8) + 1) as char;
},
(_, None) => {
types.push(Rc::new(format!("{}", ch)));
ch = ((ch as u8) + 1) as char;
}
}
}
let spec = SymbolSpec::Func(types);
self.values.insert(
signature.name.clone(),
Symbol { name: signature.name.clone(), spec }
);
},
TypeDecl { name: TypeSingletonName { name, params}, body: TypeBody(variants), mutable } => {
for var in variants {
match var {
Variant::UnitStruct(variant_name) => {
let spec = SymbolSpec::DataConstructor {
type_name: name.clone(),
type_args: vec![],
};
self.values.insert(variant_name.clone(), Symbol { name: variant_name.clone(), spec });
},
Variant::TupleStruct(variant_name, tuple_members) => {
let type_args = tuple_members.iter().map(|type_name| match type_name {
TypeName::Singleton(TypeSingletonName { name, ..}) => name.clone(),
TypeName::Tuple(_) => unimplemented!(),
}).collect();
let spec = SymbolSpec::DataConstructor {
type_name: name.clone(),
type_args
};
let symbol = Symbol { name: variant_name.clone(), spec };
self.values.insert(variant_name.clone(), symbol);
},
e => return Err(format!("{:?} not supported in typing yet", e)),
}
}
},
_ => ()
}
}
}
Ok(())
}
pub fn debug_symbol_table(&self) -> String {
let mut output = format!("Symbol table\n");
for (name, sym) in &self.values {
write!(output, "{} -> {}\n", name, sym).unwrap();
}
output
}
}

View File

@@ -3,6 +3,7 @@ use std::collections::HashMap;
use std::rc::Rc;
use std::iter::{Iterator, Peekable};
use std::fmt;
use ::schala_codegen;
#[derive(Debug, PartialEq, Clone)]
pub enum TokenType {
@@ -45,13 +46,11 @@ impl fmt::Display for TokenType {
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum Kw {
If, Then, Else,
Is,
If, Else,
Func,
For, While,
For,
Match,
Const, Let, In,
Mut,
Var, Const, Let, In,
Return,
Alias, Type, SelfType, SelfIdent,
Interface, Impl,
@@ -63,16 +62,14 @@ lazy_static! {
static ref KEYWORDS: HashMap<&'static str, Kw> =
hashmap! {
"if" => Kw::If,
"then" => Kw::Then,
"else" => Kw::Else,
"is" => Kw::Is,
"fn" => Kw::Func,
"for" => Kw::For,
"while" => Kw::While,
"match" => Kw::Match,
"var" => Kw::Var,
"const" => Kw::Const,
"let" => Kw::Let,
"in" => Kw::In,
"mut" => Kw::Mut,
"return" => Kw::Return,
"alias" => Kw::Alias,
"type" => Kw::Type,
@@ -93,9 +90,9 @@ pub struct Token {
}
impl Token {
pub fn get_error(&self) -> Option<String> {
pub fn get_error(&self) -> Option<&String> {
match self.token_type {
TokenType::Error(ref s) => Some(s.clone()),
TokenType::Error(ref s) => Some(s),
_ => None,
}
}
@@ -109,8 +106,9 @@ fn is_operator(c: &char) -> bool {
OPERATOR_CHARS.iter().any(|x| x == c)
}
type CharData = (usize, usize, char);
type CharIter<I: Iterator<Item=(usize,usize,char)>> = Peekable<I>;
#[schala_codegen::compiler_pass = "tokenization"]
pub fn tokenize(input: &str) -> Vec<Token> {
let mut tokens: Vec<Token> = Vec::new();
@@ -168,7 +166,7 @@ pub fn tokenize(input: &str) -> Vec<Token> {
tokens
}
fn handle_digit(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenType {
fn handle_digit<I: Iterator<Item=(usize,usize,char)>>(c: char, input: &mut CharIter<I>) -> TokenType {
if c == '0' && input.peek().map_or(false, |&(_, _, c)| { c == 'x' }) {
input.next();
let rest: String = input.peeking_take_while(|&(_, _, ref c)| c.is_digit(16) || *c == '_').map(|(_, _, c)| { c }).collect();
@@ -183,7 +181,7 @@ fn handle_digit(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>) ->
}
}
fn handle_quote(input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenType {
fn handle_quote<I: Iterator<Item=(usize,usize,char)>>(input: &mut CharIter<I>) -> TokenType {
let mut buf = String::new();
loop {
match input.next().map(|(_, _, c)| { c }) {
@@ -208,7 +206,7 @@ fn handle_quote(input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenType
TokenType::StrLiteral(Rc::new(buf))
}
fn handle_alphabetic(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenType {
fn handle_alphabetic<I: Iterator<Item=(usize,usize,char)>>(c: char, input: &mut CharIter<I>) -> TokenType {
let mut buf = String::new();
buf.push(c);
if c == '_' && input.peek().map(|&(_, _, c)| { !c.is_alphabetic() }).unwrap_or(true) {
@@ -231,7 +229,7 @@ fn handle_alphabetic(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>
}
}
fn handle_operator(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenType {
fn handle_operator<I: Iterator<Item=(usize,usize,char)>>(c: char, input: &mut CharIter<I>) -> TokenType {
match c {
'<' | '>' | '|' | '.' => {
let ref next = input.peek().map(|&(_, _, c)| { c });

View File

@@ -1,381 +1,163 @@
use std::cell::RefCell;
use std::rc::Rc;
use std::collections::HashMap;
use std::char;
use std::fmt;
use std::fmt::Write;
/*
use std::collections::hash_set::Union;
use std::iter::Iterator;
use itertools::Itertools;
*/
use ast;
use util::StateStack;
use symbol_table::{SymbolSpec, SymbolTable};
use parsing;
pub type TypeName = Rc<String>;
type TypeResult<T> = Result<T, String>;
pub struct TypeContext {
type_var_count: u64,
bindings: HashMap<Rc<String>, Type>,
}
#[derive(Debug, PartialEq, Clone)]
enum Type {
pub enum Type {
Const(TConst),
Var(TypeName),
Func(Vec<Type>),
Sum(Vec<Type>),
Func(Box<Type>, Box<Type>),
UVar(String),
EVar(u64),
Void
}
#[derive(Debug, PartialEq, Clone)]
enum TConst {
Unit,
Nat,
StringT,
Custom(String)
}
#[derive(Debug, PartialEq, Clone)]
struct Scheme {
names: Vec<TypeName>,
ty: Type,
}
impl fmt::Display for Scheme {
impl fmt::Display for Type {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "∀{:?} . {:?}", self.names, self.ty)
}
}
#[derive(Debug, PartialEq, Clone)]
struct Substitution(HashMap<TypeName, Type>);
impl Substitution {
fn empty() -> Substitution {
Substitution(HashMap::new())
}
}
#[derive(Debug, PartialEq, Clone)]
struct TypeEnv(HashMap<TypeName, Scheme>);
impl TypeEnv {
fn default() -> TypeEnv {
TypeEnv(HashMap::new())
}
fn populate_from_symbols(&mut self, symbol_table: &SymbolTable) {
for (name, symbol) in symbol_table.values.iter() {
if let SymbolSpec::Func(ref type_names) = symbol.spec {
let mut ch: char = 'a';
let mut names = vec![];
for _ in type_names.iter() {
names.push(Rc::new(format!("{}", ch)));
ch = ((ch as u8) + 1) as char;
use self::Type::*;
match self {
&Const(ref c) => write!(f, "{:?}", c),
&Sum(ref types) => {
write!(f, "(")?;
for item in types.iter().map(|ty| Some(ty)).intersperse(None) {
match item {
Some(ty) => write!(f, "{}", ty)?,
None => write!(f, ",")?,
};
}
let sigma = Scheme {
names: names.clone(),
ty: Type::Func(names.into_iter().map(|n| Type::Var(n)).collect())
};
self.0.insert(name.clone(), sigma);
}
write!(f, ")")
},
&Func(ref a, ref b) => write!(f, "{} -> {}", a, b),
&UVar(ref s) => write!(f, "{}_u", s),
&EVar(ref n) => write!(f, "{}_e", n),
&Void => write!(f, "Void")
}
}
}
pub struct TypeContext<'a> {
values: StateStack<'a, TypeName, Type>,
symbol_table_handle: Rc<RefCell<SymbolTable>>,
global_env: TypeEnv
#[derive(Default)]
struct UVarGenerator {
n: u32,
}
impl<'a> TypeContext<'a> {
pub fn new(symbol_table_handle: Rc<RefCell<SymbolTable>>) -> TypeContext<'static> {
TypeContext { values: StateStack::new(None), global_env: TypeEnv::default(), symbol_table_handle }
impl UVarGenerator {
fn new() -> UVarGenerator {
UVarGenerator::default()
}
pub fn debug_types(&self) -> String {
let mut output = format!("Type environment\n");
for (name, scheme) in &self.global_env.0 {
write!(output, "{} -> {}\n", name, scheme).unwrap();
}
output
}
pub fn type_check_ast(&mut self, input: &ast::AST) -> Result<String, String> {
let ref symbol_table = self.symbol_table_handle.borrow();
self.global_env.populate_from_symbols(symbol_table);
let output = self.global_env.infer_block(&input.0)?;
Ok(format!("{:?}", output))
fn next(&mut self) -> Type {
//TODO handle this in the case where someone wants to make a function with more than 26 variables
let s = format!("{}", unsafe { char::from_u32_unchecked(self.n + ('a' as u32)) });
self.n += 1;
Type::UVar(s)
}
}
impl TypeEnv {
fn instantiate(&mut self, sigma: Scheme) -> Type {
match sigma {
Scheme { ty, .. } => ty,
}
}
fn generate(&mut self, ty: Type) -> Scheme {
Scheme {
names: vec![], //TODO incomplete
ty
}
}
fn infer_block(&mut self, block: &Vec<ast::Statement>) -> TypeResult<Type> {
let mut output = Type::Const(TConst::Unit);
for statement in block {
output = self.infer_statement(statement)?;
}
Ok(output)
}
fn infer_statement(&mut self, statement: &ast::Statement) -> TypeResult<Type> {
match statement {
ast::Statement::ExpressionStatement(expr) => self.infer_expr(expr),
ast::Statement::Declaration(decl) => self.infer_decl(decl)
}
}
fn infer_decl(&mut self, decl: &ast::Declaration) -> TypeResult<Type> {
use ast::Declaration::*;
match decl {
Binding { name, expr, .. } => {
let ty = self.infer_expr(expr)?;
let sigma = self.generate(ty);
self.0.insert(name.clone(), sigma);
},
_ => (),
}
Ok(Type::Const(TConst::Unit))
}
fn infer_expr(&mut self, expr: &ast::Expression) -> TypeResult<Type> {
match expr {
ast::Expression(expr, Some(anno)) => {
self.infer_exprtype(expr)
},
ast::Expression(expr, None) => {
self.infer_exprtype(expr)
}
}
}
fn infer_exprtype(&mut self, expr: &ast::ExpressionType) -> TypeResult<Type> {
use self::TConst::*;
use ast::ExpressionType::*;
Ok(match expr {
NatLiteral(_) => Type::Const(Nat),
StringLiteral(_) => Type::Const(StringT),
BinExp(op, lhs, rhs) => {
return Err(format!("NOTDONE"))
},
Call { f, arguments } => {
return Err(format!("NOTDONE"))
},
Value(name) => {
let s = match self.0.get(name) {
Some(sigma) => sigma.clone(),
None => return Err(format!("Unknown variable: {}", name))
};
self.instantiate(s)
},
_ => Type::Const(Unit)
})
}
#[derive(Debug, PartialEq, Clone)]
pub enum TConst {
Unit,
Int,
Float,
StringT,
Bool,
Custom(String),
}
/* GIANT TODO - use the rust im crate, unless I make this code way less haskell-ish after it's done
*/
/*
pub type TypeResult<T> = Result<T, String>;
*/
/* TODO this should just check the name against a map, and that map should be pre-populated with
* types */
/*
impl parsing::TypeName {
fn to_type(&self) -> TypeResult<Type> {
use self::parsing::TypeSingletonName;
use self::parsing::TypeName::*;
use self::Type::*; use self::TConstOld::*;
use self::parsing::TypeSingletonName;
use self::parsing::TypeName::*;
use self::Type::*; use self::TConst::*;
Ok(match self {
Tuple(_) => return Err(format!("Tuples not yet implemented")),
Singleton(name) => match name {
TypeSingletonName { name, .. } => match &name[..] {
/*
"Nat" => Const(Nat),
&Tuple(_) => return Err(format!("Tuples not yet implemented")),
&Singleton(ref name) => match name {
&TypeSingletonName { ref name, .. } => match &name[..] {
"Int" => Const(Int),
"Float" => Const(Float),
"Bool" => Const(Bool),
"String" => Const(StringT),
*/
n => Const(Custom(n.to_string()))
}
}
})
}
}
*/
pub type TypeResult<T> = Result<T, String>;
/*
impl TypeContext {
pub fn type_check_ast(&mut self, ast: &parsing::AST) -> TypeResult<String> {
let ref block = ast.0;
let mut infer = Infer::default();
let env = TypeEnvironment::default();
let output = infer.infer_block(block, &env);
match output {
Ok(s) => Ok(format!("{:?}", s)),
Err(s) => Err(format!("Error: {:?}", s))
}
pub fn new() -> TypeContext {
TypeContext { bindings: HashMap::new(), type_var_count: 0 }
}
pub fn fresh(&mut self) -> Type {
let ret = self.type_var_count;
self.type_var_count += 1;
Type::EVar(ret)
}
}
// this is the equivalent of the Haskell Infer monad
#[derive(Debug, Default)]
struct Infer {
_idents: u32,
}
#[derive(Debug)]
enum InferError {
CannotUnify(MonoType, MonoType),
OccursCheckFailed(Rc<String>, MonoType),
UnknownIdentifier(Rc<String>),
Custom(String),
}
type InferResult<T> = Result<T, InferError>;
impl Infer {
fn fresh(&mut self) -> MonoType {
let i = self._idents;
self._idents += 1;
let name = Rc::new(format!("{}", ('a' as u8 + 1) as char));
MonoType::Var(name)
}
fn unify(&mut self, a: MonoType, b: MonoType) -> InferResult<Substitution> {
use self::InferError::*; use self::MonoType::*;
Ok(match (a, b) {
(Const(ref a), Const(ref b)) if a == b => Substitution::new(),
(Var(ref name), ref var) => Substitution::bind_variable(name, var),
(ref var, Var(ref name)) => Substitution::bind_variable(name, var),
(Function(box a1, box b1), Function(box a2, box b2)) => {
let s1 = self.unify(a1, a2)?;
let s2 = self.unify(b1.apply_substitution(&s1), b2.apply_substitution(&s1))?;
s1.merge(s2)
},
(a, b) => return Err(CannotUnify(a, b))
})
}
fn infer_block(&mut self, block: &Vec<parsing::Statement>, env: &TypeEnvironment) -> InferResult<MonoType> {
use self::parsing::Statement;
let mut ret = MonoType::Const(TypeConst::Unit);
for statement in block.iter() {
ret = match statement {
Statement::ExpressionStatement(expr) => {
let (sub, ty) = self.infer_expr(expr, env)?;
//TODO handle substitution monadically
ty
impl TypeContext {
pub fn add_top_level_types(&mut self, ast: &parsing::AST) -> TypeResult<()> {
use self::parsing::TypeName;
use self::parsing::Declaration::*;
use self::Type::*;
for statement in ast.0.iter() {
if let &self::parsing::Statement::Declaration(ref decl) = statement {
match decl {
&FuncSig(ref signature) | &FuncDecl(ref signature, _) => {
let mut uvar_gen = UVarGenerator::new();
let mut ty: Type = signature.type_anno.as_ref().map(|name: &TypeName| name.to_type()).unwrap_or_else(|| {Ok(uvar_gen.next())} )?;
for &(_, ref type_name) in signature.params.iter().rev() {
let arg_type = type_name.as_ref().map(|name| name.to_type()).unwrap_or_else(|| {Ok(uvar_gen.next())} )?;
ty = Func(bx!(arg_type), bx!(ty));
}
self.bindings.insert(signature.name.clone(), ty);
},
_ => ()
}
Statement::Declaration(decl) => MonoType::Const(TypeConst::Unit),
}
}
Ok(ret)
Ok(())
}
fn infer_expr(&mut self, expr: &parsing::Expression, env: &TypeEnvironment) -> InferResult<(Substitution, MonoType)> {
use self::parsing::Expression;
match expr {
Expression(e, Some(anno)) => self.infer_annotated_expr(e, anno, env),
/*
let anno_ty = anno.to_type()?;
let ty = self.infer_exprtype(&e)?;
self.unify(ty, anno_ty)
},
*/
Expression(e, None) => self.infer_exprtype(e, env)
pub fn debug_symbol_table(&self) -> String {
let mut output = format!("Symbols\n");
for (sym, ty) in &self.bindings {
write!(output, "{} : {}\n", sym, ty).unwrap();
}
}
fn infer_annotated_expr(&mut self, expr: &parsing::ExpressionType, anno: &parsing::TypeName, env: &TypeEnvironment) -> InferResult<(Substitution, MonoType)> {
Err(InferError::Custom(format!("exprtype not done: {:?}", expr)))
}
fn infer_exprtype(&mut self, expr: &parsing::ExpressionType, env: &TypeEnvironment) -> InferResult<(Substitution, MonoType)> {
use self::parsing::ExpressionType::*;
use self::TypeConst::*;
Ok(match expr {
NatLiteral(_) => (Substitution::new(), MonoType::Const(Nat)),
FloatLiteral(_) => (Substitution::new(), MonoType::Const(Float)),
StringLiteral(_) => (Substitution::new(), MonoType::Const(StringT)),
BoolLiteral(_) => (Substitution::new(), MonoType::Const(Bool)),
Value(name) => match env.lookup(name) {
Some(sigma) => {
let tau = self.instantiate(&sigma);
(Substitution::new(), tau)
},
None => return Err(InferError::UnknownIdentifier(name.clone())),
},
e => return Err(InferError::Custom(format!("Type inference for {:?} not done", e)))
})
}
fn instantiate(&mut self, sigma: &PolyType) -> MonoType {
let ref ty: MonoType = sigma.1;
let mut subst = Substitution::new();
for name in sigma.0.iter() {
let fresh_mvar = self.fresh();
let new = Substitution::bind_variable(name, &fresh_mvar);
subst = subst.merge(new);
}
ty.apply_substitution(&subst)
output
}
}
*/
/* OLD STUFF DOWN HERE */
/*
impl TypeContext {
fn infer_block(&mut self, statements: &Vec<parsing::Statement>) -> TypeResult<Type> {
let mut ret_type = Type::Const(TConst::Unit);
for statement in statements {
ret_type = self.infer_statement(statement)?;
pub fn type_check_ast(&mut self, ast: &parsing::AST) -> TypeResult<Type> {
use self::Type::*; use self::TConst::*;
let mut ret_type = Const(Unit);
for statement in ast.0.iter() {
ret_type = self.type_check_statement(statement)?;
}
Ok(ret_type)
}
fn infer_statement(&mut self, statement: &parsing::Statement) -> TypeResult<Type> {
fn type_check_statement(&mut self, statement: &parsing::Statement) -> TypeResult<Type> {
use self::parsing::Statement::*;
match statement {
ExpressionStatement(expr) => self.infer(expr),
Declaration(decl) => self.add_declaration(decl),
&ExpressionStatement(ref expr) => self.infer(expr),
&Declaration(ref decl) => self.add_declaration(decl),
}
}
fn add_declaration(&mut self, decl: &parsing::Declaration) -> TypeResult<Type> {
use self::parsing::Declaration::*;
use self::Type::*;
match decl {
Binding { name, expr, .. } => {
&Binding { ref name, ref expr, .. } => {
let ty = self.infer(expr)?;
self.bindings.insert(name.clone(), ty);
},
@@ -386,23 +168,23 @@ impl TypeContext {
fn infer(&mut self, expr: &parsing::Expression) -> TypeResult<Type> {
use self::parsing::Expression;
match expr {
Expression(e, Some(anno)) => {
&Expression(ref e, Some(ref anno)) => {
let anno_ty = anno.to_type()?;
let ty = self.infer_exprtype(&e)?;
self.unify(ty, anno_ty)
},
Expression(e, None) => self.infer_exprtype(e)
&Expression(ref e, None) => self.infer_exprtype(e)
}
}
fn infer_exprtype(&mut self, expr: &parsing::ExpressionType) -> TypeResult<Type> {
use self::parsing::ExpressionType::*;
use self::Type::*; use self::TConst::*;
match expr {
NatLiteral(_) => Ok(Const(Nat)),
FloatLiteral(_) => Ok(Const(Float)),
StringLiteral(_) => Ok(Const(StringT)),
BoolLiteral(_) => Ok(Const(Bool)),
BinExp(op, lhs, rhs) => { /* remember there are both the haskell convention talk and the write you a haskell ways to do this! */
&IntLiteral(_) => Ok(Const(Int)),
&FloatLiteral(_) => Ok(Const(Float)),
&StringLiteral(_) => Ok(Const(StringT)),
&BoolLiteral(_) => Ok(Const(Bool)),
&BinExp(ref op, ref lhs, ref rhs) => { /* remember there are both the haskell convention talk and the write you a haskell ways to do this! */
match op.get_type()? {
Func(box t1, box Func(box t2, box t3)) => {
let lhs_ty = self.infer(lhs)?;
@@ -414,7 +196,7 @@ impl TypeContext {
other => Err(format!("{:?} is not a binary function type", other))
}
},
PrefixExp(op, expr) => match op.get_type()? {
&PrefixExp(ref op, ref expr) => match op.get_type()? {
Func(box t1, box t2) => {
let expr_ty = self.infer(expr)?;
self.unify(t1, expr_ty)?;
@@ -422,13 +204,13 @@ impl TypeContext {
},
other => Err(format!("{:?} is not a prefix op function type", other))
},
Value(name) => {
&Value(ref name) => {
match self.bindings.get(name) {
Some(ty) => Ok(ty.clone()),
None => Err(format!("No binding found for variable: {}", name)),
}
},
Call { f, arguments } => {
&Call { ref f, ref arguments } => {
let mut tf = self.infer(f)?;
for arg in arguments.iter() {
match tf {
@@ -442,13 +224,22 @@ impl TypeContext {
}
Ok(tf)
},
TupleLiteral(expressions) => {
&TupleLiteral(ref expressions) => {
let mut types = vec![];
for expr in expressions {
types.push(self.infer(expr)?);
}
Ok(Sum(types))
},
/*
Index {
indexee: Box<Expression>,
indexers: Vec<Expression>,
},
IfExpression(Box<Expression>, Vec<Statement>, Option<Vec<Statement>>),
MatchExpression(Box<Expression>, Vec<MatchArm>),
ForExpression
*/
_ => Err(format!("Type not yet implemented"))
}
}
@@ -460,34 +251,4 @@ impl TypeContext {
}
}
}
*/
#[cfg(test)]
mod tests {
/*
use super::{Type, TConst, TypeContext};
use super::Type::*;
use super::TConst::*;
use std::rc::Rc;
use std::cell::RefCell;
macro_rules! type_test {
($input:expr, $correct:expr) => {
{
let symbol_table = Rc::new(RefCell::new(SymbolTable::new()));
let mut tc = TypeContext::new(symbol_table);
let ast = ::ast::parse(::tokenizing::tokenize($input)).0.unwrap() ;
//tc.add_symbols(&ast);
assert_eq!($correct, tc.infer_block(&ast.0).unwrap())
}
}
}
#[test]
fn basic_inference() {
type_test!("30", Const(Nat));
//type_test!("fn x(a: Int): Bool {}; x(1)", TConst(Boolean));
}
*/
}

View File

@@ -1,43 +0,0 @@
use std::collections::HashMap;
use std::hash::Hash;
use std::cmp::Eq;
//TODO rename this ScopeStack
#[derive(Default, Debug)]
pub struct StateStack<'a, T: 'a, V: 'a> where T: Hash + Eq {
parent: Option<&'a StateStack<'a, T, V>>,
values: HashMap<T, V>,
scope_name: Option<String>
}
impl<'a, T, V> StateStack<'a, T, V> where T: Hash + Eq {
pub fn new(name: Option<String>) -> StateStack<'a, T, V> where T: Hash + Eq {
StateStack {
parent: None,
values: HashMap::new(),
scope_name: name
}
}
pub fn insert(&mut self, key: T, value: V) where T: Hash + Eq {
self.values.insert(key, value);
}
pub fn lookup(&self, key: &T) -> Option<&V> where T: Hash + Eq {
match (self.values.get(key), self.parent) {
(None, None) => None,
(None, Some(parent)) => parent.lookup(key),
(Some(value), _) => Some(value),
}
}
//TODO rename new_scope
pub fn new_frame(&'a self, name: Option<String>) -> StateStack<'a, T, V> where T: Hash + Eq {
StateStack {
parent: Some(self),
values: HashMap::default(),
scope_name: name,
}
}
pub fn get_name(&self) -> Option<&String> {
self.scope_name.as_ref()
}
}

View File

@@ -14,13 +14,12 @@ colored = "1.5"
serde = "1.0.15"
serde_derive = "1.0.15"
serde_json = "1.0.3"
rocket = "0.3.13"
rocket_codegen = "0.3.13"
rocket_contrib = "0.3.13"
rocket = "0.3.5"
rocket_codegen = "0.3.5"
rocket_contrib = "0.3.5"
phf = "0.7.12"
includedir = "0.2.0"
linefeed = "0.5.0"
regex = "0.2"
rustyline = "1.0.0"
[build-dependencies]
includedir_codegen = "0.2.0"

View File

@@ -6,21 +6,9 @@ pub struct LLVMCodeString(pub String);
#[derive(Debug, Default, Serialize, Deserialize)]
pub struct EvalOptions {
pub execution_method: ExecutionMethod,
pub debug_passes: HashMap<String, PassDebugOptionsDescriptor>,
pub debug: DebugOptions,
pub execution_method: ExecutionMethod
}
#[derive(Debug, Hash, PartialEq)]
pub struct PassDescriptor {
pub name: String,
pub debug_options: Vec<String>
}
#[derive(Debug, Serialize, Deserialize)]
pub struct PassDebugOptionsDescriptor {
pub opts: Vec<String>,
}
#[derive(Debug, Serialize, Deserialize)]
pub enum ExecutionMethod {
Compile,
@@ -32,27 +20,67 @@ impl Default for ExecutionMethod {
}
}
#[derive(Debug, Default, Serialize, Deserialize)]
pub struct DebugOptions {
pub tokens: bool,
pub parse_tree: bool,
pub ast: bool,
pub type_checking: bool,
pub symbol_table: bool,
pub evaluation: bool,
pub llvm_ir: bool,
}
#[derive(Debug, Default)]
pub struct LanguageOutput {
output: String,
artifacts: Vec<TraceArtifact>,
pub failed: bool,
}
impl LanguageOutput {
pub fn add_artifact(&mut self, artifact: TraceArtifact) {
self.artifacts.push(artifact);
}
pub fn add_output(&mut self, output: String) {
self.output = output;
}
pub fn to_string(&self) -> String {
let mut acc = String::new();
for line in self.artifacts.iter() {
acc.push_str(&line.debug_output.color(line.text_color).to_string());
acc.push_str(&"\n");
}
acc.push_str(&self.output);
acc
}
pub fn print_to_screen(&self) {
for line in self.artifacts.iter() {
let color = line.text_color;
let stage = line.stage_name.color(color).to_string();
let output = line.debug_output.color(color).to_string();
println!("{}: {}", stage, output);
}
println!("{}", self.output);
}
}
#[derive(Debug, Default)]
pub struct UnfinishedComputation {
artifacts: Vec<(String, TraceArtifact)>,
pub cur_debug_options: Vec<String>,
artifacts: HashMap<String, TraceArtifact>,
}
#[derive(Debug)]
pub struct FinishedComputation {
artifacts: Vec<(String, TraceArtifact)>,
artifacts: HashMap<String, TraceArtifact>,
text_output: Result<String, String>,
}
impl UnfinishedComputation {
pub fn add_artifact(&mut self, artifact: TraceArtifact) {
self.artifacts.push((artifact.stage_name.clone(), artifact));
}
pub fn finish(self, text_output: Result<String, String>) -> FinishedComputation {
FinishedComputation {
artifacts: self.artifacts,
text_output
}
self.artifacts.insert(artifact.stage_name.clone(), artifact);
}
pub fn output(self, output: Result<String, String>) -> FinishedComputation {
FinishedComputation {
@@ -65,13 +93,14 @@ impl UnfinishedComputation {
impl FinishedComputation {
pub fn to_repl(&self) -> String {
let mut buf = String::new();
for (stage, artifact) in self.artifacts.iter() {
let color = artifact.text_color;
let stage = stage.color(color).bold();
let output = artifact.debug_output.color(color);
write!(&mut buf, "{}: {}\n", stage, output).unwrap();
for stage in ["tokens", "parse_trace", "ast", "symbol_table", "type_check"].iter() {
if let Some(artifact) = self.artifacts.get(&stage.to_string()) {
let color = artifact.text_color;
let stage = stage.color(color).bold();
let output = artifact.debug_output.color(color);
write!(&mut buf, "{}: {}\n", stage, output).unwrap();
}
}
match self.text_output {
Ok(ref output) => write!(&mut buf, "{}", output).unwrap(),
Err(ref err) => write!(&mut buf, "{} {}", "Error: ".red().bold(), err).unwrap(),
@@ -82,11 +111,13 @@ impl FinishedComputation {
match self.text_output {
Ok(_) => {
let mut buf = String::new();
for (stage, artifact) in self.artifacts.iter() {
let color = artifact.text_color;
let stage = stage.color(color).bold();
let output = artifact.debug_output.color(color);
write!(&mut buf, "{}: {}\n", stage, output).unwrap();
for stage in ["tokens", "parse_trace", "ast", "symbol_table", "type_check"].iter() {
if let Some(artifact) = self.artifacts.get(&stage.to_string()) {
let color = artifact.text_color;
let stage = stage.color(color).bold();
let output = artifact.debug_output.color(color);
write!(&mut buf, "{}: {}\n", stage, output).unwrap();
}
}
if buf == "" { None } else { Some(buf) }
},
@@ -106,7 +137,6 @@ impl TraceArtifact {
pub fn new(stage: &str, debug: String) -> TraceArtifact {
let color = match stage {
"parse_trace" | "ast" => "red",
"ast_reducing" => "red",
"tokens" => "green",
"type_check" => "magenta",
_ => "blue",
@@ -127,70 +157,25 @@ impl TraceArtifact {
}
pub trait ProgrammingLanguageInterface {
fn execute_pipeline(&mut self, _input: &str, _eval_options: &EvalOptions) -> FinishedComputation {
FinishedComputation { artifacts: vec![], text_output: Err(format!("Execution pipeline not done")) }
/* old */
fn evaluate_in_repl(&mut self, _: &str, _: &EvalOptions) -> LanguageOutput {
LanguageOutput { output: format!("Defunct"), artifacts: vec![], failed: false }
}
/* old */
fn new_execute(&mut self, input: &str, _options: &EvalOptions) -> FinishedComputation {
FinishedComputation { artifacts: HashMap::new(), text_output: Err(format!("NOT DONE")) }
}
fn execute(&mut self, _input: &str, _eval_options: &EvalOptions) -> FinishedComputation {
FinishedComputation { artifacts: HashMap::new(), text_output: Err(format!("REPL evaluation not implemented")) }
}
fn get_language_name(&self) -> String;
fn get_source_file_suffix(&self) -> String;
fn get_passes(&self) -> Vec<PassDescriptor> {
vec![]
}
fn handle_custom_interpreter_directives(&mut self, _commands: &Vec<&str>) -> Option<String> {
fn handle_custom_interpreter_directives(&mut self, commands: &Vec<&str>) -> Option<String> {
None
}
fn custom_interpreter_directives_help(&self) -> String {
format!(">> No custom interpreter directives specified <<")
}
}
/* a pass_chain function signature looks like:
* fn(&mut ProgrammingLanguageInterface, A, Option<&mut DebugHandler>) -> Result<B, String>
*
* TODO use some kind of failure-handling library to make this better
*/
#[macro_export]
macro_rules! pass_chain {
($state:expr, $options:expr; $($pass:path), *) => {
|text_input| {
let mut comp = UnfinishedComputation::default();
pass_chain_helper! { ($state, comp, $options); text_input $(, $pass)* }
}
};
}
#[macro_export]
macro_rules! pass_chain_helper {
(($state:expr, $comp:expr, $options:expr); $input:expr, $pass:path $(, $rest:path)*) => {
{
use schala_repl::PassDebugOptionsDescriptor;
let pass_name = stringify!($pass);
let output = {
let ref debug_map = $options.debug_passes;
let debug_handle = match debug_map.get(pass_name) {
Some(PassDebugOptionsDescriptor { opts }) => {
let ptr = &mut $comp;
ptr.cur_debug_options = opts.clone();
Some(ptr)
}
_ => None
};
$pass($state, $input, debug_handle)
};
match output {
Ok(result) => pass_chain_helper! { ($state, $comp, $options); result $(, $rest)* },
Err(err) => {
$comp.output(Err(format!("Pass {} failed with {:?}", pass_name, err)))
}
}
}
};
// Done
(($state:expr, $comp:expr, $options:expr); $final_output:expr) => {
{
let final_output: FinishedComputation = $comp.finish(Ok($final_output));
final_output
}
};
}

View File

@@ -3,7 +3,7 @@
#![feature(plugin)]
#![plugin(rocket_codegen)]
extern crate getopts;
extern crate linefeed;
extern crate rustyline;
extern crate itertools;
extern crate colored;
@@ -22,8 +22,9 @@ use std::process::exit;
use std::default::Default;
use std::fmt::Write as FmtWrite;
use colored::*;
use itertools::Itertools;
use rustyline::error::ReadlineError;
use rustyline::Editor;
use self::colored::*;
mod language;
mod webapp;
@@ -33,9 +34,7 @@ const VERSION_STRING: &'static str = "0.1.0";
include!(concat!(env!("OUT_DIR"), "/static.rs"));
pub use language::{LLVMCodeString, ProgrammingLanguageInterface, EvalOptions,
ExecutionMethod, TraceArtifact, FinishedComputation, UnfinishedComputation, PassDebugOptionsDescriptor, PassDescriptor};
pub use language::{LLVMCodeString, ProgrammingLanguageInterface, EvalOptions, ExecutionMethod, TraceArtifact, LanguageOutput, FinishedComputation, UnfinishedComputation};
pub type PLIGenerator = Box<Fn() -> Box<ProgrammingLanguageInterface> + Send + Sync>;
pub fn repl_main(generators: Vec<PLIGenerator>) {
@@ -64,12 +63,12 @@ pub fn repl_main(generators: Vec<PLIGenerator>) {
}
let mut options = EvalOptions::default();
let debug_passes = if let Some(opts) = option_matches.opt_str("debug") {
let output: Vec<String> = opts.split_terminator(",").map(|s| s.to_string()).collect();
output
} else {
vec![]
};
if let Some(ref ltrs) = option_matches.opt_str("debug") {
options.debug.tokens = ltrs.contains("l");
options.debug.ast = ltrs.contains("a");
options.debug.parse_tree = ltrs.contains("r");
options.debug.symbol_table = ltrs.contains("s");
}
let language_names: Vec<String> = languages.iter().map(|lang| {lang.get_language_name()}).collect();
let initial_index: usize =
@@ -89,12 +88,12 @@ pub fn repl_main(generators: Vec<PLIGenerator>) {
}
[_, ref filename, _..] => {
run_noninteractive(filename, languages, options, debug_passes);
run_noninteractive(filename, languages, options);
}
};
}
fn run_noninteractive(filename: &str, languages: Vec<Box<ProgrammingLanguageInterface>>, mut options: EvalOptions, debug_passes: Vec<String>) {
fn run_noninteractive(filename: &str, languages: Vec<Box<ProgrammingLanguageInterface>>, options: EvalOptions) {
let path = Path::new(filename);
let ext = path.extension().and_then(|e| e.to_str()).unwrap_or_else(|| {
println!("Source file lacks extension");
@@ -111,12 +110,6 @@ fn run_noninteractive(filename: &str, languages: Vec<Box<ProgrammingLanguageInte
source_file.read_to_string(&mut buffer).unwrap();
for pass in debug_passes.into_iter() {
if let Some(_) = language.get_passes().iter().find(|desc| desc.name == pass) {
options.debug_passes.insert(pass, PassDebugOptionsDescriptor { opts: vec![] });
}
}
match options.execution_method {
ExecutionMethod::Compile => {
/*
@@ -126,136 +119,35 @@ fn run_noninteractive(filename: &str, languages: Vec<Box<ProgrammingLanguageInte
panic!("Not ready to go yet");
},
ExecutionMethod::Interpret => {
let output = language.execute_pipeline(&buffer, &options);
let output = language.execute(&buffer, &options);
output.to_noninteractive().map(|text| println!("{}", text));
}
}
}
#[derive(Clone)]
enum CommandTree {
Terminal(String, Option<String>),
NonTerminal(String, Vec<CommandTree>, Option<String>),
Top(Vec<CommandTree>),
}
impl CommandTree {
fn term(s: &str, help: Option<&str>) -> CommandTree {
CommandTree::Terminal(s.to_string(), help.map(|x| x.to_string()))
}
fn get_cmd(&self) -> String {
match self {
CommandTree::Terminal(s, _) => s.to_string(),
CommandTree::NonTerminal(s, _, _) => s.to_string(),
CommandTree::Top(_) => "".to_string(),
}
}
fn get_help(&self) -> String {
match self {
CommandTree::Terminal(_, h) => h.as_ref().map(|h| h.clone()).unwrap_or(format!("")),
CommandTree::NonTerminal(_, _, h) => h.as_ref().map(|h| h.clone()).unwrap_or(format!("")),
CommandTree::Top(_) => "".to_string(),
}
}
fn get_children(&self) -> Vec<String> {
match self {
CommandTree::Terminal(_, _) => vec![],
CommandTree::NonTerminal(_, children, _) => children.iter().map(|x| x.get_cmd()).collect(),
CommandTree::Top(children) => children.iter().map(|x| x.get_cmd()).collect(),
}
}
}
struct TabCompleteHandler {
sigil: char,
top_level_commands: CommandTree,
}
use linefeed::complete::{Completion, Completer};
use linefeed::terminal::Terminal;
impl TabCompleteHandler {
fn new(sigil: char, top_level_commands: CommandTree) -> TabCompleteHandler {
TabCompleteHandler {
top_level_commands,
sigil,
}
}
}
impl<T: Terminal> Completer<T> for TabCompleteHandler {
fn complete(&self, word: &str, prompter: &linefeed::prompter::Prompter<T>, start: usize, _end: usize) -> Option<Vec<Completion>> {
let line = prompter.buffer();
if line.starts_with(&format!("{}", self.sigil)) {
let mut words = line[1..(if start == 0 { 1 } else { start })].split_whitespace();
let mut completions = Vec::new();
let mut command_tree: Option<&CommandTree> = Some(&self.top_level_commands);
loop {
match words.next() {
None => {
let top = match command_tree {
Some(CommandTree::Top(_)) => true,
_ => false
};
let word = if top { word.get(1..).unwrap() } else { word };
for cmd in command_tree.map(|x| x.get_children()).unwrap_or(vec![]).into_iter() {
if cmd.starts_with(word) {
completions.push(Completion {
completion: format!("{}{}", if top { ":" } else { "" }, cmd),
display: Some(cmd.clone()),
suffix: linefeed::complete::Suffix::Some(' ')
})
}
}
break;
},
Some(s) => {
let new_ptr: Option<&CommandTree> = command_tree.and_then(|cm| match cm {
CommandTree::Top(children) => children.iter().find(|c| c.get_cmd() == s),
CommandTree::NonTerminal(_, children, _) => children.iter().find(|c| c.get_cmd() == s),
CommandTree::Terminal(_, _) => None,
});
command_tree = new_ptr;
}
}
}
Some(completions)
} else {
None
}
}
}
struct Repl {
options: EvalOptions,
languages: Vec<Box<ProgrammingLanguageInterface>>,
current_language_index: usize,
interpreter_directive_sigil: char,
line_reader: linefeed::interface::Interface<linefeed::terminal::DefaultTerminal>,
console: rustyline::Editor<()>,
}
impl Repl {
fn new(languages: Vec<Box<ProgrammingLanguageInterface>>, initial_index: usize) -> Repl {
use linefeed::Interface;
let i = if initial_index < languages.len() { initial_index } else { 0 };
let line_reader = Interface::new("schala-repl").unwrap();
let console = Editor::<()>::new();
Repl {
options: Repl::get_options(),
languages: languages,
current_language_index: i,
interpreter_directive_sigil: ':',
line_reader
console
}
}
fn get_cur_language(&self) -> &ProgrammingLanguageInterface {
self.languages[self.current_language_index].as_ref()
}
fn get_options() -> EvalOptions {
File::open(".schala_repl")
.and_then(|mut file| {
@@ -283,33 +175,27 @@ impl Repl {
}
fn run(&mut self) {
use linefeed::ReadResult;
println!("Schala MetaInterpreter version {}", VERSION_STRING);
println!("Type {}help for help with the REPL", self.interpreter_directive_sigil);
self.line_reader.load_history(".schala_history").unwrap_or(());
self.console.get_history().load(".schala_history").unwrap_or(());
loop {
let language_name = self.languages[self.current_language_index].get_language_name();
let directives = self.get_directives();
let tab_complete_handler = TabCompleteHandler::new(self.interpreter_directive_sigil, directives);
self.line_reader.set_completer(std::sync::Arc::new(tab_complete_handler));
let prompt_str = format!("{} >> ", language_name);
self.line_reader.set_prompt(&prompt_str);
match self.line_reader.read_line() {
match self.console.readline(&prompt_str) {
Err(ReadlineError::Eof) | Err(ReadlineError::Interrupted) => break,
Err(e) => {
println!("Terminal read error: {}", e);
},
Ok(ReadResult::Eof) => break,
Ok(ReadResult::Signal(_)) => break,
Ok(ReadResult::Input(ref input)) => {
self.line_reader.add_history_unique(input.to_string());
Ok(ref input) => {
let output = match input.chars().nth(0) {
Some(ch) if ch == self.interpreter_directive_sigil => self.handle_interpreter_directive(input),
_ => Some(self.input_handler(input)),
_ => {
self.console.get_history().add(input);
Some(self.input_handler(input))
}
};
if let Some(o) = output {
println!("=> {}", o);
@@ -317,53 +203,17 @@ impl Repl {
}
}
}
self.line_reader.save_history(".schala_history").unwrap_or(());
self.console.get_history().save(".schala_history").unwrap_or(());
self.save_options();
println!("Exiting...");
}
fn input_handler(&mut self, input: &str) -> String {
let ref mut language = self.languages[self.current_language_index];
let interpreter_output = language.execute_pipeline(input, &self.options);
let interpreter_output = language.new_execute(input, &self.options);
interpreter_output.to_repl()
}
fn get_directives(&self) -> CommandTree {
let ref passes = self.get_cur_language().get_passes();
let passes_directives: Vec<CommandTree> = passes.iter()
.map(|pass_descriptor| {
let name = &pass_descriptor.name;
if pass_descriptor.debug_options.len() == 0 {
CommandTree::term(name, None)
} else {
let sub_opts: Vec<CommandTree> = pass_descriptor.debug_options.iter()
.map(|o| CommandTree::term(o, None)).collect();
CommandTree::NonTerminal(
name.clone(),
sub_opts,
None
)
}
}).collect();
CommandTree::Top(vec![
CommandTree::term("exit", Some("exit the REPL")),
CommandTree::term("quit", Some("exit the REPL")),
CommandTree::term("help", Some("Print this help message")),
CommandTree::NonTerminal(format!("debug"), vec![
CommandTree::term("passes", None),
CommandTree::NonTerminal(format!("show"), passes_directives.clone(), None),
CommandTree::NonTerminal(format!("hide"), passes_directives.clone(), None),
], Some(format!("show or hide pass info for a given pass, or display the names of all passes"))),
CommandTree::NonTerminal(format!("lang"), vec![
CommandTree::term("next", None),
CommandTree::term("prev", None),
CommandTree::NonTerminal(format!("go"), vec![], None)//TODO
], Some(format!("switch between languages, or go directly to a langauge by name"))),
])
}
fn handle_interpreter_directive(&mut self, input: &str) -> Option<String> {
let mut iter = input.chars();
iter.next();
@@ -417,72 +267,52 @@ impl Repl {
"help" => {
let mut buf = String::new();
let ref lang = self.languages[self.current_language_index];
let directives = match self.get_directives() {
CommandTree::Top(children) => children,
_ => panic!("Top-level CommandTree not Top")
};
writeln!(buf, "MetaInterpreter options").unwrap();
writeln!(buf, "-----------------------").unwrap();
for directive in directives {
let trailer = " ";
writeln!(buf, "{}{}- {}", directive.get_cmd(), trailer, directive.get_help()).unwrap();
}
writeln!(buf, "").unwrap();
writeln!(buf, "exit | quit - exit the REPL").unwrap();
writeln!(buf, "lang [prev|next|go <name> |show] - toggle to previous or next language, go to a specific language by name, or show all languages").unwrap();
writeln!(buf, "Language-specific help for {}", lang.get_language_name()).unwrap();
writeln!(buf, "-----------------------").unwrap();
writeln!(buf, "{}", lang.custom_interpreter_directives_help()).unwrap();
Some(buf)
},
"debug" => self.handle_debug(commands),
"set" => {
let show = match commands.get(1) {
Some(&"show") => true,
Some(&"hide") => false,
Some(e) => {
return Some(format!("Bad `set` argument: {}", e));
}
None => {
return Some(format!("`set` - valid arguments `show {{option}}`, `hide {{option}}`"));
}
};
match commands.get(2) {
Some(&"tokens") => self.options.debug.tokens = show,
Some(&"parse") => self.options.debug.parse_tree = show,
Some(&"ast") => self.options.debug.ast = show,
Some(&"symbols") => self.options.debug.symbol_table = show,
Some(&"llvm") => self.options.debug.llvm_ir = show,
Some(e) => return Some(format!("Bad `show`/`hide` argument: {}", e)),
None => return Some(format!("`show`/`hide` requires an argument")),
};
None
},
"options" => {
let ref d = self.options.debug;
let tokens = if d.tokens { "true".green() } else { "false".red() };
let parse_tree = if d.parse_tree { "true".green() } else { "false".red() };
let ast = if d.ast { "true".green() } else { "false".red() };
let symbol_table = if d.symbol_table { "true".green() } else { "false".red() };
Some(format!(r#"Debug:
tokens: {}, parse: {}, ast: {}, symbols: {}"#, tokens, parse_tree, ast, symbol_table))
},
e => self.languages[self.current_language_index]
.handle_custom_interpreter_directives(&commands)
.or(Some(format!("Unknown command: {}", e)))
}
}
fn handle_debug(&mut self, commands: Vec<&str>) -> Option<String> {
let passes = self.get_cur_language().get_passes();
match commands.get(1) {
Some(&"passes") => Some(
passes.into_iter()
.map(|desc| {
if self.options.debug_passes.contains_key(&desc.name) {
let color = "green";
format!("*{}", desc.name.color(color))
} else {
desc.name
}
})
.intersperse(format!(" -> "))
.collect()),
b @ Some(&"show") | b @ Some(&"hide") => {
let show = b == Some(&"show");
let debug_pass: String = match commands.get(2) {
Some(s) => s.to_string(),
None => return Some(format!("Must specify a stage to debug")),
};
let pass_opt = commands.get(3);
if let Some(desc) = passes.iter().find(|desc| desc.name == debug_pass) {
let mut opts = vec![];
if let Some(opt) = pass_opt {
opts.push(opt.to_string());
}
let msg = format!("{} debug for pass {}", if show { "Enabling" } else { "Disabling" }, debug_pass);
if show {
self.options.debug_passes.insert(desc.name.clone(), PassDebugOptionsDescriptor { opts });
} else {
self.options.debug_passes.remove(&desc.name);
}
Some(msg)
} else {
Some(format!("Couldn't find stage: {}", debug_pass))
}
},
_ => Some(format!("Unknown debug command"))
}
}
}
/*

View File

@@ -35,8 +35,8 @@ struct Output {
fn interpreter_input(input: Json<Input>, generators: State<Vec<PLIGenerator>>) -> Json<Output> {
let schala_gen = generators.get(0).unwrap();
let mut schala: Box<ProgrammingLanguageInterface> = schala_gen();
let code_output = schala.execute_pipeline(&input.source, &EvalOptions::default());
Json(Output { text: code_output.to_repl() })
let code_output = schala.evaluate_in_repl(&input.source, &EvalOptions::default());
Json(Output { text: code_output.to_string() })
}
pub fn web_main(language_generators: Vec<PLIGenerator>) {

View File

@@ -1,12 +0,0 @@
for n <- 1..=100 {
if n % 15 == 0 {
print("FizzBuzz")
} else if n % 5 == 0 {
print("Buzz")
} else if n % 3 == 0 {
print("Fizz")
} else {
print(n.to_string())
}
}

View File

@@ -1,114 +0,0 @@
fn main() {
//comments are C-style
/* nested comments /* are cool */ */
}
@annotations are with @-
// variable expressions
var a: I32 = 20
const b: String = 20
there(); can(); be(); multiple(); statements(); per_line();
//string interpolation
const yolo = "I have ${a + b} people in my house"
// let expressions ??? not sure if I want this
let a = 10, b = 20, c = 30 in a + b + c
//list literal
const q = [1,2,3,4]
//lambda literal
q.map({|item| item * 100 })
fn yolo(a: MyType, b: YourType): ReturnType<Param1, Param2> {
if a == 20 {
return "early"
}
var sex = 20
sex
}
/* for/while loop topics */
//infinite loop
while {
if x() { break }
...
}
//conditional loop
while conditionHolds() {
...
}
//iteration over a variable
for i <- [1..1000] {
} //return type is return type of block
//monadic decomposition
for {
a <- maybeInt();
s <- foo()
} return {
a + s
} //return type is Monad<return type of block>
/* end of for loops */
/* conditionals/pattern matching */
// "is" operator for "does this pattern match"
x is Some(t) // type bool
if x {
is Some(t) => {
},
is None => {
}
}
//syntax is, I guess, for <expr> <brace-block>, where <expr> is a bool, or a <arrow-expr>
// type level alises
typealias <name> = <other type> #maybe thsi should be 'alias'?
/*
what if type A = B meant that you could had to create A's with A(B), but when you used A's the interface was exactly like B's?
maybe introduce a 'newtype' keyword for this
*/
//declaring types of all stripes
type MyData = { a: i32, b: String }
type MyType = MyType
type Option<a> = None | Some(a)
type Signal = Absence | SimplePresence(i32) | ComplexPresence {a: i32, b: MyCustomData}
//traits
trait Bashable { }
trait Luggable {
fn lug(self, a: Option<Self>)
}
}
// lambdas
// ruby-style not rust-style
const a: X -> Y -> Z = {|x,y| }

View File

@@ -1,17 +1,105 @@
println(sua(4))
fn main() {
//comments are C-style
/* nested comments /* are cool */ */
fn sua(x): Int {
x + 10
}
@annotations are with @-
//const a = getline()
// variable expressions
var a: I32 = 20
const b: String = 20
there(); can(); be(); multiple(); statements(); per_line();
//string interpolation
const yolo = "I have ${a + b} people in my house"
// let expressions ??? not sure if I want this
let a = 10, b = 20, c = 30 in a + b + c
//list literal
const q = [1,2,3,4]
//lambda literal
q.map({|item| item * 100 })
fn yolo(a: MyType, b: YourType): ReturnType<Param1, Param2> {
if a == 20 {
return "early"
}
var sex = 20
sex
}
for {
//infinite loop
}
//iteration over a variable
for i <- [1..1000] {
} //return type is return type of block
//while loop
for a != 3 || fuckTard() {
break
} //return type is return type of block
//monadic decomposition
for {
a <- maybeInt();
s <- foo()
} return {
a + s
} //return type is Monad<return type of block>
// let statements too!!
for (a = 20
b = fuck) {
a + b
}
// pattern-matching
match <expr> {
Some(a) => {
},
None => {
},
}
//syntax is, I guess, for <expr> <brace-block>, where <expr> is a bool, or a <arrow-expr>
// type level alises
typealias <name> = <other type> #maybe thsi should be 'alias'?
/*
if a == "true" {
println("You typed true")
} else {
println("You typed something else")
what if type A = B meant that you could had to create A's with A(B), but when you used A's the interface was exactly like B's?
maybe introduce a 'newtype' keyword for this
*/
//declaring types of all stripes
type MyData = { a: i32, b: String }
type MyType = MyType
type Option<a> = None | Some(a)
type Signal = Absence | SimplePresence(i32) | ComplexPresence {a: i32, b: MyCustomData}
//traits
trait Bashable { }
trait Luggable {
fn lug(self, a: Option<Self>)
}
}
*/
// lambdas
// ruby-style not rust-style
const a: X -> Y -> Z = {|x,y| }

View File

@@ -1,12 +0,0 @@
const c = 10
fn add(a, b) {
const c = a + b
c
}
var b = 20
println(add(1,2))
println(c + b)