Compare commits
218 Commits
antiquated
...
fb26293157
Author | SHA1 | Date | |
---|---|---|---|
|
fb26293157 | ||
|
c0289f238f | ||
|
7afb9d47fc | ||
|
2431a074b0 | ||
|
3bfd251a68 | ||
|
a033c82d13 | ||
|
c176c1c918 | ||
|
aa40b985f3 | ||
|
64a3705e35 | ||
|
7e23e40a2f | ||
|
4c88a7ada6 | ||
|
367719d408 | ||
|
2e80045750 | ||
|
35c67f73c3 | ||
|
da9aa1e29d | ||
|
ca67f9b4fe | ||
|
60cce3fe9c | ||
|
4eb22f94d0 | ||
|
355c8170a4 | ||
|
e3671a579d | ||
|
08ca48b2ba | ||
|
fea9b9575b | ||
|
276dad56d7 | ||
|
695e733584 | ||
|
9bfd751db6 | ||
|
b058e47d79 | ||
|
5be53dc847 | ||
|
a0bea0d55a | ||
|
9747374e8a | ||
|
9ab1ca28f8 | ||
|
66cd51a355 | ||
|
1056be12e7 | ||
|
48e7c0be03 | ||
|
6e82d1207e | ||
|
f0e7c9906e | ||
|
57c7858c87 | ||
|
a105c84943 | ||
|
2b8d63d9cc | ||
|
c807c20292 | ||
|
a643c8a792 | ||
|
69200048fa | ||
|
55e372a670 | ||
|
c50626241e | ||
|
232bec97a7 | ||
|
ce1d967f08 | ||
|
daa0062108 | ||
|
3e7c7a50b4 | ||
|
2574a1b9c0 | ||
|
c285ee182e | ||
|
f7659a5598 | ||
|
1064d9993a | ||
|
0e3320e183 | ||
|
89a2be19f4 | ||
|
d9e96398a4 | ||
|
a564ffa1ce | ||
|
b3fff100d2 | ||
|
cfd6df7ba5 | ||
|
bb2e1ae27a | ||
|
4333563d03 | ||
|
e7cabb2a79 | ||
|
5da7c809b2 | ||
|
d229a57837 | ||
|
0dd8861f83 | ||
|
4ab900d601 | ||
|
501b975fb6 | ||
|
83315e97ac | ||
|
6259a0808c | ||
|
0c69476fd0 | ||
|
1caccc6ae2 | ||
|
23af2b1455 | ||
|
61795b0331 | ||
|
a1b874c891 | ||
|
e7103b925b | ||
|
c35401da65 | ||
|
d51a9a73d7 | ||
|
fddd43b86e | ||
|
12f55fa844 | ||
|
bb0fb716e4 | ||
|
687d482853 | ||
|
628eb28deb | ||
|
4c8b4c8c71 | ||
|
c674148772 | ||
|
7b4f69dce5 | ||
|
98caf1cac3 | ||
|
457799e0f7 | ||
|
681d767855 | ||
|
ef4620e90a | ||
|
1f2a4c706f | ||
|
a452bccd1c | ||
|
eca2218f6a | ||
|
83aedb0efb | ||
|
76841de784 | ||
|
c0574ff1ef | ||
|
faa5c6ab42 | ||
|
9c2d2190b0 | ||
|
21511f5120 | ||
|
8bd399f97a | ||
|
30a6d0929a | ||
|
bec8aedc22 | ||
|
1b642c6321 | ||
|
559306ffc8 | ||
|
540ffde4bc | ||
|
fa8d46e3d7 | ||
|
4598802999 | ||
|
5ea83e2da6 | ||
|
23c0f54042 | ||
|
8618de313b | ||
|
cd23b23a91 | ||
|
e6475a1262 | ||
|
e6f81b28f9 | ||
|
c20d75faf1 | ||
|
85aabed344 | ||
|
aa821e720a | ||
|
0e25720927 | ||
|
c101610cde | ||
|
1217f6e143 | ||
|
6f41167402 | ||
|
cf0af7e0c9 | ||
|
a7fd515e7b | ||
|
c6509338d8 | ||
|
192a6bf6e1 | ||
|
7e8c4267c2 | ||
|
25527bbdf0 | ||
|
5e7aef1040 | ||
|
3386fcc505 | ||
|
18a839bb91 | ||
|
92d641fca0 | ||
|
9b4499c5ac | ||
|
07e19cbfa2 | ||
|
2016fcab41 | ||
|
0e7b6f25b3 | ||
|
7e7aa55d6e | ||
|
cc79565fb3 | ||
|
5659bab684 | ||
|
405f91a770 | ||
|
faed1d6f25 | ||
|
f6d047e3b8 | ||
|
fcd980f148 | ||
|
c3919daa66 | ||
|
f8152f68ad | ||
|
9273773bf4 | ||
|
844cef36c7 | ||
|
0e17e45f3e | ||
|
18d8ca7bd5 | ||
|
2ee14bf740 | ||
|
502497687a | ||
|
107897ec97 | ||
|
8534fb4118 | ||
|
a1e38aba8e | ||
|
2f8ef99b08 | ||
|
2bb55b6cca | ||
|
bcd70ff538 | ||
|
728393671f | ||
|
9c3e223e51 | ||
|
2738119f17 | ||
|
630ead289c | ||
|
485e869c90 | ||
|
9e8a3d1f08 | ||
|
b1da524a8f | ||
|
787b6d51a4 | ||
|
1dae4443cd | ||
|
210a45c92e | ||
|
815e0401f2 | ||
|
753247ee83 | ||
|
6223fc20f3 | ||
|
da928db351 | ||
|
93d0cfe5b8 | ||
|
687b28d1d1 | ||
|
b62f618256 | ||
|
f25b76ea11 | ||
|
6b2736348d | ||
|
69d5f38ea1 | ||
|
a6f8616839 | ||
|
cdcb55e3b8 | ||
|
74ac26841f | ||
|
8fd29b5090 | ||
|
5ebc96daa7 | ||
|
277e039251 | ||
|
6e8f57e54f | ||
|
ae02391270 | ||
|
9379485713 | ||
|
910522537c | ||
|
98e1a5235a | ||
|
e054c4b27f | ||
|
e3b0f4a51e | ||
|
911f26e9c6 | ||
|
677e3ae0a9 | ||
|
9611770bb3 | ||
|
4c256cb5f7 | ||
|
688e1c7f5d | ||
|
26c9c72bcc | ||
|
2d614aa17a | ||
|
ecb2eb0f87 | ||
|
4c4004d3ac | ||
|
9b4a23c4f2 | ||
|
936c168cef | ||
|
db835f42aa | ||
|
cd5fc36c37 | ||
|
d7a33c974e | ||
|
b2288206d2 | ||
|
d962e2c27a | ||
|
4534c1d3d6 | ||
|
f79dc0b1e3 | ||
|
4928fc0019 | ||
|
d735e45688 | ||
|
b4208b696d | ||
|
ff3dbbcbc6 | ||
|
e3261be8a0 | ||
|
f131105b50 | ||
|
1089a33634 | ||
|
6c60794485 | ||
|
2f18529bcc | ||
|
c68e09d89d | ||
|
d9e8178a90 | ||
|
57536e6399 | ||
|
32e077c407 | ||
|
33d0d49d30 | ||
|
76a9367284 |
1
.gitignore
vendored
1
.gitignore
vendored
@ -1,3 +1,4 @@
|
|||||||
Cargo.lock
|
Cargo.lock
|
||||||
target
|
target
|
||||||
.schala_repl
|
.schala_repl
|
||||||
|
.schala_history
|
||||||
|
14
Cargo.toml
14
Cargo.toml
@ -8,15 +8,13 @@ authors = ["greg <greg.shuflin@protonmail.com>"]
|
|||||||
llvm-sys = "*"
|
llvm-sys = "*"
|
||||||
take_mut = "0.1.3"
|
take_mut = "0.1.3"
|
||||||
itertools = "0.5.8"
|
itertools = "0.5.8"
|
||||||
getopts = "*"
|
|
||||||
linefeed = "0.2.2"
|
|
||||||
lazy_static = "0.2.8"
|
lazy_static = "0.2.8"
|
||||||
maplit = "*"
|
maplit = "*"
|
||||||
colored = "1.5"
|
colored = "1.5"
|
||||||
serde = "1.0.15"
|
|
||||||
serde_derive = "1.0.15"
|
|
||||||
serde_json = "1.0.3"
|
|
||||||
rocket = "*"
|
|
||||||
rocket_codegen = "*"
|
|
||||||
rocket_contrib = "*"
|
|
||||||
|
|
||||||
|
schala-lib = { path = "schala-lib" }
|
||||||
|
|
||||||
|
[build-dependencies]
|
||||||
|
includedir_codegen = "0.2.0"
|
||||||
|
|
||||||
|
[workspace]
|
||||||
|
24
README.md
24
README.md
@ -1,18 +1,22 @@
|
|||||||
|
|
||||||
TODO:
|
|
||||||
-null-only language should be called Maaru
|
|
||||||
-haskell-ish langauge should be called Robo
|
|
||||||
-typeful scripting language should be called schala
|
|
||||||
rename accordingly!
|
|
||||||
|
|
||||||
# Schala - a programming language meta-interpreter
|
# Schala - a programming language meta-interpreter
|
||||||
|
|
||||||
Schala is a Rust-language framework written to make it easy to
|
Schala is a Rust framework written to make it easy to
|
||||||
create and experiment with toy programming languages. It provides
|
create and experiment with toy programming languages. It provides
|
||||||
a common REPL, and a trait `ProgrammingLanguage` with methods
|
a common REPL, and a trait `ProgrammingLanguage` with provisions
|
||||||
for tokenizing text, parsing tokens, evaluating an abstract syntax tree,
|
for tokenizing text, parsing tokens, evaluating an abstract syntax tree,
|
||||||
and other tasks that are common to all programming languages.
|
and other tasks that are common to all programming languages.
|
||||||
|
|
||||||
|
Schala is implemented as a Rust library `schala_lib`, which provides a
|
||||||
|
`schala_main` function. This function serves as the main loop of the REPL, if run
|
||||||
|
interactively, or otherwise reads and interprets programming language source
|
||||||
|
files. It expects as input a vector of `PLIGenerator`, which is a type representing
|
||||||
|
a closure that returns a boxed trait object that implements the `ProgrammingLanguage` trait,
|
||||||
|
and stores any persistent state relevant to that programming language. The ability
|
||||||
|
to share state between different programming languages is in the works.
|
||||||
|
|
||||||
|
## About
|
||||||
|
|
||||||
Schala started out life as an experiment in writing a Javascript-like
|
Schala started out life as an experiment in writing a Javascript-like
|
||||||
programming language that would never encounter any kind of runtime value
|
programming language that would never encounter any kind of runtime value
|
||||||
error, but rather always return `null` under any kind of error condition. I had
|
error, but rather always return `null` under any kind of error condition. I had
|
||||||
@ -29,6 +33,8 @@ creating a language name confusingly close to Scala. The naming scheme for
|
|||||||
languages implemented with the Schala meta-interpreter is Chrono Trigger
|
languages implemented with the Schala meta-interpreter is Chrono Trigger
|
||||||
characters.
|
characters.
|
||||||
|
|
||||||
|
Schala is incomplete alpha software and is not ready for public release.
|
||||||
|
|
||||||
## Languages implemented using the meta-interpreter
|
## Languages implemented using the meta-interpreter
|
||||||
|
|
||||||
* The eponymous *Schala* language is an interpreted/compiled scripting langauge,
|
* The eponymous *Schala* language is an interpreted/compiled scripting langauge,
|
||||||
@ -43,6 +49,8 @@ system.
|
|||||||
* *Robo* is an experiment in creating a lazy, functional, strongly-typed language
|
* *Robo* is an experiment in creating a lazy, functional, strongly-typed language
|
||||||
much like Haskell
|
much like Haskell
|
||||||
|
|
||||||
|
* *Rukka* is a straightforward LISP implementation
|
||||||
|
|
||||||
## Reference works
|
## Reference works
|
||||||
|
|
||||||
Here's a partial list of resources I've made use of in the process
|
Here's a partial list of resources I've made use of in the process
|
||||||
|
46
TODO.md
Normal file
46
TODO.md
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
|
||||||
|
# TODO Items
|
||||||
|
|
||||||
|
* Share state between programming languages
|
||||||
|
|
||||||
|
* idea for Schala - scoped types - be able to define a quick enum type scoped to a function ro something, that only is meant to be used as a quick bespoke interface between two other things
|
||||||
|
|
||||||
|
* another idea, allow:
|
||||||
|
type enum {
|
||||||
|
type enum MySubVariant {
|
||||||
|
SubVariant1, SubVariant2, etc.
|
||||||
|
}
|
||||||
|
Variant1(MySubVariant),
|
||||||
|
Variant2(...),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
* idea for Schala: both currying *and* default arguments!
|
||||||
|
ex. fn a(b: Int, c:Int, d:Int = 1) -> Int
|
||||||
|
a(1,2) : Int
|
||||||
|
a(1,2,d=2): Int
|
||||||
|
a(_,1,3) : Int -> Int
|
||||||
|
a(1,2, c=_): Int -> Int
|
||||||
|
a(_,_,_) : Int -> Int -> Int -> Int
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
- AST : maybe replace the Expression type with "Ascription(TypeName, Box<Expression>) nodes??
|
||||||
|
- parser: add a "debug" field to the Parser struct for all debug-related things
|
||||||
|
|
||||||
|
-scala-style html"dfasfsadf${}" string interpolations!
|
||||||
|
|
||||||
|
*Compiler passes architecture
|
||||||
|
|
||||||
|
-ProgrammingLanguageInterface defines a evaluate_in_repl() and evaluate_no_repl() functions
|
||||||
|
-these take in a vec of CompilerPasses
|
||||||
|
|
||||||
|
struct CompilerPass {
|
||||||
|
name: String,
|
||||||
|
run: fn(PrevPass) -> NextPass
|
||||||
|
}
|
||||||
|
|
||||||
|
-change "Type...." names in parser.rs to "Anno..." for non-collision with names in typechecking.rs
|
||||||
|
|
||||||
|
-get rid of code pertaining to compilation specifically, have a more generation notion of "execution type"
|
25
schala-lib/Cargo.toml
Normal file
25
schala-lib/Cargo.toml
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
[package]
|
||||||
|
name = "schala-lib"
|
||||||
|
version = "0.1.0"
|
||||||
|
authors = ["greg <greg.shuflin@protonmail.com>"]
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
llvm-sys = "*"
|
||||||
|
take_mut = "0.1.3"
|
||||||
|
itertools = "0.5.8"
|
||||||
|
getopts = "*"
|
||||||
|
lazy_static = "0.2.8"
|
||||||
|
maplit = "*"
|
||||||
|
colored = "1.5"
|
||||||
|
serde = "1.0.15"
|
||||||
|
serde_derive = "1.0.15"
|
||||||
|
serde_json = "1.0.3"
|
||||||
|
rocket = "0.3.5"
|
||||||
|
rocket_codegen = "0.3.5"
|
||||||
|
rocket_contrib = "0.3.5"
|
||||||
|
phf = "0.7.12"
|
||||||
|
includedir = "0.2.0"
|
||||||
|
rustyline = "1.0.0"
|
||||||
|
|
||||||
|
[build-dependencies]
|
||||||
|
includedir_codegen = "0.2.0"
|
10
schala-lib/build.rs
Normal file
10
schala-lib/build.rs
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
extern crate includedir_codegen;
|
||||||
|
|
||||||
|
use includedir_codegen::Compression;
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
includedir_codegen::start("WEBFILES")
|
||||||
|
.dir("../static", Compression::Gzip)
|
||||||
|
.build("static.rs")
|
||||||
|
.unwrap();
|
||||||
|
}
|
@ -2,17 +2,6 @@ extern crate colored;
|
|||||||
|
|
||||||
use self::colored::*;
|
use self::colored::*;
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct TokenError {
|
|
||||||
pub msg: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TokenError {
|
|
||||||
pub fn new(msg: &str) -> TokenError {
|
|
||||||
TokenError { msg: msg.to_string() }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct LLVMCodeString(pub String);
|
pub struct LLVMCodeString(pub String);
|
||||||
|
|
||||||
#[derive(Debug, Default, Serialize, Deserialize)]
|
#[derive(Debug, Default, Serialize, Deserialize)]
|
||||||
@ -27,12 +16,13 @@ pub struct EvalOptions {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Default)]
|
#[derive(Debug, Default)]
|
||||||
pub struct ReplOutput {
|
pub struct LanguageOutput {
|
||||||
output: String,
|
output: String,
|
||||||
artifacts: Vec<TraceArtifact>
|
artifacts: Vec<TraceArtifact>,
|
||||||
|
failed: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ReplOutput {
|
impl LanguageOutput {
|
||||||
pub fn add_artifact(&mut self, artifact: TraceArtifact) {
|
pub fn add_artifact(&mut self, artifact: TraceArtifact) {
|
||||||
self.artifacts.push(artifact);
|
self.artifacts.push(artifact);
|
||||||
}
|
}
|
||||||
@ -52,7 +42,10 @@ impl ReplOutput {
|
|||||||
|
|
||||||
pub fn print_to_screen(&self) {
|
pub fn print_to_screen(&self) {
|
||||||
for line in self.artifacts.iter() {
|
for line in self.artifacts.iter() {
|
||||||
println!("{}: {}", line.stage_name, line.debug_output);
|
let color = line.text_color;
|
||||||
|
let stage = line.stage_name.color(color).to_string();
|
||||||
|
let output = line.debug_output.color(color).to_string();
|
||||||
|
println!("{}: {}", stage, output);
|
||||||
}
|
}
|
||||||
println!("{}", self.output);
|
println!("{}", self.output);
|
||||||
}
|
}
|
||||||
@ -98,7 +91,10 @@ impl TraceArtifact {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub trait ProgrammingLanguageInterface {
|
pub trait ProgrammingLanguageInterface {
|
||||||
fn evaluate_in_repl(&mut self, input: &str, eval_options: &EvalOptions) -> ReplOutput;
|
fn evaluate_in_repl(&mut self, input: &str, eval_options: &EvalOptions) -> LanguageOutput;
|
||||||
|
fn evaluate_noninteractive(&mut self, input: &str, eval_options: &EvalOptions) -> LanguageOutput {
|
||||||
|
self.evaluate_in_repl(input, eval_options)
|
||||||
|
}
|
||||||
fn get_language_name(&self) -> String;
|
fn get_language_name(&self) -> String;
|
||||||
fn get_source_file_suffix(&self) -> String;
|
fn get_source_file_suffix(&self) -> String;
|
||||||
fn compile(&mut self, _input: &str) -> LLVMCodeString {
|
fn compile(&mut self, _input: &str) -> LLVMCodeString {
|
378
schala-lib/src/lib.rs
Normal file
378
schala-lib/src/lib.rs
Normal file
@ -0,0 +1,378 @@
|
|||||||
|
#![feature(link_args)]
|
||||||
|
#![feature(advanced_slice_patterns, slice_patterns, box_patterns, box_syntax)]
|
||||||
|
#![feature(plugin)]
|
||||||
|
#![plugin(rocket_codegen)]
|
||||||
|
extern crate getopts;
|
||||||
|
extern crate rustyline;
|
||||||
|
extern crate itertools;
|
||||||
|
#[macro_use]
|
||||||
|
extern crate lazy_static;
|
||||||
|
#[macro_use]
|
||||||
|
extern crate maplit;
|
||||||
|
#[macro_use]
|
||||||
|
extern crate serde_derive;
|
||||||
|
extern crate serde_json;
|
||||||
|
extern crate rocket;
|
||||||
|
extern crate rocket_contrib;
|
||||||
|
extern crate includedir;
|
||||||
|
extern crate phf;
|
||||||
|
|
||||||
|
use std::path::Path;
|
||||||
|
use std::fs::File;
|
||||||
|
use std::io::{Read, Write};
|
||||||
|
use std::process::exit;
|
||||||
|
use std::default::Default;
|
||||||
|
|
||||||
|
use rustyline::error::ReadlineError;
|
||||||
|
use rustyline::Editor;
|
||||||
|
|
||||||
|
mod language;
|
||||||
|
mod webapp;
|
||||||
|
pub mod llvm_wrap;
|
||||||
|
|
||||||
|
include!(concat!(env!("OUT_DIR"), "/static.rs"));
|
||||||
|
|
||||||
|
pub use language::{ProgrammingLanguageInterface, EvalOptions, TraceArtifact, LanguageOutput, LLVMCodeString};
|
||||||
|
pub type PLIGenerator = Box<Fn() -> Box<ProgrammingLanguageInterface> + Send + Sync>;
|
||||||
|
|
||||||
|
pub fn schala_main(generators: Vec<PLIGenerator>) {
|
||||||
|
let languages: Vec<Box<ProgrammingLanguageInterface>> = generators.iter().map(|x| x()).collect();
|
||||||
|
|
||||||
|
let option_matches = program_options().parse(std::env::args()).unwrap_or_else(|e| {
|
||||||
|
println!("{:?}", e);
|
||||||
|
exit(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
if option_matches.opt_present("list-languages") {
|
||||||
|
for lang in languages {
|
||||||
|
println!("{}", lang.get_language_name());
|
||||||
|
}
|
||||||
|
exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
if option_matches.opt_present("help") {
|
||||||
|
println!("{}", program_options().usage("Schala metainterpreter"));
|
||||||
|
exit(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
if option_matches.opt_present("webapp") {
|
||||||
|
webapp::web_main(generators);
|
||||||
|
exit(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
let language_names: Vec<String> = languages.iter().map(|lang| {lang.get_language_name()}).collect();
|
||||||
|
let initial_index: usize =
|
||||||
|
option_matches.opt_str("lang")
|
||||||
|
.and_then(|lang| { language_names.iter().position(|x| { x.to_lowercase() == lang.to_lowercase() }) })
|
||||||
|
.unwrap_or(0);
|
||||||
|
|
||||||
|
let mut options = EvalOptions::default();
|
||||||
|
options.compile = match option_matches.opt_str("eval-style") {
|
||||||
|
Some(ref s) if s == "compile" => true,
|
||||||
|
_ => false
|
||||||
|
};
|
||||||
|
|
||||||
|
match option_matches.free[..] {
|
||||||
|
[] | [_] => {
|
||||||
|
let mut repl = Repl::new(languages, initial_index);
|
||||||
|
repl.options.show_llvm_ir = true; //TODO make this be configurable
|
||||||
|
repl.run();
|
||||||
|
}
|
||||||
|
[_, ref filename, _..] => {
|
||||||
|
|
||||||
|
run_noninteractive(filename, languages, options);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run_noninteractive(filename: &str, languages: Vec<Box<ProgrammingLanguageInterface>>, options: EvalOptions) {
|
||||||
|
let path = Path::new(filename);
|
||||||
|
let ext = path.extension().and_then(|e| e.to_str()).unwrap_or_else(|| {
|
||||||
|
println!("Source file lacks extension");
|
||||||
|
exit(1);
|
||||||
|
});
|
||||||
|
let mut language = Box::new(languages.into_iter().find(|lang| lang.get_source_file_suffix() == ext)
|
||||||
|
.unwrap_or_else(|| {
|
||||||
|
println!("Extension .{} not recognized", ext);
|
||||||
|
exit(1);
|
||||||
|
}));
|
||||||
|
|
||||||
|
let mut source_file = File::open(path).unwrap();
|
||||||
|
let mut buffer = String::new();
|
||||||
|
|
||||||
|
source_file.read_to_string(&mut buffer).unwrap();
|
||||||
|
|
||||||
|
if options.compile {
|
||||||
|
if !language.can_compile() {
|
||||||
|
panic!("Trying to compile a non-compileable language");
|
||||||
|
} else {
|
||||||
|
let llvm_bytecode = language.compile(&buffer);
|
||||||
|
compilation_sequence(llvm_bytecode, filename);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
let output = language.evaluate_in_repl(&buffer, &options);
|
||||||
|
// if output.has_error....
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct Repl {
|
||||||
|
options: EvalOptions,
|
||||||
|
languages: Vec<Box<ProgrammingLanguageInterface>>,
|
||||||
|
current_language_index: usize,
|
||||||
|
interpreter_directive_sigil: char,
|
||||||
|
console: rustyline::Editor<()>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Repl {
|
||||||
|
fn new(languages: Vec<Box<ProgrammingLanguageInterface>>, initial_index: usize) -> Repl {
|
||||||
|
let i = if initial_index < languages.len() { initial_index } else { 0 };
|
||||||
|
|
||||||
|
let console = Editor::<()>::new();
|
||||||
|
|
||||||
|
Repl {
|
||||||
|
options: Repl::get_options(),
|
||||||
|
languages: languages,
|
||||||
|
current_language_index: i,
|
||||||
|
interpreter_directive_sigil: '.',
|
||||||
|
console
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_options() -> EvalOptions {
|
||||||
|
File::open(".schala_repl")
|
||||||
|
.and_then(|mut file| {
|
||||||
|
let mut contents = String::new();
|
||||||
|
file.read_to_string(&mut contents)?;
|
||||||
|
Ok(contents)
|
||||||
|
})
|
||||||
|
.and_then(|contents| {
|
||||||
|
let options: EvalOptions = serde_json::from_str(&contents)?;
|
||||||
|
Ok(options)
|
||||||
|
}).unwrap_or(EvalOptions::default())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn save_options(&self) {
|
||||||
|
let ref options = self.options;
|
||||||
|
let read = File::create(".schala_repl")
|
||||||
|
.and_then(|mut file| {
|
||||||
|
let buf = serde_json::to_string(options).unwrap();
|
||||||
|
file.write_all(buf.as_bytes())
|
||||||
|
});
|
||||||
|
|
||||||
|
if let Err(err) = read {
|
||||||
|
println!("Error saving .schala_repl file {}", err);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run(&mut self) {
|
||||||
|
println!("MetaInterpreter v 0.05");
|
||||||
|
|
||||||
|
self.console.get_history().load(".schala_history").unwrap_or(());
|
||||||
|
|
||||||
|
loop {
|
||||||
|
let language_name = self.languages[self.current_language_index].get_language_name();
|
||||||
|
let prompt_str = format!("{} >> ", language_name);
|
||||||
|
|
||||||
|
match self.console.readline(&prompt_str) {
|
||||||
|
Err(ReadlineError::Eof) | Err(ReadlineError::Interrupted) => break,
|
||||||
|
Err(e) => {
|
||||||
|
println!("Terminal read error: {}", e);
|
||||||
|
},
|
||||||
|
Ok(ref input) => {
|
||||||
|
self.console.add_history_entry(input);
|
||||||
|
if self.handle_interpreter_directive(input) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
let output = self.input_handler(input);
|
||||||
|
println!("=> {}", output);
|
||||||
|
}
|
||||||
|
_ => (),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self.console.get_history().save(".schala_history").unwrap_or(());
|
||||||
|
self.save_options();
|
||||||
|
println!("Exiting...");
|
||||||
|
}
|
||||||
|
|
||||||
|
fn input_handler(&mut self, input: &str) -> String {
|
||||||
|
let ref mut language = self.languages[self.current_language_index];
|
||||||
|
let interpreter_output = language.evaluate_in_repl(input, &self.options);
|
||||||
|
interpreter_output.to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn handle_interpreter_directive(&mut self, input: &str) -> bool {
|
||||||
|
match input.chars().nth(0) {
|
||||||
|
Some(ch) if ch == self.interpreter_directive_sigil => (),
|
||||||
|
_ => return false
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut iter = input.chars();
|
||||||
|
iter.next();
|
||||||
|
let trimmed_sigil: &str = iter.as_str();
|
||||||
|
|
||||||
|
let commands: Vec<&str> = trimmed_sigil
|
||||||
|
.split_whitespace()
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let cmd: &str = match commands.get(0).clone() {
|
||||||
|
None => return true,
|
||||||
|
Some(s) => s
|
||||||
|
};
|
||||||
|
|
||||||
|
match cmd {
|
||||||
|
"exit" | "quit" => {
|
||||||
|
self.save_options();
|
||||||
|
exit(0)
|
||||||
|
},
|
||||||
|
"help" => {
|
||||||
|
println!("Commands:");
|
||||||
|
println!("exit | quit");
|
||||||
|
println!("lang(uage) [go|show|next|previous]");
|
||||||
|
println!("set [show|hide] [tokens|parse|symbols|eval|llvm]");
|
||||||
|
}
|
||||||
|
"lang" | "language" => {
|
||||||
|
match commands.get(1) {
|
||||||
|
Some(&"show") => {
|
||||||
|
for (i, lang) in self.languages.iter().enumerate() {
|
||||||
|
if i == self.current_language_index {
|
||||||
|
println!("* {}", lang.get_language_name());
|
||||||
|
} else {
|
||||||
|
println!("{}", lang.get_language_name());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Some(&"go") => {
|
||||||
|
match commands.get(2) {
|
||||||
|
None => println!("Must specify a language name"),
|
||||||
|
Some(&desired_name) => {
|
||||||
|
for (i, _) in self.languages.iter().enumerate() {
|
||||||
|
let lang_name = self.languages[i].get_language_name();
|
||||||
|
if lang_name.to_lowercase() == desired_name.to_lowercase() {
|
||||||
|
self.current_language_index = i;
|
||||||
|
println!("Switching to {}", self.languages[self.current_language_index].get_language_name());
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
println!("Language {} not found", desired_name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Some(&"next") => {
|
||||||
|
self.current_language_index = (self.current_language_index + 1) % self.languages.len();
|
||||||
|
println!("Switching to {}", self.languages[self.current_language_index].get_language_name());
|
||||||
|
}
|
||||||
|
Some(&"prev") | Some(&"previous") => {
|
||||||
|
self.current_language_index = if self.current_language_index == 0 { self.languages.len() - 1 } else { self.current_language_index - 1 };
|
||||||
|
println!("Switching to {}", self.languages[self.current_language_index].get_language_name());
|
||||||
|
},
|
||||||
|
Some(e) => println!("Bad `lang` argument: {}", e),
|
||||||
|
None => println!("`lang` - valid arguments `show`, `next`, `prev`|`previous`"),
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"set" => {
|
||||||
|
let show = match commands.get(1) {
|
||||||
|
Some(&"show") => true,
|
||||||
|
Some(&"hide") => false,
|
||||||
|
Some(e) => {
|
||||||
|
println!("Bad `set` argument: {}", e);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
println!("`set` - valid arguments `show {{option}}`, `hide {{option}}`");
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
match commands.get(2) {
|
||||||
|
Some(&"tokens") => self.options.debug_tokens = show,
|
||||||
|
Some(&"parse") => self.options.debug_parse = show,
|
||||||
|
Some(&"symbols") => self.options.debug_symbol_table = show,
|
||||||
|
Some(&"eval") => {
|
||||||
|
//let ref mut language = self.languages[self.current_language_index];
|
||||||
|
//language.set_option("trace_evaluation", show);
|
||||||
|
},
|
||||||
|
Some(&"llvm") => self.options.show_llvm_ir = show,
|
||||||
|
Some(e) => {
|
||||||
|
println!("Bad `show`/`hide` argument: {}", e);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
println!("`show`/`hide` requires an argument");
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
e => println!("Unknown command: {}", e)
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn compilation_sequence(llvm_code: LLVMCodeString, sourcefile: &str) {
|
||||||
|
use std::process::Command;
|
||||||
|
|
||||||
|
let ll_filename = "out.ll";
|
||||||
|
let obj_filename = "out.o";
|
||||||
|
let q: Vec<&str> = sourcefile.split('.').collect();
|
||||||
|
let bin_filename = match &q[..] {
|
||||||
|
&[name, "maaru"] => name,
|
||||||
|
_ => panic!("Bad filename {}", sourcefile),
|
||||||
|
};
|
||||||
|
|
||||||
|
let LLVMCodeString(llvm_str) = llvm_code;
|
||||||
|
|
||||||
|
println!("Compilation process finished for {}", ll_filename);
|
||||||
|
File::create(ll_filename)
|
||||||
|
.and_then(|mut f| f.write_all(llvm_str.as_bytes()))
|
||||||
|
.expect("Error writing file");
|
||||||
|
|
||||||
|
let llc_output = Command::new("llc")
|
||||||
|
.args(&["-filetype=obj", ll_filename, "-o", obj_filename])
|
||||||
|
.output()
|
||||||
|
.expect("Failed to run llc");
|
||||||
|
|
||||||
|
|
||||||
|
if !llc_output.status.success() {
|
||||||
|
println!("{}", String::from_utf8_lossy(&llc_output.stderr));
|
||||||
|
}
|
||||||
|
|
||||||
|
let gcc_output = Command::new("gcc")
|
||||||
|
.args(&["-o", bin_filename, &obj_filename])
|
||||||
|
.output()
|
||||||
|
.expect("failed to run gcc");
|
||||||
|
|
||||||
|
if !gcc_output.status.success() {
|
||||||
|
println!("{}", String::from_utf8_lossy(&gcc_output.stdout));
|
||||||
|
println!("{}", String::from_utf8_lossy(&gcc_output.stderr));
|
||||||
|
}
|
||||||
|
|
||||||
|
for filename in [obj_filename].iter() {
|
||||||
|
Command::new("rm")
|
||||||
|
.arg(filename)
|
||||||
|
.output()
|
||||||
|
.expect(&format!("failed to run rm {}", filename));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn program_options() -> getopts::Options {
|
||||||
|
let mut options = getopts::Options::new();
|
||||||
|
options.optopt("s",
|
||||||
|
"eval-style",
|
||||||
|
"Specify whether to compile (if supported) or interpret the language. If not specified, the default is language-specific",
|
||||||
|
"[compile|interpret]"
|
||||||
|
);
|
||||||
|
options.optflag("",
|
||||||
|
"list-languages",
|
||||||
|
"Show a list of all supported languages");
|
||||||
|
options.optopt("l",
|
||||||
|
"lang",
|
||||||
|
"Start up REPL in a language",
|
||||||
|
"LANGUAGE");
|
||||||
|
options.optflag("h",
|
||||||
|
"help",
|
||||||
|
"Show help text");
|
||||||
|
options.optflag("w",
|
||||||
|
"webapp",
|
||||||
|
"Start up web interpreter");
|
||||||
|
options
|
||||||
|
}
|
45
schala-lib/src/webapp.rs
Normal file
45
schala-lib/src/webapp.rs
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
use rocket;
|
||||||
|
use rocket::State;
|
||||||
|
use rocket::response::Content;
|
||||||
|
use rocket::response::NamedFile;
|
||||||
|
use rocket::http::ContentType;
|
||||||
|
use rocket_contrib::Json;
|
||||||
|
use language::{ProgrammingLanguageInterface, EvalOptions};
|
||||||
|
use WEBFILES;
|
||||||
|
use ::PLIGenerator;
|
||||||
|
|
||||||
|
#[get("/")]
|
||||||
|
fn index() -> Content<String> {
|
||||||
|
let path = "static/index.html";
|
||||||
|
let html_contents = String::from_utf8(WEBFILES.get(path).unwrap().into_owned()).unwrap();
|
||||||
|
Content(ContentType::HTML, html_contents)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[get("/bundle.js")]
|
||||||
|
fn js_bundle() -> Content<String> {
|
||||||
|
let path = "static/bundle.js";
|
||||||
|
let js_contents = String::from_utf8(WEBFILES.get(path).unwrap().into_owned()).unwrap();
|
||||||
|
Content(ContentType::JavaScript, js_contents)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
struct Input {
|
||||||
|
source: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize)]
|
||||||
|
struct Output {
|
||||||
|
text: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[post("/input", format = "application/json", data = "<input>")]
|
||||||
|
fn interpreter_input(input: Json<Input>, generators: State<Vec<PLIGenerator>>) -> Json<Output> {
|
||||||
|
let schala_gen = generators.get(0).unwrap();
|
||||||
|
let mut schala: Box<ProgrammingLanguageInterface> = schala_gen();
|
||||||
|
let code_output = schala.evaluate_in_repl(&input.source, &EvalOptions::default());
|
||||||
|
Json(Output { text: code_output.to_string() })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn web_main(language_generators: Vec<PLIGenerator>) {
|
||||||
|
rocket::ignite().manage(language_generators).mount("/", routes![index, js_bundle, interpreter_input]).launch();
|
||||||
|
}
|
@ -1,2 +0,0 @@
|
|||||||
|
|
||||||
1 + 2
|
|
10
source_files/schala/first.schala
Normal file
10
source_files/schala/first.schala
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
fn main() {
|
||||||
|
const a = 10
|
||||||
|
const b = 20
|
||||||
|
a + b
|
||||||
|
}
|
||||||
|
|
||||||
|
print(main())
|
||||||
|
|
||||||
|
const xxx
|
||||||
|
|
@ -96,3 +96,10 @@ fn main() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# lambdas
|
||||||
|
#
|
||||||
|
|x,y| { }() #is probably fine
|
||||||
|
const a = |x: Type, y|: RetType { <statementblock> }
|
||||||
|
const a: X -> Y -> Z = |x,y| { }
|
3
source_files/test.rukka
Normal file
3
source_files/test.rukka
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
|
||||||
|
(display (+ 1 2))
|
||||||
|
(display "Hello")
|
@ -6,9 +6,9 @@ use self::llvm_sys::prelude::*;
|
|||||||
use self::llvm_sys::{LLVMIntPredicate};
|
use self::llvm_sys::{LLVMIntPredicate};
|
||||||
|
|
||||||
use maaru_lang::parser::{AST, Statement, Function, Prototype, Expression, BinOp};
|
use maaru_lang::parser::{AST, Statement, Function, Prototype, Expression, BinOp};
|
||||||
use language::LLVMCodeString;
|
use schala_lib::LLVMCodeString;
|
||||||
|
|
||||||
use llvm_wrap as LLVMWrap;
|
use schala_lib::llvm_wrap as LLVMWrap;
|
||||||
|
|
||||||
type VariableMap = HashMap<String, LLVMValueRef>;
|
type VariableMap = HashMap<String, LLVMValueRef>;
|
||||||
|
|
||||||
|
@ -3,7 +3,18 @@ pub mod parser;
|
|||||||
pub mod eval;
|
pub mod eval;
|
||||||
pub mod compilation;
|
pub mod compilation;
|
||||||
|
|
||||||
use language::{ProgrammingLanguageInterface, EvalOptions, ReplOutput, TraceArtifact, LLVMCodeString};
|
use schala_lib::{ProgrammingLanguageInterface, EvalOptions, LanguageOutput, TraceArtifact, LLVMCodeString};
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct TokenError {
|
||||||
|
pub msg: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TokenError {
|
||||||
|
pub fn new(msg: &str) -> TokenError {
|
||||||
|
TokenError { msg: msg.to_string() }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub use self::eval::Evaluator as MaaruEvaluator;
|
pub use self::eval::Evaluator as MaaruEvaluator;
|
||||||
|
|
||||||
@ -27,8 +38,8 @@ impl<'a> ProgrammingLanguageInterface for Maaru<'a> {
|
|||||||
format!("maaru")
|
format!("maaru")
|
||||||
}
|
}
|
||||||
|
|
||||||
fn evaluate_in_repl(&mut self, input: &str, options: &EvalOptions) -> ReplOutput {
|
fn evaluate_in_repl(&mut self, input: &str, options: &EvalOptions) -> LanguageOutput {
|
||||||
let mut output = ReplOutput::default();
|
let mut output = LanguageOutput::default();
|
||||||
|
|
||||||
let tokens = match tokenizer::tokenize(input) {
|
let tokens = match tokenizer::tokenize(input) {
|
||||||
Ok(tokens) => {
|
Ok(tokens) => {
|
||||||
|
@ -5,7 +5,7 @@ use std::str::Chars;
|
|||||||
use self::itertools::Itertools;
|
use self::itertools::Itertools;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
use language::TokenError;
|
use maaru_lang::TokenError;
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
pub enum Token {
|
pub enum Token {
|
||||||
|
357
src/main.rs
357
src/main.rs
@ -1,364 +1,29 @@
|
|||||||
#![feature(advanced_slice_patterns, slice_patterns, box_patterns, box_syntax)]
|
#![feature(advanced_slice_patterns, slice_patterns, box_patterns, box_syntax)]
|
||||||
#![feature(plugin)]
|
#![feature(plugin)]
|
||||||
#![plugin(rocket_codegen)]
|
|
||||||
extern crate getopts;
|
|
||||||
extern crate linefeed;
|
|
||||||
extern crate itertools;
|
extern crate itertools;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate lazy_static;
|
extern crate lazy_static;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate maplit;
|
extern crate maplit;
|
||||||
#[macro_use]
|
|
||||||
extern crate serde_derive;
|
|
||||||
extern crate serde_json;
|
|
||||||
extern crate rocket;
|
|
||||||
extern crate rocket_contrib;
|
|
||||||
|
|
||||||
use std::path::Path;
|
|
||||||
use std::fs::File;
|
|
||||||
use std::io::{Read, Write};
|
|
||||||
use std::process::exit;
|
|
||||||
use std::default::Default;
|
|
||||||
|
|
||||||
mod schala_lang;
|
mod schala_lang;
|
||||||
mod maaru_lang;
|
mod maaru_lang;
|
||||||
mod robo_lang;
|
mod robo_lang;
|
||||||
|
mod rukka_lang;
|
||||||
|
|
||||||
mod language;
|
extern crate schala_lib;
|
||||||
use language::{ProgrammingLanguageInterface, EvalOptions, LLVMCodeString};
|
use schala_lib::{PLIGenerator, schala_main};
|
||||||
|
|
||||||
mod webapp;
|
extern { }
|
||||||
mod llvm_wrap;
|
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
let languages: Vec<Box<ProgrammingLanguageInterface>> =
|
let generators: Vec<PLIGenerator> = vec![
|
||||||
vec![
|
Box::new(|| { Box::new(schala_lang::Schala::new())}),
|
||||||
Box::new(schala_lang::Schala::new()),
|
Box::new(|| { Box::new(schala_lang::autoparser::Schala::new())}),
|
||||||
Box::new(maaru_lang::Maaru::new()),
|
Box::new(|| { Box::new(maaru_lang::Maaru::new())}),
|
||||||
Box::new(robo_lang::Robo::new()),
|
Box::new(|| { Box::new(robo_lang::Robo::new())}),
|
||||||
|
Box::new(|| { Box::new(rukka_lang::Rukka::new())}),
|
||||||
];
|
];
|
||||||
|
schala_main(generators);
|
||||||
let option_matches = program_options().parse(std::env::args()).unwrap_or_else(|e| {
|
|
||||||
println!("{:?}", e);
|
|
||||||
exit(1);
|
|
||||||
});
|
|
||||||
|
|
||||||
if option_matches.opt_present("list-languages") {
|
|
||||||
for lang in languages {
|
|
||||||
println!("{}", lang.get_language_name());
|
|
||||||
}
|
|
||||||
exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
if option_matches.opt_present("help") {
|
|
||||||
println!("{}", program_options().usage("Schala metainterpreter"));
|
|
||||||
exit(0);
|
|
||||||
}
|
|
||||||
|
|
||||||
if option_matches.opt_present("webapp") {
|
|
||||||
webapp::web_main();
|
|
||||||
exit(0);
|
|
||||||
}
|
|
||||||
|
|
||||||
let language_names: Vec<String> = languages.iter().map(|lang| {lang.get_language_name()}).collect();
|
|
||||||
let initial_index: usize =
|
|
||||||
option_matches.opt_str("lang")
|
|
||||||
.and_then(|lang| { language_names.iter().position(|x| { *x == lang }) })
|
|
||||||
.unwrap_or(0);
|
|
||||||
|
|
||||||
let mut options = EvalOptions::default();
|
|
||||||
options.compile = match option_matches.opt_str("eval-style") {
|
|
||||||
Some(ref s) if s == "compile" => true,
|
|
||||||
_ => false
|
|
||||||
};
|
|
||||||
|
|
||||||
match option_matches.free[..] {
|
|
||||||
[] | [_] => {
|
|
||||||
let mut repl = Repl::new(languages, initial_index);
|
|
||||||
repl.options.show_llvm_ir = true; //TODO make this be configurable
|
|
||||||
repl.run();
|
|
||||||
}
|
|
||||||
[_, ref filename, _..] => {
|
|
||||||
|
|
||||||
run_noninteractive(filename, languages, options);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
fn run_noninteractive(filename: &str, languages: Vec<Box<ProgrammingLanguageInterface>>, options: EvalOptions) {
|
|
||||||
let path = Path::new(filename);
|
|
||||||
let ext = path.extension().and_then(|e| e.to_str()).unwrap_or_else(|| {
|
|
||||||
println!("Source file lacks extension");
|
|
||||||
exit(1);
|
|
||||||
});
|
|
||||||
let mut language = Box::new(languages.into_iter().find(|lang| lang.get_source_file_suffix() == ext)
|
|
||||||
.unwrap_or_else(|| {
|
|
||||||
println!("Extension .{} not recognized", ext);
|
|
||||||
exit(1);
|
|
||||||
}));
|
|
||||||
|
|
||||||
let mut source_file = File::open(path).unwrap();
|
|
||||||
let mut buffer = String::new();
|
|
||||||
|
|
||||||
source_file.read_to_string(&mut buffer).unwrap();
|
|
||||||
|
|
||||||
if options.compile {
|
|
||||||
if !language.can_compile() {
|
|
||||||
panic!("Trying to compile a non-compileable language");
|
|
||||||
} else {
|
|
||||||
let llvm_bytecode = language.compile(&buffer);
|
|
||||||
compilation_sequence(llvm_bytecode, filename);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
let interpretor_output = language.evaluate_in_repl(&buffer, &options);
|
|
||||||
interpretor_output.print_to_screen();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type LineReader = linefeed::Reader<linefeed::terminal::DefaultTerminal>;
|
|
||||||
struct Repl {
|
|
||||||
options: EvalOptions,
|
|
||||||
languages: Vec<Box<ProgrammingLanguageInterface>>,
|
|
||||||
current_language_index: usize,
|
|
||||||
interpreter_directive_sigil: char,
|
|
||||||
reader: LineReader,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Repl {
|
|
||||||
fn new(languages: Vec<Box<ProgrammingLanguageInterface>>, initial_index: usize) -> Repl {
|
|
||||||
let mut reader: linefeed::Reader<_> = linefeed::Reader::new("Metainterpreter").unwrap();
|
|
||||||
reader.set_prompt(">> ");
|
|
||||||
let i = if initial_index < languages.len() { initial_index } else { 0 };
|
|
||||||
|
|
||||||
Repl {
|
|
||||||
options: Repl::get_options(),
|
|
||||||
languages: languages,
|
|
||||||
current_language_index: i,
|
|
||||||
interpreter_directive_sigil: '.',
|
|
||||||
reader: reader,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_options() -> EvalOptions {
|
|
||||||
File::open(".schala_repl")
|
|
||||||
.and_then(|mut file| {
|
|
||||||
let mut contents = String::new();
|
|
||||||
file.read_to_string(&mut contents)?;
|
|
||||||
Ok(contents)
|
|
||||||
})
|
|
||||||
.and_then(|contents| {
|
|
||||||
let options: EvalOptions = serde_json::from_str(&contents)?;
|
|
||||||
Ok(options)
|
|
||||||
}).unwrap_or(EvalOptions::default())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn save_options(&self) {
|
|
||||||
let ref options = self.options;
|
|
||||||
let read = File::create(".schala_repl")
|
|
||||||
.and_then(|mut file| {
|
|
||||||
let buf = serde_json::to_string(options).unwrap();
|
|
||||||
file.write_all(buf.as_bytes())
|
|
||||||
});
|
|
||||||
|
|
||||||
if let Err(err) = read {
|
|
||||||
println!("Error saving .schala_repl file {}", err);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn run(&mut self) {
|
|
||||||
use linefeed::ReadResult::*;
|
|
||||||
println!("MetaInterpreter v 0.05");
|
|
||||||
println!("Using language: {}", self.languages[self.current_language_index].get_language_name());
|
|
||||||
loop {
|
|
||||||
match self.reader.read_line() {
|
|
||||||
Err(e) => {
|
|
||||||
println!("Terminal read error: {}", e);
|
|
||||||
},
|
|
||||||
Ok(Eof) => {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
Ok(Input(ref input)) => {
|
|
||||||
self.reader.add_history(input.clone());
|
|
||||||
if self.handle_interpreter_directive(input) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
let output = self.input_handler(input);
|
|
||||||
println!("=> {}", output);
|
|
||||||
}
|
|
||||||
_ => (),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
println!("Exiting...");
|
|
||||||
}
|
|
||||||
|
|
||||||
fn input_handler(&mut self, input: &str) -> String {
|
|
||||||
let ref mut language = self.languages[self.current_language_index];
|
|
||||||
let interpretor_output = language.evaluate_in_repl(input, &self.options);
|
|
||||||
interpretor_output.to_string()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn handle_interpreter_directive(&mut self, input: &str) -> bool {
|
|
||||||
match input.chars().nth(0) {
|
|
||||||
Some(ch) if ch == self.interpreter_directive_sigil => (),
|
|
||||||
_ => return false
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut iter = input.chars();
|
|
||||||
iter.next();
|
|
||||||
let trimmed_sigil: &str = iter.as_str();
|
|
||||||
|
|
||||||
let commands: Vec<&str> = trimmed_sigil
|
|
||||||
.split_whitespace()
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let cmd: &str = match commands.get(0).clone() {
|
|
||||||
None => return true,
|
|
||||||
Some(s) => s
|
|
||||||
};
|
|
||||||
|
|
||||||
match cmd {
|
|
||||||
"exit" | "quit" => {
|
|
||||||
self.save_options();
|
|
||||||
exit(0)
|
|
||||||
},
|
|
||||||
"history" => {
|
|
||||||
for item in self.reader.history() {
|
|
||||||
println!("{}", item);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"help" => {
|
|
||||||
println!("Commands:");
|
|
||||||
println!("exit | quit");
|
|
||||||
println!("lang [show|next|previous]");
|
|
||||||
println!("set [show|hide] [tokens|parse|symbols|eval|llvm]");
|
|
||||||
}
|
|
||||||
"lang" => {
|
|
||||||
match commands.get(1) {
|
|
||||||
Some(&"show") => {
|
|
||||||
for (i, lang) in self.languages.iter().enumerate() {
|
|
||||||
if i == self.current_language_index {
|
|
||||||
println!("* {}", lang.get_language_name());
|
|
||||||
} else {
|
|
||||||
println!("{}", lang.get_language_name());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
Some(&"next") => {
|
|
||||||
self.current_language_index = (self.current_language_index + 1) % self.languages.len();
|
|
||||||
println!("Switching to {}", self.languages[self.current_language_index].get_language_name());
|
|
||||||
}
|
|
||||||
Some(&"prev") | Some(&"previous") => {
|
|
||||||
self.current_language_index = if self.current_language_index == 0 { self.languages.len() - 1 } else { self.current_language_index - 1 };
|
|
||||||
println!("Switching to {}", self.languages[self.current_language_index].get_language_name());
|
|
||||||
},
|
|
||||||
Some(e) => println!("Bad `lang` argument: {}", e),
|
|
||||||
None => println!("`lang` - valid arguments `show`, `next`, `prev`|`previous`"),
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"set" => {
|
|
||||||
let show = match commands.get(1) {
|
|
||||||
Some(&"show") => true,
|
|
||||||
Some(&"hide") => false,
|
|
||||||
Some(e) => {
|
|
||||||
println!("Bad `set` argument: {}", e);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
None => {
|
|
||||||
println!("`set` - valid arguments `show {{option}}`, `hide {{option}}`");
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
match commands.get(2) {
|
|
||||||
Some(&"tokens") => self.options.debug_tokens = show,
|
|
||||||
Some(&"parse") => self.options.debug_parse = show,
|
|
||||||
Some(&"symbols") => self.options.debug_symbol_table = show,
|
|
||||||
Some(&"eval") => {
|
|
||||||
//let ref mut language = self.languages[self.current_language_index];
|
|
||||||
//language.set_option("trace_evaluation", show);
|
|
||||||
},
|
|
||||||
Some(&"llvm") => self.options.show_llvm_ir = show,
|
|
||||||
Some(e) => {
|
|
||||||
println!("Bad `show`/`hide` argument: {}", e);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
None => {
|
|
||||||
println!("`show`/`hide` requires an argument");
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
e => println!("Unknown command: {}", e)
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn compilation_sequence(llvm_code: LLVMCodeString, sourcefile: &str) {
|
|
||||||
use std::process::Command;
|
|
||||||
|
|
||||||
let ll_filename = "out.ll";
|
|
||||||
let obj_filename = "out.o";
|
|
||||||
let q: Vec<&str> = sourcefile.split('.').collect();
|
|
||||||
let bin_filename = match &q[..] {
|
|
||||||
&[name, "maaru"] => name,
|
|
||||||
_ => panic!("Bad filename {}", sourcefile),
|
|
||||||
};
|
|
||||||
|
|
||||||
let LLVMCodeString(llvm_str) = llvm_code;
|
|
||||||
|
|
||||||
println!("Compilation process finished for {}", ll_filename);
|
|
||||||
File::create(ll_filename)
|
|
||||||
.and_then(|mut f| f.write_all(llvm_str.as_bytes()))
|
|
||||||
.expect("Error writing file");
|
|
||||||
|
|
||||||
let llc_output = Command::new("llc")
|
|
||||||
.args(&["-filetype=obj", ll_filename, "-o", obj_filename])
|
|
||||||
.output()
|
|
||||||
.expect("Failed to run llc");
|
|
||||||
|
|
||||||
|
|
||||||
if !llc_output.status.success() {
|
|
||||||
println!("{}", String::from_utf8_lossy(&llc_output.stderr));
|
|
||||||
}
|
|
||||||
|
|
||||||
let gcc_output = Command::new("gcc")
|
|
||||||
.args(&["-o", bin_filename, &obj_filename])
|
|
||||||
.output()
|
|
||||||
.expect("failed to run gcc");
|
|
||||||
|
|
||||||
if !gcc_output.status.success() {
|
|
||||||
println!("{}", String::from_utf8_lossy(&gcc_output.stdout));
|
|
||||||
println!("{}", String::from_utf8_lossy(&gcc_output.stderr));
|
|
||||||
}
|
|
||||||
|
|
||||||
for filename in [obj_filename].iter() {
|
|
||||||
Command::new("rm")
|
|
||||||
.arg(filename)
|
|
||||||
.output()
|
|
||||||
.expect(&format!("failed to run rm {}", filename));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn program_options() -> getopts::Options {
|
|
||||||
let mut options = getopts::Options::new();
|
|
||||||
options.optopt("s",
|
|
||||||
"eval-style",
|
|
||||||
"Specify whether to compile (if supported) or interpret the language. If not specified, the default is language-specific",
|
|
||||||
"[compile|interpret]"
|
|
||||||
);
|
|
||||||
options.optflag("",
|
|
||||||
"list-languages",
|
|
||||||
"Show a list of all supported languages");
|
|
||||||
options.optopt("l",
|
|
||||||
"lang",
|
|
||||||
"Start up REPL in a language",
|
|
||||||
"LANGUAGE");
|
|
||||||
options.optflag("h",
|
|
||||||
"help",
|
|
||||||
"Show help text");
|
|
||||||
options.optflag("w",
|
|
||||||
"webapp",
|
|
||||||
"Start up web interpreter");
|
|
||||||
options
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
|
use schala_lib::{ProgrammingLanguageInterface, EvalOptions, LanguageOutput};
|
||||||
use language::{ProgrammingLanguageInterface, EvalOptions, ReplOutput, TokenError};
|
|
||||||
|
|
||||||
pub struct Robo {
|
pub struct Robo {
|
||||||
}
|
}
|
||||||
@ -11,6 +10,17 @@ impl Robo {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct TokenError {
|
||||||
|
pub msg: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TokenError {
|
||||||
|
pub fn new(msg: &str) -> TokenError {
|
||||||
|
TokenError { msg: msg.to_string() }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum Token {
|
pub enum Token {
|
||||||
@ -140,8 +150,8 @@ impl ProgrammingLanguageInterface for Robo {
|
|||||||
format!("robo")
|
format!("robo")
|
||||||
}
|
}
|
||||||
|
|
||||||
fn evaluate_in_repl(&mut self, input: &str, _eval_options: &EvalOptions) -> ReplOutput {
|
fn evaluate_in_repl(&mut self, input: &str, _eval_options: &EvalOptions) -> LanguageOutput {
|
||||||
let mut output = ReplOutput::default();
|
let mut output = LanguageOutput::default();
|
||||||
let tokens = match tokenize(input) {
|
let tokens = match tokenize(input) {
|
||||||
Ok(tokens) => tokens,
|
Ok(tokens) => tokens,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
|
432
src/rukka_lang/mod.rs
Normal file
432
src/rukka_lang/mod.rs
Normal file
@ -0,0 +1,432 @@
|
|||||||
|
use itertools::Itertools;
|
||||||
|
use schala_lib::{ProgrammingLanguageInterface, EvalOptions, LanguageOutput};
|
||||||
|
use std::iter::Peekable;
|
||||||
|
use std::vec::IntoIter;
|
||||||
|
use std::str::Chars;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
pub struct EvaluatorState {
|
||||||
|
binding_stack: Vec<HashMap<String, Sexp>>
|
||||||
|
}
|
||||||
|
|
||||||
|
impl EvaluatorState {
|
||||||
|
fn new() -> EvaluatorState {
|
||||||
|
use self::Sexp::Primitive;
|
||||||
|
use self::PrimitiveFn::*;
|
||||||
|
let mut default_map = HashMap::new();
|
||||||
|
default_map.insert(format!("+"), Primitive(Plus));
|
||||||
|
default_map.insert(format!("-"), Primitive(Minus));
|
||||||
|
default_map.insert(format!("*"), Primitive(Mult));
|
||||||
|
default_map.insert(format!("/"), Primitive(Div));
|
||||||
|
default_map.insert(format!("%"), Primitive(Mod));
|
||||||
|
default_map.insert(format!(">"), Primitive(Greater));
|
||||||
|
default_map.insert(format!("<"), Primitive(Less));
|
||||||
|
default_map.insert(format!("<="), Primitive(LessThanOrEqual));
|
||||||
|
default_map.insert(format!(">="), Primitive(GreaterThanOrEqual));
|
||||||
|
default_map.insert(format!("display"), Primitive(Display));
|
||||||
|
|
||||||
|
EvaluatorState {
|
||||||
|
binding_stack: vec![default_map],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fn set_var(&mut self, var: String, value: Sexp) {
|
||||||
|
let binding = self.binding_stack.last_mut().unwrap();
|
||||||
|
binding.insert(var, value);
|
||||||
|
}
|
||||||
|
fn get_var(&self, var: &str) -> Option<&Sexp> {
|
||||||
|
for bindings in self.binding_stack.iter().rev() {
|
||||||
|
match bindings.get(var) {
|
||||||
|
Some(x) => return Some(x),
|
||||||
|
None => (),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
fn push_env(&mut self) {
|
||||||
|
self.binding_stack.push(HashMap::new());
|
||||||
|
}
|
||||||
|
fn pop_env(&mut self) {
|
||||||
|
self.binding_stack.pop();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Rukka {
|
||||||
|
state: EvaluatorState
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Rukka {
|
||||||
|
pub fn new() -> Rukka { Rukka { state: EvaluatorState::new() } }
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ProgrammingLanguageInterface for Rukka {
|
||||||
|
fn get_language_name(&self) -> String {
|
||||||
|
"Rukka".to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_source_file_suffix(&self) -> String {
|
||||||
|
format!("rukka")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn evaluate_in_repl(&mut self, input: &str, _eval_options: &EvalOptions) -> LanguageOutput {
|
||||||
|
let mut output = LanguageOutput::default();
|
||||||
|
let sexps = match read(input) {
|
||||||
|
Err(err) => {
|
||||||
|
output.add_output(format!("Error: {}", err));
|
||||||
|
return output;
|
||||||
|
},
|
||||||
|
Ok(sexps) => sexps
|
||||||
|
};
|
||||||
|
|
||||||
|
let output_str: String = sexps.into_iter().enumerate().map(|(i, sexp)| {
|
||||||
|
match self.state.eval(sexp) {
|
||||||
|
Ok(result) => format!("{}: {}", i, result.print()),
|
||||||
|
Err(err) => format!("{} Error: {}", i, err),
|
||||||
|
}
|
||||||
|
}).intersperse(format!("\n")).collect();
|
||||||
|
output.add_output(output_str);
|
||||||
|
output
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl EvaluatorState {
|
||||||
|
fn eval(&mut self, expr: Sexp) -> Result<Sexp, String> {
|
||||||
|
use self::Sexp::*;
|
||||||
|
Ok(match expr {
|
||||||
|
SymbolAtom(ref sym) => match self.get_var(sym) {
|
||||||
|
Some(ref sexp) => {
|
||||||
|
let q: &Sexp = sexp; //WTF? if I delete this line, the copy doesn't work??
|
||||||
|
q.clone() //TODO make this not involve a clone
|
||||||
|
},
|
||||||
|
None => return Err(format!("Variable {} not bound", sym)),
|
||||||
|
},
|
||||||
|
expr @ Primitive(_) => expr,
|
||||||
|
expr @ FnLiteral { .. } => expr,
|
||||||
|
expr @ StringAtom(_) => expr,
|
||||||
|
expr @ NumberAtom(_) => expr,
|
||||||
|
expr @ BoolAtom(_) => expr,
|
||||||
|
Cons(box operator, box operands) => match operator {
|
||||||
|
SymbolAtom(ref sym) if match &sym[..] {
|
||||||
|
"quote" | "eq?" | "cons" | "car" | "cdr" | "atom?" | "define" | "lambda" | "if" | "cond" => true, _ => false
|
||||||
|
} => self.eval_special_form(sym, operands)?,
|
||||||
|
_ => {
|
||||||
|
let evaled = self.eval(operator)?;
|
||||||
|
self.apply(evaled, operands)?
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Nil => Nil,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
fn eval_special_form(&mut self, form: &str, operands: Sexp) -> Result<Sexp, String> {
|
||||||
|
use self::Sexp::*;
|
||||||
|
Ok(match form {
|
||||||
|
"quote" => match operands {
|
||||||
|
Cons(box quoted, box Nil) => quoted,
|
||||||
|
_ => return Err(format!("Bad syntax in quote")),
|
||||||
|
},
|
||||||
|
"eq?" => match operands {//TODO make correct
|
||||||
|
Cons(box lhs, box Cons(box rhs, _)) => BoolAtom(lhs == rhs),
|
||||||
|
_ => BoolAtom(true),
|
||||||
|
},
|
||||||
|
"cons" => match operands {
|
||||||
|
Cons(box cadr, box Cons(box caddr, box Nil)) => {
|
||||||
|
let newl = self.eval(cadr)?;
|
||||||
|
let newr = self.eval(caddr)?;
|
||||||
|
Cons(Box::new(newl), Box::new(newr))
|
||||||
|
},
|
||||||
|
_ => return Err(format!("Bad arguments for cons")),
|
||||||
|
},
|
||||||
|
"car" => match operands {
|
||||||
|
Cons(box car, _) => car,
|
||||||
|
_ => return Err(format!("called car with a non-pair argument")),
|
||||||
|
},
|
||||||
|
"cdr" => match operands {
|
||||||
|
Cons(_, box cdr) => cdr,
|
||||||
|
_ => return Err(format!("called cdr with a non-pair argument")),
|
||||||
|
},
|
||||||
|
"atom?" => match operands {
|
||||||
|
Cons(_, _) => BoolAtom(false),
|
||||||
|
_ => BoolAtom(true),
|
||||||
|
},
|
||||||
|
"define" => match operands {
|
||||||
|
Cons(box SymbolAtom(sym), box Cons(box expr, box Nil)) => {
|
||||||
|
let evaluated = self.eval(expr)?;
|
||||||
|
self.set_var(sym, evaluated);
|
||||||
|
Nil
|
||||||
|
},
|
||||||
|
_ => return Err(format!("Bad assignment")),
|
||||||
|
}
|
||||||
|
"lambda" => match operands {
|
||||||
|
Cons(box mut paramlist, box Cons(box formalexp, box Nil)) => {
|
||||||
|
let mut formal_params = vec![];
|
||||||
|
{
|
||||||
|
let mut ptr = ¶mlist;
|
||||||
|
loop {
|
||||||
|
match ptr {
|
||||||
|
&Cons(ref arg, ref rest) => {
|
||||||
|
if let SymbolAtom(ref sym) = **arg {
|
||||||
|
formal_params.push(sym.clone());
|
||||||
|
ptr = rest;
|
||||||
|
} else {
|
||||||
|
return Err(format!("Bad lambda format"));
|
||||||
|
}
|
||||||
|
},
|
||||||
|
_ => break,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
FnLiteral {
|
||||||
|
formal_params,
|
||||||
|
body: Box::new(formalexp)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
_ => return Err(format!("Bad lambda expression")),
|
||||||
|
},
|
||||||
|
"if" => match operands {
|
||||||
|
Cons(box test, box body) => {
|
||||||
|
let truth_value = test.truthy();
|
||||||
|
match (truth_value, body) {
|
||||||
|
(true, Cons(box consequent, _)) => consequent,
|
||||||
|
(false, Cons(_, box Cons(box alternative, _))) => alternative,
|
||||||
|
_ => return Err(format!("Bad if expression"))
|
||||||
|
}
|
||||||
|
},
|
||||||
|
_ => return Err(format!("Bad if expression"))
|
||||||
|
},
|
||||||
|
s => return Err(format!("Non-existent special form {}; this should never happen", s)),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn apply(&mut self, function: Sexp, operands: Sexp) -> Result<Sexp, String> {
|
||||||
|
use self::Sexp::*;
|
||||||
|
match function {
|
||||||
|
FnLiteral { formal_params, body } => {
|
||||||
|
self.push_env();
|
||||||
|
|
||||||
|
let mut cur = operands;
|
||||||
|
for param in formal_params {
|
||||||
|
match cur {
|
||||||
|
Cons(box arg, box rest) => {
|
||||||
|
cur = rest;
|
||||||
|
self.set_var(param, arg);
|
||||||
|
},
|
||||||
|
_ => return Err(format!("Bad argument for function application")),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let result = self.eval(*body);
|
||||||
|
self.pop_env();
|
||||||
|
result
|
||||||
|
},
|
||||||
|
Primitive(prim) => {
|
||||||
|
let mut evaled_operands = Vec::new();
|
||||||
|
let mut cur_operand = operands;
|
||||||
|
loop {
|
||||||
|
match cur_operand {
|
||||||
|
Nil => break,
|
||||||
|
Cons(box l, box rest) => {
|
||||||
|
evaled_operands.push(self.eval(l)?);
|
||||||
|
cur_operand = rest;
|
||||||
|
},
|
||||||
|
_ => return Err(format!("Bad operands list"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
prim.apply(evaled_operands)
|
||||||
|
}
|
||||||
|
_ => return Err(format!("Bad type to apply")),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn read(input: &str) -> Result<Vec<Sexp>, String> {
|
||||||
|
let mut chars: Peekable<Chars> = input.chars().peekable();
|
||||||
|
let mut tokens = tokenize(&mut chars).into_iter().peekable();
|
||||||
|
let mut sexps = Vec::new();
|
||||||
|
while let Some(_) = tokens.peek() {
|
||||||
|
sexps.push(parse(&mut tokens)?);
|
||||||
|
}
|
||||||
|
Ok(sexps)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
enum Token {
|
||||||
|
LParen,
|
||||||
|
RParen,
|
||||||
|
Quote,
|
||||||
|
Word(String),
|
||||||
|
StringLiteral(String),
|
||||||
|
NumLiteral(u64),
|
||||||
|
}
|
||||||
|
|
||||||
|
//TODO make this notion of Eq more sophisticated
|
||||||
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
|
enum Sexp {
|
||||||
|
SymbolAtom(String),
|
||||||
|
StringAtom(String),
|
||||||
|
NumberAtom(u64),
|
||||||
|
BoolAtom(bool),
|
||||||
|
Cons(Box<Sexp>, Box<Sexp>),
|
||||||
|
Nil,
|
||||||
|
FnLiteral {
|
||||||
|
formal_params: Vec<String>,
|
||||||
|
body: Box<Sexp>
|
||||||
|
},
|
||||||
|
Primitive(PrimitiveFn)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
|
enum PrimitiveFn {
|
||||||
|
Plus, Minus, Mult, Div, Mod, Greater, Less, GreaterThanOrEqual, LessThanOrEqual, Display
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PrimitiveFn {
|
||||||
|
fn apply(&self, evaled_operands: Vec<Sexp>) -> Result<Sexp, String> {
|
||||||
|
use self::Sexp::*;
|
||||||
|
use self::PrimitiveFn::*;
|
||||||
|
let op = self.clone();
|
||||||
|
Ok(match op {
|
||||||
|
Display => {
|
||||||
|
for arg in evaled_operands {
|
||||||
|
print!("{}\n", arg.print());
|
||||||
|
}
|
||||||
|
Nil
|
||||||
|
},
|
||||||
|
Plus | Mult => {
|
||||||
|
let mut result = match op { Plus => 0, Mult => 1, _ => unreachable!() };
|
||||||
|
for arg in evaled_operands {
|
||||||
|
if let NumberAtom(n) = arg {
|
||||||
|
if let Plus = op {
|
||||||
|
result += n;
|
||||||
|
} else if let Mult = op {
|
||||||
|
result *= n;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return Err(format!("Bad operand: {:?}", arg));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
NumberAtom(result)
|
||||||
|
},
|
||||||
|
op => return Err(format!("Primitive op {:?} not implemented", op)),
|
||||||
|
})
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Sexp {
|
||||||
|
fn print(&self) -> String {
|
||||||
|
use self::Sexp::*;
|
||||||
|
match self {
|
||||||
|
&BoolAtom(true) => format!("#t"),
|
||||||
|
&BoolAtom(false) => format!("#f"),
|
||||||
|
&SymbolAtom(ref sym) => format!("{}", sym),
|
||||||
|
&StringAtom(ref s) => format!("\"{}\"", s),
|
||||||
|
&NumberAtom(ref n) => format!("{}", n),
|
||||||
|
&Cons(ref car, ref cdr) => format!("({} . {})", car.print(), cdr.print()),
|
||||||
|
&Nil => format!("()"),
|
||||||
|
&FnLiteral { ref formal_params, .. } => format!("<lambda {:?}>", formal_params),
|
||||||
|
&Primitive(ref sym) => format!("<primitive \"{:?}\">", sym),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn truthy(&self) -> bool {
|
||||||
|
use self::Sexp::*;
|
||||||
|
match self {
|
||||||
|
&BoolAtom(false) => false,
|
||||||
|
_ => true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn tokenize(input: &mut Peekable<Chars>) -> Vec<Token> {
|
||||||
|
use self::Token::*;
|
||||||
|
let mut tokens = Vec::new();
|
||||||
|
loop {
|
||||||
|
match input.next() {
|
||||||
|
None => break,
|
||||||
|
Some('(') => tokens.push(LParen),
|
||||||
|
Some(')') => tokens.push(RParen),
|
||||||
|
Some('\'') => tokens.push(Quote),
|
||||||
|
Some(c) if c.is_whitespace() => continue,
|
||||||
|
Some(c) if c.is_numeric() => {
|
||||||
|
let tok: String = input.peeking_take_while(|next| next.is_numeric()).collect();
|
||||||
|
let n: u64 = format!("{}{}", c, tok).parse().unwrap();
|
||||||
|
tokens.push(NumLiteral(n));
|
||||||
|
},
|
||||||
|
Some('"') => {
|
||||||
|
let string: String = input.scan(false, |escape, cur_char| {
|
||||||
|
let seen_escape = *escape;
|
||||||
|
*escape = cur_char == '\\' && !seen_escape;
|
||||||
|
match (cur_char, seen_escape) {
|
||||||
|
('"', false) => None,
|
||||||
|
('\\', false) => Some(None),
|
||||||
|
(c, _) => Some(Some(c))
|
||||||
|
}
|
||||||
|
}).filter_map(|x| x).collect();
|
||||||
|
tokens.push(StringLiteral(string));
|
||||||
|
}
|
||||||
|
Some(c) => {
|
||||||
|
let sym: String = input.peeking_take_while(|next| {
|
||||||
|
match *next {
|
||||||
|
'(' | ')' => false,
|
||||||
|
c if c.is_whitespace() => false,
|
||||||
|
_ => true
|
||||||
|
}
|
||||||
|
}).collect();
|
||||||
|
tokens.push(Word(format!("{}{}", c, sym)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tokens
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse(tokens: &mut Peekable<IntoIter<Token>>) -> Result<Sexp, String> {
|
||||||
|
use self::Token::*;
|
||||||
|
use self::Sexp::*;
|
||||||
|
match tokens.next() {
|
||||||
|
Some(Word(ref s)) if s == "#f" => Ok(BoolAtom(false)),
|
||||||
|
Some(Word(ref s)) if s == "#t" => Ok(BoolAtom(true)),
|
||||||
|
Some(Word(s)) => Ok(SymbolAtom(s)),
|
||||||
|
Some(StringLiteral(s)) => Ok(StringAtom(s)),
|
||||||
|
Some(LParen) => parse_sexp(tokens),
|
||||||
|
Some(RParen) => Err(format!("Unexpected ')'")),
|
||||||
|
Some(Quote) => {
|
||||||
|
let quoted = parse(tokens)?;
|
||||||
|
Ok(Cons(Box::new(SymbolAtom(format!("quote"))), Box::new(Cons(Box::new(quoted), Box::new(Nil)))))
|
||||||
|
},
|
||||||
|
Some(NumLiteral(n)) => Ok(NumberAtom(n)),
|
||||||
|
None => Err(format!("Unexpected end of input")),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_sexp(tokens: &mut Peekable<IntoIter<Token>>) -> Result<Sexp, String> {
|
||||||
|
use self::Token::*;
|
||||||
|
use self::Sexp::*;
|
||||||
|
let mut cell = Nil;
|
||||||
|
{
|
||||||
|
let mut cell_ptr = &mut cell;
|
||||||
|
loop {
|
||||||
|
match tokens.peek() {
|
||||||
|
None => return Err(format!("Unexpected end of input")),
|
||||||
|
Some(&RParen) => {
|
||||||
|
tokens.next();
|
||||||
|
break;
|
||||||
|
},
|
||||||
|
_ => {
|
||||||
|
let current = parse(tokens)?;
|
||||||
|
let new_cdr = Cons(Box::new(current), Box::new(Nil));
|
||||||
|
match cell_ptr {
|
||||||
|
&mut Cons(_, ref mut cdr) => **cdr = new_cdr,
|
||||||
|
&mut Nil => *cell_ptr = new_cdr,
|
||||||
|
_ => unreachable!()
|
||||||
|
};
|
||||||
|
|
||||||
|
let old_ptr = cell_ptr;
|
||||||
|
let new_ptr: &mut Sexp = match old_ptr { &mut Cons(_, ref mut cdr) => cdr, _ => unreachable!() } as &mut Sexp;
|
||||||
|
cell_ptr = new_ptr;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(cell)
|
||||||
|
}
|
||||||
|
|
133
src/schala_lang/autoparser.rs
Normal file
133
src/schala_lang/autoparser.rs
Normal file
@ -0,0 +1,133 @@
|
|||||||
|
use schala_lib::{ProgrammingLanguageInterface, EvalOptions, TraceArtifact, LanguageOutput};
|
||||||
|
use itertools::Itertools;
|
||||||
|
|
||||||
|
use schala_lang::{tokenizing, parsing};
|
||||||
|
use self::tokenizing::*;
|
||||||
|
use self::parsing::*;
|
||||||
|
|
||||||
|
use schala_lang::tokenizing::TokenType::*;
|
||||||
|
|
||||||
|
struct AutoParser {
|
||||||
|
tokens: Vec<Token>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/* BNF
|
||||||
|
* all terminals in this BNF refer to TokenType values
|
||||||
|
|
||||||
|
literal := Kw::True | Kw::False | StrLiteral | number_literal
|
||||||
|
number_literal := int_literal | float_literal
|
||||||
|
float_literal := digits float_continued
|
||||||
|
float_continued := ε | Period digits
|
||||||
|
int_literal := HexLiteral | nonhex_int
|
||||||
|
nonhex_int := BinNumberSigil+ digits
|
||||||
|
digits := (DigitGroup Underscore)+
|
||||||
|
*/
|
||||||
|
|
||||||
|
impl AutoParser {
|
||||||
|
fn new(tokens: Vec<Token>) -> AutoParser {
|
||||||
|
AutoParser { tokens: tokens.into_iter().rev().collect() }
|
||||||
|
}
|
||||||
|
fn peek(&mut self) -> TokenType {
|
||||||
|
self.tokens.last().map(|ref t| { t.token_type.clone() }).unwrap_or(TokenType::EOF)
|
||||||
|
}
|
||||||
|
fn next(&mut self) -> TokenType {
|
||||||
|
self.tokens.pop().map(|t| { t.token_type }).unwrap_or(TokenType::EOF)
|
||||||
|
}
|
||||||
|
fn parse(&mut self) -> (Result<AST, ParseError>, Vec<String>) {
|
||||||
|
let ast = self.program();
|
||||||
|
(ast, vec![])
|
||||||
|
}
|
||||||
|
fn program(&mut self) -> ParseResult<AST> {
|
||||||
|
let etype = self.literal()?;
|
||||||
|
Ok(AST(vec![Statement::ExpressionStatement(Expression(etype, None))]))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
macro_rules! expand_match_var {
|
||||||
|
(($pat:pat => $e:expr)) => { $pat };
|
||||||
|
(nonterm ($pat:pat => $e:expr)) => { $pat };
|
||||||
|
}
|
||||||
|
|
||||||
|
macro_rules! expand_match_expr {
|
||||||
|
($self:ident, ($pat:pat => $e:expr)) => {
|
||||||
|
{ $self.next(); $e }
|
||||||
|
};
|
||||||
|
($self:ident, nonterm ($pat:pat => $e:expr)) => {
|
||||||
|
{ $self.next(); $e }
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
macro_rules! bnf_rule {
|
||||||
|
($self:ident, $type:ty, $rule:ident := $( $rule_clauses:tt )|*) => {
|
||||||
|
fn $rule(&mut $self) -> ParseResult<$type> {
|
||||||
|
Ok(match $self.peek() {
|
||||||
|
$(
|
||||||
|
expand_match_var!($rule_clauses) => expand_match_expr!($self, $rule_clauses),
|
||||||
|
)*
|
||||||
|
_ => return ParseError::new("Not found"),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AutoParser {
|
||||||
|
bnf_rule!(self, ExpressionType, literal :=
|
||||||
|
(Keyword(Kw::True) => ExpressionType::BoolLiteral(true)) |
|
||||||
|
(Keyword(Kw::False) => ExpressionType::BoolLiteral(false))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
pub struct Schala { }
|
||||||
|
|
||||||
|
impl Schala {
|
||||||
|
pub fn new() -> Schala {
|
||||||
|
Schala { }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ProgrammingLanguageInterface for Schala {
|
||||||
|
fn get_language_name(&self) -> String {
|
||||||
|
"Schala-autoparser".to_string()
|
||||||
|
}
|
||||||
|
fn get_source_file_suffix(&self) -> String {
|
||||||
|
format!("schala")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn evaluate_in_repl(&mut self, input: &str, options: &EvalOptions) -> LanguageOutput {
|
||||||
|
let mut output = LanguageOutput::default();
|
||||||
|
|
||||||
|
let tokens = tokenizing::tokenize(input);
|
||||||
|
if options.debug_tokens {
|
||||||
|
let token_string = tokens.iter().map(|t| format!("{:?}<L:{},C:{}>", t.token_type, t.offset.0, t.offset.1)).join(", ");
|
||||||
|
output.add_artifact(TraceArtifact::new("tokens", format!("{:?}", token_string)));
|
||||||
|
}
|
||||||
|
{
|
||||||
|
let token_errors: Vec<&String> = tokens.iter().filter_map(|t| t.get_error()).collect();
|
||||||
|
if token_errors.len() != 0 {
|
||||||
|
output.add_output(format!("Tokenization error: {:?}\n", token_errors));
|
||||||
|
return output;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut parser = AutoParser::new(tokens);
|
||||||
|
|
||||||
|
let ast = match parser.parse() {
|
||||||
|
(Ok(ast), trace) => {
|
||||||
|
if options.debug_parse {
|
||||||
|
output.add_artifact(TraceArtifact::new_parse_trace(trace));
|
||||||
|
output.add_artifact(TraceArtifact::new("ast", format!("{:?}", ast)));
|
||||||
|
}
|
||||||
|
ast
|
||||||
|
},
|
||||||
|
(Err(err), trace) => {
|
||||||
|
output.add_artifact(TraceArtifact::new_parse_trace(trace));
|
||||||
|
output.add_output(format!("Parse error: {:?}\n", err.msg));
|
||||||
|
return output;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
output.add_output(format!("{:?}", ast));
|
||||||
|
output
|
||||||
|
}
|
||||||
|
}
|
77
src/schala_lang/builtin.rs
Normal file
77
src/schala_lang/builtin.rs
Normal file
@ -0,0 +1,77 @@
|
|||||||
|
use std::rc::Rc;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
use schala_lang::typechecking::{Type, TypeResult, TConst};
|
||||||
|
use self::Type::*; use self::TConst::*;
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
|
pub struct BinOp {
|
||||||
|
sigil: Rc<String>
|
||||||
|
}
|
||||||
|
|
||||||
|
impl BinOp {
|
||||||
|
pub fn from_sigil(sigil: &str) -> BinOp {
|
||||||
|
BinOp { sigil: Rc::new(sigil.to_string()) }
|
||||||
|
}
|
||||||
|
pub fn sigil(&self) -> &Rc<String> {
|
||||||
|
&self.sigil
|
||||||
|
}
|
||||||
|
pub fn get_type(&self) -> TypeResult<Type> {
|
||||||
|
let s = self.sigil.as_str();
|
||||||
|
BINOPS.get(s).map(|x| x.0.clone()).ok_or(format!("Binop {} not found", s))
|
||||||
|
}
|
||||||
|
pub fn min_precedence() -> i32 {
|
||||||
|
i32::min_value()
|
||||||
|
}
|
||||||
|
pub fn get_precedence(op: &str) -> i32 {
|
||||||
|
let default = 10_000_000;
|
||||||
|
BINOPS.get(op).map(|x| x.2.clone()).unwrap_or(default)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
|
pub struct PrefixOp {
|
||||||
|
sigil: Rc<String>
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PrefixOp {
|
||||||
|
pub fn from_sigil(sigil: &str) -> PrefixOp {
|
||||||
|
PrefixOp { sigil: Rc::new(sigil.to_string()) }
|
||||||
|
}
|
||||||
|
pub fn sigil(&self) -> &Rc<String> {
|
||||||
|
&self.sigil
|
||||||
|
}
|
||||||
|
pub fn is_prefix(op: &str) -> bool {
|
||||||
|
PREFIX_OPS.get(op).is_some()
|
||||||
|
}
|
||||||
|
pub fn get_type(&self) -> TypeResult<Type> {
|
||||||
|
let s = self.sigil.as_str();
|
||||||
|
PREFIX_OPS.get(s).map(|x| x.0.clone()).ok_or(format!("Prefix op {} not found", s))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
lazy_static! {
|
||||||
|
static ref PREFIX_OPS: HashMap<&'static str, (Type, ())> =
|
||||||
|
hashmap! {
|
||||||
|
"+" => (Func(bx!(Const(Int)), bx!(Const(Int))), ()),
|
||||||
|
"-" => (Func(bx!(Const(Int)), bx!(Const(Int))), ()),
|
||||||
|
"!" => (Func(bx!(Const(Bool)), bx!(Const(Bool))), ()),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/* the second tuple member is a placeholder for when I want to make evaluation rules tied to the
|
||||||
|
* binop definition */
|
||||||
|
lazy_static! {
|
||||||
|
static ref BINOPS: HashMap<&'static str, (Type, (), i32)> =
|
||||||
|
hashmap! {
|
||||||
|
"+" => (Func(bx!(Const(Int)), bx!(Func(bx!(Const(Int)), bx!(Const(Int))))), (), 10),
|
||||||
|
"-" => (Func(bx!(Const(Int)), bx!(Func(bx!(Const(Int)), bx!(Const(Int))))), (), 10),
|
||||||
|
"*" => (Func(bx!(Const(Int)), bx!(Func(bx!(Const(Int)), bx!(Const(Int))))), (), 20),
|
||||||
|
"/" => (Func(bx!(Const(Int)), bx!(Func(bx!(Const(Int)), bx!(Const(Float))))), (), 20),
|
||||||
|
"//" => (Func(bx!(Const(Int)), bx!(Func(bx!(Const(Int)), bx!(Const(Int))))), (), 20),
|
||||||
|
"%" => (Func(bx!(Const(Int)), bx!(Func(bx!(Const(Int)), bx!(Const(Int))))), (), 20),
|
||||||
|
"++" => (Func(bx!(Const(StringT)), bx!(Func(bx!(Const(StringT)), bx!(Const(StringT))))), (), 30),
|
||||||
|
"^" => (Func(bx!(Const(Int)), bx!(Func(bx!(Const(Int)), bx!(Const(Int))))), (), 20),
|
||||||
|
"&" => (Func(bx!(Const(Int)), bx!(Func(bx!(Const(Int)), bx!(Const(Int))))), (), 20),
|
||||||
|
"|" => (Func(bx!(Const(Int)), bx!(Func(bx!(Const(Int)), bx!(Const(Int))))), (), 20),
|
||||||
|
};
|
||||||
|
}
|
@ -1,21 +1,105 @@
|
|||||||
use schala_lang::parsing::{AST, Statement, Declaration, Expression, ExpressionType, Operation};
|
use std::collections::HashMap;
|
||||||
|
use std::rc::Rc;
|
||||||
|
use std::fmt::Write;
|
||||||
|
|
||||||
pub struct ReplState {
|
use itertools::Itertools;
|
||||||
|
|
||||||
|
use schala_lang::parsing::{AST, Statement, Declaration, Expression, Variant, ExpressionType};
|
||||||
|
use schala_lang::builtin::{BinOp, PrefixOp};
|
||||||
|
|
||||||
|
pub struct State<'a> {
|
||||||
|
parent_frame: Option<&'a State<'a>>,
|
||||||
|
values: HashMap<Rc<String>, ValueEntry>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> State<'a> {
|
||||||
|
|
||||||
|
fn insert(&mut self, name: Rc<String>, value: ValueEntry) {
|
||||||
|
self.values.insert(name, value);
|
||||||
|
}
|
||||||
|
fn lookup(&self, name: &Rc<String>) -> Option<&ValueEntry> {
|
||||||
|
match (self.values.get(name), self.parent_frame) {
|
||||||
|
(None, None) => None,
|
||||||
|
(None, Some(parent)) => parent.lookup(name),
|
||||||
|
(Some(value), _) => Some(value),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
enum ValueEntry {
|
||||||
|
Binding {
|
||||||
|
val: FullyEvaluatedExpr,
|
||||||
|
},
|
||||||
|
Function {
|
||||||
|
param_names: Vec<Rc<String>>,
|
||||||
|
body: Vec<Statement>,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
type EvalResult<T> = Result<T, String>;
|
type EvalResult<T> = Result<T, String>;
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
enum FullyEvaluatedExpr {
|
enum FullyEvaluatedExpr {
|
||||||
UnsignedInt(u64),
|
UnsignedInt(u64),
|
||||||
SignedInt(i64),
|
SignedInt(i64),
|
||||||
Float(f64),
|
Float(f64),
|
||||||
Str(String),
|
Str(String),
|
||||||
Bool(bool),
|
Bool(bool),
|
||||||
|
FuncLit(Rc<String>),
|
||||||
|
Custom {
|
||||||
|
string_rep: Rc<String>,
|
||||||
|
},
|
||||||
|
Tuple(Vec<FullyEvaluatedExpr>),
|
||||||
|
List(Vec<FullyEvaluatedExpr>)
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ReplState {
|
impl FullyEvaluatedExpr {
|
||||||
pub fn new() -> ReplState {
|
fn to_string(&self) -> String {
|
||||||
ReplState { }
|
use self::FullyEvaluatedExpr::*;
|
||||||
|
match self {
|
||||||
|
&UnsignedInt(ref n) => format!("{}", n),
|
||||||
|
&SignedInt(ref n) => format!("{}", n),
|
||||||
|
&Float(ref f) => format!("{}", f),
|
||||||
|
&Str(ref s) => format!("\"{}\"", s),
|
||||||
|
&Bool(ref b) => format!("{}", b),
|
||||||
|
&Custom { ref string_rep } => format!("{}", string_rep),
|
||||||
|
&Tuple(ref items) => {
|
||||||
|
let mut buf = String::new();
|
||||||
|
write!(buf, "(").unwrap();
|
||||||
|
for term in items.iter().map(|e| Some(e)).intersperse(None) {
|
||||||
|
match term {
|
||||||
|
Some(e) => write!(buf, "{}", e.to_string()).unwrap(),
|
||||||
|
None => write!(buf, ", ").unwrap(),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
write!(buf, ")").unwrap();
|
||||||
|
buf
|
||||||
|
},
|
||||||
|
&FuncLit(ref name) => format!("<function {}>", name),
|
||||||
|
&List(ref items) => {
|
||||||
|
let mut buf = String::new();
|
||||||
|
write!(buf, "[").unwrap();
|
||||||
|
for term in items.iter().map(|e| Some(e)).intersperse(None) {
|
||||||
|
match term {
|
||||||
|
Some(e) => write!(buf, "{}", e.to_string()).unwrap(),
|
||||||
|
None => write!(buf, ", ").unwrap()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
write!(buf, "]").unwrap();
|
||||||
|
buf
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> State<'a> {
|
||||||
|
pub fn new() -> State<'a> {
|
||||||
|
State { parent_frame: None, values: HashMap::new() }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn new_with_parent(parent: &'a State<'a>) -> State<'a> {
|
||||||
|
State { parent_frame: Some(parent), values: HashMap::new() }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn evaluate(&mut self, ast: AST) -> Vec<String> {
|
pub fn evaluate(&mut self, ast: AST) -> Vec<String> {
|
||||||
@ -23,12 +107,12 @@ impl ReplState {
|
|||||||
for statement in ast.0 {
|
for statement in ast.0 {
|
||||||
match self.eval_statement(statement) {
|
match self.eval_statement(statement) {
|
||||||
Ok(output) => {
|
Ok(output) => {
|
||||||
if let Some(s) = output {
|
if let Some(fully_evaluated) = output {
|
||||||
acc.push(s);
|
acc.push(fully_evaluated.to_string());
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
Err(error) => {
|
Err(error) => {
|
||||||
acc.push(format!("Error: {}", error));
|
acc.push(format!("Eval error: {}", error));
|
||||||
return acc;
|
return acc;
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@ -37,29 +121,41 @@ impl ReplState {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ReplState {
|
impl<'a> State<'a> {
|
||||||
fn eval_statement(&mut self, statement: Statement) -> EvalResult<Option<String>> {
|
fn eval_statement(&mut self, statement: Statement) -> EvalResult<Option<FullyEvaluatedExpr>> {
|
||||||
use self::FullyEvaluatedExpr::*;
|
Ok(match statement {
|
||||||
match statement {
|
Statement::ExpressionStatement(expr) => Some(self.eval_expr(expr)?),
|
||||||
Statement::ExpressionStatement(expr) => {
|
Statement::Declaration(decl) => { self.eval_decl(decl)?; None }
|
||||||
self.eval_expr(expr).map( |eval| {
|
|
||||||
match eval {
|
|
||||||
UnsignedInt(n) => Some(format!("{}", n)),
|
|
||||||
SignedInt(n) => Some(format!("{}", n)),
|
|
||||||
Float(f) => Some(format!("{}", f)),
|
|
||||||
Str(s) => Some(format!("\"{}\"", s)),
|
|
||||||
Bool(b) => Some(format!("{}", b)),
|
|
||||||
}
|
|
||||||
})
|
})
|
||||||
},
|
|
||||||
Statement::Declaration(decl) => {
|
|
||||||
self.eval_decl(decl).map(|_| None)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn eval_decl(&mut self, _decl: Declaration) -> EvalResult<()> {
|
fn eval_decl(&mut self, decl: Declaration) -> EvalResult<()> {
|
||||||
Err("Not implmemented".to_string())
|
use self::Declaration::*;
|
||||||
|
use self::Variant::*;
|
||||||
|
|
||||||
|
match decl {
|
||||||
|
FuncDecl(signature, statements) => {
|
||||||
|
let name = signature.name;
|
||||||
|
let param_names: Vec<Rc<String>> = signature.params.iter().map(|fp| fp.0.clone()).collect();
|
||||||
|
self.insert(name, ValueEntry::Function { body: statements.clone(), param_names });
|
||||||
|
},
|
||||||
|
TypeDecl(_name, body) => {
|
||||||
|
for variant in body.0.iter() {
|
||||||
|
match variant {
|
||||||
|
&UnitStruct(ref name) => self.insert(name.clone(),
|
||||||
|
ValueEntry::Binding { val: FullyEvaluatedExpr::Custom { string_rep: name.clone() } }),
|
||||||
|
&TupleStruct(ref _name, ref _args) => unimplemented!(),
|
||||||
|
&Record(ref _name, ref _fields) => unimplemented!(),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Binding { name, expr, ..} => {
|
||||||
|
let val = self.eval_expr(expr)?;
|
||||||
|
self.insert(name.clone(), ValueEntry::Binding { val });
|
||||||
|
},
|
||||||
|
_ => return Err(format!("Declaration evaluation not yet implemented"))
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn eval_expr(&mut self, expr: Expression) -> EvalResult<FullyEvaluatedExpr> {
|
fn eval_expr(&mut self, expr: Expression) -> EvalResult<FullyEvaluatedExpr> {
|
||||||
@ -74,35 +170,147 @@ impl ReplState {
|
|||||||
BoolLiteral(b) => Ok(Bool(b)),
|
BoolLiteral(b) => Ok(Bool(b)),
|
||||||
PrefixExp(op, expr) => self.eval_prefix_exp(op, expr),
|
PrefixExp(op, expr) => self.eval_prefix_exp(op, expr),
|
||||||
BinExp(op, lhs, rhs) => self.eval_binexp(op, lhs, rhs),
|
BinExp(op, lhs, rhs) => self.eval_binexp(op, lhs, rhs),
|
||||||
_ => Err(format!("Unimplemented")),
|
Value(name) => self.eval_value(name),
|
||||||
|
TupleLiteral(expressions) => {
|
||||||
|
let mut evals = Vec::new();
|
||||||
|
for expr in expressions {
|
||||||
|
match self.eval_expr(expr) {
|
||||||
|
Ok(fully_evaluated) => evals.push(fully_evaluated),
|
||||||
|
error => return error,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(Tuple(evals))
|
||||||
|
}
|
||||||
|
Call { f, arguments } => {
|
||||||
|
let mut evaled_arguments = Vec::new();
|
||||||
|
for arg in arguments.into_iter() {
|
||||||
|
evaled_arguments.push(self.eval_expr(arg)?);
|
||||||
|
}
|
||||||
|
self.eval_application(*f, evaled_arguments)
|
||||||
|
},
|
||||||
|
Index { box indexee, indexers } => {
|
||||||
|
let evaled = self.eval_expr(indexee)?;
|
||||||
|
match evaled {
|
||||||
|
Tuple(mut exprs) => {
|
||||||
|
let len = indexers.len();
|
||||||
|
if len == 1 {
|
||||||
|
let idx = indexers.into_iter().nth(0).unwrap();
|
||||||
|
match self.eval_expr(idx)? {
|
||||||
|
UnsignedInt(n) if (n as usize) < exprs.len() => Ok(exprs.drain(n as usize..).next().unwrap()),
|
||||||
|
UnsignedInt(n) => Err(format!("Index {} out of range", n)),
|
||||||
|
other => Err(format!("{:?} is not an unsigned integer", other)),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Err(format!("Tuple index must be one integer"))
|
||||||
|
}
|
||||||
|
},
|
||||||
|
_ => Err(format!("Bad index expression"))
|
||||||
|
}
|
||||||
|
},
|
||||||
|
ListLiteral(items) => Ok(List(items.into_iter().map(|item| self.eval_expr(item)).collect::<Result<Vec<_>,_>>()?)),
|
||||||
|
x => Err(format!("Unimplemented thing {:?}", x)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn eval_binexp(&mut self, op: Operation, lhs: Box<Expression>, rhs: Box<Expression>) -> EvalResult<FullyEvaluatedExpr> {
|
fn eval_application(&mut self, f: Expression, arguments: Vec<FullyEvaluatedExpr>) -> EvalResult<FullyEvaluatedExpr> {
|
||||||
|
use self::ExpressionType::*;
|
||||||
|
match f {
|
||||||
|
Expression(Value(ref identifier), _) if self.is_builtin(identifier) => self.eval_builtin(identifier, arguments),
|
||||||
|
Expression(Value(identifier), _) => {
|
||||||
|
match self.lookup(&identifier) {
|
||||||
|
Some(&ValueEntry::Function { ref body, ref param_names }) => {
|
||||||
|
if arguments.len() != param_names.len() {
|
||||||
|
return Err(format!("Wrong number of arguments for the function"));
|
||||||
|
}
|
||||||
|
let mut new_state = State::new_with_parent(self);
|
||||||
|
let sub_ast = body.clone();
|
||||||
|
for (param, val) in param_names.iter().zip(arguments.into_iter()) {
|
||||||
|
new_state.insert(param.clone(), ValueEntry::Binding { val });
|
||||||
|
}
|
||||||
|
let mut ret: Option<FullyEvaluatedExpr> = None;
|
||||||
|
for statement in sub_ast.into_iter() {
|
||||||
|
ret = new_state.eval_statement(statement)?;
|
||||||
|
}
|
||||||
|
Ok(ret.unwrap_or(FullyEvaluatedExpr::Custom { string_rep: Rc::new("()".to_string()) }))
|
||||||
|
},
|
||||||
|
_ => Err(format!("Function {} not found", identifier)),
|
||||||
|
}
|
||||||
|
},
|
||||||
|
x => Err(format!("Trying to apply {:?} which is not a function", x)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fn is_builtin(&self, name: &Rc<String>) -> bool {
|
||||||
|
match &name.as_ref()[..] {
|
||||||
|
"print" | "println" => true,
|
||||||
|
_ => false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fn eval_builtin(&mut self, name: &Rc<String>, args: Vec<FullyEvaluatedExpr>) -> EvalResult<FullyEvaluatedExpr> {
|
||||||
|
use self::FullyEvaluatedExpr::*;
|
||||||
|
match &name.as_ref()[..] {
|
||||||
|
"print" => {
|
||||||
|
for arg in args {
|
||||||
|
print!("{}", arg.to_string());
|
||||||
|
}
|
||||||
|
Ok(Tuple(vec![]))
|
||||||
|
},
|
||||||
|
"println" => {
|
||||||
|
for arg in args {
|
||||||
|
println!("{}", arg.to_string());
|
||||||
|
}
|
||||||
|
Ok(Tuple(vec![]))
|
||||||
|
},
|
||||||
|
_ => unreachable!()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fn eval_value(&mut self, name: Rc<String>) -> EvalResult<FullyEvaluatedExpr> {
|
||||||
|
use self::ValueEntry::*;
|
||||||
|
match self.lookup(&name) {
|
||||||
|
None => return Err(format!("Value {} not found", *name)),
|
||||||
|
Some(lookup) => match lookup {
|
||||||
|
&Binding { ref val } => Ok(val.clone()),
|
||||||
|
&Function { .. } => Ok(FullyEvaluatedExpr::FuncLit(name.clone()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn eval_binexp(&mut self, op: BinOp, lhs: Box<Expression>, rhs: Box<Expression>) -> EvalResult<FullyEvaluatedExpr> {
|
||||||
use self::FullyEvaluatedExpr::*;
|
use self::FullyEvaluatedExpr::*;
|
||||||
let evaled_lhs = self.eval_expr(*lhs)?;
|
let evaled_lhs = self.eval_expr(*lhs)?;
|
||||||
let evaled_rhs = self.eval_expr(*rhs)?;
|
let evaled_rhs = self.eval_expr(*rhs)?;
|
||||||
let opstr: &str = &op.0;
|
let sigil = op.sigil();
|
||||||
Ok(match (opstr, evaled_lhs, evaled_rhs) {
|
//let sigil: &str = op.sigil().as_ref().as_str();
|
||||||
|
Ok(match (sigil.as_str(), evaled_lhs, evaled_rhs) {
|
||||||
("+", UnsignedInt(l), UnsignedInt(r)) => UnsignedInt(l + r),
|
("+", UnsignedInt(l), UnsignedInt(r)) => UnsignedInt(l + r),
|
||||||
|
("++", Str(s1), Str(s2)) => Str(format!("{}{}", s1, s2)),
|
||||||
("-", UnsignedInt(l), UnsignedInt(r)) => UnsignedInt(l - r),
|
("-", UnsignedInt(l), UnsignedInt(r)) => UnsignedInt(l - r),
|
||||||
("*", UnsignedInt(l), UnsignedInt(r)) => UnsignedInt(l * r),
|
("*", UnsignedInt(l), UnsignedInt(r)) => UnsignedInt(l * r),
|
||||||
("/", UnsignedInt(l), UnsignedInt(r)) => UnsignedInt(l / r),
|
("/", UnsignedInt(l), UnsignedInt(r)) => Float((l as f64)/ (r as f64)),
|
||||||
|
("//", UnsignedInt(l), UnsignedInt(r)) => if r == 0 {
|
||||||
|
return Err(format!("Runtime error: divide by zero"));
|
||||||
|
} else {
|
||||||
|
UnsignedInt(l / r)
|
||||||
|
},
|
||||||
("%", UnsignedInt(l), UnsignedInt(r)) => UnsignedInt(l % r),
|
("%", UnsignedInt(l), UnsignedInt(r)) => UnsignedInt(l % r),
|
||||||
|
("^", UnsignedInt(l), UnsignedInt(r)) => UnsignedInt(l ^ r),
|
||||||
|
("&", UnsignedInt(l), UnsignedInt(r)) => UnsignedInt(l & r),
|
||||||
|
("|", UnsignedInt(l), UnsignedInt(r)) => UnsignedInt(l | r),
|
||||||
_ => return Err(format!("Runtime error: not yet implemented")),
|
_ => return Err(format!("Runtime error: not yet implemented")),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn eval_prefix_exp(&mut self, op: Operation, expr: Box<Expression>) -> EvalResult<FullyEvaluatedExpr> {
|
fn eval_prefix_exp(&mut self, op: PrefixOp, expr: Box<Expression>) -> EvalResult<FullyEvaluatedExpr> {
|
||||||
use self::FullyEvaluatedExpr::*;
|
use self::FullyEvaluatedExpr::*;
|
||||||
let evaled_expr = self.eval_expr(*expr)?;
|
let evaled_expr = self.eval_expr(*expr)?;
|
||||||
let opstr: &str = &op.0;
|
let sigil = op.sigil();
|
||||||
|
|
||||||
Ok(match (opstr, evaled_expr) {
|
Ok(match (sigil.as_str(), evaled_expr) {
|
||||||
("!", Bool(true)) => Bool(false),
|
("!", Bool(true)) => Bool(false),
|
||||||
("!", Bool(false)) => Bool(true),
|
("!", Bool(false)) => Bool(true),
|
||||||
("-", UnsignedInt(n)) => SignedInt(-1*(n as i64)),
|
("-", UnsignedInt(n)) => SignedInt(-1*(n as i64)),
|
||||||
("-", SignedInt(n)) => SignedInt(-1*(n as i64)),
|
("-", SignedInt(n)) => SignedInt(-1*(n as i64)),
|
||||||
|
("+", SignedInt(n)) => SignedInt(n),
|
||||||
|
("+", UnsignedInt(n)) => UnsignedInt(n),
|
||||||
_ => return Err(format!("Runtime error: not yet implemented")),
|
_ => return Err(format!("Runtime error: not yet implemented")),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -1,21 +1,30 @@
|
|||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use language::{ProgrammingLanguageInterface, EvalOptions, TraceArtifact, ReplOutput};
|
use schala_lib::{ProgrammingLanguageInterface, EvalOptions, TraceArtifact, LanguageOutput};
|
||||||
|
|
||||||
|
macro_rules! bx {
|
||||||
|
($e:expr) => { Box::new($e) }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub mod autoparser;
|
||||||
|
|
||||||
|
mod builtin;
|
||||||
|
|
||||||
|
mod tokenizing;
|
||||||
mod parsing;
|
mod parsing;
|
||||||
mod type_check;
|
mod typechecking;
|
||||||
mod eval;
|
mod eval;
|
||||||
|
|
||||||
use self::type_check::{TypeContext};
|
use self::typechecking::{TypeContext};
|
||||||
|
|
||||||
pub struct Schala {
|
pub struct Schala {
|
||||||
state: eval::ReplState,
|
state: eval::State<'static>,
|
||||||
type_context: TypeContext
|
type_context: TypeContext
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Schala {
|
impl Schala {
|
||||||
pub fn new() -> Schala {
|
pub fn new() -> Schala {
|
||||||
Schala {
|
Schala {
|
||||||
state: eval::ReplState::new(),
|
state: eval::State::new(),
|
||||||
type_context: TypeContext::new(),
|
type_context: TypeContext::new(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -30,13 +39,12 @@ impl ProgrammingLanguageInterface for Schala {
|
|||||||
format!("schala")
|
format!("schala")
|
||||||
}
|
}
|
||||||
|
|
||||||
fn evaluate_in_repl(&mut self, input: &str, options: &EvalOptions) -> ReplOutput {
|
fn evaluate_in_repl(&mut self, input: &str, options: &EvalOptions) -> LanguageOutput {
|
||||||
let mut output = ReplOutput::default();
|
let mut output = LanguageOutput::default();
|
||||||
let tokens = parsing::tokenize(input);
|
let tokens = tokenizing::tokenize(input);
|
||||||
if options.debug_tokens {
|
if options.debug_tokens {
|
||||||
let token_string = tokens.iter().map(|t| format!("{:?}<{}>", t.token_type, t.offset)).join(", ");
|
let token_string = tokens.iter().map(|t| format!("{:?}<L:{},C:{}>", t.token_type, t.offset.0, t.offset.1)).join(", ");
|
||||||
output.add_artifact(TraceArtifact::new("tokens", format!("{:?}", token_string)));
|
output.add_artifact(TraceArtifact::new("tokens", format!("{:?}", token_string)));
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
@ -62,34 +70,35 @@ impl ProgrammingLanguageInterface for Schala {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
self.type_context.add_symbols(&ast);
|
match self.type_context.add_top_level_types(&ast) {
|
||||||
|
Ok(()) => (),
|
||||||
|
Err(msg) => {
|
||||||
|
output.add_artifact(TraceArtifact::new("type_check", msg));
|
||||||
|
//return output
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
if options.debug_symbol_table {
|
if options.debug_symbol_table {
|
||||||
let text = self.type_context.debug_symbol_table();
|
let text = self.type_context.debug_symbol_table();
|
||||||
output.add_artifact(TraceArtifact::new("symbol_table", text));
|
output.add_artifact(TraceArtifact::new("symbol_table", text));
|
||||||
}
|
}
|
||||||
|
|
||||||
match self.type_context.type_check(&ast) {
|
match self.type_context.type_check_ast(&ast) {
|
||||||
Ok(ty) => {
|
Ok(ty) => {
|
||||||
output.add_artifact(TraceArtifact::new("type_check", format!("type: {:?}", ty)));
|
output.add_artifact(TraceArtifact::new("type_check", format!("{:?}", ty)));
|
||||||
},
|
},
|
||||||
Err(msg) => {
|
Err(msg) => {
|
||||||
output.add_artifact(TraceArtifact::new("type_check", msg));
|
output.add_artifact(TraceArtifact::new("type_check", msg));
|
||||||
|
/*
|
||||||
output.add_output(format!("Type error"));
|
output.add_output(format!("Type error"));
|
||||||
return output;
|
return output;
|
||||||
|
*/
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let evaluation_output = self.state.evaluate(ast);
|
let evaluation_outputs = self.state.evaluate(ast);
|
||||||
let mut acc = String::new();
|
let text_output: String = evaluation_outputs.into_iter().intersperse(format!("\n")).collect();
|
||||||
let mut iter = evaluation_output.iter().peekable();
|
output.add_output(text_output);
|
||||||
while let Some(s) = iter.next() {
|
|
||||||
acc.push_str(&s);
|
|
||||||
if let Some(_) = iter.peek() {
|
|
||||||
acc.push_str("\n");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
output.add_output(acc);
|
|
||||||
return output;
|
return output;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
File diff suppressed because it is too large
Load Diff
264
src/schala_lang/tokenizing.rs
Normal file
264
src/schala_lang/tokenizing.rs
Normal file
@ -0,0 +1,264 @@
|
|||||||
|
use itertools::Itertools;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::rc::Rc;
|
||||||
|
use std::iter::{Iterator, Peekable};
|
||||||
|
use std::fmt;
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
|
pub enum TokenType {
|
||||||
|
Newline, Semicolon,
|
||||||
|
|
||||||
|
LParen, RParen,
|
||||||
|
LSquareBracket, RSquareBracket,
|
||||||
|
LAngleBracket, RAngleBracket,
|
||||||
|
LCurlyBrace, RCurlyBrace,
|
||||||
|
Pipe,
|
||||||
|
|
||||||
|
Comma, Period, Colon, Underscore,
|
||||||
|
|
||||||
|
Operator(Rc<String>),
|
||||||
|
DigitGroup(Rc<String>), HexLiteral(Rc<String>), BinNumberSigil,
|
||||||
|
StrLiteral(Rc<String>),
|
||||||
|
Identifier(Rc<String>),
|
||||||
|
Keyword(Kw),
|
||||||
|
|
||||||
|
EOF,
|
||||||
|
|
||||||
|
Error(String),
|
||||||
|
}
|
||||||
|
use self::TokenType::*;
|
||||||
|
|
||||||
|
impl fmt::Display for TokenType {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
match self {
|
||||||
|
&Operator(ref s) => write!(f, "Operator({})", **s),
|
||||||
|
&DigitGroup(ref s) => write!(f, "DigitGroup({})", s),
|
||||||
|
&HexLiteral(ref s) => write!(f, "HexLiteral({})", s),
|
||||||
|
&StrLiteral(ref s) => write!(f, "StrLiteral({})", s),
|
||||||
|
&Identifier(ref s) => write!(f, "Identifier({})", s),
|
||||||
|
&Error(ref s) => write!(f, "Error({})", s),
|
||||||
|
other => write!(f, "{:?}", other),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||||
|
pub enum Kw {
|
||||||
|
If, Else,
|
||||||
|
Func,
|
||||||
|
For,
|
||||||
|
Match,
|
||||||
|
Var, Const, Let, In,
|
||||||
|
Return,
|
||||||
|
Alias, Type, SelfType, SelfIdent,
|
||||||
|
Trait, Impl,
|
||||||
|
True, False,
|
||||||
|
Module
|
||||||
|
}
|
||||||
|
|
||||||
|
lazy_static! {
|
||||||
|
static ref KEYWORDS: HashMap<&'static str, Kw> =
|
||||||
|
hashmap! {
|
||||||
|
"if" => Kw::If,
|
||||||
|
"else" => Kw::Else,
|
||||||
|
"fn" => Kw::Func,
|
||||||
|
"for" => Kw::For,
|
||||||
|
"match" => Kw::Match,
|
||||||
|
"var" => Kw::Var,
|
||||||
|
"const" => Kw::Const,
|
||||||
|
"let" => Kw::Let,
|
||||||
|
"in" => Kw::In,
|
||||||
|
"return" => Kw::Return,
|
||||||
|
"alias" => Kw::Alias,
|
||||||
|
"type" => Kw::Type,
|
||||||
|
"Self" => Kw::SelfType,
|
||||||
|
"self" => Kw::SelfIdent,
|
||||||
|
"trait" => Kw::Trait,
|
||||||
|
"impl" => Kw::Impl,
|
||||||
|
"true" => Kw::True,
|
||||||
|
"false" => Kw::False,
|
||||||
|
"module" => Kw::Module,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct Token {
|
||||||
|
pub token_type: TokenType,
|
||||||
|
pub offset: (usize, usize),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Token {
|
||||||
|
pub fn get_error(&self) -> Option<&String> {
|
||||||
|
match self.token_type {
|
||||||
|
TokenType::Error(ref s) => Some(s),
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn to_string_with_metadata(&self) -> String {
|
||||||
|
format!("{}(L:{},c:{})", self.token_type, self.offset.0, self.offset.1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const OPERATOR_CHARS: [char; 19] = ['!', '$', '%', '&', '*', '+', '-', '.', '/', ':', '<', '>', '=', '?', '@', '^', '|', '~', '`'];
|
||||||
|
fn is_operator(c: &char) -> bool {
|
||||||
|
OPERATOR_CHARS.iter().any(|x| x == c)
|
||||||
|
}
|
||||||
|
|
||||||
|
type CharIter<I: Iterator<Item=(usize,usize,char)>> = Peekable<I>;
|
||||||
|
|
||||||
|
pub fn tokenize(input: &str) -> Vec<Token> {
|
||||||
|
let mut tokens: Vec<Token> = Vec::new();
|
||||||
|
|
||||||
|
let mut input = input.lines().enumerate()
|
||||||
|
.flat_map(|(line_idx, ref line)| {
|
||||||
|
line.chars().enumerate().map(move |(ch_idx, ch)| (line_idx, ch_idx, ch))
|
||||||
|
}).peekable();
|
||||||
|
|
||||||
|
while let Some((line_idx, ch_idx, c)) = input.next() {
|
||||||
|
let cur_tok_type = match c {
|
||||||
|
'#' => {
|
||||||
|
if let Some(&(_, _, '{')) = input.peek() {
|
||||||
|
} else {
|
||||||
|
while let Some((_, _, c)) = input.next() {
|
||||||
|
if c == '\n' {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
continue;
|
||||||
|
},
|
||||||
|
c if c.is_whitespace() && c != '\n' => continue,
|
||||||
|
'\n' => Newline, ';' => Semicolon,
|
||||||
|
':' => Colon, ',' => Comma,
|
||||||
|
'(' => LParen, ')' => RParen,
|
||||||
|
'{' => LCurlyBrace, '}' => RCurlyBrace,
|
||||||
|
'[' => LSquareBracket, ']' => RSquareBracket,
|
||||||
|
'"' => handle_quote(&mut input),
|
||||||
|
c if c.is_digit(10) => handle_digit(c, &mut input),
|
||||||
|
c if c.is_alphabetic() || c == '_' => handle_alphabetic(c, &mut input), //TODO I'll probably have to rewrite this if I care about types being uppercase, also type parameterization
|
||||||
|
c if is_operator(&c) => handle_operator(c, &mut input),
|
||||||
|
unknown => Error(format!("Unexpected character: {}", unknown)),
|
||||||
|
};
|
||||||
|
tokens.push(Token { token_type: cur_tok_type, offset: (line_idx, ch_idx) });
|
||||||
|
}
|
||||||
|
tokens
|
||||||
|
}
|
||||||
|
|
||||||
|
fn handle_digit<I: Iterator<Item=(usize,usize,char)>>(c: char, input: &mut CharIter<I>) -> TokenType {
|
||||||
|
if c == '0' && input.peek().map_or(false, |&(_, _, c)| { c == 'x' }) {
|
||||||
|
input.next();
|
||||||
|
let rest: String = input.peeking_take_while(|&(_, _, ref c)| c.is_digit(16) || *c == '_').map(|(_, _, c)| { c }).collect();
|
||||||
|
HexLiteral(Rc::new(rest))
|
||||||
|
} else if c == '0' && input.peek().map_or(false, |&(_, _, c)| { c == 'b' }) {
|
||||||
|
input.next();
|
||||||
|
BinNumberSigil
|
||||||
|
} else {
|
||||||
|
let mut buf = c.to_string();
|
||||||
|
buf.extend(input.peeking_take_while(|&(_, _, ref c)| c.is_digit(10)).map(|(_, _, c)| { c }));
|
||||||
|
DigitGroup(Rc::new(buf))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn handle_quote<I: Iterator<Item=(usize,usize,char)>>(input: &mut CharIter<I>) -> TokenType {
|
||||||
|
let mut buf = String::new();
|
||||||
|
loop {
|
||||||
|
match input.next().map(|(_, _, c)| { c }) {
|
||||||
|
Some('"') => break,
|
||||||
|
Some('\\') => {
|
||||||
|
let next = input.peek().map(|&(_, _, c)| { c });
|
||||||
|
if next == Some('n') {
|
||||||
|
input.next();
|
||||||
|
buf.push('\n')
|
||||||
|
} else if next == Some('"') {
|
||||||
|
input.next();
|
||||||
|
buf.push('"');
|
||||||
|
} else if next == Some('t') {
|
||||||
|
input.next();
|
||||||
|
buf.push('\t');
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Some(c) => buf.push(c),
|
||||||
|
None => return TokenType::Error(format!("Unclosed string")),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
TokenType::StrLiteral(Rc::new(buf))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn handle_alphabetic<I: Iterator<Item=(usize,usize,char)>>(c: char, input: &mut CharIter<I>) -> TokenType {
|
||||||
|
let mut buf = String::new();
|
||||||
|
buf.push(c);
|
||||||
|
if c == '_' && input.peek().map(|&(_, _, c)| { !c.is_alphabetic() }).unwrap_or(true) {
|
||||||
|
return TokenType::Underscore
|
||||||
|
}
|
||||||
|
|
||||||
|
loop {
|
||||||
|
match input.peek().map(|&(_, _, c)| { c }) {
|
||||||
|
Some(c) if c.is_alphanumeric() => {
|
||||||
|
input.next();
|
||||||
|
buf.push(c);
|
||||||
|
},
|
||||||
|
_ => break,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
match KEYWORDS.get(buf.as_str()) {
|
||||||
|
Some(kw) => TokenType::Keyword(*kw),
|
||||||
|
None => TokenType::Identifier(Rc::new(buf)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn handle_operator<I: Iterator<Item=(usize,usize,char)>>(c: char, input: &mut CharIter<I>) -> TokenType {
|
||||||
|
match c {
|
||||||
|
'<' | '>' | '|' | '.' => {
|
||||||
|
let ref next = input.peek().map(|&(_, _, c)| { c });
|
||||||
|
if !next.map(|n| { is_operator(&n) }).unwrap_or(false) {
|
||||||
|
return match c {
|
||||||
|
'<' => LAngleBracket,
|
||||||
|
'>' => RAngleBracket,
|
||||||
|
'|' => Pipe,
|
||||||
|
'.' => Period,
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
_ => (),
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut buf = String::new();
|
||||||
|
buf.push(c);
|
||||||
|
loop {
|
||||||
|
match input.peek().map(|&(_, _, c)| { c }) {
|
||||||
|
Some(c) if is_operator(&c) => {
|
||||||
|
input.next();
|
||||||
|
buf.push(c);
|
||||||
|
},
|
||||||
|
_ => break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
TokenType::Operator(Rc::new(buf))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod schala_tokenizer_tests {
|
||||||
|
use super::*;
|
||||||
|
use super::Kw::*;
|
||||||
|
|
||||||
|
macro_rules! digit { ($ident:expr) => { DigitGroup(Rc::new($ident.to_string())) } }
|
||||||
|
macro_rules! ident { ($ident:expr) => { Identifier(Rc::new($ident.to_string())) } }
|
||||||
|
macro_rules! op { ($ident:expr) => { Operator(Rc::new($ident.to_string())) } }
|
||||||
|
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn tokens() {
|
||||||
|
let a = tokenize("let a: A<B> = c ++ d");
|
||||||
|
let token_types: Vec<TokenType> = a.into_iter().map(move |t| t.token_type).collect();
|
||||||
|
assert_eq!(token_types, vec![Keyword(Let), ident!("a"), Colon, ident!("A"),
|
||||||
|
LAngleBracket, ident!("B"), RAngleBracket, op!("="), ident!("c"), op!("++"), ident!("d")]);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn underscores() {
|
||||||
|
let token_types: Vec<TokenType> = tokenize("4_8").into_iter().map(move |t| t.token_type).collect();
|
||||||
|
assert_eq!(token_types, vec![digit!("4"), Underscore, digit!("8")]);
|
||||||
|
}
|
||||||
|
}
|
@ -1,123 +1,8 @@
|
|||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
use schala_lang::parsing::{AST, Statement, Declaration, Signature, Expression, ExpressionType, Operation, TypeName};
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
use schala_lang::parsing::{AST, Statement, Declaration, Signature, Expression, ExpressionType, Operation, Variant, TypeName, TypeSingletonName};
|
||||||
struct PathSpecifier(Rc<String>);
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
|
||||||
struct TypeContextEntry {
|
|
||||||
type_var: TypeVariable,
|
|
||||||
constant: bool
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct TypeContext {
|
|
||||||
symbol_table: HashMap<PathSpecifier, TypeContextEntry>,
|
|
||||||
existential_type_label_count: u64
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TypeContext {
|
|
||||||
pub fn new() -> TypeContext {
|
|
||||||
TypeContext {
|
|
||||||
symbol_table: HashMap::new(),
|
|
||||||
existential_type_label_count: 0,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pub fn add_symbols(&mut self, ast: &AST) {
|
|
||||||
use self::Declaration::*;
|
|
||||||
|
|
||||||
for statement in ast.0.iter() {
|
|
||||||
match *statement {
|
|
||||||
Statement::ExpressionStatement(_) => (),
|
|
||||||
Statement::Declaration(ref decl) => {
|
|
||||||
match *decl {
|
|
||||||
FuncSig(_) => (),
|
|
||||||
Impl { .. } => (),
|
|
||||||
TypeDecl { .. } => (),
|
|
||||||
TypeAlias { .. } => (),
|
|
||||||
Binding {ref name, ref constant, ref expr} => {
|
|
||||||
let spec = PathSpecifier(name.clone());
|
|
||||||
let type_var = expr.1.as_ref()
|
|
||||||
.map(|ty| self.from_anno(ty))
|
|
||||||
.unwrap_or_else(|| { self.get_existential_type() });
|
|
||||||
let entry = TypeContextEntry { type_var, constant: *constant };
|
|
||||||
self.symbol_table.insert(spec, entry);
|
|
||||||
},
|
|
||||||
FuncDecl(ref signature, _) => {
|
|
||||||
let spec = PathSpecifier(signature.name.clone());
|
|
||||||
let type_var = self.from_signature(signature);
|
|
||||||
let entry = TypeContextEntry { type_var, constant: true };
|
|
||||||
self.symbol_table.insert(spec, entry);
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
fn lookup(&mut self, binding: &Rc<String>) -> Option<TypeContextEntry> {
|
|
||||||
let key = PathSpecifier(binding.clone());
|
|
||||||
self.symbol_table.get(&key).map(|entry| entry.clone())
|
|
||||||
}
|
|
||||||
pub fn debug_symbol_table(&self) -> String {
|
|
||||||
format!("Symbol table:\n {:?}", self.symbol_table)
|
|
||||||
}
|
|
||||||
fn get_existential_type(&mut self) -> TypeVariable {
|
|
||||||
let ret = TypeVariable::Exist(self.existential_type_label_count);
|
|
||||||
self.existential_type_label_count += 1;
|
|
||||||
ret
|
|
||||||
}
|
|
||||||
|
|
||||||
fn from_anno(&mut self, anno: &TypeName) -> TypeVariable {
|
|
||||||
use self::TypeVariable::*;
|
|
||||||
use self::UVar::*;
|
|
||||||
|
|
||||||
match anno {
|
|
||||||
&TypeName::Singleton { ref name, .. } => {
|
|
||||||
match name.as_ref().as_ref() {
|
|
||||||
"Int" => Univ(Integer),
|
|
||||||
"Bool" => Univ(Boolean),
|
|
||||||
_ => self.get_existential_type()
|
|
||||||
}
|
|
||||||
},
|
|
||||||
_ => Univ(Bottom),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn from_signature(&mut self, sig: &Signature) -> TypeVariable {
|
|
||||||
use self::TypeVariable::Univ;
|
|
||||||
use self::UVar::{Unit, Function};
|
|
||||||
let return_type = sig.type_anno.as_ref().map(|anno| self.from_anno(&anno)).unwrap_or_else(|| { self.get_existential_type() });
|
|
||||||
if sig.params.len() == 0 {
|
|
||||||
Univ(Function(Box::new(Univ(Unit)), Box::new(return_type)))
|
|
||||||
} else {
|
|
||||||
let mut output_type = return_type;
|
|
||||||
for p in sig.params.iter() {
|
|
||||||
let p_type = p.1.as_ref().map(|anno| self.from_anno(anno)).unwrap_or_else(|| { self.get_existential_type() });
|
|
||||||
output_type = Univ(Function(Box::new(p_type), Box::new(output_type)));
|
|
||||||
}
|
|
||||||
output_type
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
|
||||||
pub enum TypeVariable {
|
|
||||||
Univ(UVar),
|
|
||||||
Exist(u64),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
|
||||||
pub enum UVar {
|
|
||||||
Integer,
|
|
||||||
Float,
|
|
||||||
Boolean,
|
|
||||||
Unit,
|
|
||||||
Function(Box<TypeVariable>, Box<TypeVariable>),
|
|
||||||
Bottom,
|
|
||||||
}
|
|
||||||
|
|
||||||
type TypeCheckResult = Result<TypeVariable, String>;
|
|
||||||
|
|
||||||
// from Niko's talk
|
// from Niko's talk
|
||||||
/* fn type_check(expression, expected_ty) -> Ty {
|
/* fn type_check(expression, expected_ty) -> Ty {
|
||||||
@ -132,13 +17,273 @@ type TypeCheckResult = Result<TypeVariable, String>;
|
|||||||
fn bare_type_check(exprssion, expected_type) -> Ty { ... }
|
fn bare_type_check(exprssion, expected_type) -> Ty { ... }
|
||||||
*/
|
*/
|
||||||
|
|
||||||
// from https://www.youtube.com/watch?v=il3gD7XMdmA
|
/* H-M ALGO NOTES
|
||||||
// typeInfer :: Expr a -> Matching (Type a)
|
from https://www.youtube.com/watch?v=il3gD7XMdmA
|
||||||
// unify :: Type a -> Type b -> Matching (Type c)
|
(also check out http://dev.stephendiehl.com/fun/006_hindley_milner.html)
|
||||||
|
|
||||||
|
typeInfer :: Expr a -> Matching (Type a)
|
||||||
|
unify :: Type a -> Type b -> Matching (Type c)
|
||||||
|
|
||||||
|
(Matching a) is a monad in which unification is done
|
||||||
|
|
||||||
|
ex:
|
||||||
|
|
||||||
|
typeInfer (If e1 e2 e3) = do
|
||||||
|
t1 <- typeInfer e1
|
||||||
|
t2 <- typeInfer e2
|
||||||
|
t3 <- typeInfer e3
|
||||||
|
_ <- unify t1 BoolType
|
||||||
|
unify t2 t3 -- b/c t2 and t3 have to be the same type
|
||||||
|
|
||||||
|
typeInfer (Const (ConstInt _)) = IntType -- same for other literals
|
||||||
|
|
||||||
|
--function application
|
||||||
|
typeInfer (Apply f x) = do
|
||||||
|
tf <- typeInfer f
|
||||||
|
tx <- typeInfer x
|
||||||
|
case tf of
|
||||||
|
FunctionType t1 t2 -> do
|
||||||
|
_ <- unify t1 tx
|
||||||
|
return t2
|
||||||
|
_ -> fail "Not a function"
|
||||||
|
|
||||||
|
--type annotation
|
||||||
|
typeInfer (Typed x t) = do
|
||||||
|
tx <- typeInfer x
|
||||||
|
unify tx t
|
||||||
|
|
||||||
|
--variable and let expressions - need to pass around a map of variable names to types here
|
||||||
|
typeInfer :: [ (Var, Type Var) ] -> Expr Var -> Matching (Type Var)
|
||||||
|
|
||||||
|
typeInfer ctx (Var x) = case (lookup x ctx) of
|
||||||
|
Just t -> return t
|
||||||
|
Nothing -> fail "Unknown variable"
|
||||||
|
|
||||||
|
--let x = e1 in e2
|
||||||
|
typeInfer ctx (Let x e1 e2) = do
|
||||||
|
t1 <- typeInfer ctx e1
|
||||||
|
typeInfer ((x, t1) :: ctx) e2
|
||||||
|
|
||||||
|
--lambdas are complicated (this represents ʎx.e)
|
||||||
|
typeInfer ctx (Lambda x e) = do
|
||||||
|
t1 <- allocExistentialVariable
|
||||||
|
t2 <- typeInfer ((x, t1) :: ctx) e
|
||||||
|
return $ FunctionType t1 t2 -- ie. t1 -> t2
|
||||||
|
|
||||||
|
|
||||||
|
--to solve the problem of map :: (a -> b) -> [a] -> [b]
|
||||||
|
when we use a variable whose type has universal tvars, convert those universal
|
||||||
|
tvars to existential ones
|
||||||
|
-and each distinct universal tvar needs to map to the same existential type
|
||||||
|
|
||||||
|
-so we change typeinfer:
|
||||||
|
typeInfer ctx (Var x) = do
|
||||||
|
case (lookup x ctx) of
|
||||||
|
Nothing -> ...
|
||||||
|
Just t -> do
|
||||||
|
let uvars = nub (toList t) -- nub removes duplicates, so this gets unique universally quantified variables
|
||||||
|
evars <- mapM (const allocExistentialVariable) uvars
|
||||||
|
let varMap = zip uvars evars
|
||||||
|
let vixVar varMap v = fromJust $ lookup v varMap
|
||||||
|
return (fmap (fixVar varMap) t)
|
||||||
|
|
||||||
|
--how do we define unify??
|
||||||
|
|
||||||
|
-recall, type signature is:
|
||||||
|
unify :: Type a -> Type b -> Matching (Type c)
|
||||||
|
unify BoolType BoolType = BoolType --easy, same for all constants
|
||||||
|
unify (FunctionType t1 t2) (FunctionType t3 t4) = do
|
||||||
|
t5 <- unify t1 t3
|
||||||
|
t6 <- unify t2 t4
|
||||||
|
return $ FunctionType t5 t6
|
||||||
|
unify (TVar a) (TVar b) = if a == b then TVar a else fail
|
||||||
|
--existential types can be assigned another type at most once
|
||||||
|
--some complicated stuff about hanlding existential types
|
||||||
|
--everything else is a type error
|
||||||
|
unify a b = fail
|
||||||
|
|
||||||
|
|
||||||
|
SKOLEMIZATION - how you prevent an unassigned existential type variable from leaking!
|
||||||
|
-before a type gets to global scope, replace all unassigned existential vars w/ new unique universal
|
||||||
|
type variables
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
|
pub enum Type {
|
||||||
|
TVar(TypeVar),
|
||||||
|
TConst(TypeConst),
|
||||||
|
TFunc(Box<Type>, Box<Type>),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
|
pub enum TypeVar {
|
||||||
|
Univ(Rc<String>),
|
||||||
|
Exist(u64),
|
||||||
|
}
|
||||||
|
impl TypeVar {
|
||||||
|
fn univ(label: &str) -> TypeVar {
|
||||||
|
TypeVar::Univ(Rc::new(label.to_string()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
|
pub enum TypeConst {
|
||||||
|
UserT(Rc<String>),
|
||||||
|
Integer,
|
||||||
|
Float,
|
||||||
|
StringT,
|
||||||
|
Boolean,
|
||||||
|
Unit,
|
||||||
|
Bottom,
|
||||||
|
}
|
||||||
|
|
||||||
|
type TypeCheckResult = Result<Type, String>;
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Eq, Hash)]
|
||||||
|
struct PathSpecifier(Rc<String>);
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
|
struct TypeContextEntry {
|
||||||
|
ty: Type,
|
||||||
|
constant: bool
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct TypeContext {
|
||||||
|
symbol_table: HashMap<PathSpecifier, TypeContextEntry>,
|
||||||
|
evar_table: HashMap<u64, Type>,
|
||||||
|
existential_type_label_count: u64
|
||||||
|
}
|
||||||
|
|
||||||
impl TypeContext {
|
impl TypeContext {
|
||||||
|
pub fn new() -> TypeContext {
|
||||||
|
TypeContext {
|
||||||
|
symbol_table: HashMap::new(),
|
||||||
|
evar_table: HashMap::new(),
|
||||||
|
existential_type_label_count: 0,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn add_symbols(&mut self, ast: &AST) {
|
||||||
|
use self::Declaration::*;
|
||||||
|
use self::Type::*;
|
||||||
|
use self::TypeConst::*;
|
||||||
|
|
||||||
|
for statement in ast.0.iter() {
|
||||||
|
match *statement {
|
||||||
|
Statement::ExpressionStatement(_) => (),
|
||||||
|
Statement::Declaration(ref decl) => match *decl {
|
||||||
|
FuncSig(_) => (),
|
||||||
|
Impl { .. } => (),
|
||||||
|
TypeDecl(ref type_constructor, ref body) => {
|
||||||
|
for variant in body.0.iter() {
|
||||||
|
let (spec, ty) = match variant {
|
||||||
|
&Variant::UnitStruct(ref data_constructor) => {
|
||||||
|
let spec = PathSpecifier(data_constructor.clone());
|
||||||
|
let ty = TConst(UserT(type_constructor.name.clone()));
|
||||||
|
(spec, ty)
|
||||||
|
},
|
||||||
|
&Variant::TupleStruct(ref data_construcor, ref args) => {
|
||||||
|
//TODO fix
|
||||||
|
let arg = args.get(0).unwrap();
|
||||||
|
let type_arg = self.from_anno(arg);
|
||||||
|
let spec = PathSpecifier(data_construcor.clone());
|
||||||
|
let ty = TFunc(Box::new(type_arg), Box::new(TConst(UserT(type_constructor.name.clone()))));
|
||||||
|
(spec, ty)
|
||||||
|
},
|
||||||
|
&Variant::Record(_, _) => unimplemented!(),
|
||||||
|
};
|
||||||
|
let entry = TypeContextEntry { ty, constant: true };
|
||||||
|
self.symbol_table.insert(spec, entry);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
TypeAlias { .. } => (),
|
||||||
|
Binding {ref name, ref constant, ref expr} => {
|
||||||
|
let spec = PathSpecifier(name.clone());
|
||||||
|
let ty = expr.1.as_ref()
|
||||||
|
.map(|ty| self.from_anno(ty))
|
||||||
|
.unwrap_or_else(|| { self.alloc_existential_type() }); // this call to alloc_existential is OK b/c a binding only ever has one type, so if the annotation is absent, it's fine to just make one de novo
|
||||||
|
let entry = TypeContextEntry { ty, constant: *constant };
|
||||||
|
self.symbol_table.insert(spec, entry);
|
||||||
|
},
|
||||||
|
FuncDecl(ref signature, _) => {
|
||||||
|
let spec = PathSpecifier(signature.name.clone());
|
||||||
|
let ty = self.from_signature(signature);
|
||||||
|
let entry = TypeContextEntry { ty, constant: true };
|
||||||
|
self.symbol_table.insert(spec, entry);
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fn lookup(&mut self, binding: &Rc<String>) -> Option<TypeContextEntry> {
|
||||||
|
let key = PathSpecifier(binding.clone());
|
||||||
|
self.symbol_table.get(&key).map(|entry| entry.clone())
|
||||||
|
}
|
||||||
|
pub fn debug_symbol_table(&self) -> String {
|
||||||
|
format!("Symbol table:\n {:?}\nEvar table:\n{:?}", self.symbol_table, self.evar_table)
|
||||||
|
}
|
||||||
|
fn alloc_existential_type(&mut self) -> Type {
|
||||||
|
let ret = Type::TVar(TypeVar::Exist(self.existential_type_label_count));
|
||||||
|
self.existential_type_label_count += 1;
|
||||||
|
ret
|
||||||
|
}
|
||||||
|
|
||||||
|
fn from_anno(&mut self, anno: &TypeName) -> Type {
|
||||||
|
use self::Type::*;
|
||||||
|
use self::TypeConst::*;
|
||||||
|
|
||||||
|
match anno {
|
||||||
|
&TypeName::Singleton(TypeSingletonName { ref name, .. }) => {
|
||||||
|
match name.as_ref().as_ref() {
|
||||||
|
"Int" => TConst(Integer),
|
||||||
|
"Float" => TConst(Float),
|
||||||
|
"Bool" => TConst(Boolean),
|
||||||
|
"String" => TConst(StringT),
|
||||||
|
s => TVar(TypeVar::Univ(Rc::new(format!("{}",s)))),
|
||||||
|
}
|
||||||
|
},
|
||||||
|
&TypeName::Tuple(ref items) => {
|
||||||
|
if items.len() == 1 {
|
||||||
|
TConst(Unit)
|
||||||
|
} else {
|
||||||
|
TConst(Bottom)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn from_signature(&mut self, sig: &Signature) -> Type {
|
||||||
|
use self::Type::*;
|
||||||
|
use self::TypeConst::*;
|
||||||
|
|
||||||
|
//TODO this won't work properly until you make sure that all (universal) type vars in the function have the same existential type var
|
||||||
|
// actually this should never even put existential types into the symbol table at all
|
||||||
|
|
||||||
|
//this will crash if more than 5 arg function is used
|
||||||
|
let names = vec!["a", "b", "c", "d", "e", "f"];
|
||||||
|
let mut idx = 0;
|
||||||
|
|
||||||
|
let mut get_type = || { let q = TVar(TypeVar::Univ(Rc::new(format!("{}", names.get(idx).unwrap())))); idx += 1; q };
|
||||||
|
|
||||||
|
let return_type = sig.type_anno.as_ref().map(|anno| self.from_anno(&anno)).unwrap_or_else(|| { get_type() });
|
||||||
|
if sig.params.len() == 0 {
|
||||||
|
TFunc(Box::new(TConst(Unit)), Box::new(return_type))
|
||||||
|
} else {
|
||||||
|
let mut output_type = return_type;
|
||||||
|
for p in sig.params.iter() {
|
||||||
|
let p_type = p.1.as_ref().map(|anno| self.from_anno(anno)).unwrap_or_else(|| { get_type() });
|
||||||
|
output_type = TFunc(Box::new(p_type), Box::new(output_type));
|
||||||
|
}
|
||||||
|
output_type
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn type_check(&mut self, ast: &AST) -> TypeCheckResult {
|
pub fn type_check(&mut self, ast: &AST) -> TypeCheckResult {
|
||||||
let mut last = TypeVariable::Univ(UVar::Unit);
|
use self::Type::*;
|
||||||
|
use self::TypeConst::*;
|
||||||
|
|
||||||
|
let mut last = TConst(Unit);
|
||||||
|
|
||||||
for statement in ast.0.iter() {
|
for statement in ast.0.iter() {
|
||||||
match statement {
|
match statement {
|
||||||
&Statement::Declaration(ref _decl) => {
|
&Statement::Declaration(ref _decl) => {
|
||||||
@ -151,61 +296,150 @@ impl TypeContext {
|
|||||||
}
|
}
|
||||||
Ok(last)
|
Ok(last)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn infer(&mut self, expr: &Expression) -> TypeCheckResult {
|
fn infer(&mut self, expr: &Expression) -> TypeCheckResult {
|
||||||
use self::ExpressionType::*;
|
match (&expr.0, &expr.1) {
|
||||||
use self::TypeVariable::*;
|
(exprtype, &Some(ref anno)) => {
|
||||||
|
let tx = self.infer_no_anno(exprtype)?;
|
||||||
|
let ty = self.from_anno(anno);
|
||||||
|
self.unify(tx, ty)
|
||||||
|
},
|
||||||
|
(exprtype, &None) => self.infer_no_anno(exprtype),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
Ok(match (&expr.0, &expr.1) {
|
fn infer_no_anno(&mut self, ex: &ExpressionType) -> TypeCheckResult {
|
||||||
(ref _t, &Some(ref anno)) => {
|
use self::ExpressionType::*;
|
||||||
//TODO make this better,
|
use self::Type::*;
|
||||||
self.from_anno(anno)
|
use self::TypeConst::*;
|
||||||
|
|
||||||
|
Ok(match ex {
|
||||||
|
&IntLiteral(_) => TConst(Integer),
|
||||||
|
&FloatLiteral(_) => TConst(Float),
|
||||||
|
&StringLiteral(_) => TConst(StringT),
|
||||||
|
&BoolLiteral(_) => TConst(Boolean),
|
||||||
|
&Value(ref name, _) => {
|
||||||
|
self.lookup(name)
|
||||||
|
.map(|entry| entry.ty)
|
||||||
|
.ok_or(format!("Couldn't find {}", name))?
|
||||||
},
|
},
|
||||||
(&IntLiteral(_), _) => Univ(UVar::Integer),
|
&BinExp(ref op, ref lhs, ref rhs) => {
|
||||||
(&FloatLiteral(_), _) => Univ(UVar::Float),
|
let t_lhs = self.infer(lhs)?;
|
||||||
(&BoolLiteral(_), _) => Univ(UVar::Boolean),
|
match self.infer_op(op)? {
|
||||||
(&Variable(ref name), _) => self.lookup(name).map(|entry| entry.type_var)
|
TFunc(t1, t2) => {
|
||||||
.ok_or(format!("Couldn't find {}", name))?,
|
let _ = self.unify(t_lhs, *t1)?;
|
||||||
(&BinExp(ref op, box ref lhs, box ref rhs), _) => {
|
let t_rhs = self.infer(rhs)?;
|
||||||
let _f_type = self.infer_op(op);
|
let x = *t2;
|
||||||
let _lhs_type = self.infer(&lhs);
|
match x {
|
||||||
let _rhs_type = self.infer(&rhs);
|
TFunc(t3, t4) => {
|
||||||
unimplemented!()
|
let _ = self.unify(t_rhs, *t3)?;
|
||||||
|
*t4
|
||||||
},
|
},
|
||||||
(&Call { ref f, ref arguments }, _) => {
|
_ => return Err(format!("Not a function type either")),
|
||||||
let f_type = self.infer(&*f)?;
|
|
||||||
let arg_type = self.infer(arguments.get(0).unwrap())?; // TODO fix later
|
|
||||||
match f_type {
|
|
||||||
Univ(UVar::Function(box t1, box ret_type)) => {
|
|
||||||
let _ = self.unify(&t1, &arg_type)?;
|
|
||||||
ret_type
|
|
||||||
},
|
|
||||||
_ => return Err(format!("Type error"))
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
_ => Univ(UVar::Unit),
|
_ => return Err(format!("Op {:?} is not a function type", op)),
|
||||||
|
}
|
||||||
|
},
|
||||||
|
&Call { ref f, ref arguments } => {
|
||||||
|
let tf = self.infer(f)?;
|
||||||
|
let targ = self.infer(arguments.get(0).unwrap())?;
|
||||||
|
match tf {
|
||||||
|
TFunc(box t1, box t2) => {
|
||||||
|
let _ = self.unify(t1, targ)?;
|
||||||
|
t2
|
||||||
|
},
|
||||||
|
_ => return Err(format!("Not a function!")),
|
||||||
|
}
|
||||||
|
},
|
||||||
|
_ => TConst(Bottom),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn infer_op(&mut self, _op: &Operation) -> TypeCheckResult {
|
fn infer_op(&mut self, op: &Operation) -> TypeCheckResult {
|
||||||
use self::TypeVariable::*;
|
use self::Type::*;
|
||||||
Ok(
|
use self::TypeConst::*;
|
||||||
Univ(UVar::Function(
|
macro_rules! binoptype {
|
||||||
Box::new(Univ(UVar::Integer)),
|
($lhs:expr, $rhs:expr, $out:expr) => { TFunc(Box::new($lhs), Box::new(TFunc(Box::new($rhs), Box::new($out)))) };
|
||||||
Box::new(Univ(UVar::Function(
|
|
||||||
Box::new(Univ(UVar::Integer)),
|
|
||||||
Box::new(Univ(UVar::Integer))
|
|
||||||
)))
|
|
||||||
))
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn unify(&mut self, t1: &TypeVariable, t2: &TypeVariable) -> TypeCheckResult {
|
Ok(match (*op.0).as_ref() {
|
||||||
if t1 == t2 {
|
"+" => binoptype!(TConst(Integer), TConst(Integer), TConst(Integer)),
|
||||||
Ok(t1.clone())
|
"++" => binoptype!(TConst(StringT), TConst(StringT), TConst(StringT)),
|
||||||
|
"-" => binoptype!(TConst(Integer), TConst(Integer), TConst(Integer)),
|
||||||
|
"*" => binoptype!(TConst(Integer), TConst(Integer), TConst(Integer)),
|
||||||
|
"/" => binoptype!(TConst(Integer), TConst(Integer), TConst(Integer)),
|
||||||
|
"%" => binoptype!(TConst(Integer), TConst(Integer), TConst(Integer)),
|
||||||
|
_ => TConst(Bottom)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn unify(&mut self, t1: Type, t2: Type) -> TypeCheckResult {
|
||||||
|
use self::Type::*;
|
||||||
|
use self::TypeVar::*;
|
||||||
|
|
||||||
|
println!("Calling unify with `{:?}` and `{:?}`", t1, t2);
|
||||||
|
|
||||||
|
match (&t1, &t2) {
|
||||||
|
(&TConst(ref c1), &TConst(ref c2)) if c1 == c2 => Ok(TConst(c1.clone())),
|
||||||
|
(&TFunc(ref t1, ref t2), &TFunc(ref t3, ref t4)) => {
|
||||||
|
let t5 = self.unify(*t1.clone().clone(), *t3.clone().clone())?;
|
||||||
|
let t6 = self.unify(*t2.clone().clone(), *t4.clone().clone())?;
|
||||||
|
Ok(TFunc(Box::new(t5), Box::new(t6)))
|
||||||
|
},
|
||||||
|
(&TVar(Univ(ref a)), &TVar(Univ(ref b))) => {
|
||||||
|
if a == b {
|
||||||
|
Ok(TVar(Univ(a.clone())))
|
||||||
} else {
|
} else {
|
||||||
Err(format!("Types {:?} and {:?} don't unify", t1, t2))
|
Err(format!("Couldn't unify universal types {} and {}", a, b))
|
||||||
|
}
|
||||||
|
},
|
||||||
|
//the interesting case!!
|
||||||
|
(&TVar(Exist(ref a)), ref t2) => {
|
||||||
|
let x = self.evar_table.get(a).map(|x| x.clone());
|
||||||
|
match x {
|
||||||
|
Some(ref t1) => self.unify(t1.clone().clone(), t2.clone().clone()),
|
||||||
|
None => {
|
||||||
|
self.evar_table.insert(*a, t2.clone().clone());
|
||||||
|
Ok(t2.clone().clone())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
(ref t1, &TVar(Exist(ref a))) => {
|
||||||
|
let x = self.evar_table.get(a).map(|x| x.clone());
|
||||||
|
match x {
|
||||||
|
Some(ref t2) => self.unify(t2.clone().clone(), t1.clone().clone()),
|
||||||
|
None => {
|
||||||
|
self.evar_table.insert(*a, t1.clone().clone());
|
||||||
|
Ok(t1.clone().clone())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
_ => Err(format!("Types {:?} and {:?} don't unify", t1, t2))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::{Type, TypeVar, TypeConst, TypeContext};
|
||||||
|
use super::Type::*;
|
||||||
|
use super::TypeConst::*;
|
||||||
|
use schala_lang::parsing::{parse, tokenize};
|
||||||
|
|
||||||
|
macro_rules! type_test {
|
||||||
|
($input:expr, $correct:expr) => {
|
||||||
|
{
|
||||||
|
let mut tc = TypeContext::new();
|
||||||
|
let ast = parse(tokenize($input)).0.unwrap() ;
|
||||||
|
tc.add_symbols(&ast);
|
||||||
|
assert_eq!($correct, tc.type_check(&ast).unwrap())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn basic_inference() {
|
||||||
|
type_test!("30", TConst(Integer));
|
||||||
|
type_test!("fn x(a: Int): Bool {}; x(1)", TConst(Boolean));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
254
src/schala_lang/typechecking.rs
Normal file
254
src/schala_lang/typechecking.rs
Normal file
@ -0,0 +1,254 @@
|
|||||||
|
use std::rc::Rc;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::char;
|
||||||
|
use std::fmt;
|
||||||
|
use std::fmt::Write;
|
||||||
|
|
||||||
|
use itertools::Itertools;
|
||||||
|
|
||||||
|
use schala_lang::parsing;
|
||||||
|
|
||||||
|
pub struct TypeContext {
|
||||||
|
type_var_count: u64,
|
||||||
|
bindings: HashMap<Rc<String>, Type>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
|
pub enum Type {
|
||||||
|
Const(TConst),
|
||||||
|
Sum(Vec<Type>),
|
||||||
|
Func(Box<Type>, Box<Type>),
|
||||||
|
UVar(String),
|
||||||
|
EVar(u64),
|
||||||
|
Void
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for Type {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
use self::Type::*;
|
||||||
|
match self {
|
||||||
|
&Const(ref c) => write!(f, "{:?}", c),
|
||||||
|
&Sum(ref types) => {
|
||||||
|
write!(f, "(")?;
|
||||||
|
for item in types.iter().map(|ty| Some(ty)).intersperse(None) {
|
||||||
|
match item {
|
||||||
|
Some(ty) => write!(f, "{}", ty)?,
|
||||||
|
None => write!(f, ",")?,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
write!(f, ")")
|
||||||
|
},
|
||||||
|
&Func(ref a, ref b) => write!(f, "{} -> {}", a, b),
|
||||||
|
&UVar(ref s) => write!(f, "{}_u", s),
|
||||||
|
&EVar(ref n) => write!(f, "{}_e", n),
|
||||||
|
&Void => write!(f, "Void")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Default)]
|
||||||
|
struct UVarGenerator {
|
||||||
|
n: u32,
|
||||||
|
}
|
||||||
|
impl UVarGenerator {
|
||||||
|
fn new() -> UVarGenerator {
|
||||||
|
UVarGenerator::default()
|
||||||
|
}
|
||||||
|
fn next(&mut self) -> Type {
|
||||||
|
//TODO handle this in the case where someone wants to make a function with more than 26 variables
|
||||||
|
let s = format!("{}", unsafe { char::from_u32_unchecked(self.n + ('a' as u32)) });
|
||||||
|
self.n += 1;
|
||||||
|
Type::UVar(s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
|
pub enum TConst {
|
||||||
|
Unit,
|
||||||
|
Int,
|
||||||
|
Float,
|
||||||
|
StringT,
|
||||||
|
Bool,
|
||||||
|
Custom(String),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl parsing::TypeName {
|
||||||
|
fn to_type(&self) -> TypeResult<Type> {
|
||||||
|
use self::parsing::TypeSingletonName;
|
||||||
|
use self::parsing::TypeName::*;
|
||||||
|
use self::Type::*; use self::TConst::*;
|
||||||
|
Ok(match self {
|
||||||
|
&Tuple(_) => return Err(format!("Tuples not yet implemented")),
|
||||||
|
&Singleton(ref name) => match name {
|
||||||
|
&TypeSingletonName { ref name, .. } => match &name[..] {
|
||||||
|
"Int" => Const(Int),
|
||||||
|
"Float" => Const(Float),
|
||||||
|
"Bool" => Const(Bool),
|
||||||
|
"String" => Const(StringT),
|
||||||
|
n => Const(Custom(n.to_string()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub type TypeResult<T> = Result<T, String>;
|
||||||
|
|
||||||
|
impl TypeContext {
|
||||||
|
pub fn new() -> TypeContext {
|
||||||
|
TypeContext { bindings: HashMap::new(), type_var_count: 0 }
|
||||||
|
}
|
||||||
|
pub fn fresh(&mut self) -> Type {
|
||||||
|
let ret = self.type_var_count;
|
||||||
|
self.type_var_count += 1;
|
||||||
|
Type::EVar(ret)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TypeContext {
|
||||||
|
pub fn add_top_level_types(&mut self, ast: &parsing::AST) -> TypeResult<()> {
|
||||||
|
use self::parsing::TypeName;
|
||||||
|
use self::parsing::Declaration::*;
|
||||||
|
use self::Type::*;
|
||||||
|
for statement in ast.0.iter() {
|
||||||
|
if let &self::parsing::Statement::Declaration(ref decl) = statement {
|
||||||
|
match decl {
|
||||||
|
&FuncSig(ref signature) | &FuncDecl(ref signature, _) => {
|
||||||
|
let mut uvar_gen = UVarGenerator::new();
|
||||||
|
let mut ty: Type = signature.type_anno.as_ref().map(|name: &TypeName| name.to_type()).unwrap_or_else(|| {Ok(uvar_gen.next())} )?;
|
||||||
|
for &(_, ref type_name) in signature.params.iter().rev() {
|
||||||
|
let arg_type = type_name.as_ref().map(|name| name.to_type()).unwrap_or_else(|| {Ok(uvar_gen.next())} )?;
|
||||||
|
ty = Func(bx!(arg_type), bx!(ty));
|
||||||
|
}
|
||||||
|
self.bindings.insert(signature.name.clone(), ty);
|
||||||
|
},
|
||||||
|
_ => ()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
pub fn debug_symbol_table(&self) -> String {
|
||||||
|
let mut output = format!("Symbols\n");
|
||||||
|
for (sym, ty) in &self.bindings {
|
||||||
|
write!(output, "{} : {}\n", sym, ty).unwrap();
|
||||||
|
}
|
||||||
|
output
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TypeContext {
|
||||||
|
pub fn type_check_ast(&mut self, ast: &parsing::AST) -> TypeResult<Type> {
|
||||||
|
use self::Type::*; use self::TConst::*;
|
||||||
|
let mut ret_type = Const(Unit);
|
||||||
|
for statement in ast.0.iter() {
|
||||||
|
ret_type = self.type_check_statement(statement)?;
|
||||||
|
}
|
||||||
|
Ok(ret_type)
|
||||||
|
}
|
||||||
|
fn type_check_statement(&mut self, statement: &parsing::Statement) -> TypeResult<Type> {
|
||||||
|
use self::parsing::Statement::*;
|
||||||
|
match statement {
|
||||||
|
&ExpressionStatement(ref expr) => self.infer(expr),
|
||||||
|
&Declaration(ref decl) => self.add_declaration(decl),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fn add_declaration(&mut self, decl: &parsing::Declaration) -> TypeResult<Type> {
|
||||||
|
use self::parsing::Declaration::*;
|
||||||
|
use self::Type::*;
|
||||||
|
match decl {
|
||||||
|
&Binding { ref name, ref expr, .. } => {
|
||||||
|
let ty = self.infer(expr)?;
|
||||||
|
self.bindings.insert(name.clone(), ty);
|
||||||
|
},
|
||||||
|
_ => return Err(format!("other formats not done"))
|
||||||
|
}
|
||||||
|
Ok(Void)
|
||||||
|
}
|
||||||
|
fn infer(&mut self, expr: &parsing::Expression) -> TypeResult<Type> {
|
||||||
|
use self::parsing::Expression;
|
||||||
|
match expr {
|
||||||
|
&Expression(ref e, Some(ref anno)) => {
|
||||||
|
let anno_ty = anno.to_type()?;
|
||||||
|
let ty = self.infer_exprtype(&e)?;
|
||||||
|
self.unify(ty, anno_ty)
|
||||||
|
},
|
||||||
|
&Expression(ref e, None) => self.infer_exprtype(e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fn infer_exprtype(&mut self, expr: &parsing::ExpressionType) -> TypeResult<Type> {
|
||||||
|
use self::parsing::ExpressionType::*;
|
||||||
|
use self::Type::*; use self::TConst::*;
|
||||||
|
match expr {
|
||||||
|
&IntLiteral(_) => Ok(Const(Int)),
|
||||||
|
&FloatLiteral(_) => Ok(Const(Float)),
|
||||||
|
&StringLiteral(_) => Ok(Const(StringT)),
|
||||||
|
&BoolLiteral(_) => Ok(Const(Bool)),
|
||||||
|
&BinExp(ref op, ref lhs, ref rhs) => { /* remember there are both the haskell convention talk and the write you a haskell ways to do this! */
|
||||||
|
match op.get_type()? {
|
||||||
|
Func(box t1, box Func(box t2, box t3)) => {
|
||||||
|
let lhs_ty = self.infer(lhs)?;
|
||||||
|
let rhs_ty = self.infer(rhs)?;
|
||||||
|
self.unify(t1, lhs_ty)?;
|
||||||
|
self.unify(t2, rhs_ty)?;
|
||||||
|
Ok(t3)
|
||||||
|
},
|
||||||
|
other => Err(format!("{:?} is not a binary function type", other))
|
||||||
|
}
|
||||||
|
},
|
||||||
|
&PrefixExp(ref op, ref expr) => match op.get_type()? {
|
||||||
|
Func(box t1, box t2) => {
|
||||||
|
let expr_ty = self.infer(expr)?;
|
||||||
|
self.unify(t1, expr_ty)?;
|
||||||
|
Ok(t2)
|
||||||
|
},
|
||||||
|
other => Err(format!("{:?} is not a prefix op function type", other))
|
||||||
|
},
|
||||||
|
&Value(ref name) => {
|
||||||
|
match self.bindings.get(name) {
|
||||||
|
Some(ty) => Ok(ty.clone()),
|
||||||
|
None => Err(format!("No binding found for variable: {}", name)),
|
||||||
|
}
|
||||||
|
},
|
||||||
|
&Call { ref f, ref arguments } => {
|
||||||
|
let mut tf = self.infer(f)?;
|
||||||
|
for arg in arguments.iter() {
|
||||||
|
match tf {
|
||||||
|
Func(box t, box rest) => {
|
||||||
|
let t_arg = self.infer(arg)?;
|
||||||
|
self.unify(t, t_arg)?;
|
||||||
|
tf = rest;
|
||||||
|
},
|
||||||
|
other => return Err(format!("Function call failed to unify; last type: {:?}", other)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(tf)
|
||||||
|
},
|
||||||
|
&TupleLiteral(ref expressions) => {
|
||||||
|
let mut types = vec![];
|
||||||
|
for expr in expressions {
|
||||||
|
types.push(self.infer(expr)?);
|
||||||
|
}
|
||||||
|
Ok(Sum(types))
|
||||||
|
},
|
||||||
|
/*
|
||||||
|
Index {
|
||||||
|
indexee: Box<Expression>,
|
||||||
|
indexers: Vec<Expression>,
|
||||||
|
},
|
||||||
|
IfExpression(Box<Expression>, Vec<Statement>, Option<Vec<Statement>>),
|
||||||
|
MatchExpression(Box<Expression>, Vec<MatchArm>),
|
||||||
|
ForExpression
|
||||||
|
*/
|
||||||
|
_ => Err(format!("Type not yet implemented"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fn unify(&mut self, t1: Type, t2: Type) -> TypeResult<Type> {
|
||||||
|
use self::Type::*;// use self::TConst::*;
|
||||||
|
match (t1, t2) {
|
||||||
|
(Const(ref a), Const(ref b)) if a == b => Ok(Const(a.clone())),
|
||||||
|
(a, b) => Err(format!("Types {:?} and {:?} don't unify", a, b))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -1,36 +0,0 @@
|
|||||||
use rocket;
|
|
||||||
use rocket::response::NamedFile;
|
|
||||||
use rocket_contrib::Json;
|
|
||||||
use schala_lang;
|
|
||||||
use language::{ProgrammingLanguageInterface, EvalOptions};
|
|
||||||
|
|
||||||
#[get("/")]
|
|
||||||
fn index() -> Result<NamedFile, ()> {
|
|
||||||
NamedFile::open("static/index.html").map_err(|_| ())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[get("/bundle.js")]
|
|
||||||
fn js_bundle() -> Result<NamedFile, ()> {
|
|
||||||
NamedFile::open("static/bundle.js").map_err(|_| ())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
|
||||||
struct Input {
|
|
||||||
source: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize)]
|
|
||||||
struct Output {
|
|
||||||
text: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[post("/input", format = "application/json", data = "<input>")]
|
|
||||||
fn interpreter_input(input: Json<Input>) -> Json<Output> {
|
|
||||||
let mut schala = schala_lang::Schala::new();
|
|
||||||
let code_output = schala.evaluate_in_repl(&input.source, &EvalOptions::default());
|
|
||||||
Json(Output { text: code_output.to_string() })
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn web_main() {
|
|
||||||
rocket::ignite().mount("/", routes![index, js_bundle, interpreter_input]).launch();
|
|
||||||
}
|
|
@ -1,6 +1,6 @@
|
|||||||
const React = require("react");
|
const React = require("react");
|
||||||
const ReactDOM = require("react-dom");
|
const ReactDOM = require("react-dom");
|
||||||
const request = require("request");
|
const superagent = require("superagent");
|
||||||
|
|
||||||
const serverAddress = "http://localhost:8000";
|
const serverAddress = "http://localhost:8000";
|
||||||
|
|
||||||
@ -20,13 +20,16 @@ class CodeArea extends React.Component {
|
|||||||
console.log("Event", this.state.value);
|
console.log("Event", this.state.value);
|
||||||
const source = this.state.value;
|
const source = this.state.value;
|
||||||
|
|
||||||
const options = {
|
superagent.post(`${serverAddress}/input`)
|
||||||
url: `${serverAddress}/input`,
|
.send({ source })
|
||||||
json: true,
|
.set("accept", "json")
|
||||||
body: { source }
|
.end((error, response) => {
|
||||||
};
|
if (response) {
|
||||||
request.post(options, (error, response, body) => {
|
console.log("Resp", response);
|
||||||
this.setState({lastOutput: body.text})
|
this.setState({lastOutput: response.body.text})
|
||||||
|
} else {
|
||||||
|
console.error("Error: ", error);
|
||||||
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -11,7 +11,7 @@
|
|||||||
"browserify": "^14.4.0",
|
"browserify": "^14.4.0",
|
||||||
"react": "^15.6.1",
|
"react": "^15.6.1",
|
||||||
"react-dom": "^15.6.1",
|
"react-dom": "^15.6.1",
|
||||||
"request": "^2.82.0",
|
"superagent": "^3.6.3",
|
||||||
"uglify-js": "^3.1.1"
|
"uglify-js": "^3.1.1"
|
||||||
},
|
},
|
||||||
"babel": {
|
"babel": {
|
||||||
|
302
static/yarn.lock
302
static/yarn.lock
@ -13,15 +13,6 @@ acorn@^4.0.3:
|
|||||||
version "4.0.13"
|
version "4.0.13"
|
||||||
resolved "https://registry.yarnpkg.com/acorn/-/acorn-4.0.13.tgz#105495ae5361d697bd195c825192e1ad7f253787"
|
resolved "https://registry.yarnpkg.com/acorn/-/acorn-4.0.13.tgz#105495ae5361d697bd195c825192e1ad7f253787"
|
||||||
|
|
||||||
ajv@^5.1.0:
|
|
||||||
version "5.2.2"
|
|
||||||
resolved "https://registry.yarnpkg.com/ajv/-/ajv-5.2.2.tgz#47c68d69e86f5d953103b0074a9430dc63da5e39"
|
|
||||||
dependencies:
|
|
||||||
co "^4.6.0"
|
|
||||||
fast-deep-equal "^1.0.0"
|
|
||||||
json-schema-traverse "^0.3.0"
|
|
||||||
json-stable-stringify "^1.0.1"
|
|
||||||
|
|
||||||
ansi-regex@^2.0.0:
|
ansi-regex@^2.0.0:
|
||||||
version "2.1.1"
|
version "2.1.1"
|
||||||
resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df"
|
resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df"
|
||||||
@ -54,14 +45,6 @@ asn1.js@^4.0.0:
|
|||||||
inherits "^2.0.1"
|
inherits "^2.0.1"
|
||||||
minimalistic-assert "^1.0.0"
|
minimalistic-assert "^1.0.0"
|
||||||
|
|
||||||
asn1@~0.2.3:
|
|
||||||
version "0.2.3"
|
|
||||||
resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.3.tgz#dac8787713c9966849fc8180777ebe9c1ddf3b86"
|
|
||||||
|
|
||||||
assert-plus@1.0.0, assert-plus@^1.0.0:
|
|
||||||
version "1.0.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525"
|
|
||||||
|
|
||||||
assert@^1.4.0:
|
assert@^1.4.0:
|
||||||
version "1.4.1"
|
version "1.4.1"
|
||||||
resolved "https://registry.yarnpkg.com/assert/-/assert-1.4.1.tgz#99912d591836b5a6f5b345c0f07eefc08fc65d91"
|
resolved "https://registry.yarnpkg.com/assert/-/assert-1.4.1.tgz#99912d591836b5a6f5b345c0f07eefc08fc65d91"
|
||||||
@ -78,14 +61,6 @@ asynckit@^0.4.0:
|
|||||||
version "0.4.0"
|
version "0.4.0"
|
||||||
resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79"
|
resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79"
|
||||||
|
|
||||||
aws-sign2@~0.7.0:
|
|
||||||
version "0.7.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.7.0.tgz#b46e890934a9591f2d2f6f86d7e6a9f1b3fe76a8"
|
|
||||||
|
|
||||||
aws4@^1.6.0:
|
|
||||||
version "1.6.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.6.0.tgz#83ef5ca860b2b32e4a0deedee8c771b9db57471e"
|
|
||||||
|
|
||||||
babel-code-frame@^6.26.0:
|
babel-code-frame@^6.26.0:
|
||||||
version "6.26.0"
|
version "6.26.0"
|
||||||
resolved "https://registry.yarnpkg.com/babel-code-frame/-/babel-code-frame-6.26.0.tgz#63fd43f7dc1e3bb7ce35947db8fe369a3f58c74b"
|
resolved "https://registry.yarnpkg.com/babel-code-frame/-/babel-code-frame-6.26.0.tgz#63fd43f7dc1e3bb7ce35947db8fe369a3f58c74b"
|
||||||
@ -571,28 +546,10 @@ base64-js@^1.0.2:
|
|||||||
version "1.2.1"
|
version "1.2.1"
|
||||||
resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.2.1.tgz#a91947da1f4a516ea38e5b4ec0ec3773675e0886"
|
resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.2.1.tgz#a91947da1f4a516ea38e5b4ec0ec3773675e0886"
|
||||||
|
|
||||||
bcrypt-pbkdf@^1.0.0:
|
|
||||||
version "1.0.1"
|
|
||||||
resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.1.tgz#63bc5dcb61331b92bc05fd528953c33462a06f8d"
|
|
||||||
dependencies:
|
|
||||||
tweetnacl "^0.14.3"
|
|
||||||
|
|
||||||
bn.js@^4.0.0, bn.js@^4.1.0, bn.js@^4.1.1, bn.js@^4.4.0:
|
bn.js@^4.0.0, bn.js@^4.1.0, bn.js@^4.1.1, bn.js@^4.4.0:
|
||||||
version "4.11.8"
|
version "4.11.8"
|
||||||
resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-4.11.8.tgz#2cde09eb5ee341f484746bb0309b3253b1b1442f"
|
resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-4.11.8.tgz#2cde09eb5ee341f484746bb0309b3253b1b1442f"
|
||||||
|
|
||||||
boom@4.x.x:
|
|
||||||
version "4.3.1"
|
|
||||||
resolved "https://registry.yarnpkg.com/boom/-/boom-4.3.1.tgz#4f8a3005cb4a7e3889f749030fd25b96e01d2e31"
|
|
||||||
dependencies:
|
|
||||||
hoek "4.x.x"
|
|
||||||
|
|
||||||
boom@5.x.x:
|
|
||||||
version "5.2.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/boom/-/boom-5.2.0.tgz#5dd9da6ee3a5f302077436290cb717d3f4a54e02"
|
|
||||||
dependencies:
|
|
||||||
hoek "4.x.x"
|
|
||||||
|
|
||||||
brace-expansion@^1.1.7:
|
brace-expansion@^1.1.7:
|
||||||
version "1.1.8"
|
version "1.1.8"
|
||||||
resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.8.tgz#c07b211c7c952ec1f8efd51a77ef0d1d3990a292"
|
resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.8.tgz#c07b211c7c952ec1f8efd51a77ef0d1d3990a292"
|
||||||
@ -743,10 +700,6 @@ cached-path-relative@^1.0.0:
|
|||||||
version "1.0.1"
|
version "1.0.1"
|
||||||
resolved "https://registry.yarnpkg.com/cached-path-relative/-/cached-path-relative-1.0.1.tgz#d09c4b52800aa4c078e2dd81a869aac90d2e54e7"
|
resolved "https://registry.yarnpkg.com/cached-path-relative/-/cached-path-relative-1.0.1.tgz#d09c4b52800aa4c078e2dd81a869aac90d2e54e7"
|
||||||
|
|
||||||
caseless@~0.12.0:
|
|
||||||
version "0.12.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc"
|
|
||||||
|
|
||||||
chalk@^1.1.3:
|
chalk@^1.1.3:
|
||||||
version "1.1.3"
|
version "1.1.3"
|
||||||
resolved "https://registry.yarnpkg.com/chalk/-/chalk-1.1.3.tgz#a8115c55e4a702fe4d150abd3872822a7e09fc98"
|
resolved "https://registry.yarnpkg.com/chalk/-/chalk-1.1.3.tgz#a8115c55e4a702fe4d150abd3872822a7e09fc98"
|
||||||
@ -764,10 +717,6 @@ cipher-base@^1.0.0, cipher-base@^1.0.1, cipher-base@^1.0.3:
|
|||||||
inherits "^2.0.1"
|
inherits "^2.0.1"
|
||||||
safe-buffer "^5.0.1"
|
safe-buffer "^5.0.1"
|
||||||
|
|
||||||
co@^4.6.0:
|
|
||||||
version "4.6.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184"
|
|
||||||
|
|
||||||
combine-source-map@~0.7.1:
|
combine-source-map@~0.7.1:
|
||||||
version "0.7.2"
|
version "0.7.2"
|
||||||
resolved "https://registry.yarnpkg.com/combine-source-map/-/combine-source-map-0.7.2.tgz#0870312856b307a87cc4ac486f3a9a62aeccc09e"
|
resolved "https://registry.yarnpkg.com/combine-source-map/-/combine-source-map-0.7.2.tgz#0870312856b307a87cc4ac486f3a9a62aeccc09e"
|
||||||
@ -777,7 +726,7 @@ combine-source-map@~0.7.1:
|
|||||||
lodash.memoize "~3.0.3"
|
lodash.memoize "~3.0.3"
|
||||||
source-map "~0.5.3"
|
source-map "~0.5.3"
|
||||||
|
|
||||||
combined-stream@^1.0.5, combined-stream@~1.0.5:
|
combined-stream@^1.0.5:
|
||||||
version "1.0.5"
|
version "1.0.5"
|
||||||
resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.5.tgz#938370a57b4a51dea2c77c15d5c5fdf895164009"
|
resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.5.tgz#938370a57b4a51dea2c77c15d5c5fdf895164009"
|
||||||
dependencies:
|
dependencies:
|
||||||
@ -787,6 +736,10 @@ commander@~2.11.0:
|
|||||||
version "2.11.0"
|
version "2.11.0"
|
||||||
resolved "https://registry.yarnpkg.com/commander/-/commander-2.11.0.tgz#157152fd1e7a6c8d98a5b715cf376df928004563"
|
resolved "https://registry.yarnpkg.com/commander/-/commander-2.11.0.tgz#157152fd1e7a6c8d98a5b715cf376df928004563"
|
||||||
|
|
||||||
|
component-emitter@^1.2.0:
|
||||||
|
version "1.2.1"
|
||||||
|
resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.2.1.tgz#137918d6d78283f7df7a6b7c5a63e140e69425e6"
|
||||||
|
|
||||||
concat-map@0.0.1:
|
concat-map@0.0.1:
|
||||||
version "0.0.1"
|
version "0.0.1"
|
||||||
resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b"
|
resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b"
|
||||||
@ -817,6 +770,10 @@ convert-source-map@~1.1.0:
|
|||||||
version "1.1.3"
|
version "1.1.3"
|
||||||
resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.1.3.tgz#4829c877e9fe49b3161f3bf3673888e204699860"
|
resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.1.3.tgz#4829c877e9fe49b3161f3bf3673888e204699860"
|
||||||
|
|
||||||
|
cookiejar@^2.1.0:
|
||||||
|
version "2.1.1"
|
||||||
|
resolved "https://registry.yarnpkg.com/cookiejar/-/cookiejar-2.1.1.tgz#41ad57b1b555951ec171412a81942b1e8200d34a"
|
||||||
|
|
||||||
core-js@^1.0.0:
|
core-js@^1.0.0:
|
||||||
version "1.2.7"
|
version "1.2.7"
|
||||||
resolved "https://registry.yarnpkg.com/core-js/-/core-js-1.2.7.tgz#652294c14651db28fa93bd2d5ff2983a4f08c636"
|
resolved "https://registry.yarnpkg.com/core-js/-/core-js-1.2.7.tgz#652294c14651db28fa93bd2d5ff2983a4f08c636"
|
||||||
@ -825,7 +782,7 @@ core-js@^2.4.0, core-js@^2.5.0:
|
|||||||
version "2.5.1"
|
version "2.5.1"
|
||||||
resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.5.1.tgz#ae6874dc66937789b80754ff5428df66819ca50b"
|
resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.5.1.tgz#ae6874dc66937789b80754ff5428df66819ca50b"
|
||||||
|
|
||||||
core-util-is@1.0.2, core-util-is@~1.0.0:
|
core-util-is@~1.0.0:
|
||||||
version "1.0.2"
|
version "1.0.2"
|
||||||
resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7"
|
resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7"
|
||||||
|
|
||||||
@ -864,12 +821,6 @@ create-react-class@^15.6.0:
|
|||||||
loose-envify "^1.3.1"
|
loose-envify "^1.3.1"
|
||||||
object-assign "^4.1.1"
|
object-assign "^4.1.1"
|
||||||
|
|
||||||
cryptiles@3.x.x:
|
|
||||||
version "3.1.2"
|
|
||||||
resolved "https://registry.yarnpkg.com/cryptiles/-/cryptiles-3.1.2.tgz#a89fbb220f5ce25ec56e8c4aa8a4fd7b5b0d29fe"
|
|
||||||
dependencies:
|
|
||||||
boom "5.x.x"
|
|
||||||
|
|
||||||
crypto-browserify@^3.0.0:
|
crypto-browserify@^3.0.0:
|
||||||
version "3.11.1"
|
version "3.11.1"
|
||||||
resolved "https://registry.yarnpkg.com/crypto-browserify/-/crypto-browserify-3.11.1.tgz#948945efc6757a400d6e5e5af47194d10064279f"
|
resolved "https://registry.yarnpkg.com/crypto-browserify/-/crypto-browserify-3.11.1.tgz#948945efc6757a400d6e5e5af47194d10064279f"
|
||||||
@ -885,12 +836,6 @@ crypto-browserify@^3.0.0:
|
|||||||
public-encrypt "^4.0.0"
|
public-encrypt "^4.0.0"
|
||||||
randombytes "^2.0.0"
|
randombytes "^2.0.0"
|
||||||
|
|
||||||
dashdash@^1.12.0:
|
|
||||||
version "1.14.1"
|
|
||||||
resolved "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0"
|
|
||||||
dependencies:
|
|
||||||
assert-plus "^1.0.0"
|
|
||||||
|
|
||||||
date-now@^0.1.4:
|
date-now@^0.1.4:
|
||||||
version "0.1.4"
|
version "0.1.4"
|
||||||
resolved "https://registry.yarnpkg.com/date-now/-/date-now-0.1.4.tgz#eaf439fd4d4848ad74e5cc7dbef200672b9e345b"
|
resolved "https://registry.yarnpkg.com/date-now/-/date-now-0.1.4.tgz#eaf439fd4d4848ad74e5cc7dbef200672b9e345b"
|
||||||
@ -901,6 +846,12 @@ debug@^2.6.8:
|
|||||||
dependencies:
|
dependencies:
|
||||||
ms "2.0.0"
|
ms "2.0.0"
|
||||||
|
|
||||||
|
debug@^3.1.0:
|
||||||
|
version "3.1.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/debug/-/debug-3.1.0.tgz#5bb5a0672628b64149566ba16819e61518c67261"
|
||||||
|
dependencies:
|
||||||
|
ms "2.0.0"
|
||||||
|
|
||||||
defined@^1.0.0:
|
defined@^1.0.0:
|
||||||
version "1.0.0"
|
version "1.0.0"
|
||||||
resolved "https://registry.yarnpkg.com/defined/-/defined-1.0.0.tgz#c98d9bcef75674188e110969151199e39b1fa693"
|
resolved "https://registry.yarnpkg.com/defined/-/defined-1.0.0.tgz#c98d9bcef75674188e110969151199e39b1fa693"
|
||||||
@ -956,12 +907,6 @@ duplexer2@^0.1.2, duplexer2@~0.1.0, duplexer2@~0.1.2:
|
|||||||
dependencies:
|
dependencies:
|
||||||
readable-stream "^2.0.2"
|
readable-stream "^2.0.2"
|
||||||
|
|
||||||
ecc-jsbn@~0.1.1:
|
|
||||||
version "0.1.1"
|
|
||||||
resolved "https://registry.yarnpkg.com/ecc-jsbn/-/ecc-jsbn-0.1.1.tgz#0fc73a9ed5f0d53c38193398523ef7e543777505"
|
|
||||||
dependencies:
|
|
||||||
jsbn "~0.1.0"
|
|
||||||
|
|
||||||
elliptic@^6.0.0:
|
elliptic@^6.0.0:
|
||||||
version "6.4.0"
|
version "6.4.0"
|
||||||
resolved "https://registry.yarnpkg.com/elliptic/-/elliptic-6.4.0.tgz#cac9af8762c85836187003c8dfe193e5e2eae5df"
|
resolved "https://registry.yarnpkg.com/elliptic/-/elliptic-6.4.0.tgz#cac9af8762c85836187003c8dfe193e5e2eae5df"
|
||||||
@ -999,18 +944,10 @@ evp_bytestokey@^1.0.0, evp_bytestokey@^1.0.3:
|
|||||||
md5.js "^1.3.4"
|
md5.js "^1.3.4"
|
||||||
safe-buffer "^5.1.1"
|
safe-buffer "^5.1.1"
|
||||||
|
|
||||||
extend@~3.0.1:
|
extend@^3.0.0:
|
||||||
version "3.0.1"
|
version "3.0.1"
|
||||||
resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.1.tgz#a755ea7bc1adfcc5a31ce7e762dbaadc5e636444"
|
resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.1.tgz#a755ea7bc1adfcc5a31ce7e762dbaadc5e636444"
|
||||||
|
|
||||||
extsprintf@1.3.0, extsprintf@^1.2.0:
|
|
||||||
version "1.3.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.3.0.tgz#96918440e3041a7a414f8c52e3c574eb3c3e1e05"
|
|
||||||
|
|
||||||
fast-deep-equal@^1.0.0:
|
|
||||||
version "1.0.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-1.0.0.tgz#96256a3bc975595eb36d82e9929d060d893439ff"
|
|
||||||
|
|
||||||
fbjs@^0.8.9:
|
fbjs@^0.8.9:
|
||||||
version "0.8.15"
|
version "0.8.15"
|
||||||
resolved "https://registry.yarnpkg.com/fbjs/-/fbjs-0.8.15.tgz#4f0695fdfcc16c37c0b07facec8cb4c4091685b9"
|
resolved "https://registry.yarnpkg.com/fbjs/-/fbjs-0.8.15.tgz#4f0695fdfcc16c37c0b07facec8cb4c4091685b9"
|
||||||
@ -1023,11 +960,7 @@ fbjs@^0.8.9:
|
|||||||
setimmediate "^1.0.5"
|
setimmediate "^1.0.5"
|
||||||
ua-parser-js "^0.7.9"
|
ua-parser-js "^0.7.9"
|
||||||
|
|
||||||
forever-agent@~0.6.1:
|
form-data@^2.3.1:
|
||||||
version "0.6.1"
|
|
||||||
resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91"
|
|
||||||
|
|
||||||
form-data@~2.3.1:
|
|
||||||
version "2.3.1"
|
version "2.3.1"
|
||||||
resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.3.1.tgz#6fb94fbd71885306d73d15cc497fe4cc4ecd44bf"
|
resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.3.1.tgz#6fb94fbd71885306d73d15cc497fe4cc4ecd44bf"
|
||||||
dependencies:
|
dependencies:
|
||||||
@ -1035,6 +968,10 @@ form-data@~2.3.1:
|
|||||||
combined-stream "^1.0.5"
|
combined-stream "^1.0.5"
|
||||||
mime-types "^2.1.12"
|
mime-types "^2.1.12"
|
||||||
|
|
||||||
|
formidable@^1.1.1:
|
||||||
|
version "1.1.1"
|
||||||
|
resolved "https://registry.yarnpkg.com/formidable/-/formidable-1.1.1.tgz#96b8886f7c3c3508b932d6bd70c4d3a88f35f1a9"
|
||||||
|
|
||||||
fs.realpath@^1.0.0:
|
fs.realpath@^1.0.0:
|
||||||
version "1.0.0"
|
version "1.0.0"
|
||||||
resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f"
|
resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f"
|
||||||
@ -1043,12 +980,6 @@ function-bind@^1.0.2:
|
|||||||
version "1.1.1"
|
version "1.1.1"
|
||||||
resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d"
|
resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d"
|
||||||
|
|
||||||
getpass@^0.1.1:
|
|
||||||
version "0.1.7"
|
|
||||||
resolved "https://registry.yarnpkg.com/getpass/-/getpass-0.1.7.tgz#5eff8e3e684d569ae4cb2b1282604e8ba62149fa"
|
|
||||||
dependencies:
|
|
||||||
assert-plus "^1.0.0"
|
|
||||||
|
|
||||||
glob@^7.1.0:
|
glob@^7.1.0:
|
||||||
version "7.1.2"
|
version "7.1.2"
|
||||||
resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.2.tgz#c19c9df9a028702d678612384a6552404c636d15"
|
resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.2.tgz#c19c9df9a028702d678612384a6552404c636d15"
|
||||||
@ -1064,17 +995,6 @@ globals@^9.18.0:
|
|||||||
version "9.18.0"
|
version "9.18.0"
|
||||||
resolved "https://registry.yarnpkg.com/globals/-/globals-9.18.0.tgz#aa3896b3e69b487f17e31ed2143d69a8e30c2d8a"
|
resolved "https://registry.yarnpkg.com/globals/-/globals-9.18.0.tgz#aa3896b3e69b487f17e31ed2143d69a8e30c2d8a"
|
||||||
|
|
||||||
har-schema@^2.0.0:
|
|
||||||
version "2.0.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-2.0.0.tgz#a94c2224ebcac04782a0d9035521f24735b7ec92"
|
|
||||||
|
|
||||||
har-validator@~5.0.3:
|
|
||||||
version "5.0.3"
|
|
||||||
resolved "https://registry.yarnpkg.com/har-validator/-/har-validator-5.0.3.tgz#ba402c266194f15956ef15e0fcf242993f6a7dfd"
|
|
||||||
dependencies:
|
|
||||||
ajv "^5.1.0"
|
|
||||||
har-schema "^2.0.0"
|
|
||||||
|
|
||||||
has-ansi@^2.0.0:
|
has-ansi@^2.0.0:
|
||||||
version "2.0.0"
|
version "2.0.0"
|
||||||
resolved "https://registry.yarnpkg.com/has-ansi/-/has-ansi-2.0.0.tgz#34f5049ce1ecdf2b0649af3ef24e45ed35416d91"
|
resolved "https://registry.yarnpkg.com/has-ansi/-/has-ansi-2.0.0.tgz#34f5049ce1ecdf2b0649af3ef24e45ed35416d91"
|
||||||
@ -1107,15 +1027,6 @@ hash.js@^1.0.0, hash.js@^1.0.3:
|
|||||||
inherits "^2.0.3"
|
inherits "^2.0.3"
|
||||||
minimalistic-assert "^1.0.0"
|
minimalistic-assert "^1.0.0"
|
||||||
|
|
||||||
hawk@~6.0.2:
|
|
||||||
version "6.0.2"
|
|
||||||
resolved "https://registry.yarnpkg.com/hawk/-/hawk-6.0.2.tgz#af4d914eb065f9b5ce4d9d11c1cb2126eecc3038"
|
|
||||||
dependencies:
|
|
||||||
boom "4.x.x"
|
|
||||||
cryptiles "3.x.x"
|
|
||||||
hoek "4.x.x"
|
|
||||||
sntp "2.x.x"
|
|
||||||
|
|
||||||
hmac-drbg@^1.0.0:
|
hmac-drbg@^1.0.0:
|
||||||
version "1.0.1"
|
version "1.0.1"
|
||||||
resolved "https://registry.yarnpkg.com/hmac-drbg/-/hmac-drbg-1.0.1.tgz#d2745701025a6c775a6c545793ed502fc0c649a1"
|
resolved "https://registry.yarnpkg.com/hmac-drbg/-/hmac-drbg-1.0.1.tgz#d2745701025a6c775a6c545793ed502fc0c649a1"
|
||||||
@ -1124,10 +1035,6 @@ hmac-drbg@^1.0.0:
|
|||||||
minimalistic-assert "^1.0.0"
|
minimalistic-assert "^1.0.0"
|
||||||
minimalistic-crypto-utils "^1.0.1"
|
minimalistic-crypto-utils "^1.0.1"
|
||||||
|
|
||||||
hoek@4.x.x:
|
|
||||||
version "4.2.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/hoek/-/hoek-4.2.0.tgz#72d9d0754f7fe25ca2d01ad8f8f9a9449a89526d"
|
|
||||||
|
|
||||||
home-or-tmp@^2.0.0:
|
home-or-tmp@^2.0.0:
|
||||||
version "2.0.0"
|
version "2.0.0"
|
||||||
resolved "https://registry.yarnpkg.com/home-or-tmp/-/home-or-tmp-2.0.0.tgz#e36c3f2d2cae7d746a857e38d18d5f32a7882db8"
|
resolved "https://registry.yarnpkg.com/home-or-tmp/-/home-or-tmp-2.0.0.tgz#e36c3f2d2cae7d746a857e38d18d5f32a7882db8"
|
||||||
@ -1139,14 +1046,6 @@ htmlescape@^1.1.0:
|
|||||||
version "1.1.1"
|
version "1.1.1"
|
||||||
resolved "https://registry.yarnpkg.com/htmlescape/-/htmlescape-1.1.1.tgz#3a03edc2214bca3b66424a3e7959349509cb0351"
|
resolved "https://registry.yarnpkg.com/htmlescape/-/htmlescape-1.1.1.tgz#3a03edc2214bca3b66424a3e7959349509cb0351"
|
||||||
|
|
||||||
http-signature@~1.2.0:
|
|
||||||
version "1.2.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-1.2.0.tgz#9aecd925114772f3d95b65a60abb8f7c18fbace1"
|
|
||||||
dependencies:
|
|
||||||
assert-plus "^1.0.0"
|
|
||||||
jsprim "^1.2.2"
|
|
||||||
sshpk "^1.7.0"
|
|
||||||
|
|
||||||
https-browserify@^1.0.0:
|
https-browserify@^1.0.0:
|
||||||
version "1.0.0"
|
version "1.0.0"
|
||||||
resolved "https://registry.yarnpkg.com/https-browserify/-/https-browserify-1.0.0.tgz#ec06c10e0a34c0f2faf199f7fd7fc78fffd03c73"
|
resolved "https://registry.yarnpkg.com/https-browserify/-/https-browserify-1.0.0.tgz#ec06c10e0a34c0f2faf199f7fd7fc78fffd03c73"
|
||||||
@ -1217,10 +1116,6 @@ is-stream@^1.0.1:
|
|||||||
version "1.1.0"
|
version "1.1.0"
|
||||||
resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44"
|
resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44"
|
||||||
|
|
||||||
is-typedarray@~1.0.0:
|
|
||||||
version "1.0.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a"
|
|
||||||
|
|
||||||
isarray@~0.0.1:
|
isarray@~0.0.1:
|
||||||
version "0.0.1"
|
version "0.0.1"
|
||||||
resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf"
|
resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf"
|
||||||
@ -1236,18 +1131,10 @@ isomorphic-fetch@^2.1.1:
|
|||||||
node-fetch "^1.0.1"
|
node-fetch "^1.0.1"
|
||||||
whatwg-fetch ">=0.10.0"
|
whatwg-fetch ">=0.10.0"
|
||||||
|
|
||||||
isstream@~0.1.2:
|
|
||||||
version "0.1.2"
|
|
||||||
resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a"
|
|
||||||
|
|
||||||
js-tokens@^3.0.0, js-tokens@^3.0.2:
|
js-tokens@^3.0.0, js-tokens@^3.0.2:
|
||||||
version "3.0.2"
|
version "3.0.2"
|
||||||
resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-3.0.2.tgz#9866df395102130e38f7f996bceb65443209c25b"
|
resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-3.0.2.tgz#9866df395102130e38f7f996bceb65443209c25b"
|
||||||
|
|
||||||
jsbn@~0.1.0:
|
|
||||||
version "0.1.1"
|
|
||||||
resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513"
|
|
||||||
|
|
||||||
jsesc@^1.3.0:
|
jsesc@^1.3.0:
|
||||||
version "1.3.0"
|
version "1.3.0"
|
||||||
resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-1.3.0.tgz#46c3fec8c1892b12b0833db9bc7622176dbab34b"
|
resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-1.3.0.tgz#46c3fec8c1892b12b0833db9bc7622176dbab34b"
|
||||||
@ -1256,30 +1143,12 @@ jsesc@~0.5.0:
|
|||||||
version "0.5.0"
|
version "0.5.0"
|
||||||
resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d"
|
resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d"
|
||||||
|
|
||||||
json-schema-traverse@^0.3.0:
|
|
||||||
version "0.3.1"
|
|
||||||
resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.3.1.tgz#349a6d44c53a51de89b40805c5d5e59b417d3340"
|
|
||||||
|
|
||||||
json-schema@0.2.3:
|
|
||||||
version "0.2.3"
|
|
||||||
resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.2.3.tgz#b480c892e59a2f05954ce727bd3f2a4e882f9e13"
|
|
||||||
|
|
||||||
json-stable-stringify@^1.0.1:
|
|
||||||
version "1.0.1"
|
|
||||||
resolved "https://registry.yarnpkg.com/json-stable-stringify/-/json-stable-stringify-1.0.1.tgz#9a759d39c5f2ff503fd5300646ed445f88c4f9af"
|
|
||||||
dependencies:
|
|
||||||
jsonify "~0.0.0"
|
|
||||||
|
|
||||||
json-stable-stringify@~0.0.0:
|
json-stable-stringify@~0.0.0:
|
||||||
version "0.0.1"
|
version "0.0.1"
|
||||||
resolved "https://registry.yarnpkg.com/json-stable-stringify/-/json-stable-stringify-0.0.1.tgz#611c23e814db375527df851193db59dd2af27f45"
|
resolved "https://registry.yarnpkg.com/json-stable-stringify/-/json-stable-stringify-0.0.1.tgz#611c23e814db375527df851193db59dd2af27f45"
|
||||||
dependencies:
|
dependencies:
|
||||||
jsonify "~0.0.0"
|
jsonify "~0.0.0"
|
||||||
|
|
||||||
json-stringify-safe@~5.0.1:
|
|
||||||
version "5.0.1"
|
|
||||||
resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb"
|
|
||||||
|
|
||||||
json5@^0.5.1:
|
json5@^0.5.1:
|
||||||
version "0.5.1"
|
version "0.5.1"
|
||||||
resolved "https://registry.yarnpkg.com/json5/-/json5-0.5.1.tgz#1eade7acc012034ad84e2396767ead9fa5495821"
|
resolved "https://registry.yarnpkg.com/json5/-/json5-0.5.1.tgz#1eade7acc012034ad84e2396767ead9fa5495821"
|
||||||
@ -1292,15 +1161,6 @@ jsonparse@^1.2.0:
|
|||||||
version "1.3.1"
|
version "1.3.1"
|
||||||
resolved "https://registry.yarnpkg.com/jsonparse/-/jsonparse-1.3.1.tgz#3f4dae4a91fac315f71062f8521cc239f1366280"
|
resolved "https://registry.yarnpkg.com/jsonparse/-/jsonparse-1.3.1.tgz#3f4dae4a91fac315f71062f8521cc239f1366280"
|
||||||
|
|
||||||
jsprim@^1.2.2:
|
|
||||||
version "1.4.1"
|
|
||||||
resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.1.tgz#313e66bc1e5cc06e438bc1b7499c2e5c56acb6a2"
|
|
||||||
dependencies:
|
|
||||||
assert-plus "1.0.0"
|
|
||||||
extsprintf "1.3.0"
|
|
||||||
json-schema "0.2.3"
|
|
||||||
verror "1.10.0"
|
|
||||||
|
|
||||||
labeled-stream-splicer@^2.0.0:
|
labeled-stream-splicer@^2.0.0:
|
||||||
version "2.0.0"
|
version "2.0.0"
|
||||||
resolved "https://registry.yarnpkg.com/labeled-stream-splicer/-/labeled-stream-splicer-2.0.0.tgz#a52e1d138024c00b86b1c0c91f677918b8ae0a59"
|
resolved "https://registry.yarnpkg.com/labeled-stream-splicer/-/labeled-stream-splicer-2.0.0.tgz#a52e1d138024c00b86b1c0c91f677918b8ae0a59"
|
||||||
@ -1336,6 +1196,10 @@ md5.js@^1.3.4:
|
|||||||
hash-base "^3.0.0"
|
hash-base "^3.0.0"
|
||||||
inherits "^2.0.1"
|
inherits "^2.0.1"
|
||||||
|
|
||||||
|
methods@^1.1.1:
|
||||||
|
version "1.1.2"
|
||||||
|
resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee"
|
||||||
|
|
||||||
miller-rabin@^4.0.0:
|
miller-rabin@^4.0.0:
|
||||||
version "4.0.0"
|
version "4.0.0"
|
||||||
resolved "https://registry.yarnpkg.com/miller-rabin/-/miller-rabin-4.0.0.tgz#4a62fb1d42933c05583982f4c716f6fb9e6c6d3d"
|
resolved "https://registry.yarnpkg.com/miller-rabin/-/miller-rabin-4.0.0.tgz#4a62fb1d42933c05583982f4c716f6fb9e6c6d3d"
|
||||||
@ -1347,12 +1211,16 @@ mime-db@~1.30.0:
|
|||||||
version "1.30.0"
|
version "1.30.0"
|
||||||
resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.30.0.tgz#74c643da2dd9d6a45399963465b26d5ca7d71f01"
|
resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.30.0.tgz#74c643da2dd9d6a45399963465b26d5ca7d71f01"
|
||||||
|
|
||||||
mime-types@^2.1.12, mime-types@~2.1.17:
|
mime-types@^2.1.12:
|
||||||
version "2.1.17"
|
version "2.1.17"
|
||||||
resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.17.tgz#09d7a393f03e995a79f8af857b70a9e0ab16557a"
|
resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.17.tgz#09d7a393f03e995a79f8af857b70a9e0ab16557a"
|
||||||
dependencies:
|
dependencies:
|
||||||
mime-db "~1.30.0"
|
mime-db "~1.30.0"
|
||||||
|
|
||||||
|
mime@^1.4.1:
|
||||||
|
version "1.4.1"
|
||||||
|
resolved "https://registry.yarnpkg.com/mime/-/mime-1.4.1.tgz#121f9ebc49e3766f311a76e1fa1c8003c4b03aa6"
|
||||||
|
|
||||||
minimalistic-assert@^1.0.0:
|
minimalistic-assert@^1.0.0:
|
||||||
version "1.0.0"
|
version "1.0.0"
|
||||||
resolved "https://registry.yarnpkg.com/minimalistic-assert/-/minimalistic-assert-1.0.0.tgz#702be2dda6b37f4836bcb3f5db56641b64a1d3d3"
|
resolved "https://registry.yarnpkg.com/minimalistic-assert/-/minimalistic-assert-1.0.0.tgz#702be2dda6b37f4836bcb3f5db56641b64a1d3d3"
|
||||||
@ -1416,10 +1284,6 @@ number-is-nan@^1.0.0:
|
|||||||
version "1.0.1"
|
version "1.0.1"
|
||||||
resolved "https://registry.yarnpkg.com/number-is-nan/-/number-is-nan-1.0.1.tgz#097b602b53422a522c1afb8790318336941a011d"
|
resolved "https://registry.yarnpkg.com/number-is-nan/-/number-is-nan-1.0.1.tgz#097b602b53422a522c1afb8790318336941a011d"
|
||||||
|
|
||||||
oauth-sign@~0.8.2:
|
|
||||||
version "0.8.2"
|
|
||||||
resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.8.2.tgz#46a6ab7f0aead8deae9ec0565780b7d4efeb9d43"
|
|
||||||
|
|
||||||
object-assign@^4.0.0, object-assign@^4.1.0, object-assign@^4.1.1:
|
object-assign@^4.0.0, object-assign@^4.1.0, object-assign@^4.1.1:
|
||||||
version "4.1.1"
|
version "4.1.1"
|
||||||
resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863"
|
resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863"
|
||||||
@ -1488,10 +1352,6 @@ pbkdf2@^3.0.3:
|
|||||||
safe-buffer "^5.0.1"
|
safe-buffer "^5.0.1"
|
||||||
sha.js "^2.4.8"
|
sha.js "^2.4.8"
|
||||||
|
|
||||||
performance-now@^2.1.0:
|
|
||||||
version "2.1.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b"
|
|
||||||
|
|
||||||
private@^0.1.6, private@^0.1.7:
|
private@^0.1.6, private@^0.1.7:
|
||||||
version "0.1.7"
|
version "0.1.7"
|
||||||
resolved "https://registry.yarnpkg.com/private/-/private-0.1.7.tgz#68ce5e8a1ef0a23bb570cc28537b5332aba63ef1"
|
resolved "https://registry.yarnpkg.com/private/-/private-0.1.7.tgz#68ce5e8a1ef0a23bb570cc28537b5332aba63ef1"
|
||||||
@ -1531,11 +1391,11 @@ punycode@1.3.2:
|
|||||||
version "1.3.2"
|
version "1.3.2"
|
||||||
resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.3.2.tgz#9653a036fb7c1ee42342f2325cceefea3926c48d"
|
resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.3.2.tgz#9653a036fb7c1ee42342f2325cceefea3926c48d"
|
||||||
|
|
||||||
punycode@^1.3.2, punycode@^1.4.1:
|
punycode@^1.3.2:
|
||||||
version "1.4.1"
|
version "1.4.1"
|
||||||
resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.4.1.tgz#c0d5a63b2718800ad8e1eb0fa5269c84dd41845e"
|
resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.4.1.tgz#c0d5a63b2718800ad8e1eb0fa5269c84dd41845e"
|
||||||
|
|
||||||
qs@~6.5.1:
|
qs@^6.5.1:
|
||||||
version "6.5.1"
|
version "6.5.1"
|
||||||
resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.1.tgz#349cdf6eef89ec45c12d7d5eb3fc0c870343a6d8"
|
resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.1.tgz#349cdf6eef89ec45c12d7d5eb3fc0c870343a6d8"
|
||||||
|
|
||||||
@ -1578,7 +1438,7 @@ read-only-stream@^2.0.0:
|
|||||||
dependencies:
|
dependencies:
|
||||||
readable-stream "^2.0.2"
|
readable-stream "^2.0.2"
|
||||||
|
|
||||||
readable-stream@^2.0.2, readable-stream@^2.1.5, readable-stream@^2.2.6:
|
readable-stream@^2.0.2, readable-stream@^2.0.5, readable-stream@^2.1.5, readable-stream@^2.2.6:
|
||||||
version "2.3.3"
|
version "2.3.3"
|
||||||
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.3.tgz#368f2512d79f9d46fdfc71349ae7878bbc1eb95c"
|
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.3.tgz#368f2512d79f9d46fdfc71349ae7878bbc1eb95c"
|
||||||
dependencies:
|
dependencies:
|
||||||
@ -1641,33 +1501,6 @@ repeating@^2.0.0:
|
|||||||
dependencies:
|
dependencies:
|
||||||
is-finite "^1.0.0"
|
is-finite "^1.0.0"
|
||||||
|
|
||||||
request@^2.82.0:
|
|
||||||
version "2.82.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/request/-/request-2.82.0.tgz#2ba8a92cd7ac45660ea2b10a53ae67cd247516ea"
|
|
||||||
dependencies:
|
|
||||||
aws-sign2 "~0.7.0"
|
|
||||||
aws4 "^1.6.0"
|
|
||||||
caseless "~0.12.0"
|
|
||||||
combined-stream "~1.0.5"
|
|
||||||
extend "~3.0.1"
|
|
||||||
forever-agent "~0.6.1"
|
|
||||||
form-data "~2.3.1"
|
|
||||||
har-validator "~5.0.3"
|
|
||||||
hawk "~6.0.2"
|
|
||||||
http-signature "~1.2.0"
|
|
||||||
is-typedarray "~1.0.0"
|
|
||||||
isstream "~0.1.2"
|
|
||||||
json-stringify-safe "~5.0.1"
|
|
||||||
mime-types "~2.1.17"
|
|
||||||
oauth-sign "~0.8.2"
|
|
||||||
performance-now "^2.1.0"
|
|
||||||
qs "~6.5.1"
|
|
||||||
safe-buffer "^5.1.1"
|
|
||||||
stringstream "~0.0.5"
|
|
||||||
tough-cookie "~2.3.2"
|
|
||||||
tunnel-agent "^0.6.0"
|
|
||||||
uuid "^3.1.0"
|
|
||||||
|
|
||||||
resolve@1.1.7:
|
resolve@1.1.7:
|
||||||
version "1.1.7"
|
version "1.1.7"
|
||||||
resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.1.7.tgz#203114d82ad2c5ed9e8e0411b3932875e889e97b"
|
resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.1.7.tgz#203114d82ad2c5ed9e8e0411b3932875e889e97b"
|
||||||
@ -1719,12 +1552,6 @@ slash@^1.0.0:
|
|||||||
version "1.0.0"
|
version "1.0.0"
|
||||||
resolved "https://registry.yarnpkg.com/slash/-/slash-1.0.0.tgz#c41f2f6c39fc16d1cd17ad4b5d896114ae470d55"
|
resolved "https://registry.yarnpkg.com/slash/-/slash-1.0.0.tgz#c41f2f6c39fc16d1cd17ad4b5d896114ae470d55"
|
||||||
|
|
||||||
sntp@2.x.x:
|
|
||||||
version "2.0.2"
|
|
||||||
resolved "https://registry.yarnpkg.com/sntp/-/sntp-2.0.2.tgz#5064110f0af85f7cfdb7d6b67a40028ce52b4b2b"
|
|
||||||
dependencies:
|
|
||||||
hoek "4.x.x"
|
|
||||||
|
|
||||||
source-map-support@^0.4.15:
|
source-map-support@^0.4.15:
|
||||||
version "0.4.18"
|
version "0.4.18"
|
||||||
resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.4.18.tgz#0286a6de8be42641338594e97ccea75f0a2c585f"
|
resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.4.18.tgz#0286a6de8be42641338594e97ccea75f0a2c585f"
|
||||||
@ -1735,20 +1562,6 @@ source-map@^0.5.6, source-map@~0.5.1, source-map@~0.5.3:
|
|||||||
version "0.5.7"
|
version "0.5.7"
|
||||||
resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc"
|
resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc"
|
||||||
|
|
||||||
sshpk@^1.7.0:
|
|
||||||
version "1.13.1"
|
|
||||||
resolved "https://registry.yarnpkg.com/sshpk/-/sshpk-1.13.1.tgz#512df6da6287144316dc4c18fe1cf1d940739be3"
|
|
||||||
dependencies:
|
|
||||||
asn1 "~0.2.3"
|
|
||||||
assert-plus "^1.0.0"
|
|
||||||
dashdash "^1.12.0"
|
|
||||||
getpass "^0.1.1"
|
|
||||||
optionalDependencies:
|
|
||||||
bcrypt-pbkdf "^1.0.0"
|
|
||||||
ecc-jsbn "~0.1.1"
|
|
||||||
jsbn "~0.1.0"
|
|
||||||
tweetnacl "~0.14.0"
|
|
||||||
|
|
||||||
stream-browserify@^2.0.0:
|
stream-browserify@^2.0.0:
|
||||||
version "2.0.1"
|
version "2.0.1"
|
||||||
resolved "https://registry.yarnpkg.com/stream-browserify/-/stream-browserify-2.0.1.tgz#66266ee5f9bdb9940a4e4514cafb43bb71e5c9db"
|
resolved "https://registry.yarnpkg.com/stream-browserify/-/stream-browserify-2.0.1.tgz#66266ee5f9bdb9940a4e4514cafb43bb71e5c9db"
|
||||||
@ -1790,10 +1603,6 @@ string_decoder@~1.0.0, string_decoder@~1.0.3:
|
|||||||
dependencies:
|
dependencies:
|
||||||
safe-buffer "~5.1.0"
|
safe-buffer "~5.1.0"
|
||||||
|
|
||||||
stringstream@~0.0.5:
|
|
||||||
version "0.0.5"
|
|
||||||
resolved "https://registry.yarnpkg.com/stringstream/-/stringstream-0.0.5.tgz#4e484cd4de5a0bbbee18e46307710a8a81621878"
|
|
||||||
|
|
||||||
strip-ansi@^3.0.0:
|
strip-ansi@^3.0.0:
|
||||||
version "3.0.1"
|
version "3.0.1"
|
||||||
resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf"
|
resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf"
|
||||||
@ -1806,6 +1615,21 @@ subarg@^1.0.0:
|
|||||||
dependencies:
|
dependencies:
|
||||||
minimist "^1.1.0"
|
minimist "^1.1.0"
|
||||||
|
|
||||||
|
superagent@^3.6.3:
|
||||||
|
version "3.6.3"
|
||||||
|
resolved "https://registry.yarnpkg.com/superagent/-/superagent-3.6.3.tgz#eb95fcb576a9d23a730a9d0789731b5379a36cdc"
|
||||||
|
dependencies:
|
||||||
|
component-emitter "^1.2.0"
|
||||||
|
cookiejar "^2.1.0"
|
||||||
|
debug "^3.1.0"
|
||||||
|
extend "^3.0.0"
|
||||||
|
form-data "^2.3.1"
|
||||||
|
formidable "^1.1.1"
|
||||||
|
methods "^1.1.1"
|
||||||
|
mime "^1.4.1"
|
||||||
|
qs "^6.5.1"
|
||||||
|
readable-stream "^2.0.5"
|
||||||
|
|
||||||
supports-color@^2.0.0:
|
supports-color@^2.0.0:
|
||||||
version "2.0.0"
|
version "2.0.0"
|
||||||
resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7"
|
resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7"
|
||||||
@ -1841,12 +1665,6 @@ to-fast-properties@^1.0.3:
|
|||||||
version "1.0.3"
|
version "1.0.3"
|
||||||
resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-1.0.3.tgz#b83571fa4d8c25b82e231b06e3a3055de4ca1a47"
|
resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-1.0.3.tgz#b83571fa4d8c25b82e231b06e3a3055de4ca1a47"
|
||||||
|
|
||||||
tough-cookie@~2.3.2:
|
|
||||||
version "2.3.2"
|
|
||||||
resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.3.2.tgz#f081f76e4c85720e6c37a5faced737150d84072a"
|
|
||||||
dependencies:
|
|
||||||
punycode "^1.4.1"
|
|
||||||
|
|
||||||
trim-right@^1.0.1:
|
trim-right@^1.0.1:
|
||||||
version "1.0.1"
|
version "1.0.1"
|
||||||
resolved "https://registry.yarnpkg.com/trim-right/-/trim-right-1.0.1.tgz#cb2e1203067e0c8de1f614094b9fe45704ea6003"
|
resolved "https://registry.yarnpkg.com/trim-right/-/trim-right-1.0.1.tgz#cb2e1203067e0c8de1f614094b9fe45704ea6003"
|
||||||
@ -1855,16 +1673,6 @@ tty-browserify@~0.0.0:
|
|||||||
version "0.0.0"
|
version "0.0.0"
|
||||||
resolved "https://registry.yarnpkg.com/tty-browserify/-/tty-browserify-0.0.0.tgz#a157ba402da24e9bf957f9aa69d524eed42901a6"
|
resolved "https://registry.yarnpkg.com/tty-browserify/-/tty-browserify-0.0.0.tgz#a157ba402da24e9bf957f9aa69d524eed42901a6"
|
||||||
|
|
||||||
tunnel-agent@^0.6.0:
|
|
||||||
version "0.6.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd"
|
|
||||||
dependencies:
|
|
||||||
safe-buffer "^5.0.1"
|
|
||||||
|
|
||||||
tweetnacl@^0.14.3, tweetnacl@~0.14.0:
|
|
||||||
version "0.14.5"
|
|
||||||
resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64"
|
|
||||||
|
|
||||||
typedarray@~0.0.5:
|
typedarray@~0.0.5:
|
||||||
version "0.0.6"
|
version "0.0.6"
|
||||||
resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777"
|
resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777"
|
||||||
@ -1901,18 +1709,6 @@ util@0.10.3, util@~0.10.1:
|
|||||||
dependencies:
|
dependencies:
|
||||||
inherits "2.0.1"
|
inherits "2.0.1"
|
||||||
|
|
||||||
uuid@^3.1.0:
|
|
||||||
version "3.1.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.1.0.tgz#3dd3d3e790abc24d7b0d3a034ffababe28ebbc04"
|
|
||||||
|
|
||||||
verror@1.10.0:
|
|
||||||
version "1.10.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/verror/-/verror-1.10.0.tgz#3a105ca17053af55d6e270c1f8288682e18da400"
|
|
||||||
dependencies:
|
|
||||||
assert-plus "^1.0.0"
|
|
||||||
core-util-is "1.0.2"
|
|
||||||
extsprintf "^1.2.0"
|
|
||||||
|
|
||||||
vm-browserify@~0.0.1:
|
vm-browserify@~0.0.1:
|
||||||
version "0.0.4"
|
version "0.0.4"
|
||||||
resolved "https://registry.yarnpkg.com/vm-browserify/-/vm-browserify-0.0.4.tgz#5d7ea45bbef9e4a6ff65f95438e0a87c357d5a73"
|
resolved "https://registry.yarnpkg.com/vm-browserify/-/vm-browserify-0.0.4.tgz#5d7ea45bbef9e4a6ff65f95438e0a87c357d5a73"
|
||||||
|
Loading…
Reference in New Issue
Block a user