More refactoring in main Schala driver
This commit is contained in:
parent
8610bd7a87
commit
7e899246e9
@ -23,7 +23,7 @@ use std::cell::RefCell;
|
|||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use schala_repl::{ProgrammingLanguageInterface, EvalOptions, TraceArtifact, UnfinishedComputation, FinishedComputation, ComputationRequest, ComputationResponse, GlobalOutputStats};
|
use schala_repl::{ProgrammingLanguageInterface, EvalOptions, ComputationRequest, ComputationResponse, GlobalOutputStats};
|
||||||
|
|
||||||
macro_rules! bx {
|
macro_rules! bx {
|
||||||
($e:expr) => { Box::new($e) }
|
($e:expr) => { Box::new($e) }
|
||||||
@ -42,15 +42,6 @@ mod builtin;
|
|||||||
mod reduced_ast;
|
mod reduced_ast;
|
||||||
mod eval;
|
mod eval;
|
||||||
|
|
||||||
//trace_macros!(true);
|
|
||||||
/*
|
|
||||||
#[derive(ProgrammingLanguageInterface)]
|
|
||||||
#[LanguageName = "Schala"]
|
|
||||||
#[SourceFileExtension = "schala"]
|
|
||||||
#[PipelineSteps(load_source, tokenizing, parsing(compact,expanded,trace), symbol_table, typechecking, ast_reducing, eval)]
|
|
||||||
#[DocMethod = "get_doc"]
|
|
||||||
#[HandleCustomInterpreterDirectives = "handle_custom_interpreter_directives"]
|
|
||||||
*/
|
|
||||||
/// All bits of state necessary to parse and execute a Schala program are stored in this struct.
|
/// All bits of state necessary to parse and execute a Schala program are stored in this struct.
|
||||||
/// `state` represents the execution state for the AST-walking interpreter, the other fields
|
/// `state` represents the execution state for the AST-walking interpreter, the other fields
|
||||||
/// should be self-explanatory.
|
/// should be self-explanatory.
|
||||||
@ -90,21 +81,23 @@ impl Schala {
|
|||||||
pub fn new() -> Schala {
|
pub fn new() -> Schala {
|
||||||
let prelude = include_str!("prelude.schala");
|
let prelude = include_str!("prelude.schala");
|
||||||
let mut s = Schala::new_blank_env();
|
let mut s = Schala::new_blank_env();
|
||||||
s.execute_pipeline(prelude, &EvalOptions::default());
|
|
||||||
|
let request = ComputationRequest { source: prelude.to_string(), debug_requests: vec![] };
|
||||||
|
s.run_computation(request);
|
||||||
s
|
s
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn load_source<'a>(input: &'a str, handle: &mut Schala, _comp: Option<&mut UnfinishedComputation>) -> Result<&'a str, String> {
|
fn load_source<'a>(input: &'a str, handle: &mut Schala) -> Result<&'a str, String> {
|
||||||
handle.source_reference.load_new_source(input);
|
handle.source_reference.load_new_source(input);
|
||||||
Ok(input)
|
Ok(input)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn tokenizing(input: &str, _handle: &mut Schala, comp: Option<&mut UnfinishedComputation>) -> Result<Vec<tokenizing::Token>, String> {
|
fn tokenizing(input: &str, _handle: &mut Schala, comp: Option<&mut PassDebugArtifact>) -> Result<Vec<tokenizing::Token>, String> {
|
||||||
let tokens = tokenizing::tokenize(input);
|
let tokens = tokenizing::tokenize(input);
|
||||||
comp.map(|comp| {
|
comp.map(|comp| {
|
||||||
let token_string = tokens.iter().map(|t| t.to_string_with_metadata()).join(", ");
|
let token_string = tokens.iter().map(|t| t.to_string_with_metadata()).join(", ");
|
||||||
comp.add_artifact(TraceArtifact::new("tokens", token_string));
|
comp.add_artifact(token_string);
|
||||||
});
|
});
|
||||||
|
|
||||||
let errors: Vec<String> = tokens.iter().filter_map(|t| t.get_error()).collect();
|
let errors: Vec<String> = tokens.iter().filter_map(|t| t.get_error()).collect();
|
||||||
@ -115,7 +108,7 @@ fn tokenizing(input: &str, _handle: &mut Schala, comp: Option<&mut UnfinishedCom
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parsing(input: Vec<tokenizing::Token>, handle: &mut Schala, comp: Option<&mut UnfinishedComputation>) -> Result<ast::AST, String> {
|
fn parsing(input: Vec<tokenizing::Token>, handle: &mut Schala, comp: Option<&mut PassDebugArtifact>) -> Result<ast::AST, String> {
|
||||||
use crate::parsing::Parser;
|
use crate::parsing::Parser;
|
||||||
|
|
||||||
let mut parser = match handle.active_parser.take() {
|
let mut parser = match handle.active_parser.take() {
|
||||||
@ -127,6 +120,7 @@ fn parsing(input: Vec<tokenizing::Token>, handle: &mut Schala, comp: Option<&mut
|
|||||||
let trace = parser.format_parse_trace();
|
let trace = parser.format_parse_trace();
|
||||||
|
|
||||||
comp.map(|comp| {
|
comp.map(|comp| {
|
||||||
|
/*
|
||||||
//TODO need to control which of these debug stages get added
|
//TODO need to control which of these debug stages get added
|
||||||
let opt = comp.cur_debug_options.get(0).map(|s| s.clone());
|
let opt = comp.cur_debug_options.get(0).map(|s| s.clone());
|
||||||
match opt {
|
match opt {
|
||||||
@ -136,6 +130,7 @@ fn parsing(input: Vec<tokenizing::Token>, handle: &mut Schala, comp: Option<&mut
|
|||||||
Some(ref s) if s == "trace" => comp.add_artifact(TraceArtifact::new_parse_trace(trace)),
|
Some(ref s) if s == "trace" => comp.add_artifact(TraceArtifact::new_parse_trace(trace)),
|
||||||
Some(ref x) => println!("Bad parsing debug option: {}", x),
|
Some(ref x) => println!("Bad parsing debug option: {}", x),
|
||||||
};
|
};
|
||||||
|
*/
|
||||||
});
|
});
|
||||||
ast.map_err(|err| format_parse_error(err, handle))
|
ast.map_err(|err| format_parse_error(err, handle))
|
||||||
}
|
}
|
||||||
@ -157,41 +152,40 @@ fn format_parse_error(error: parsing::ParseError, handle: &mut Schala) -> String
|
|||||||
"#, line_from_program, location_pointer, error_msg=error.msg, space_padding=space_padding, line_num=line_num)
|
"#, line_from_program, location_pointer, error_msg=error.msg, space_padding=space_padding, line_num=line_num)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn symbol_table(input: ast::AST, handle: &mut Schala, comp: Option<&mut UnfinishedComputation>) -> Result<ast::AST, String> {
|
fn symbol_table(input: ast::AST, handle: &mut Schala, comp: Option<&mut PassDebugArtifact>) -> Result<ast::AST, String> {
|
||||||
let add = handle.symbol_table.borrow_mut().add_top_level_symbols(&input);
|
let add = handle.symbol_table.borrow_mut().add_top_level_symbols(&input);
|
||||||
match add {
|
match add {
|
||||||
Ok(()) => {
|
Ok(()) => {
|
||||||
let artifact = TraceArtifact::new("symbol_table", handle.symbol_table.borrow().debug_symbol_table());
|
let debug = handle.symbol_table.borrow().debug_symbol_table();
|
||||||
comp.map(|comp| comp.add_artifact(artifact));
|
comp.map(|comp| comp.add_artifact(debug));
|
||||||
Ok(input)
|
Ok(input)
|
||||||
},
|
},
|
||||||
Err(msg) => Err(msg)
|
Err(msg) => Err(msg)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn typechecking(input: ast::AST, handle: &mut Schala, comp: Option<&mut UnfinishedComputation>) -> Result<ast::AST, String> {
|
fn typechecking(input: ast::AST, handle: &mut Schala, comp: Option<&mut PassDebugArtifact>) -> Result<ast::AST, String> {
|
||||||
let result = handle.type_context.typecheck(&input);
|
let result = handle.type_context.typecheck(&input);
|
||||||
|
|
||||||
comp.map(|comp| {
|
comp.map(|comp| {
|
||||||
let artifact = TraceArtifact::new("type", match result {
|
comp.add_artifact(match result {
|
||||||
Ok(ty) => ty.to_string(),
|
Ok(ty) => ty.to_string(),
|
||||||
Err(err) => format!("Type error: {}", err.msg)
|
Err(err) => format!("Type error: {}", err.msg)
|
||||||
});
|
});
|
||||||
comp.add_artifact(artifact);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
Ok(input)
|
Ok(input)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn ast_reducing(input: ast::AST, handle: &mut Schala, comp: Option<&mut UnfinishedComputation>) -> Result<reduced_ast::ReducedAST, String> {
|
fn ast_reducing(input: ast::AST, handle: &mut Schala, comp: Option<&mut PassDebugArtifact>) -> Result<reduced_ast::ReducedAST, String> {
|
||||||
let ref symbol_table = handle.symbol_table.borrow();
|
let ref symbol_table = handle.symbol_table.borrow();
|
||||||
let output = input.reduce(symbol_table);
|
let output = input.reduce(symbol_table);
|
||||||
comp.map(|comp| comp.add_artifact(TraceArtifact::new("ast_reducing", format!("{:?}", output))));
|
comp.map(|comp| comp.add_artifact(format!("{:?}", output)));
|
||||||
Ok(output)
|
Ok(output)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn eval(input: reduced_ast::ReducedAST, handle: &mut Schala, comp: Option<&mut UnfinishedComputation>) -> Result<String, String> {
|
fn eval(input: reduced_ast::ReducedAST, handle: &mut Schala, comp: Option<&mut PassDebugArtifact>) -> Result<String, String> {
|
||||||
comp.map(|comp| comp.add_artifact(TraceArtifact::new("value_state", handle.state.debug_print())));
|
comp.map(|comp| comp.add_artifact(handle.state.debug_print()));
|
||||||
let evaluation_outputs = handle.state.evaluate(input, true);
|
let evaluation_outputs = handle.state.evaluate(input, true);
|
||||||
let text_output: Result<Vec<String>, String> = evaluation_outputs
|
let text_output: Result<Vec<String>, String> = evaluation_outputs
|
||||||
.into_iter()
|
.into_iter()
|
||||||
@ -221,6 +215,15 @@ impl SourceReference {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
struct PassDebugArtifact {
|
||||||
|
artifact: Option<String>
|
||||||
|
}
|
||||||
|
impl PassDebugArtifact {
|
||||||
|
fn add_artifact(&mut self, artifact: String) {
|
||||||
|
self.artifact = Some(artifact)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl ProgrammingLanguageInterface for Schala {
|
impl ProgrammingLanguageInterface for Schala {
|
||||||
fn get_language_name(&self) -> String { format!("Schala") }
|
fn get_language_name(&self) -> String { format!("Schala") }
|
||||||
fn get_source_file_suffix(&self) -> String { format!("schala") }
|
fn get_source_file_suffix(&self) -> String { format!("schala") }
|
||||||
@ -228,13 +231,20 @@ impl ProgrammingLanguageInterface for Schala {
|
|||||||
fn run_computation(&mut self, request: ComputationRequest) -> ComputationResponse {
|
fn run_computation(&mut self, request: ComputationRequest) -> ComputationResponse {
|
||||||
let ComputationRequest { source, debug_requests } = request;
|
let ComputationRequest { source, debug_requests } = request;
|
||||||
|
|
||||||
load_source(&source, self, None);
|
let mut token_debug_artifact = None;
|
||||||
let main_output: Result<String, String> = tokenizing(&source, self, None)
|
let mut parsing_debug_artifact = None;
|
||||||
.and_then(|tokens| parsing(tokens, self, None))
|
let mut symbol_debug_artifact = None;
|
||||||
.and_then(|ast| symbol_table(ast, self, None))
|
let mut typechecking_debug_artifact = None;
|
||||||
.and_then(|ast| typechecking(ast, self, None))
|
let mut reducing_debug_artifact = None;
|
||||||
.and_then(|ast| ast_reducing(ast, self, None))
|
let mut eval_debug_artifact = None;
|
||||||
.and_then(|reduced_ast| eval(reduced_ast, self, None));
|
|
||||||
|
load_source(&source, self);
|
||||||
|
let main_output: Result<String, String> = tokenizing(&source, self, token_debug_artifact)
|
||||||
|
.and_then(|tokens| parsing(tokens, self, parsing_debug_artifact))
|
||||||
|
.and_then(|ast| symbol_table(ast, self, symbol_debug_artifact))
|
||||||
|
.and_then(|ast| typechecking(ast, self, typechecking_debug_artifact))
|
||||||
|
.and_then(|ast| ast_reducing(ast, self, reducing_debug_artifact))
|
||||||
|
.and_then(|reduced_ast| eval(reduced_ast, self, eval_debug_artifact));
|
||||||
|
|
||||||
ComputationResponse {
|
ComputationResponse {
|
||||||
main_output,
|
main_output,
|
||||||
|
@ -189,6 +189,7 @@ pub trait ProgrammingLanguageInterface {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//TODO source can probably be a &str
|
||||||
pub struct ComputationRequest {
|
pub struct ComputationRequest {
|
||||||
pub source: String,
|
pub source: String,
|
||||||
pub debug_requests: Vec<DebugRequest>,
|
pub debug_requests: Vec<DebugRequest>,
|
||||||
|
Loading…
Reference in New Issue
Block a user