use stopwatch::Stopwatch; use std::time::Duration; use std::cell::RefCell; use std::rc::Rc; use std::collections::HashSet; use itertools::Itertools; use schala_repl::{ProgrammingLanguageInterface, ComputationRequest, ComputationResponse, LangMetaRequest, LangMetaResponse, GlobalOutputStats, DebugResponse, DebugAsk}; use crate::{ast, reduced_ast, tokenizing, parsing, parser, eval, typechecking, symbol_table, source_map}; pub type SymbolTableHandle = Rc>; pub type SourceMapHandle = Rc>; /// All the state necessary to parse and execute a Schala program are stored in this struct. /// `state` represents the execution state for the AST-walking interpreter, the other fields /// should be self-explanatory. pub struct Schala { source_reference: SourceReference, source_map: SourceMapHandle, state: eval::State<'static>, symbol_table: SymbolTableHandle, resolver: crate::scope_resolution::ScopeResolver<'static>, type_context: typechecking::TypeContext<'static>, active_parser: parsing::Parser, } impl Schala { fn handle_docs(&self, source: String) -> LangMetaResponse { LangMetaResponse::Docs { doc_string: format!("Schala item `{}` : <>", source) } } } impl Schala { /// Creates a new Schala environment *without* any prelude. fn new_blank_env() -> Schala { let source_map = Rc::new(RefCell::new(source_map::SourceMap::new())); let symbols = Rc::new(RefCell::new(symbol_table::SymbolTable::new(source_map.clone()))); Schala { //TODO maybe these can be the same structure source_reference: SourceReference::new(), symbol_table: symbols.clone(), source_map: source_map.clone(), resolver: crate::scope_resolution::ScopeResolver::new(symbols.clone()), state: eval::State::new(), type_context: typechecking::TypeContext::new(), active_parser: parsing::Parser::new(source_map) } } /// Creates a new Schala environment with the standard prelude, which is defined as ordinary /// Schala code in the file `prelude.schala` pub fn new() -> Schala { let prelude = include_str!("prelude.schala"); let mut s = Schala::new_blank_env(); let request = ComputationRequest { source: prelude, debug_requests: HashSet::default() }; let response = s.run_computation(request); if let Err(msg) = response.main_output { panic!("Error in prelude, panicking: {}", msg); } s } fn handle_debug_immediate(&self, request: DebugAsk) -> DebugResponse { use DebugAsk::*; match request { Timing => DebugResponse { ask: Timing, value: format!("Invalid") }, ByStage { stage_name, token } => match &stage_name[..] { "symbol-table" => { let value = self.symbol_table.borrow().debug_symbol_table(); DebugResponse { ask: ByStage { stage_name: format!("symbol-table"), token }, value } }, s => { DebugResponse { ask: ByStage { stage_name: s.to_string(), token: None }, value: format!("Not-implemented") } } } } } } fn tokenizing(input: &str, _handle: &mut Schala, comp: Option<&mut PassDebugArtifact>) -> Result, String> { let tokens = tokenizing::tokenize(input); comp.map(|comp| { let token_string = tokens.iter().map(|t| t.to_string_with_metadata()).join(", "); comp.add_artifact(token_string); }); let errors: Vec = tokens.iter().filter_map(|t| t.get_error()).collect(); if errors.len() == 0 { Ok(tokens) } else { Err(format!("{:?}", errors)) } } fn parsing(input: Vec, handle: &mut Schala, comp: Option<&mut PassDebugArtifact>) -> Result { use ParsingDebugType::*; let ref mut parser = handle.active_parser; parser.add_new_tokens(input); let ast = parser.parse(); comp.map(|comp| { let debug_format = comp.parsing.as_ref().unwrap_or(&CompactAST); let debug_info = match debug_format { CompactAST => match ast{ Ok(ref ast) => ast.compact_debug(), Err(_) => "Error - see output".to_string(), }, ExpandedAST => match ast{ Ok(ref ast) => ast.expanded_debug(), Err(_) => "Error - see output".to_string(), }, Trace => parser.format_parse_trace(), }; comp.add_artifact(debug_info); }); ast.map_err(|err| format_parse_error(err, &handle.source_reference)) } fn format_parse_error(error: parsing::ParseError, source_reference: &SourceReference) -> String { let line_num = error.token.location.line_num; let ch = error.token.location.char_num; let line_from_program = source_reference.get_line(line_num); let location_pointer = format!("{}^", " ".repeat(ch)); let line_num_digits = format!("{}", line_num).chars().count(); let space_padding = " ".repeat(line_num_digits); let production = match error.production_name { Some(n) => format!("\n(from production \"{}\")", n), None => "".to_string() }; format!(r#" {error_msg}{production} {space_padding} | {line_num} | {} {space_padding} | {} "#, line_from_program, location_pointer, error_msg=error.msg, space_padding=space_padding, line_num=line_num, production=production ) } fn symbol_table(input: ast::AST, handle: &mut Schala, comp: Option<&mut PassDebugArtifact>) -> Result { let () = handle.symbol_table.borrow_mut().add_top_level_symbols(&input)?; comp.map(|comp| { let debug = handle.symbol_table.borrow().debug_symbol_table(); comp.add_artifact(debug); }); Ok(input) } fn scope_resolution(mut input: ast::AST, handle: &mut Schala, _com: Option<&mut PassDebugArtifact>) -> Result { let () = handle.resolver.resolve(&mut input)?; Ok(input) } fn typechecking(input: ast::AST, handle: &mut Schala, comp: Option<&mut PassDebugArtifact>) -> Result { let result = handle.type_context.typecheck(&input); comp.map(|comp| { comp.add_artifact(match result { Ok(ty) => ty.to_string(), Err(err) => format!("Type error: {}", err.msg) }); }); Ok(input) } fn ast_reducing(input: ast::AST, handle: &mut Schala, comp: Option<&mut PassDebugArtifact>) -> Result { let ref symbol_table = handle.symbol_table.borrow(); let output = reduced_ast::reduce(&input, symbol_table); comp.map(|comp| comp.add_artifact(format!("{:?}", output))); Ok(output) } fn eval(input: reduced_ast::ReducedAST, handle: &mut Schala, comp: Option<&mut PassDebugArtifact>) -> Result { comp.map(|comp| comp.add_artifact(handle.state.debug_print())); let evaluation_outputs = handle.state.evaluate(input, true); let text_output: Result, String> = evaluation_outputs .into_iter() .collect(); let eval_output: Result = text_output .map(|v| { v.into_iter().intersperse(format!("\n")).collect() }); eval_output } /// Represents lines of source code struct SourceReference { lines: Option> } impl SourceReference { fn new() -> SourceReference { SourceReference { lines: None } } fn load_new_source(&mut self, source: &str) { //TODO this is a lot of heap allocations - maybe there's a way to make it more efficient? self.lines = Some(source.lines().map(|s| s.to_string()).collect()); } fn get_line(&self, line: usize) -> String { self.lines.as_ref().and_then(|x| x.get(line).map(|s| s.to_string())).unwrap_or(format!("NO LINE FOUND")) } } enum ParsingDebugType { CompactAST, ExpandedAST, Trace } #[derive(Default)] struct PassDebugArtifact { parsing: Option, artifacts: Vec } impl PassDebugArtifact { fn add_artifact(&mut self, artifact: String) { self.artifacts.push(artifact) } } fn stage_names() -> Vec<&'static str> { vec![ "tokenizing", "parsing", "symbol-table", "scope-resolution", "typechecking", "ast-reduction", "ast-walking-evaluation" ] } impl ProgrammingLanguageInterface for Schala { fn get_language_name(&self) -> String { format!("Schala") } fn get_source_file_suffix(&self) -> String { format!("schala") } fn run_computation(&mut self, request: ComputationRequest) -> ComputationResponse { struct PassToken<'a> { schala: &'a mut Schala, stage_durations: &'a mut Vec<(String, Duration)>, sw: &'a Stopwatch, debug_requests: &'a HashSet, debug_responses: &'a mut Vec, } fn output_wrapper(n: usize, func: F, input: Input, token: &mut PassToken) -> Result where F: Fn(Input, &mut Schala, Option<&mut PassDebugArtifact>) -> Result { let stage_names = stage_names(); let cur_stage_name = stage_names[n]; let ask = token.debug_requests.iter().find(|ask| ask.is_for_stage(cur_stage_name)); let parsing = match ask { Some(DebugAsk::ByStage { token, .. }) if cur_stage_name == "parsing" => Some( token.as_ref().map(|token| match &token[..] { "compact" => ParsingDebugType::CompactAST, "expanded" => ParsingDebugType::ExpandedAST, "trace" => ParsingDebugType::Trace, _ => ParsingDebugType::CompactAST, }).unwrap_or(ParsingDebugType::CompactAST) ), _ => None, }; let mut debug_artifact = ask.map(|_| PassDebugArtifact { parsing, ..Default::default() }); let output = func(input, token.schala, debug_artifact.as_mut()); //TODO I think this is not counting the time since the *previous* stage token.stage_durations.push((cur_stage_name.to_string(), token.sw.elapsed())); if let Some(artifact) = debug_artifact { for value in artifact.artifacts.into_iter() { let resp = DebugResponse { ask: ask.unwrap().clone(), value }; token.debug_responses.push(resp); } } output } let ComputationRequest { source, debug_requests } = request; self.source_reference.load_new_source(source); let sw = Stopwatch::start_new(); let mut stage_durations = Vec::new(); let mut debug_responses = Vec::new(); let mut tok = PassToken { schala: self, stage_durations: &mut stage_durations, sw: &sw, debug_requests: &debug_requests, debug_responses: &mut debug_responses }; let main_output: Result = Ok(source) .and_then(|source| output_wrapper(0, tokenizing, source, &mut tok)) .and_then(|tokens| output_wrapper(1, parsing, tokens, &mut tok)) .and_then(|ast| output_wrapper(2, symbol_table, ast, &mut tok)) .and_then(|ast| output_wrapper(3, scope_resolution, ast, &mut tok)) .and_then(|ast| output_wrapper(4, typechecking, ast, &mut tok)) .and_then(|ast| output_wrapper(5, ast_reducing, ast, &mut tok)) .and_then(|reduced_ast| output_wrapper(6, eval, reduced_ast, &mut tok)); let total_duration = sw.elapsed(); let global_output_stats = GlobalOutputStats { total_duration, stage_durations }; let main_output = parser::perform_parsing(source); ComputationResponse { main_output, global_output_stats, debug_responses, } } fn request_meta(&mut self, request: LangMetaRequest) -> LangMetaResponse { match request { LangMetaRequest::StageNames => LangMetaResponse::StageNames(stage_names().iter().map(|s| s.to_string()).collect()), LangMetaRequest::Docs { source } => self.handle_docs(source), LangMetaRequest::ImmediateDebug(debug_request) => LangMetaResponse::ImmediateDebug(self.handle_debug_immediate(debug_request)), LangMetaRequest::Custom { .. } => LangMetaResponse::Custom { kind: format!("not-implemented"), value: format!("") } } } }