Run rustfmt on schala.rs
This commit is contained in:
parent
052a2feb23
commit
49a50deb04
@ -1,185 +1,203 @@
|
|||||||
use stopwatch::Stopwatch;
|
use stopwatch::Stopwatch;
|
||||||
|
|
||||||
use schala_repl::{ProgrammingLanguageInterface,
|
|
||||||
ComputationRequest, ComputationResponse,
|
|
||||||
LangMetaRequest, LangMetaResponse, GlobalOutputStats};
|
|
||||||
use crate::{reduced_ast, tokenizing, parsing, eval, typechecking, symbol_table};
|
|
||||||
use crate::error::SchalaError;
|
use crate::error::SchalaError;
|
||||||
|
use crate::{eval, parsing, reduced_ast, symbol_table, tokenizing, typechecking};
|
||||||
|
use schala_repl::{
|
||||||
|
ComputationRequest, ComputationResponse, GlobalOutputStats, LangMetaRequest, LangMetaResponse,
|
||||||
|
ProgrammingLanguageInterface,
|
||||||
|
};
|
||||||
|
|
||||||
/// All the state necessary to parse and execute a Schala program are stored in this struct.
|
/// All the state necessary to parse and execute a Schala program are stored in this struct.
|
||||||
pub struct Schala {
|
pub struct Schala {
|
||||||
/// Holds a reference to the original source code, parsed into line and character
|
/// Holds a reference to the original source code, parsed into line and character
|
||||||
source_reference: SourceReference,
|
source_reference: SourceReference,
|
||||||
/// Execution state for AST-walking interpreter
|
/// Execution state for AST-walking interpreter
|
||||||
state: eval::State<'static>,
|
state: eval::State<'static>,
|
||||||
/// Keeps track of symbols and scopes
|
/// Keeps track of symbols and scopes
|
||||||
symbol_table: symbol_table::SymbolTable,
|
symbol_table: symbol_table::SymbolTable,
|
||||||
/// Contains information for type-checking
|
/// Contains information for type-checking
|
||||||
type_context: typechecking::TypeContext<'static>,
|
type_context: typechecking::TypeContext<'static>,
|
||||||
/// Schala Parser
|
/// Schala Parser
|
||||||
active_parser: parsing::Parser,
|
active_parser: parsing::Parser,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Schala {
|
impl Schala {
|
||||||
//TODO implement documentation for language items
|
//TODO implement documentation for language items
|
||||||
/*
|
/*
|
||||||
fn handle_docs(&self, source: String) -> LangMetaResponse {
|
fn handle_docs(&self, source: String) -> LangMetaResponse {
|
||||||
LangMetaResponse::Docs {
|
LangMetaResponse::Docs {
|
||||||
doc_string: format!("Schala item `{}` : <<Schala-lang documentation not yet implemented>>", source)
|
doc_string: format!("Schala item `{}` : <<Schala-lang documentation not yet implemented>>", source)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
*/
|
||||||
*/
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Schala {
|
impl Schala {
|
||||||
/// Creates a new Schala environment *without* any prelude.
|
/// Creates a new Schala environment *without* any prelude.
|
||||||
fn new_blank_env() -> Schala {
|
fn new_blank_env() -> Schala {
|
||||||
Schala {
|
Schala {
|
||||||
source_reference: SourceReference::new(),
|
source_reference: SourceReference::new(),
|
||||||
symbol_table: symbol_table::SymbolTable::new(),
|
symbol_table: symbol_table::SymbolTable::new(),
|
||||||
state: eval::State::new(),
|
state: eval::State::new(),
|
||||||
type_context: typechecking::TypeContext::new(),
|
type_context: typechecking::TypeContext::new(),
|
||||||
active_parser: parsing::Parser::new()
|
active_parser: parsing::Parser::new(),
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
/// Creates a new Schala environment with the standard prelude, which is defined as ordinary
|
|
||||||
/// Schala code in the file `prelude.schala`
|
|
||||||
#[allow(clippy::new_without_default)]
|
|
||||||
pub fn new() -> Schala {
|
|
||||||
let prelude = include_str!("../source-files/prelude.schala");
|
|
||||||
let mut env = Schala::new_blank_env();
|
|
||||||
|
|
||||||
let response = env.run_pipeline(prelude);
|
|
||||||
if let Err(err) = response {
|
|
||||||
panic!("Error in prelude, panicking: {}", err.display());
|
|
||||||
}
|
|
||||||
env
|
|
||||||
}
|
|
||||||
|
|
||||||
/// This is where the actual action of interpreting/compilation happens.
|
|
||||||
/// Note: this should eventually use a query-based system for parallelization, cf.
|
|
||||||
/// https://rustc-dev-guide.rust-lang.org/overview.html
|
|
||||||
fn run_pipeline(&mut self, source: &str) -> Result<String, SchalaError> {
|
|
||||||
// 1st stage - tokenization
|
|
||||||
// TODO tokenize should return its own error type
|
|
||||||
let tokens = tokenizing::tokenize(source);
|
|
||||||
if let Some(err) = SchalaError::from_tokens(&tokens) {
|
|
||||||
return Err(err)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
//2nd stage - parsing
|
/// Creates a new Schala environment with the standard prelude, which is defined as ordinary
|
||||||
self.active_parser.add_new_tokens(tokens);
|
/// Schala code in the file `prelude.schala`
|
||||||
let ast = self.active_parser.parse()
|
#[allow(clippy::new_without_default)]
|
||||||
.map_err(|err| SchalaError::from_parse_error(err, &self.source_reference))?;
|
pub fn new() -> Schala {
|
||||||
|
let prelude = include_str!("../source-files/prelude.schala");
|
||||||
|
let mut env = Schala::new_blank_env();
|
||||||
|
|
||||||
//Perform all symbol table work
|
let response = env.run_pipeline(prelude);
|
||||||
self.symbol_table.process_ast(&ast)
|
if let Err(err) = response {
|
||||||
.map_err(SchalaError::from_symbol_table)?;
|
panic!("Error in prelude, panicking: {}", err.display());
|
||||||
|
}
|
||||||
|
env
|
||||||
|
}
|
||||||
|
|
||||||
// Typechecking
|
/// This is where the actual action of interpreting/compilation happens.
|
||||||
// TODO typechecking not working
|
/// Note: this should eventually use a query-based system for parallelization, cf.
|
||||||
let _overall_type = self.type_context.typecheck(&ast)
|
/// https://rustc-dev-guide.rust-lang.org/overview.html
|
||||||
.map_err(SchalaError::from_type_error);
|
fn run_pipeline(&mut self, source: &str) -> Result<String, SchalaError> {
|
||||||
|
// 1st stage - tokenization
|
||||||
|
// TODO tokenize should return its own error type
|
||||||
|
let tokens = tokenizing::tokenize(source);
|
||||||
|
if let Some(err) = SchalaError::from_tokens(&tokens) {
|
||||||
|
return Err(err);
|
||||||
|
}
|
||||||
|
|
||||||
// Reduce AST - TODO this doesn't produce an error yet, but probably should
|
//2nd stage - parsing
|
||||||
let reduced_ast = reduced_ast::reduce(&ast, &self.symbol_table);
|
self.active_parser.add_new_tokens(tokens);
|
||||||
|
let ast = self
|
||||||
|
.active_parser
|
||||||
|
.parse()
|
||||||
|
.map_err(|err| SchalaError::from_parse_error(err, &self.source_reference))?;
|
||||||
|
|
||||||
// Tree-walking evaluator. TODO fix this
|
//Perform all symbol table work
|
||||||
let evaluation_outputs = self.state.evaluate(reduced_ast, true);
|
self.symbol_table
|
||||||
let text_output: Result<Vec<String>, String> = evaluation_outputs
|
.process_ast(&ast)
|
||||||
.into_iter()
|
.map_err(SchalaError::from_symbol_table)?;
|
||||||
.collect();
|
|
||||||
|
|
||||||
let text_output: Result<Vec<String>, SchalaError> = text_output
|
// Typechecking
|
||||||
.map_err(|err| SchalaError::from_string(err, Stage::Evaluation));
|
// TODO typechecking not working
|
||||||
|
let _overall_type = self
|
||||||
|
.type_context
|
||||||
|
.typecheck(&ast)
|
||||||
|
.map_err(SchalaError::from_type_error);
|
||||||
|
|
||||||
let eval_output: String = text_output
|
// Reduce AST - TODO this doesn't produce an error yet, but probably should
|
||||||
.map(|v| { Iterator::intersperse(v.into_iter(), "\n".to_owned()).collect() })?;
|
let reduced_ast = reduced_ast::reduce(&ast, &self.symbol_table);
|
||||||
|
|
||||||
Ok(eval_output)
|
// Tree-walking evaluator. TODO fix this
|
||||||
}
|
let evaluation_outputs = self.state.evaluate(reduced_ast, true);
|
||||||
|
let text_output: Result<Vec<String>, String> = evaluation_outputs.into_iter().collect();
|
||||||
|
|
||||||
|
let text_output: Result<Vec<String>, SchalaError> =
|
||||||
|
text_output.map_err(|err| SchalaError::from_string(err, Stage::Evaluation));
|
||||||
|
|
||||||
|
let eval_output: String =
|
||||||
|
text_output.map(|v| Iterator::intersperse(v.into_iter(), "\n".to_owned()).collect())?;
|
||||||
|
|
||||||
|
Ok(eval_output)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/// Represents lines of source code
|
/// Represents lines of source code
|
||||||
pub(crate) struct SourceReference {
|
pub(crate) struct SourceReference {
|
||||||
lines: Option<Vec<String>>
|
lines: Option<Vec<String>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SourceReference {
|
impl SourceReference {
|
||||||
fn new() -> SourceReference {
|
fn new() -> SourceReference {
|
||||||
SourceReference { lines: None }
|
SourceReference { lines: None }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn load_new_source(&mut self, source: &str) {
|
fn load_new_source(&mut self, source: &str) {
|
||||||
//TODO this is a lot of heap allocations - maybe there's a way to make it more efficient?
|
//TODO this is a lot of heap allocations - maybe there's a way to make it more efficient?
|
||||||
self.lines = Some(source.lines().map(|s| s.to_string()).collect()); }
|
self.lines = Some(source.lines().map(|s| s.to_string()).collect());
|
||||||
|
}
|
||||||
|
|
||||||
pub fn get_line(&self, line: usize) -> String {
|
pub fn get_line(&self, line: usize) -> String {
|
||||||
self.lines.as_ref().and_then(|x| x.get(line).map(|s| s.to_string())).unwrap_or_else(|| "NO LINE FOUND".to_string())
|
self.lines
|
||||||
}
|
.as_ref()
|
||||||
|
.and_then(|x| x.get(line).map(|s| s.to_string()))
|
||||||
|
.unwrap_or_else(|| "NO LINE FOUND".to_string())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
#[derive(Clone, Copy, Debug)]
|
#[derive(Clone, Copy, Debug)]
|
||||||
pub(crate) enum Stage {
|
pub(crate) enum Stage {
|
||||||
Tokenizing,
|
Tokenizing,
|
||||||
Parsing,
|
Parsing,
|
||||||
Symbols,
|
Symbols,
|
||||||
ScopeResolution,
|
ScopeResolution,
|
||||||
Typechecking,
|
Typechecking,
|
||||||
AstReduction,
|
AstReduction,
|
||||||
Evaluation,
|
Evaluation,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn stage_names() -> Vec<&'static str> {
|
fn stage_names() -> Vec<&'static str> {
|
||||||
vec![
|
vec![
|
||||||
"tokenizing",
|
"tokenizing",
|
||||||
"parsing",
|
"parsing",
|
||||||
"symbol-table",
|
"symbol-table",
|
||||||
"scope-resolution",
|
"typechecking",
|
||||||
"typechecking",
|
"ast-reduction",
|
||||||
"ast-reduction",
|
"ast-walking-evaluation",
|
||||||
"ast-walking-evaluation"
|
]
|
||||||
]
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
impl ProgrammingLanguageInterface for Schala {
|
impl ProgrammingLanguageInterface for Schala {
|
||||||
type Config = ();
|
type Config = ();
|
||||||
fn language_name() -> String {
|
fn language_name() -> String {
|
||||||
"Schala".to_owned()
|
"Schala".to_owned()
|
||||||
}
|
|
||||||
|
|
||||||
fn source_file_suffix() -> String {
|
|
||||||
"schala".to_owned()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn run_computation(&mut self, request: ComputationRequest<Self::Config>) -> ComputationResponse {
|
|
||||||
let ComputationRequest { source, debug_requests: _, config: _ } = request;
|
|
||||||
self.source_reference.load_new_source(source);
|
|
||||||
let sw = Stopwatch::start_new();
|
|
||||||
|
|
||||||
let main_output = self.run_pipeline(source)
|
|
||||||
.map_err(|schala_err| schala_err.display());
|
|
||||||
|
|
||||||
let global_output_stats = GlobalOutputStats {
|
|
||||||
total_duration: sw.elapsed(),
|
|
||||||
stage_durations: vec![]
|
|
||||||
};
|
|
||||||
|
|
||||||
ComputationResponse {
|
|
||||||
main_output,
|
|
||||||
global_output_stats,
|
|
||||||
debug_responses: vec![]
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
fn request_meta(&mut self, request: LangMetaRequest) -> LangMetaResponse {
|
fn source_file_suffix() -> String {
|
||||||
match request {
|
"schala".to_owned()
|
||||||
LangMetaRequest::StageNames => LangMetaResponse::StageNames(stage_names().iter().map(|s| s.to_string()).collect()),
|
}
|
||||||
_ => LangMetaResponse::Custom { kind: "not-implemented".to_string(), value: "".to_string() }
|
|
||||||
|
fn run_computation(
|
||||||
|
&mut self,
|
||||||
|
request: ComputationRequest<Self::Config>,
|
||||||
|
) -> ComputationResponse {
|
||||||
|
let ComputationRequest {
|
||||||
|
source,
|
||||||
|
debug_requests: _,
|
||||||
|
config: _,
|
||||||
|
} = request;
|
||||||
|
self.source_reference.load_new_source(source);
|
||||||
|
let sw = Stopwatch::start_new();
|
||||||
|
|
||||||
|
let main_output = self
|
||||||
|
.run_pipeline(source)
|
||||||
|
.map_err(|schala_err| schala_err.display());
|
||||||
|
|
||||||
|
let global_output_stats = GlobalOutputStats {
|
||||||
|
total_duration: sw.elapsed(),
|
||||||
|
stage_durations: vec![],
|
||||||
|
};
|
||||||
|
|
||||||
|
ComputationResponse {
|
||||||
|
main_output,
|
||||||
|
global_output_stats,
|
||||||
|
debug_responses: vec![],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn request_meta(&mut self, request: LangMetaRequest) -> LangMetaResponse {
|
||||||
|
match request {
|
||||||
|
LangMetaRequest::StageNames => {
|
||||||
|
LangMetaResponse::StageNames(stage_names().iter().map(|s| s.to_string()).collect())
|
||||||
|
}
|
||||||
|
_ => LangMetaResponse::Custom {
|
||||||
|
kind: "not-implemented".to_string(),
|
||||||
|
value: "".to_string(),
|
||||||
|
},
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user