Compare commits

..

No commits in common. "ec55e2e8f0d880eaefffbc6705107775092ce831" and "63360e5617637e2983c303e91953311f6c178fec" have entirely different histories.

15 changed files with 105 additions and 100 deletions

View File

@ -9,10 +9,9 @@ mod operators;
pub use operators::*; pub use operators::*;
pub use visitor::ASTVisitor; pub use visitor::ASTVisitor;
pub use walker::walk_ast; pub use walker::walk_ast;
use crate::tokenizing::Location;
/// An abstract identifier for an AST node /// An abstract identifier for an AST node
#[derive(Debug, PartialEq, Eq, Hash, Clone, Default)] #[derive(Debug, PartialEq, Eq, Hash, Clone)]
pub struct ItemId { pub struct ItemId {
idx: u32, idx: u32,
} }
@ -58,8 +57,6 @@ pub struct AST {
pub struct Statement { pub struct Statement {
#[derivative(PartialEq="ignore")] #[derivative(PartialEq="ignore")]
pub id: ItemId, pub id: ItemId,
#[derivative(PartialEq="ignore")]
pub location: Location,
pub kind: StatementKind, pub kind: StatementKind,
} }

View File

@ -22,7 +22,7 @@ impl ASTVisitor for Tester {
#[test] #[test]
fn foo() { fn foo() {
let mut tester = Tester { count: 0, float_count: 0 }; let mut tester = Tester { count: 0, float_count: 0 };
let ast = quick_ast(r#" let (ast, _) = quick_ast(r#"
import gragh import gragh
let a = 20 + 84 let a = 20 + 84

View File

@ -1,6 +1,7 @@
use crate::parsing::ParseError; use crate::parsing::ParseError;
use crate::schala::{SourceReference, Stage}; use crate::schala::{SourceReference, Stage};
use crate::tokenizing::{Token, TokenKind, Location}; use crate::source_map::Location;
use crate::tokenizing::{Token, TokenKind};
use crate::typechecking::TypeError; use crate::typechecking::TypeError;
pub struct SchalaError { pub struct SchalaError {
@ -84,7 +85,7 @@ struct Error {
fn format_parse_error(error: ParseError, source_reference: &SourceReference) -> String { fn format_parse_error(error: ParseError, source_reference: &SourceReference) -> String {
let line_num = error.token.location.line_num; let line_num = error.token.location.line_num;
let ch = error.token.location.char_num; let ch = error.token.location.char_num;
let line_from_program = source_reference.get_line(line_num as usize); let line_from_program = source_reference.get_line(line_num);
let location_pointer = format!("{}^", " ".repeat(ch)); let location_pointer = format!("{}^", " ".repeat(ch));
let line_num_digits = format!("{}", line_num).chars().count(); let line_num_digits = format!("{}", line_num).chars().count();

View File

@ -9,8 +9,9 @@ use crate::reduced_ast::reduce;
use crate::eval::State; use crate::eval::State;
fn evaluate_all_outputs(input: &str) -> Vec<Result<String, String>> { fn evaluate_all_outputs(input: &str) -> Vec<Result<String, String>> {
let mut ast = crate::util::quick_ast(input); let (mut ast, source_map) = crate::util::quick_ast(input);
let symbol_table = Rc::new(RefCell::new(SymbolTable::new())); let source_map = Rc::new(RefCell::new(source_map));
let symbol_table = Rc::new(RefCell::new(SymbolTable::new(source_map)));
symbol_table.borrow_mut().add_top_level_symbols(&ast).unwrap(); symbol_table.borrow_mut().add_top_level_symbols(&ast).unwrap();
{ {
let mut scope_resolver = ScopeResolver::new(symbol_table.clone()); let mut scope_resolver = ScopeResolver::new(symbol_table.clone());

View File

@ -30,6 +30,7 @@ mod scope_resolution;
mod builtin; mod builtin;
mod reduced_ast; mod reduced_ast;
mod eval; mod eval;
mod source_map;
mod error; mod error;
mod schala; mod schala;

View File

@ -166,9 +166,10 @@ use std::str::FromStr;
use crate::tokenizing::*; use crate::tokenizing::*;
use crate::tokenizing::Kw::*; use crate::tokenizing::Kw::*;
use crate::tokenizing::TokenKind::*; use crate::tokenizing::TokenKind::*;
use crate::tokenizing::Location;
use crate::source_map::Location;
use crate::ast::*; use crate::ast::*;
use crate::schala::SourceMapHandle;
/// Represents a parsing error /// Represents a parsing error
#[derive(Debug)] #[derive(Debug)]
@ -201,6 +202,7 @@ pub struct Parser {
parse_level: u32, parse_level: u32,
restrictions: ParserRestrictions, restrictions: ParserRestrictions,
id_store: ItemIdStore, id_store: ItemIdStore,
source_map: SourceMapHandle
} }
@ -245,13 +247,14 @@ impl TokenHandler {
impl Parser { impl Parser {
/// Create a new parser initialized with some tokens. /// Create a new parser initialized with some tokens.
pub fn new() -> Parser { pub fn new(source_map: SourceMapHandle) -> Parser {
Parser { Parser {
token_handler: TokenHandler::new(vec![]), token_handler: TokenHandler::new(vec![]),
parse_record: vec![], parse_record: vec![],
parse_level: 0, parse_level: 0,
restrictions: ParserRestrictions { no_struct_literal: false }, restrictions: ParserRestrictions { no_struct_literal: false },
id_store: ItemIdStore::new(), id_store: ItemIdStore::new(),
source_map,
} }
} }
@ -380,7 +383,8 @@ impl Parser {
_ => self.expression().map(|expr| { StatementKind::Expression(expr) } ), _ => self.expression().map(|expr| { StatementKind::Expression(expr) } ),
}?; }?;
let id = self.id_store.fresh(); let id = self.id_store.fresh();
Ok(Statement { kind, id, location: tok.location }) self.source_map.borrow_mut().add_location(&id, tok.location);
Ok(Statement { kind, id })
} }
#[recursive_descent_method] #[recursive_descent_method]
@ -1036,12 +1040,11 @@ impl Parser {
#[recursive_descent_method] #[recursive_descent_method]
fn expr_or_block(&mut self) -> ParseResult<Block> { fn expr_or_block(&mut self) -> ParseResult<Block> {
let tok = self.token_handler.peek(); match self.token_handler.peek_kind() {
match tok.get_kind() {
LCurlyBrace => self.block(), LCurlyBrace => self.block(),
_ => { _ => {
let expr = self.expression()?; let expr = self.expression()?;
let s = Statement { id: self.id_store.fresh(), location: tok.location, kind: StatementKind::Expression(expr) }; let s = Statement { id: self.id_store.fresh(), kind: StatementKind::Expression(expr) };
Ok(vec![s]) Ok(vec![s])
} }
} }

View File

@ -1,8 +1,8 @@
#![cfg(test)] #![cfg(test)]
use std::cell::RefCell;
use std::rc::Rc; use std::rc::Rc;
use std::str::FromStr; use std::str::FromStr;
use crate::tokenizing::Location;
use super::{Parser, ParseResult, tokenize}; use super::{Parser, ParseResult, tokenize};
use crate::ast::*; use crate::ast::*;
use super::Declaration::*; use super::Declaration::*;
@ -14,8 +14,10 @@ use super::Variant::*;
use super::ForBody::*; use super::ForBody::*;
fn make_parser(input: &str) -> Parser { fn make_parser(input: &str) -> Parser {
let source_map = crate::source_map::SourceMap::new();
let source_map_handle = Rc::new(RefCell::new(source_map));
let tokens: Vec<crate::tokenizing::Token> = tokenize(input); let tokens: Vec<crate::tokenizing::Token> = tokenize(input);
let mut parser = super::Parser::new(); let mut parser = super::Parser::new(source_map_handle);
parser.add_new_tokens(tokens); parser.add_new_tokens(tokens);
parser parser
} }
@ -25,15 +27,6 @@ fn parse(input: &str) -> ParseResult<AST> {
parser.parse() parser.parse()
} }
//TODO maybe can be const?
fn make_statement(kind: StatementKind) -> Statement {
Statement {
location: Location::default(),
id: ItemId::default(),
kind,
}
}
macro_rules! parse_test { macro_rules! parse_test {
($string:expr, $correct:expr) => { ($string:expr, $correct:expr) => {
assert_eq!(parse($string).unwrap(), $correct) assert_eq!(parse($string).unwrap(), $correct)
@ -68,19 +61,19 @@ macro_rules! tys {
macro_rules! decl { macro_rules! decl {
($expr_type:expr) => { ($expr_type:expr) => {
make_statement(StatementKind::Declaration($expr_type)) Statement { id: ItemIdStore::new_id(), kind: StatementKind::Declaration($expr_type) }
}; };
} }
macro_rules! import { macro_rules! import {
($import_spec:expr) => { ($import_spec:expr) => {
make_statement(StatementKind::Import($import_spec)) Statement { id: ItemIdStore::new_id(), kind: StatementKind::Import($import_spec) }
} }
} }
macro_rules! module { macro_rules! module {
($module_spec:expr) => { ($module_spec:expr) => {
make_statement(StatementKind::Module($module_spec)) Statement { id: ItemIdStore::new_id(), kind: StatementKind::Module($module_spec) }
} }
} }
@ -106,9 +99,10 @@ macro_rules! prefexp {
($op:expr, $lhs:expr) => { PrefixExp(PrefixOp::from_str($op).unwrap(), bx!(Expression::new(ItemIdStore::new_id(), $lhs).into())) } ($op:expr, $lhs:expr) => { PrefixExp(PrefixOp::from_str($op).unwrap(), bx!(Expression::new(ItemIdStore::new_id(), $lhs).into())) }
} }
macro_rules! exst { macro_rules! exst {
($expr_type:expr) => { make_statement(StatementKind::Expression(Expression::new(ItemIdStore::new_id(), $expr_type).into())) }; ($expr_type:expr) => { Statement { id: ItemIdStore::new_id(), kind: StatementKind::Expression(Expression::new(ItemIdStore::new_id(), $expr_type).into())} };
($expr_type:expr, $type_anno:expr) => { make_statement(StatementKind::Expression(Expression::with_anno(ItemIdStore::new_id(), $expr_type, $type_anno).into())) }; ($expr_type:expr, $type_anno:expr) => { Statement { id: ItemIdStore::new_id(), kind: StatementKind::Expression(Expression::with_anno(ItemIdStore::new_id(), $expr_type, $type_anno).into())} };
($op:expr, $lhs:expr, $rhs:expr) => { make_statement(StatementKind::Expression(ex!(binexp!($op, $lhs, $rhs)))) }; ($op:expr, $lhs:expr, $rhs:expr) => { Statement { id: ItemIdStore::new_id(), ,kind: StatementKind::Expression(ex!(binexp!($op, $lhs, $rhs)))}
};
(s $statement_text:expr) => { (s $statement_text:expr) => {
{ {
let mut parser = make_parser($statement_text); let mut parser = make_parser($statement_text);

View File

@ -6,10 +6,11 @@ use std::rc::Rc;
use schala_repl::{ProgrammingLanguageInterface, use schala_repl::{ProgrammingLanguageInterface,
ComputationRequest, ComputationResponse, ComputationRequest, ComputationResponse,
LangMetaRequest, LangMetaResponse, GlobalOutputStats}; LangMetaRequest, LangMetaResponse, GlobalOutputStats};
use crate::{reduced_ast, tokenizing, parsing, eval, typechecking, symbol_table}; use crate::{reduced_ast, tokenizing, parsing, eval, typechecking, symbol_table, source_map};
use crate::error::SchalaError; use crate::error::SchalaError;
pub type SymbolTableHandle = Rc<RefCell<symbol_table::SymbolTable>>; pub type SymbolTableHandle = Rc<RefCell<symbol_table::SymbolTable>>;
pub type SourceMapHandle = Rc<RefCell<source_map::SourceMap>>;
/// All the state necessary to parse and execute a Schala program are stored in this struct. /// All the state necessary to parse and execute a Schala program are stored in this struct.
/// `state` represents the execution state for the AST-walking interpreter, the other fields /// `state` represents the execution state for the AST-walking interpreter, the other fields
@ -17,6 +18,7 @@ pub type SymbolTableHandle = Rc<RefCell<symbol_table::SymbolTable>>;
#[allow(dead_code)] #[allow(dead_code)]
pub struct Schala { pub struct Schala {
source_reference: SourceReference, source_reference: SourceReference,
source_map: SourceMapHandle,
state: eval::State<'static>, state: eval::State<'static>,
symbol_table: SymbolTableHandle, symbol_table: SymbolTableHandle,
resolver: crate::scope_resolution::ScopeResolver<'static>, resolver: crate::scope_resolution::ScopeResolver<'static>,
@ -38,15 +40,17 @@ impl Schala {
impl Schala { impl Schala {
/// Creates a new Schala environment *without* any prelude. /// Creates a new Schala environment *without* any prelude.
fn new_blank_env() -> Schala { fn new_blank_env() -> Schala {
let symbols = Rc::new(RefCell::new(symbol_table::SymbolTable::new())); let source_map = Rc::new(RefCell::new(source_map::SourceMap::new()));
let symbols = Rc::new(RefCell::new(symbol_table::SymbolTable::new(source_map.clone())));
Schala { Schala {
//TODO maybe these can be the same structure //TODO maybe these can be the same structure
source_reference: SourceReference::new(), source_reference: SourceReference::new(),
symbol_table: symbols.clone(), symbol_table: symbols.clone(),
source_map: source_map.clone(),
resolver: crate::scope_resolution::ScopeResolver::new(symbols.clone()), resolver: crate::scope_resolution::ScopeResolver::new(symbols.clone()),
state: eval::State::new(), state: eval::State::new(),
type_context: typechecking::TypeContext::new(), type_context: typechecking::TypeContext::new(),
active_parser: parsing::Parser::new() active_parser: parsing::Parser::new(source_map)
} }
} }

View File

@ -0,0 +1,39 @@
use std::collections::HashMap;
use std::fmt;
use crate::ast::ItemId;
pub type LineNumber = usize;
#[derive(Debug, Clone, Copy, PartialEq)]
pub struct Location {
pub line_num: LineNumber,
pub char_num: usize,
}
impl fmt::Display for Location {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}:{}", self.line_num, self.char_num)
}
}
pub struct SourceMap {
map: HashMap<ItemId, Location>
}
impl SourceMap {
pub fn new() -> SourceMap {
SourceMap { map: HashMap::new() }
}
pub fn add_location(&mut self, id: &ItemId, loc: Location) {
self.map.insert(id.clone(), loc);
}
pub fn lookup(&self, id: &ItemId) -> Option<Location> {
match self.map.get(id) {
Some(loc) => Some(loc.clone()),
None => None
}
}
}

View File

@ -4,7 +4,8 @@ use std::rc::Rc;
use std::fmt; use std::fmt;
use std::fmt::Write; use std::fmt::Write;
use crate::tokenizing::LineNumber; use crate::schala::SourceMapHandle;
use crate::source_map::{SourceMap, LineNumber};
use crate::ast; use crate::ast;
use crate::ast::{ItemId, TypeBody, TypeSingletonName, Signature, Statement, StatementKind, ModuleSpecifier}; use crate::ast::{ItemId, TypeBody, TypeSingletonName, Signature, Statement, StatementKind, ModuleSpecifier};
use crate::typechecking::TypeName; use crate::typechecking::TypeName;
@ -23,8 +24,6 @@ macro_rules! fqsn {
}; };
} }
mod source_map;
use source_map::SourceMap;
mod symbol_trie; mod symbol_trie;
use symbol_trie::SymbolTrie; use symbol_trie::SymbolTrie;
mod test; mod test;
@ -94,16 +93,16 @@ impl ScopeSegment {
//cf. p. 150 or so of Language Implementation Patterns //cf. p. 150 or so of Language Implementation Patterns
pub struct SymbolTable { pub struct SymbolTable {
source_map: SourceMap, source_map_handle: SourceMapHandle,
symbol_path_to_symbol: HashMap<FullyQualifiedSymbolName, Symbol>, symbol_path_to_symbol: HashMap<FullyQualifiedSymbolName, Symbol>,
id_to_fqsn: HashMap<ItemId, FullyQualifiedSymbolName>, id_to_fqsn: HashMap<ItemId, FullyQualifiedSymbolName>,
symbol_trie: SymbolTrie, symbol_trie: SymbolTrie,
} }
impl SymbolTable { impl SymbolTable {
pub fn new() -> SymbolTable { pub fn new(source_map_handle: SourceMapHandle) -> SymbolTable {
SymbolTable { SymbolTable {
source_map: SourceMap::new(), source_map_handle,
symbol_path_to_symbol: HashMap::new(), symbol_path_to_symbol: HashMap::new(),
id_to_fqsn: HashMap::new(), id_to_fqsn: HashMap::new(),
symbol_trie: SymbolTrie::new() symbol_trie: SymbolTrie::new()
@ -199,17 +198,15 @@ impl SymbolTable {
for statement in statements.iter() { for statement in statements.iter() {
match statement { match statement {
Statement { kind: StatementKind::Declaration(decl), id, location, } => { Statement { kind: StatementKind::Declaration(decl), id } => {
self.source_map.add_location(id, *location);
match decl { match decl {
FuncSig(ref signature) => { FuncSig(ref signature) => {
seen_identifiers.try_register(&signature.name, &id, &self.source_map) seen_identifiers.try_register(&signature.name, &id, &self.source_map_handle.borrow())
.map_err(|line| format!("Duplicate function definition: {}. It's already defined at {}", signature.name, line))?; .map_err(|line| format!("Duplicate function definition: {}. It's already defined at {}", signature.name, line))?;
self.add_function_signature(signature, scope_name_stack)? self.add_function_signature(signature, scope_name_stack)?
} }
FuncDecl(ref signature, ref body) => { FuncDecl(ref signature, ref body) => {
seen_identifiers.try_register(&signature.name, &id, &self.source_map) seen_identifiers.try_register(&signature.name, &id, &self.source_map_handle.borrow())
.map_err(|line| format!("Duplicate function definition: {}. It's already defined at {}", signature.name, line))?; .map_err(|line| format!("Duplicate function definition: {}. It's already defined at {}", signature.name, line))?;
self.add_function_signature(signature, scope_name_stack)?; self.add_function_signature(signature, scope_name_stack)?;
scope_name_stack.push(ScopeSegment{ scope_name_stack.push(ScopeSegment{
@ -220,21 +217,20 @@ impl SymbolTable {
output? output?
}, },
TypeDecl { name, body, mutable } => { TypeDecl { name, body, mutable } => {
seen_identifiers.try_register(&name.name, &id, &self.source_map) seen_identifiers.try_register(&name.name, &id, &self.source_map_handle.borrow())
.map_err(|line| format!("Duplicate type definition: {}. It's already defined at {}", name.name, line))?; .map_err(|line| format!("Duplicate type definition: {}. It's already defined at {}", name.name, line))?;
self.add_type_decl(name, body, mutable, scope_name_stack)? self.add_type_decl(name, body, mutable, scope_name_stack)?
}, },
Binding { name, .. } => { Binding { name, .. } => {
seen_identifiers.try_register(&name, &id, &self.source_map) seen_identifiers.try_register(&name, &id, &self.source_map_handle.borrow())
.map_err(|line| format!("Duplicate variable definition: {}. It's already defined at {}", name, line))?; .map_err(|line| format!("Duplicate variable definition: {}. It's already defined at {}", name, line))?;
self.add_new_symbol(name, scope_name_stack, SymbolSpec::Binding); self.add_new_symbol(name, scope_name_stack, SymbolSpec::Binding);
} }
_ => () _ => ()
} }
}, },
Statement { kind: StatementKind::Module(ModuleSpecifier { name, contents}), id, location } => { Statement { kind: StatementKind::Module(ModuleSpecifier { name, contents}), id } => {
self.source_map.add_location(id, *location); seen_modules.try_register(&name, &id, &self.source_map_handle.borrow())
seen_modules.try_register(&name, &id, &self.source_map)
.map_err(|line| format!("Duplicate module definition: {}. It's already defined at {}", name, line))?; .map_err(|line| format!("Duplicate module definition: {}. It's already defined at {}", name, line))?;
scope_name_stack.push(ScopeSegment { name: name.clone() }); scope_name_stack.push(ScopeSegment { name: name.clone() });
let output = self.add_symbols_from_scope(contents, scope_name_stack); let output = self.add_symbols_from_scope(contents, scope_name_stack);

View File

@ -1,27 +0,0 @@
use std::collections::HashMap;
use crate::ast::ItemId;
use crate::tokenizing::Location;
//TODO rename this type to make its purpose clearer
pub struct SourceMap {
map: HashMap<ItemId, Location>
}
impl SourceMap {
pub fn new() -> SourceMap {
SourceMap { map: HashMap::new() }
}
pub(crate) fn add_location(&mut self, id: &ItemId, loc: Location) {
self.map.insert(id.clone(), loc);
}
pub(crate) fn lookup(&self, id: &ItemId) -> Option<Location> {
match self.map.get(id) {
Some(loc) => Some(loc.clone()),
None => None
}
}
}

View File

@ -1,10 +1,14 @@
#![cfg(test)] #![cfg(test)]
use std::cell::RefCell;
use std::rc::Rc;
use super::*; use super::*;
use crate::util::quick_ast; use crate::util::quick_ast;
fn add_symbols_from_source(src: &str) -> (SymbolTable, Result<(), String>) { fn add_symbols_from_source(src: &str) -> (SymbolTable, Result<(), String>) {
let ast = quick_ast(src); let (ast, source_map) = quick_ast(src);
let mut symbol_table = SymbolTable::new(); let source_map = Rc::new(RefCell::new(source_map));
let mut symbol_table = SymbolTable::new(source_map);
let result = symbol_table.add_top_level_symbols(&ast); let result = symbol_table.add_top_level_symbols(&ast);
(symbol_table, result) (symbol_table, result)
} }

View File

@ -1,20 +1,7 @@
use itertools::Itertools; use itertools::Itertools;
use std::{iter::{Iterator, Peekable}, convert::TryFrom, rc::Rc, fmt}; use std::{iter::{Iterator, Peekable}, convert::TryFrom, rc::Rc, fmt};
use std::convert::TryInto;
pub type LineNumber = u32; use crate::source_map::Location;
#[derive(Debug, Clone, Copy, PartialEq, Default)]
pub struct Location {
pub(crate) line_num: LineNumber,
pub(crate) char_num: usize,
}
impl fmt::Display for Location {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}:{}", self.line_num, self.char_num)
}
}
#[derive(Debug, PartialEq, Clone)] #[derive(Debug, PartialEq, Clone)]
pub enum TokenKind { pub enum TokenKind {
@ -108,7 +95,7 @@ impl TryFrom<&str> for Kw {
#[derive(Debug, Clone, PartialEq)] #[derive(Debug, Clone, PartialEq)]
pub struct Token { pub struct Token {
pub kind: TokenKind, pub kind: TokenKind,
pub(crate) location: Location, pub location: Location,
} }
impl Token { impl Token {
@ -184,7 +171,7 @@ pub fn tokenize(input: &str) -> Vec<Token> {
c if is_operator(&c) => handle_operator(c, &mut input), c if is_operator(&c) => handle_operator(c, &mut input),
unknown => Error(format!("Unexpected character: {}", unknown)), unknown => Error(format!("Unexpected character: {}", unknown)),
}; };
let location = Location { line_num: line_num.try_into().unwrap(), char_num }; let location = Location { line_num, char_num };
tokens.push(Token { kind: cur_tok_kind, location }); tokens.push(Token { kind: cur_tok_kind, location });
} }
tokens tokens

View File

@ -464,7 +464,7 @@ mod typechecking_tests {
macro_rules! assert_type_in_fresh_context { macro_rules! assert_type_in_fresh_context {
($string:expr, $type:expr) => { ($string:expr, $type:expr) => {
let mut tc = TypeContext::new(); let mut tc = TypeContext::new();
let ref ast = crate::util::quick_ast($string); let (ref ast, _) = crate::util::quick_ast($string);
let ty = tc.typecheck(ast).unwrap(); let ty = tc.typecheck(ast).unwrap();
assert_eq!(ty, $type) assert_eq!(ty, $type)
} }

View File

@ -48,13 +48,18 @@ impl<'a, T, V> ScopeStack<'a, T, V> where T: Hash + Eq {
/// this is intended for use in tests, and does no error-handling whatsoever /// this is intended for use in tests, and does no error-handling whatsoever
#[allow(dead_code)] #[allow(dead_code)]
pub fn quick_ast(input: &str) -> crate::ast::AST { pub fn quick_ast(input: &str) -> (crate::ast::AST, crate::source_map::SourceMap) {
use std::cell::RefCell;
use std::rc::Rc;
let source_map = crate::source_map::SourceMap::new();
let source_map_handle = Rc::new(RefCell::new(source_map));
let tokens = crate::tokenizing::tokenize(input); let tokens = crate::tokenizing::tokenize(input);
let mut parser = crate::parsing::Parser::new(); let mut parser = crate::parsing::Parser::new(source_map_handle.clone());
parser.add_new_tokens(tokens); parser.add_new_tokens(tokens);
let output = parser.parse(); let output = parser.parse();
std::mem::drop(parser); std::mem::drop(parser);
output.unwrap() (output.unwrap(), Rc::try_unwrap(source_map_handle).map_err(|_| ()).unwrap().into_inner())
} }
#[allow(unused_macros)] #[allow(unused_macros)]