Fix interspersing of newlines in tokenizer infra
This commit is contained in:
parent
3295242115
commit
e9c538da49
@ -82,7 +82,6 @@ impl ProgrammingLanguageInterface for Schala {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
//symbol table
|
//symbol table
|
||||||
match self.type_context.add_top_level_types(&ast) {
|
match self.type_context.add_top_level_types(&ast) {
|
||||||
Ok(()) => (),
|
Ok(()) => (),
|
||||||
|
@ -111,9 +111,11 @@ pub fn tokenize(input: &str) -> Vec<Token> {
|
|||||||
let mut tokens: Vec<Token> = Vec::new();
|
let mut tokens: Vec<Token> = Vec::new();
|
||||||
|
|
||||||
let mut input = input.lines().enumerate()
|
let mut input = input.lines().enumerate()
|
||||||
|
.intersperse((0, "\n"))
|
||||||
.flat_map(|(line_idx, ref line)| {
|
.flat_map(|(line_idx, ref line)| {
|
||||||
line.chars().enumerate().map(move |(ch_idx, ch)| (line_idx, ch_idx, ch))
|
line.chars().enumerate().map(move |(ch_idx, ch)| (line_idx, ch_idx, ch))
|
||||||
}).peekable();
|
})
|
||||||
|
.peekable();
|
||||||
|
|
||||||
while let Some((line_idx, ch_idx, c)) = input.next() {
|
while let Some((line_idx, ch_idx, c)) = input.next() {
|
||||||
let cur_tok_type = match c {
|
let cur_tok_type = match c {
|
||||||
|
@ -4,7 +4,7 @@ fn main() {
|
|||||||
a + b
|
a + b
|
||||||
}
|
}
|
||||||
|
|
||||||
foo
|
//foo
|
||||||
|
|
||||||
print(main())
|
print(main())
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user