Token offsets
This commit is contained in:
parent
ab8e24a276
commit
9b3b5c5541
@ -97,7 +97,7 @@ fn load_source<'a>(input: &'a str, handle: &mut Schala, _comp: Option<&mut Unfin
|
||||
fn tokenizing(input: &str, _handle: &mut Schala, comp: Option<&mut UnfinishedComputation>) -> Result<Vec<tokenizing::Token>, String> {
|
||||
let tokens = tokenizing::tokenize(input);
|
||||
comp.map(|comp| {
|
||||
let token_string = tokens.iter().map(|t| format!("{:?}<L:{},C:{}>", t.kind, t.offset.0, t.offset.1)).join(", ");
|
||||
let token_string = tokens.iter().map(|t| t.to_string_with_metadata()).join(", ");
|
||||
comp.add_artifact(TraceArtifact::new("tokens", token_string));
|
||||
});
|
||||
|
||||
@ -135,8 +135,8 @@ fn parsing(input: Vec<tokenizing::Token>, handle: &mut Schala, comp: Option<&mut
|
||||
}
|
||||
|
||||
fn format_parse_error(error: parsing::ParseError, handle: &mut Schala) -> String {
|
||||
let line_num = error.token.offset.0;
|
||||
let ch = error.token.offset.1;
|
||||
let line_num = error.token.line_num;
|
||||
let ch = error.token.char_num;
|
||||
let line_from_program = handle.source_reference.get_line(line_num);
|
||||
let location_pointer = format!("{}^", " ".repeat(ch));
|
||||
|
||||
|
@ -51,7 +51,7 @@ impl TokenHandler {
|
||||
fn new(tokens: Vec<Token>) -> TokenHandler {
|
||||
let end_of_file = match tokens.last() {
|
||||
None => (0, 0),
|
||||
Some(s) => s.offset.clone(),
|
||||
Some(t) => (t.line_num, t.char_num)
|
||||
};
|
||||
let tokens = tokens.into_iter().peekable();
|
||||
TokenHandler { tokens, end_of_file }
|
||||
@ -61,10 +61,10 @@ impl TokenHandler {
|
||||
self.tokens.peek().map(|ref t| { t.kind.clone() }).unwrap_or(TokenKind::EOF)
|
||||
}
|
||||
fn peek(&mut self) -> Token {
|
||||
self.tokens.peek().map(|t: &Token| { t.clone()}).unwrap_or(Token { kind: TokenKind::EOF, offset: self.end_of_file})
|
||||
self.tokens.peek().map(|t: &Token| { t.clone()}).unwrap_or(Token { kind: TokenKind::EOF, line_num: self.end_of_file.0, char_num: self.end_of_file.1})
|
||||
}
|
||||
fn next(&mut self) -> Token {
|
||||
self.tokens.next().unwrap_or(Token { kind: TokenKind::EOF, offset: self.end_of_file})
|
||||
self.tokens.next().unwrap_or(Token { kind: TokenKind::EOF, line_num: self.end_of_file.0, char_num: self.end_of_file.1})
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -88,12 +88,8 @@ lazy_static! {
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Token {
|
||||
pub kind: TokenKind,
|
||||
pub offset: (usize, usize),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct TokenMetadata {
|
||||
pub offset: (usize, usize)
|
||||
pub line_num: usize,
|
||||
pub char_num: usize
|
||||
}
|
||||
|
||||
impl Token {
|
||||
@ -104,7 +100,7 @@ impl Token {
|
||||
}
|
||||
}
|
||||
pub fn to_string_with_metadata(&self) -> String {
|
||||
format!("{}(L:{},c:{})", self.kind, self.offset.0, self.offset.1)
|
||||
format!("{}(L:{},c:{})", self.kind, self.line_num, self.char_num)
|
||||
}
|
||||
|
||||
pub fn get_kind(&self) -> TokenKind {
|
||||
@ -129,7 +125,7 @@ pub fn tokenize(input: &str) -> Vec<Token> {
|
||||
})
|
||||
.peekable();
|
||||
|
||||
while let Some((line_idx, ch_idx, c)) = input.next() {
|
||||
while let Some((line_num, char_num, c)) = input.next() {
|
||||
let cur_tok_kind = match c {
|
||||
'/' => match input.peek().map(|t| t.2) {
|
||||
Some('/') => {
|
||||
@ -172,7 +168,7 @@ pub fn tokenize(input: &str) -> Vec<Token> {
|
||||
c if is_operator(&c) => handle_operator(c, &mut input),
|
||||
unknown => Error(format!("Unexpected character: {}", unknown)),
|
||||
};
|
||||
tokens.push(Token { kind: cur_tok_kind, offset: (line_idx, ch_idx) });
|
||||
tokens.push(Token { kind: cur_tok_kind, line_num, char_num });
|
||||
}
|
||||
tokens
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user