Include line count in token debug
This commit is contained in:
parent
48e7c0be03
commit
1056be12e7
@ -42,7 +42,7 @@ impl ProgrammingLanguageInterface for Schala {
|
|||||||
let mut output = ReplOutput::default();
|
let mut output = ReplOutput::default();
|
||||||
let tokens = tokenizing::tokenize(input);
|
let tokens = tokenizing::tokenize(input);
|
||||||
if options.debug_tokens {
|
if options.debug_tokens {
|
||||||
let token_string = tokens.iter().map(|t| format!("{:?}<{}>", t.token_type, t.offset)).join(", ");
|
let token_string = tokens.iter().map(|t| format!("{:?}<L:{},C:{}>", t.token_type, t.offset.0, t.offset.1)).join(", ");
|
||||||
output.add_artifact(TraceArtifact::new("tokens", format!("{:?}", token_string)));
|
output.add_artifact(TraceArtifact::new("tokens", format!("{:?}", token_string)));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -135,7 +135,7 @@ impl Parser {
|
|||||||
self.tokens.peek().map(|ref t| { t.token_type.clone() }).unwrap_or(TokenType::EOF)
|
self.tokens.peek().map(|ref t| { t.token_type.clone() }).unwrap_or(TokenType::EOF)
|
||||||
}
|
}
|
||||||
fn peek_with_token_offset(&mut self) -> Token {
|
fn peek_with_token_offset(&mut self) -> Token {
|
||||||
self.tokens.peek().map(|t: &Token| { t.clone()}).unwrap_or(Token { token_type: TokenType::EOF, offset: 0})
|
self.tokens.peek().map(|t: &Token| { t.clone()}).unwrap_or(Token { token_type: TokenType::EOF, offset: (0,0)})
|
||||||
}
|
}
|
||||||
fn next(&mut self) -> TokenType {
|
fn next(&mut self) -> TokenType {
|
||||||
self.tokens.next().map(|ref t| { t.token_type.clone() }).unwrap_or(TokenType::EOF)
|
self.tokens.next().map(|ref t| { t.token_type.clone() }).unwrap_or(TokenType::EOF)
|
||||||
|
@ -70,7 +70,7 @@ lazy_static! {
|
|||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct Token {
|
pub struct Token {
|
||||||
pub token_type: TokenType,
|
pub token_type: TokenType,
|
||||||
pub offset: usize,
|
pub offset: (usize, usize),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Token {
|
impl Token {
|
||||||
@ -104,7 +104,7 @@ pub fn tokenize(input: &str) -> Vec<Token> {
|
|||||||
|
|
||||||
//let mut input: CharIter = input.chars().enumerate().peekable();
|
//let mut input: CharIter = input.chars().enumerate().peekable();
|
||||||
|
|
||||||
while let Some((_, idx, c)) = input.next() {
|
while let Some((line_idx, ch_idx, c)) = input.next() {
|
||||||
let cur_tok_type = match c {
|
let cur_tok_type = match c {
|
||||||
'#' => {
|
'#' => {
|
||||||
if let Some(&(_, _, '{')) = input.peek() {
|
if let Some(&(_, _, '{')) = input.peek() {
|
||||||
@ -129,7 +129,7 @@ pub fn tokenize(input: &str) -> Vec<Token> {
|
|||||||
c if is_operator(&c) => handle_operator(c, &mut input),
|
c if is_operator(&c) => handle_operator(c, &mut input),
|
||||||
unknown => Error(format!("Unexpected character: {}", unknown)),
|
unknown => Error(format!("Unexpected character: {}", unknown)),
|
||||||
};
|
};
|
||||||
tokens.push(Token { token_type: cur_tok_type, offset: idx });
|
tokens.push(Token { token_type: cur_tok_type, offset: (line_idx, ch_idx) });
|
||||||
}
|
}
|
||||||
tokens
|
tokens
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user