Add number tokenizing

This commit is contained in:
greg 2015-07-19 16:53:37 -07:00
parent 8e3774ffca
commit 4f17d5a0dc

View File

@ -15,7 +15,7 @@ enum Token {
LParen, LParen,
RParen, RParen,
Comma, Comma,
NumLiteral(i32), NumLiteral(f64),
StrLiteral(String), StrLiteral(String),
Identifier(String) Identifier(String)
/* Keyword(Keyword) */ //implement in future /* Keyword(Keyword) */ //implement in future
@ -29,7 +29,7 @@ enum ASTNode {
fn repl() { fn repl() {
let mut stdin = io::stdin(); let stdin = io::stdin();
let mut stdout = io::stdout(); let mut stdout = io::stdout();
let mut buf = String::with_capacity(20); let mut buf = String::with_capacity(20);
loop { loop {
@ -37,7 +37,7 @@ fn repl() {
stdout.flush().ok(); stdout.flush().ok();
let line = stdin.lock().read_line(&mut buf); let line = stdin.lock().read_line(&mut buf);
match line { match line {
Ok(n) => { Ok(_) => {
let tokens = tokenize(&buf); let tokens = tokenize(&buf);
buf.clear(); buf.clear();
println!("Tokens: {:?}", tokens); println!("Tokens: {:?}", tokens);
@ -60,6 +60,17 @@ fn tokenize(input: &str) -> Vec<Token> {
let mut tokens = Vec::new(); let mut tokens = Vec::new();
let mut iterator = input.chars().peekable(); let mut iterator = input.chars().peekable();
fn ends_identifier(c: char) -> bool {
match c {
c if char::is_whitespace(c) => true,
',' => true,
';' => true,
'(' => true,
')' => true,
_ => false
}
}
while let Some(c) = iterator.next() { while let Some(c) = iterator.next() {
if char::is_whitespace(c) { if char::is_whitespace(c) {
continue; continue;
@ -93,12 +104,16 @@ fn tokenize(input: &str) -> Vec<Token> {
buffer.push(c); buffer.push(c);
while let Some(x) = iterator.peek().cloned() { while let Some(x) = iterator.peek().cloned() {
if !char::is_alphanumeric(x) { if ends_identifier(x) {
break; break;
} }
buffer.push(iterator.next().unwrap()); buffer.push(iterator.next().unwrap());
} }
tokens.push(Token::Identifier(buffer));
match buffer.parse::<f64>() {
Ok(f) => tokens.push(Token::NumLiteral(f)),
_ => tokens.push(Token::Identifier(buffer))
}
} }
} }
tokens.push(Token::EOF); tokens.push(Token::EOF);