From 4f17d5a0dc797bd3d7cd5ac0785973faa2337d5a Mon Sep 17 00:00:00 2001 From: greg Date: Sun, 19 Jul 2015 16:53:37 -0700 Subject: [PATCH] Add number tokenizing --- src/main.rs | 25 ++++++++++++++++++++----- 1 file changed, 20 insertions(+), 5 deletions(-) diff --git a/src/main.rs b/src/main.rs index 19a73cf..90f36c4 100644 --- a/src/main.rs +++ b/src/main.rs @@ -15,7 +15,7 @@ enum Token { LParen, RParen, Comma, - NumLiteral(i32), + NumLiteral(f64), StrLiteral(String), Identifier(String) /* Keyword(Keyword) */ //implement in future @@ -29,7 +29,7 @@ enum ASTNode { fn repl() { - let mut stdin = io::stdin(); + let stdin = io::stdin(); let mut stdout = io::stdout(); let mut buf = String::with_capacity(20); loop { @@ -37,7 +37,7 @@ fn repl() { stdout.flush().ok(); let line = stdin.lock().read_line(&mut buf); match line { - Ok(n) => { + Ok(_) => { let tokens = tokenize(&buf); buf.clear(); println!("Tokens: {:?}", tokens); @@ -60,6 +60,17 @@ fn tokenize(input: &str) -> Vec { let mut tokens = Vec::new(); let mut iterator = input.chars().peekable(); + fn ends_identifier(c: char) -> bool { + match c { + c if char::is_whitespace(c) => true, + ',' => true, + ';' => true, + '(' => true, + ')' => true, + _ => false + } + } + while let Some(c) = iterator.next() { if char::is_whitespace(c) { continue; @@ -93,12 +104,16 @@ fn tokenize(input: &str) -> Vec { buffer.push(c); while let Some(x) = iterator.peek().cloned() { - if !char::is_alphanumeric(x) { + if ends_identifier(x) { break; } buffer.push(iterator.next().unwrap()); } - tokens.push(Token::Identifier(buffer)); + + match buffer.parse::() { + Ok(f) => tokens.push(Token::NumLiteral(f)), + _ => tokens.push(Token::Identifier(buffer)) + } } } tokens.push(Token::EOF);