Fix tokenization bug

This commit is contained in:
greg 2016-01-16 10:40:17 -08:00
parent c4ab1ed105
commit 032d01c9f5

View File

@ -119,7 +119,7 @@ pub fn tokenize(input: &str) -> Option<Vec<Token>> {
let mut buffer = String::with_capacity(20); let mut buffer = String::with_capacity(20);
buffer.push(c); buffer.push(c);
loop { loop {
if iter.peek().map_or(false, |x| ends_identifier(x)) { if iter.peek().map_or(true, |x| ends_identifier(x)) {
break; break;
} else { } else {
buffer.push(iter.next().unwrap()); buffer.push(iter.next().unwrap());