Convert tokenizer to large match statement

In the hopes of making it shorter
This commit is contained in:
greg 2017-01-01 18:18:55 -08:00
parent 9b62efc830
commit e5ee072b00

View File

@ -60,9 +60,7 @@ pub fn tokenize(input: &str) -> TokenizeResult {
let mut iter = input.chars().peekable(); let mut iter = input.chars().peekable();
while let Some(c) = iter.next() { while let Some(c) = iter.next() {
if char::is_whitespace(c) && c != '\n' { if c == '#' {
continue;
} else if c == '#' {
while let Some(c) = iter.next() { while let Some(c) = iter.next() {
if c == '\n' { if c == '\n' {
break; break;
@ -70,19 +68,15 @@ pub fn tokenize(input: &str) -> TokenizeResult {
} }
} }
let cur_tok = if c == '\n' { let cur_tok = match c {
Newline c if char::is_whitespace(c) && c != '\n' => continue,
} else if c == ';' { '\n' => Newline,
Semicolon ';' => Semicolon,
} else if c == '(' { '(' => LParen,
LParen ')' => RParen,
} else if c == ')' { ':' => Colon,
RParen ',' => Comma,
} else if c == ':' { '"' => {
Colon
} else if c == ',' {
Comma
} else if c == '"' {
let mut buffer = String::with_capacity(20); let mut buffer = String::with_capacity(20);
loop { loop {
// TODO handle string escapes, interpolation // TODO handle string escapes, interpolation
@ -93,7 +87,23 @@ pub fn tokenize(input: &str) -> TokenizeResult {
} }
} }
StrLiteral(buffer) StrLiteral(buffer)
} else if c == '.' && !iter.peek().map_or(false, |x| is_digit(x)) { }
c if !char::is_alphanumeric(c) => {
let mut buffer = String::with_capacity(20);
buffer.push(c);
loop {
if iter.peek().map_or(false,
|x| !char::is_alphanumeric(*x) && !char::is_whitespace(*x)) {
let n = iter.next().unwrap();
buffer.push(n);
} else {
break;
}
}
Operator(Op { repr: buffer })
}
c => {
if c == '.' && !iter.peek().map_or(false, |x| is_digit(x)) {
Period Period
} else if is_digit(&c) || c == '.' { } else if is_digit(&c) || c == '.' {
let mut buffer = String::with_capacity(20); let mut buffer = String::with_capacity(20);
@ -110,19 +120,6 @@ pub fn tokenize(input: &str) -> TokenizeResult {
Ok(f) => NumLiteral(f), Ok(f) => NumLiteral(f),
Err(_) => return Err(TokenizeError::new("Failed to pase digit")), Err(_) => return Err(TokenizeError::new("Failed to pase digit")),
} }
} else if !char::is_alphanumeric(c) {
let mut buffer = String::with_capacity(20);
buffer.push(c);
loop {
if iter.peek().map_or(false,
|x| !char::is_alphanumeric(*x) && !char::is_whitespace(*x)) {
let n = iter.next().unwrap();
buffer.push(n);
} else {
break;
}
}
Operator(Op { repr: buffer })
} else { } else {
let mut buffer = String::with_capacity(20); let mut buffer = String::with_capacity(20);
buffer.push(c); buffer.push(c);
@ -145,6 +142,8 @@ pub fn tokenize(input: &str) -> TokenizeResult {
"null" => Keyword(Kw::Null), "null" => Keyword(Kw::Null),
b => Identifier(b.to_string()), b => Identifier(b.to_string()),
} }
}
}
}; };
tokens.push(cur_tok); tokens.push(cur_tok);