From 743311d18a01314e62022a196b998495395106c0 Mon Sep 17 00:00:00 2001 From: greg Date: Wed, 28 Dec 2016 22:52:23 -0800 Subject: [PATCH] Convert Tokenize to Result --- src/main.rs | 11 +++++++---- src/tokenizer.rs | 23 ++++++++++++++++++----- 2 files changed, 25 insertions(+), 9 deletions(-) diff --git a/src/main.rs b/src/main.rs index c4d794c..c11d307 100644 --- a/src/main.rs +++ b/src/main.rs @@ -45,8 +45,11 @@ fn run_noninteractive(filename: &str, compile: bool) { source_file.read_to_string(&mut buffer).unwrap(); let tokens = match tokenize(&buffer) { - Some(t) => t, - None => { println!("Tokenization error"); return; } + Ok(t) => t, + Err(e) => { + println!("Tokenization error"); + return; + } }; let ast = match parse(&tokens, &[]) { @@ -106,8 +109,8 @@ fn repl_handler(input: &str, state: &mut InterpreterState) -> String { let mut result = String::new(); let tokens = match tokenize(input) { - None => return format!("Tokenization error"), - Some(t) => t + Err(e) => return format!("Tokenization error"), + Ok(t) => t }; if state.show_tokens { diff --git a/src/tokenizer.rs b/src/tokenizer.rs index 9f1a896..0addacb 100644 --- a/src/tokenizer.rs +++ b/src/tokenizer.rs @@ -31,6 +31,19 @@ pub enum Kw { Null, } +pub type TokenizeResult = Result, TokenizeError>; + +#[derive(Debug)] +pub struct TokenizeError { + pub msg: String, +} + +impl TokenizeError { + fn new(msg: &str) -> TokenizeError { + TokenizeError { msg: msg.to_string() } + } +} + fn is_digit(c: &char) -> bool { c.is_digit(10) } @@ -48,7 +61,7 @@ fn ends_identifier(c: &char) -> bool { c == ':' } -pub fn tokenize(input: &str) -> Option> { +pub fn tokenize(input: &str) -> TokenizeResult { use self::Token::*; let mut tokens = Vec::new(); let mut iter = input.chars().peekable(); @@ -82,7 +95,7 @@ pub fn tokenize(input: &str) -> Option> { match iter.next() { Some(x) if x == '"' => break, Some(x) => buffer.push(x), - None => return None, + None => return Err(TokenizeError::new("Unclosed quote")), } } StrLiteral(buffer) @@ -101,7 +114,7 @@ pub fn tokenize(input: &str) -> Option> { } match buffer.parse::() { Ok(f) => NumLiteral(f), - Err(_) => return None + Err(_) => return Err(TokenizeError::new("Failed to pase digit")), } } else if !char::is_alphanumeric(c) { let mut buffer = String::with_capacity(20); @@ -142,7 +155,7 @@ pub fn tokenize(input: &str) -> Option> { tokens.push(cur_tok); } - Some(tokens) + Ok(tokens) } #[cfg(test)] @@ -173,7 +186,7 @@ mod tests { tokentest!("2.3*49.2", "[NumLiteral(2.3), Operator(Op { repr: \"*\" }), NumLiteral(49.2)]"); - assert_eq!(tokenize("2.4.5"), None); + assert!(tokenize("2.4.5").is_err()); } #[test]