Added Keyword lexical class
This commit is contained in:
parent
25f5188d8c
commit
bfa16fd6fb
@ -1,7 +1,7 @@
|
||||
use std::slice::Iter;
|
||||
use std::iter::Peekable;
|
||||
|
||||
use tokenizer::{Token};
|
||||
use tokenizer::{Token, Kw};
|
||||
use tokenizer::Token::*;
|
||||
|
||||
#[derive(Debug)]
|
||||
@ -39,6 +39,7 @@ fn expect_token(tok: Token, tokens: &mut Tokens) -> bool {
|
||||
(NumLiteral(_), NumLiteral(_)) => true,
|
||||
(StrLiteral(_), StrLiteral(_)) => true,
|
||||
(Identifier(ref i1), Identifier(ref i2)) => i1 == i2,
|
||||
(Keyword(k1), Keyword(k2)) => k1 == k2,
|
||||
_ => false
|
||||
}
|
||||
}
|
||||
@ -60,7 +61,7 @@ pub fn parse(input: Vec<Token>) -> ParseResult {
|
||||
|
||||
|
||||
fn let_expression(input: &mut Tokens) -> ParseResult {
|
||||
expect!(Identifier("let".to_string()), input);
|
||||
expect!(Keyword(Kw::Let), input);
|
||||
if let Some(&Identifier(ref name)) = input.next() {
|
||||
if let Some(&Identifier(ref s)) = input.next() {
|
||||
if s == "=" {
|
||||
|
@ -7,8 +7,21 @@ pub enum Token {
|
||||
Comma,
|
||||
NumLiteral(f64),
|
||||
StrLiteral(String),
|
||||
Identifier(String)
|
||||
/* Keyword(Keyword) */ //implement in future
|
||||
Identifier(String),
|
||||
Keyword(Kw)
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub enum Kw {
|
||||
If,
|
||||
Then,
|
||||
Else,
|
||||
While,
|
||||
Do,
|
||||
End,
|
||||
Let,
|
||||
Fn,
|
||||
Null
|
||||
}
|
||||
|
||||
pub fn tokenize(input: &str) -> Vec<Token> {
|
||||
@ -67,10 +80,18 @@ pub fn tokenize(input: &str) -> Vec<Token> {
|
||||
|
||||
match buffer.parse::<f64>() {
|
||||
Ok(f) => tokens.push(Token::NumLiteral(f)),
|
||||
_ => tokens.push(Token::Identifier(buffer))
|
||||
_ => tokens.push(handle_identifier(buffer))
|
||||
}
|
||||
}
|
||||
}
|
||||
tokens.push(Token::EOF);
|
||||
tokens
|
||||
}
|
||||
|
||||
fn handle_identifier(identifier: String) -> Token {
|
||||
if identifier == "let" {
|
||||
return Token::Keyword(Kw::Let);
|
||||
}
|
||||
|
||||
return Token::Identifier(identifier);
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user