From 1f527f79490764a30edd835a52d95359fa7db88f Mon Sep 17 00:00:00 2001 From: greg Date: Fri, 16 Nov 2018 23:17:34 -0800 Subject: [PATCH] Rename TokenType -> TokenKind --- schala-lang/language/src/builtin.rs | 10 ++--- schala-lang/language/src/lib.rs | 2 +- schala-lang/language/src/parsing.rs | 20 ++++----- schala-lang/language/src/tokenizing.rs | 62 +++++++++++++------------- 4 files changed, 47 insertions(+), 47 deletions(-) diff --git a/schala-lang/language/src/builtin.rs b/schala-lang/language/src/builtin.rs index f2df113..b0a7e96 100644 --- a/schala-lang/language/src/builtin.rs +++ b/schala-lang/language/src/builtin.rs @@ -2,7 +2,7 @@ use std::rc::Rc; use std::collections::HashMap; use std::fmt; -use tokenizing::TokenType; +use tokenizing::TokenKind; use self::BuiltinTypeSpecifier::*; use self::BuiltinTConst::*; @@ -40,8 +40,8 @@ impl BinOp { pub fn sigil(&self) -> &Rc { &self.sigil } - pub fn from_sigil_token(tok: &TokenType) -> Option { - use self::TokenType::*; + pub fn from_sigil_token(tok: &TokenKind) -> Option { + use self::TokenKind::*; let s = match tok { Operator(op) => op, Period => ".", @@ -62,8 +62,8 @@ impl BinOp { pub fn min_precedence() -> i32 { i32::min_value() } - pub fn get_precedence_from_token(op: &TokenType) -> Option { - use self::TokenType::*; + pub fn get_precedence_from_token(op: &TokenKind) -> Option { + use self::TokenKind::*; let s = match op { Operator(op) => op, Period => ".", diff --git a/schala-lang/language/src/lib.rs b/schala-lang/language/src/lib.rs index 5258dc2..f66027a 100644 --- a/schala-lang/language/src/lib.rs +++ b/schala-lang/language/src/lib.rs @@ -91,7 +91,7 @@ impl Schala { fn tokenizing(_handle: &mut Schala, input: &str, comp: Option<&mut UnfinishedComputation>) -> Result, String> { let tokens = tokenizing::tokenize(input); comp.map(|comp| { - let token_string = tokens.iter().map(|t| format!("{:?}", t.token_type, t.offset.0, t.offset.1)).join(", "); + let token_string = tokens.iter().map(|t| format!("{:?}", t.kind, t.offset.0, t.offset.1)).join(", "); comp.add_artifact(TraceArtifact::new("tokens", token_string)); }); diff --git a/schala-lang/language/src/parsing.rs b/schala-lang/language/src/parsing.rs index d28c72c..5b210e9 100644 --- a/schala-lang/language/src/parsing.rs +++ b/schala-lang/language/src/parsing.rs @@ -4,7 +4,7 @@ use std::vec::IntoIter; use tokenizing::*; use tokenizing::Kw::*; -use tokenizing::TokenType::*; +use tokenizing::TokenKind::*; use source_map::{SourceMap, SourceData}; use ast::*; @@ -56,14 +56,14 @@ impl TokenHandler { TokenHandler { tokens } } - fn peek(&mut self) -> TokenType { - self.tokens.peek().map(|ref t| { t.token_type.clone() }).unwrap_or(TokenType::EOF) + fn peek(&mut self) -> TokenKind { + self.tokens.peek().map(|ref t| { t.kind.clone() }).unwrap_or(TokenKind::EOF) } fn peek_with_token_offset(&mut self) -> Token { - self.tokens.peek().map(|t: &Token| { t.clone()}).unwrap_or(Token { token_type: TokenType::EOF, offset: (0,0)}) + self.tokens.peek().map(|t: &Token| { t.clone()}).unwrap_or(Token { kind: TokenKind::EOF, offset: (0,0)}) } - fn next(&mut self) -> TokenType { - self.tokens.next().map(|ref t| { t.token_type.clone() }).unwrap_or(TokenType::EOF) + fn next(&mut self) -> TokenKind { + self.tokens.next().map(|ref t| { t.kind.clone() }).unwrap_or(TokenKind::EOF) } } @@ -77,17 +77,17 @@ impl Parser { } } - fn peek(&mut self) -> TokenType { + fn peek(&mut self) -> TokenKind { self.token_handler.peek() } fn peek_with_token_offset(&mut self) -> Token { self.token_handler.peek_with_token_offset() } - fn next(&mut self) -> TokenType { + fn next(&mut self) -> TokenKind { self.token_handler.next() } - fn next_mapped(&mut self) -> SourceMap { + fn next_mapped(&mut self) -> SourceMap { let tt = self.next(); SourceMap { node: tt, @@ -1046,7 +1046,7 @@ impl Parser { fn float_literal(&mut self) -> ParseResult { use self::ExpressionType::*; let mut digits = self.digits()?; - if let TokenType::Period = self.peek() { + if let Period = self.peek() { self.next(); digits.push_str("."); digits.push_str(&self.digits()?); diff --git a/schala-lang/language/src/tokenizing.rs b/schala-lang/language/src/tokenizing.rs index 12b20c0..879ebb2 100644 --- a/schala-lang/language/src/tokenizing.rs +++ b/schala-lang/language/src/tokenizing.rs @@ -5,7 +5,7 @@ use std::iter::{Iterator, Peekable}; use std::fmt; #[derive(Debug, PartialEq, Clone)] -pub enum TokenType { +pub enum TokenKind { Newline, Semicolon, LParen, RParen, @@ -27,9 +27,9 @@ pub enum TokenType { Error(String), } -use self::TokenType::*; +use self::TokenKind::*; -impl fmt::Display for TokenType { +impl fmt::Display for TokenKind { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { &Operator(ref s) => write!(f, "Operator({})", **s), @@ -87,7 +87,7 @@ lazy_static! { #[derive(Debug, Clone)] pub struct Token { - pub token_type: TokenType, + pub kind: TokenKind, pub offset: (usize, usize), } @@ -98,17 +98,17 @@ pub struct TokenMetadata { impl Token { pub fn get_error(&self) -> Option { - match self.token_type { - TokenType::Error(ref s) => Some(s.clone()), + match self.kind { + TokenKind::Error(ref s) => Some(s.clone()), _ => None, } } pub fn to_string_with_metadata(&self) -> String { - format!("{}(L:{},c:{})", self.token_type, self.offset.0, self.offset.1) + format!("{}(L:{},c:{})", self.kind, self.offset.0, self.offset.1) } - pub fn get_token_type(&self) -> TokenType { - self.token_type.clone() + pub fn get_kind(&self) -> TokenKind { + self.kind.clone() } } @@ -130,7 +130,7 @@ pub fn tokenize(input: &str) -> Vec { .peekable(); while let Some((line_idx, ch_idx, c)) = input.next() { - let cur_tok_type = match c { + let cur_tok_kind = match c { '/' => match input.peek().map(|t| t.2) { Some('/') => { while let Some((_, _, c)) = input.next() { @@ -172,12 +172,12 @@ pub fn tokenize(input: &str) -> Vec { c if is_operator(&c) => handle_operator(c, &mut input), unknown => Error(format!("Unexpected character: {}", unknown)), }; - tokens.push(Token { token_type: cur_tok_type, offset: (line_idx, ch_idx) }); + tokens.push(Token { kind: cur_tok_kind, offset: (line_idx, ch_idx) }); } tokens } -fn handle_digit(c: char, input: &mut Peekable>) -> TokenType { +fn handle_digit(c: char, input: &mut Peekable>) -> TokenKind { if c == '0' && input.peek().map_or(false, |&(_, _, c)| { c == 'x' }) { input.next(); let rest: String = input.peeking_take_while(|&(_, _, ref c)| c.is_digit(16) || *c == '_').map(|(_, _, c)| { c }).collect(); @@ -192,7 +192,7 @@ fn handle_digit(c: char, input: &mut Peekable>) -> } } -fn handle_quote(input: &mut Peekable>) -> TokenType { +fn handle_quote(input: &mut Peekable>) -> TokenKind { let mut buf = String::new(); loop { match input.next().map(|(_, _, c)| { c }) { @@ -211,17 +211,17 @@ fn handle_quote(input: &mut Peekable>) -> TokenType } }, Some(c) => buf.push(c), - None => return TokenType::Error(format!("Unclosed string")), + None => return TokenKind::Error(format!("Unclosed string")), } } - TokenType::StrLiteral(Rc::new(buf)) + TokenKind::StrLiteral(Rc::new(buf)) } -fn handle_alphabetic(c: char, input: &mut Peekable>) -> TokenType { +fn handle_alphabetic(c: char, input: &mut Peekable>) -> TokenKind { let mut buf = String::new(); buf.push(c); if c == '_' && input.peek().map(|&(_, _, c)| { !c.is_alphabetic() }).unwrap_or(true) { - return TokenType::Underscore + return TokenKind::Underscore } loop { @@ -235,12 +235,12 @@ fn handle_alphabetic(c: char, input: &mut Peekable> } match KEYWORDS.get(buf.as_str()) { - Some(kw) => TokenType::Keyword(*kw), - None => TokenType::Identifier(Rc::new(buf)), + Some(kw) => TokenKind::Keyword(*kw), + None => TokenKind::Identifier(Rc::new(buf)), } } -fn handle_operator(c: char, input: &mut Peekable>) -> TokenType { +fn handle_operator(c: char, input: &mut Peekable>) -> TokenKind { match c { '<' | '>' | '|' | '.' => { let ref next = input.peek().map(|&(_, _, c)| { c }); @@ -285,7 +285,7 @@ fn handle_operator(c: char, input: &mut Peekable>) } } } - TokenType::Operator(Rc::new(buf)) + TokenKind::Operator(Rc::new(buf)) } #[cfg(test)] @@ -300,29 +300,29 @@ mod schala_tokenizer_tests { #[test] fn tokens() { let a = tokenize("let a: A = c ++ d"); - let token_types: Vec = a.into_iter().map(move |t| t.token_type).collect(); - assert_eq!(token_types, vec![Keyword(Let), ident!("a"), Colon, ident!("A"), + let token_kinds: Vec = a.into_iter().map(move |t| t.kind).collect(); + assert_eq!(token_kinds, vec![Keyword(Let), ident!("a"), Colon, ident!("A"), LAngleBracket, ident!("B"), RAngleBracket, op!("="), ident!("c"), op!("++"), ident!("d")]); } #[test] fn underscores() { - let token_types: Vec = tokenize("4_8").into_iter().map(move |t| t.token_type).collect(); - assert_eq!(token_types, vec![digit!("4"), Underscore, digit!("8")]); + let token_kinds: Vec = tokenize("4_8").into_iter().map(move |t| t.kind).collect(); + assert_eq!(token_kinds, vec![digit!("4"), Underscore, digit!("8")]); - let token_types2: Vec = tokenize("aba_yo").into_iter().map(move |t| t.token_type).collect(); - assert_eq!(token_types2, vec![ident!("aba_yo")]); + let token_kinds2: Vec = tokenize("aba_yo").into_iter().map(move |t| t.kind).collect(); + assert_eq!(token_kinds2, vec![ident!("aba_yo")]); } #[test] fn comments() { - let token_types: Vec = tokenize("1 + /* hella /* bro */ */ 2").into_iter().map(move |t| t.token_type).collect(); - assert_eq!(token_types, vec![digit!("1"), op!("+"), digit!("2")]); + let token_kinds: Vec = tokenize("1 + /* hella /* bro */ */ 2").into_iter().map(move |t| t.kind).collect(); + assert_eq!(token_kinds, vec![digit!("1"), op!("+"), digit!("2")]); } #[test] fn backtick_operators() { - let token_types: Vec = tokenize("1 `plus` 2").into_iter().map(move |t| t.token_type).collect(); - assert_eq!(token_types, vec![digit!("1"), op!("plus"), digit!("2")]); + let token_kinds: Vec = tokenize("1 `plus` 2").into_iter().map(move |t| t.kind).collect(); + assert_eq!(token_kinds, vec![digit!("1"), op!("plus"), digit!("2")]); } }