Rename TokenType -> TokenKind

This commit is contained in:
greg 2018-11-16 23:17:34 -08:00
parent 8680c4faf6
commit 1f527f7949
4 changed files with 47 additions and 47 deletions

View File

@ -2,7 +2,7 @@ use std::rc::Rc;
use std::collections::HashMap;
use std::fmt;
use tokenizing::TokenType;
use tokenizing::TokenKind;
use self::BuiltinTypeSpecifier::*;
use self::BuiltinTConst::*;
@ -40,8 +40,8 @@ impl BinOp {
pub fn sigil(&self) -> &Rc<String> {
&self.sigil
}
pub fn from_sigil_token(tok: &TokenType) -> Option<BinOp> {
use self::TokenType::*;
pub fn from_sigil_token(tok: &TokenKind) -> Option<BinOp> {
use self::TokenKind::*;
let s = match tok {
Operator(op) => op,
Period => ".",
@ -62,8 +62,8 @@ impl BinOp {
pub fn min_precedence() -> i32 {
i32::min_value()
}
pub fn get_precedence_from_token(op: &TokenType) -> Option<i32> {
use self::TokenType::*;
pub fn get_precedence_from_token(op: &TokenKind) -> Option<i32> {
use self::TokenKind::*;
let s = match op {
Operator(op) => op,
Period => ".",

View File

@ -91,7 +91,7 @@ impl Schala {
fn tokenizing(_handle: &mut Schala, input: &str, comp: Option<&mut UnfinishedComputation>) -> Result<Vec<tokenizing::Token>, String> {
let tokens = tokenizing::tokenize(input);
comp.map(|comp| {
let token_string = tokens.iter().map(|t| format!("{:?}<L:{},C:{}>", t.token_type, t.offset.0, t.offset.1)).join(", ");
let token_string = tokens.iter().map(|t| format!("{:?}<L:{},C:{}>", t.kind, t.offset.0, t.offset.1)).join(", ");
comp.add_artifact(TraceArtifact::new("tokens", token_string));
});

View File

@ -4,7 +4,7 @@ use std::vec::IntoIter;
use tokenizing::*;
use tokenizing::Kw::*;
use tokenizing::TokenType::*;
use tokenizing::TokenKind::*;
use source_map::{SourceMap, SourceData};
use ast::*;
@ -56,14 +56,14 @@ impl TokenHandler {
TokenHandler { tokens }
}
fn peek(&mut self) -> TokenType {
self.tokens.peek().map(|ref t| { t.token_type.clone() }).unwrap_or(TokenType::EOF)
fn peek(&mut self) -> TokenKind {
self.tokens.peek().map(|ref t| { t.kind.clone() }).unwrap_or(TokenKind::EOF)
}
fn peek_with_token_offset(&mut self) -> Token {
self.tokens.peek().map(|t: &Token| { t.clone()}).unwrap_or(Token { token_type: TokenType::EOF, offset: (0,0)})
self.tokens.peek().map(|t: &Token| { t.clone()}).unwrap_or(Token { kind: TokenKind::EOF, offset: (0,0)})
}
fn next(&mut self) -> TokenType {
self.tokens.next().map(|ref t| { t.token_type.clone() }).unwrap_or(TokenType::EOF)
fn next(&mut self) -> TokenKind {
self.tokens.next().map(|ref t| { t.kind.clone() }).unwrap_or(TokenKind::EOF)
}
}
@ -77,17 +77,17 @@ impl Parser {
}
}
fn peek(&mut self) -> TokenType {
fn peek(&mut self) -> TokenKind {
self.token_handler.peek()
}
fn peek_with_token_offset(&mut self) -> Token {
self.token_handler.peek_with_token_offset()
}
fn next(&mut self) -> TokenType {
fn next(&mut self) -> TokenKind {
self.token_handler.next()
}
fn next_mapped(&mut self) -> SourceMap<TokenType> {
fn next_mapped(&mut self) -> SourceMap<TokenKind> {
let tt = self.next();
SourceMap {
node: tt,
@ -1046,7 +1046,7 @@ impl Parser {
fn float_literal(&mut self) -> ParseResult<Expression> {
use self::ExpressionType::*;
let mut digits = self.digits()?;
if let TokenType::Period = self.peek() {
if let Period = self.peek() {
self.next();
digits.push_str(".");
digits.push_str(&self.digits()?);

View File

@ -5,7 +5,7 @@ use std::iter::{Iterator, Peekable};
use std::fmt;
#[derive(Debug, PartialEq, Clone)]
pub enum TokenType {
pub enum TokenKind {
Newline, Semicolon,
LParen, RParen,
@ -27,9 +27,9 @@ pub enum TokenType {
Error(String),
}
use self::TokenType::*;
use self::TokenKind::*;
impl fmt::Display for TokenType {
impl fmt::Display for TokenKind {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
&Operator(ref s) => write!(f, "Operator({})", **s),
@ -87,7 +87,7 @@ lazy_static! {
#[derive(Debug, Clone)]
pub struct Token {
pub token_type: TokenType,
pub kind: TokenKind,
pub offset: (usize, usize),
}
@ -98,17 +98,17 @@ pub struct TokenMetadata {
impl Token {
pub fn get_error(&self) -> Option<String> {
match self.token_type {
TokenType::Error(ref s) => Some(s.clone()),
match self.kind {
TokenKind::Error(ref s) => Some(s.clone()),
_ => None,
}
}
pub fn to_string_with_metadata(&self) -> String {
format!("{}(L:{},c:{})", self.token_type, self.offset.0, self.offset.1)
format!("{}(L:{},c:{})", self.kind, self.offset.0, self.offset.1)
}
pub fn get_token_type(&self) -> TokenType {
self.token_type.clone()
pub fn get_kind(&self) -> TokenKind {
self.kind.clone()
}
}
@ -130,7 +130,7 @@ pub fn tokenize(input: &str) -> Vec<Token> {
.peekable();
while let Some((line_idx, ch_idx, c)) = input.next() {
let cur_tok_type = match c {
let cur_tok_kind = match c {
'/' => match input.peek().map(|t| t.2) {
Some('/') => {
while let Some((_, _, c)) = input.next() {
@ -172,12 +172,12 @@ pub fn tokenize(input: &str) -> Vec<Token> {
c if is_operator(&c) => handle_operator(c, &mut input),
unknown => Error(format!("Unexpected character: {}", unknown)),
};
tokens.push(Token { token_type: cur_tok_type, offset: (line_idx, ch_idx) });
tokens.push(Token { kind: cur_tok_kind, offset: (line_idx, ch_idx) });
}
tokens
}
fn handle_digit(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenType {
fn handle_digit(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenKind {
if c == '0' && input.peek().map_or(false, |&(_, _, c)| { c == 'x' }) {
input.next();
let rest: String = input.peeking_take_while(|&(_, _, ref c)| c.is_digit(16) || *c == '_').map(|(_, _, c)| { c }).collect();
@ -192,7 +192,7 @@ fn handle_digit(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>) ->
}
}
fn handle_quote(input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenType {
fn handle_quote(input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenKind {
let mut buf = String::new();
loop {
match input.next().map(|(_, _, c)| { c }) {
@ -211,17 +211,17 @@ fn handle_quote(input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenType
}
},
Some(c) => buf.push(c),
None => return TokenType::Error(format!("Unclosed string")),
None => return TokenKind::Error(format!("Unclosed string")),
}
}
TokenType::StrLiteral(Rc::new(buf))
TokenKind::StrLiteral(Rc::new(buf))
}
fn handle_alphabetic(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenType {
fn handle_alphabetic(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenKind {
let mut buf = String::new();
buf.push(c);
if c == '_' && input.peek().map(|&(_, _, c)| { !c.is_alphabetic() }).unwrap_or(true) {
return TokenType::Underscore
return TokenKind::Underscore
}
loop {
@ -235,12 +235,12 @@ fn handle_alphabetic(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>
}
match KEYWORDS.get(buf.as_str()) {
Some(kw) => TokenType::Keyword(*kw),
None => TokenType::Identifier(Rc::new(buf)),
Some(kw) => TokenKind::Keyword(*kw),
None => TokenKind::Identifier(Rc::new(buf)),
}
}
fn handle_operator(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenType {
fn handle_operator(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenKind {
match c {
'<' | '>' | '|' | '.' => {
let ref next = input.peek().map(|&(_, _, c)| { c });
@ -285,7 +285,7 @@ fn handle_operator(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>)
}
}
}
TokenType::Operator(Rc::new(buf))
TokenKind::Operator(Rc::new(buf))
}
#[cfg(test)]
@ -300,29 +300,29 @@ mod schala_tokenizer_tests {
#[test]
fn tokens() {
let a = tokenize("let a: A<B> = c ++ d");
let token_types: Vec<TokenType> = a.into_iter().map(move |t| t.token_type).collect();
assert_eq!(token_types, vec![Keyword(Let), ident!("a"), Colon, ident!("A"),
let token_kinds: Vec<TokenKind> = a.into_iter().map(move |t| t.kind).collect();
assert_eq!(token_kinds, vec![Keyword(Let), ident!("a"), Colon, ident!("A"),
LAngleBracket, ident!("B"), RAngleBracket, op!("="), ident!("c"), op!("++"), ident!("d")]);
}
#[test]
fn underscores() {
let token_types: Vec<TokenType> = tokenize("4_8").into_iter().map(move |t| t.token_type).collect();
assert_eq!(token_types, vec![digit!("4"), Underscore, digit!("8")]);
let token_kinds: Vec<TokenKind> = tokenize("4_8").into_iter().map(move |t| t.kind).collect();
assert_eq!(token_kinds, vec![digit!("4"), Underscore, digit!("8")]);
let token_types2: Vec<TokenType> = tokenize("aba_yo").into_iter().map(move |t| t.token_type).collect();
assert_eq!(token_types2, vec![ident!("aba_yo")]);
let token_kinds2: Vec<TokenKind> = tokenize("aba_yo").into_iter().map(move |t| t.kind).collect();
assert_eq!(token_kinds2, vec![ident!("aba_yo")]);
}
#[test]
fn comments() {
let token_types: Vec<TokenType> = tokenize("1 + /* hella /* bro */ */ 2").into_iter().map(move |t| t.token_type).collect();
assert_eq!(token_types, vec![digit!("1"), op!("+"), digit!("2")]);
let token_kinds: Vec<TokenKind> = tokenize("1 + /* hella /* bro */ */ 2").into_iter().map(move |t| t.kind).collect();
assert_eq!(token_kinds, vec![digit!("1"), op!("+"), digit!("2")]);
}
#[test]
fn backtick_operators() {
let token_types: Vec<TokenType> = tokenize("1 `plus` 2").into_iter().map(move |t| t.token_type).collect();
assert_eq!(token_types, vec![digit!("1"), op!("plus"), digit!("2")]);
let token_kinds: Vec<TokenKind> = tokenize("1 `plus` 2").into_iter().map(move |t| t.kind).collect();
assert_eq!(token_kinds, vec![digit!("1"), op!("plus"), digit!("2")]);
}
}