TokenClass -> TokenKind
'Kind' seems to be the rust convention, or close too it. See std::io::ErrorKind and compiler internals.
This commit is contained in:
parent
512d3f67a8
commit
e8a4a82e4d
3
notes
3
notes
@ -9,8 +9,6 @@ notes
|
|||||||
although maybe only after a '--': j build -- a=hello
|
although maybe only after a '--': j build -- a=hello
|
||||||
- parse lines into {{fragments}} and allow argument substitution
|
- parse lines into {{fragments}} and allow argument substitution
|
||||||
- change error messages to underline problem token
|
- change error messages to underline problem token
|
||||||
- try clippy
|
|
||||||
- use "kind" instead of class
|
|
||||||
- should i use // comments, since that's what's used in rust?
|
- should i use // comments, since that's what's used in rust?
|
||||||
- allow calling recipes in a justfile in a different directory:
|
- allow calling recipes in a justfile in a different directory:
|
||||||
- just ../foo # ../justfile:foo
|
- just ../foo # ../justfile:foo
|
||||||
@ -66,3 +64,4 @@ later:
|
|||||||
and dir with --directory/-d, so i can do:
|
and dir with --directory/-d, so i can do:
|
||||||
alias .j='just -j ~/.justfile -d ~'
|
alias .j='just -j ~/.justfile -d ~'
|
||||||
- run recipes asyncronously
|
- run recipes asyncronously
|
||||||
|
- lint with clippy once it runs on stable
|
||||||
|
20
src/lib.rs
20
src/lib.rs
@ -259,7 +259,7 @@ enum ErrorKind<'a> {
|
|||||||
AssignmentUnimplemented,
|
AssignmentUnimplemented,
|
||||||
UnknownDependency{recipe: &'a str, unknown: &'a str},
|
UnknownDependency{recipe: &'a str, unknown: &'a str},
|
||||||
UnknownStartOfToken,
|
UnknownStartOfToken,
|
||||||
UnexpectedToken{expected: Vec<TokenClass>, found: TokenClass},
|
UnexpectedToken{expected: Vec<TokenKind>, found: TokenKind},
|
||||||
InternalError{message: String},
|
InternalError{message: String},
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -476,7 +476,7 @@ struct Token<'a> {
|
|||||||
text: &'a str,
|
text: &'a str,
|
||||||
prefix: &'a str,
|
prefix: &'a str,
|
||||||
lexeme: &'a str,
|
lexeme: &'a str,
|
||||||
class: TokenClass,
|
class: TokenKind,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Token<'a> {
|
impl<'a> Token<'a> {
|
||||||
@ -493,7 +493,7 @@ impl<'a> Token<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone, Copy)]
|
#[derive(Debug, PartialEq, Clone, Copy)]
|
||||||
enum TokenClass {
|
enum TokenKind {
|
||||||
Name,
|
Name,
|
||||||
Colon,
|
Colon,
|
||||||
Equals,
|
Equals,
|
||||||
@ -505,7 +505,7 @@ enum TokenClass {
|
|||||||
Eof,
|
Eof,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Display for TokenClass {
|
impl Display for TokenKind {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
|
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
|
||||||
try!(write!(f, "{}", match *self {
|
try!(write!(f, "{}", match *self {
|
||||||
Name => "name",
|
Name => "name",
|
||||||
@ -522,7 +522,7 @@ impl Display for TokenClass {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
use TokenClass::*;
|
use TokenKind::*;
|
||||||
|
|
||||||
fn token(pattern: &str) -> Regex {
|
fn token(pattern: &str) -> Regex {
|
||||||
let mut s = String::new();
|
let mut s = String::new();
|
||||||
@ -714,11 +714,11 @@ struct Parser<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Parser<'a> {
|
impl<'a> Parser<'a> {
|
||||||
fn peek(&mut self, class: TokenClass) -> bool {
|
fn peek(&mut self, class: TokenKind) -> bool {
|
||||||
self.tokens.peek().unwrap().class == class
|
self.tokens.peek().unwrap().class == class
|
||||||
}
|
}
|
||||||
|
|
||||||
fn accept(&mut self, class: TokenClass) -> Option<Token<'a>> {
|
fn accept(&mut self, class: TokenKind) -> Option<Token<'a>> {
|
||||||
if self.peek(class) {
|
if self.peek(class) {
|
||||||
self.tokens.next()
|
self.tokens.next()
|
||||||
} else {
|
} else {
|
||||||
@ -726,11 +726,11 @@ impl<'a> Parser<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn accepted(&mut self, class: TokenClass) -> bool {
|
fn accepted(&mut self, class: TokenKind) -> bool {
|
||||||
self.accept(class).is_some()
|
self.accept(class).is_some()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expect(&mut self, class: TokenClass) -> Option<Token<'a>> {
|
fn expect(&mut self, class: TokenKind) -> Option<Token<'a>> {
|
||||||
if self.peek(class) {
|
if self.peek(class) {
|
||||||
self.tokens.next();
|
self.tokens.next();
|
||||||
None
|
None
|
||||||
@ -828,7 +828,7 @@ impl<'a> Parser<'a> {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn unexpected_token(&self, found: &Token<'a>, expected: &[TokenClass]) -> Error<'a> {
|
fn unexpected_token(&self, found: &Token<'a>, expected: &[TokenKind]) -> Error<'a> {
|
||||||
found.error(ErrorKind::UnexpectedToken {
|
found.error(ErrorKind::UnexpectedToken {
|
||||||
expected: expected.to_vec(),
|
expected: expected.to_vec(),
|
||||||
found: found.class,
|
found: found.class,
|
||||||
|
20
src/tests.rs
20
src/tests.rs
@ -2,7 +2,7 @@ extern crate tempdir;
|
|||||||
|
|
||||||
use super::{Token, Error, ErrorKind, Justfile};
|
use super::{Token, Error, ErrorKind, Justfile};
|
||||||
|
|
||||||
use super::TokenClass::*;
|
use super::TokenKind::*;
|
||||||
|
|
||||||
fn tokenize_success(text: &str, expected_summary: &str) {
|
fn tokenize_success(text: &str, expected_summary: &str) {
|
||||||
let tokens = super::tokenize(text).unwrap();
|
let tokens = super::tokenize(text).unwrap();
|
||||||
@ -32,15 +32,15 @@ fn tokenize_error(text: &str, expected: Error) {
|
|||||||
fn token_summary(tokens: &[Token]) -> String {
|
fn token_summary(tokens: &[Token]) -> String {
|
||||||
tokens.iter().map(|t| {
|
tokens.iter().map(|t| {
|
||||||
match t.class {
|
match t.class {
|
||||||
super::TokenClass::Line{..} => "*",
|
super::TokenKind::Line{..} => "*",
|
||||||
super::TokenClass::Name => "N",
|
super::TokenKind::Name => "N",
|
||||||
super::TokenClass::Colon => ":",
|
super::TokenKind::Colon => ":",
|
||||||
super::TokenClass::Equals => "=",
|
super::TokenKind::Equals => "=",
|
||||||
super::TokenClass::Comment{..} => "#",
|
super::TokenKind::Comment{..} => "#",
|
||||||
super::TokenClass::Indent{..} => ">",
|
super::TokenKind::Indent{..} => ">",
|
||||||
super::TokenClass::Dedent => "<",
|
super::TokenKind::Dedent => "<",
|
||||||
super::TokenClass::Eol => "$",
|
super::TokenKind::Eol => "$",
|
||||||
super::TokenClass::Eof => ".",
|
super::TokenKind::Eof => ".",
|
||||||
}
|
}
|
||||||
}).collect::<Vec<_>>().join("")
|
}).collect::<Vec<_>>().join("")
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user