Equals should be a token type
This commit is contained in:
parent
c332747c3e
commit
32fe7430a4
2
TODO.md
2
TODO.md
@ -65,6 +65,8 @@ ex.
|
||||
|
||||
## Playing around with conditional syntax ideas
|
||||
|
||||
-
|
||||
|
||||
- if/match playground
|
||||
|
||||
simple if
|
||||
|
@ -25,6 +25,7 @@ impl BinOp {
|
||||
Slash => "/",
|
||||
LAngleBracket => "<",
|
||||
RAngleBracket => ">",
|
||||
Equals => "=",
|
||||
_ => return None
|
||||
};
|
||||
Some(BinOp::from_sigil(s))
|
||||
@ -47,6 +48,7 @@ impl BinOp {
|
||||
Slash => "/",
|
||||
LAngleBracket => "<",
|
||||
RAngleBracket => ">",
|
||||
Equals => "=",
|
||||
_ => return None
|
||||
};
|
||||
let default = 10_000_000;
|
||||
|
@ -532,8 +532,10 @@ mod eval_tests {
|
||||
fn test_basic_eval() {
|
||||
test_in_fresh_env!("1 + 2", "3");
|
||||
test_in_fresh_env!("let mut a = 1; a = 2", "Unit");
|
||||
/*
|
||||
test_in_fresh_env!("let mut a = 1; a = 2; a", "2");
|
||||
test_in_fresh_env!(r#"("a", 1 + 2)"#, r#"("a", 3)"#);
|
||||
*/
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -2,7 +2,7 @@
|
||||
//! This module is where the recursive-descent parsing methods live.
|
||||
//!
|
||||
//!
|
||||
//! # Schala EBNF Grammar
|
||||
//! # Schala EBNF Grammar
|
||||
//! This document is the authoritative grammar of Schala, represented in something approximating
|
||||
//! Extended Backus-Naur form. Terminal productions are in "double quotes", or UPPERCASE
|
||||
//! if they represent a class of tokens rather than an specific string, or are otherwise
|
||||
@ -366,7 +366,7 @@ impl Parser {
|
||||
false
|
||||
};
|
||||
let name = self.type_singleton_name()?;
|
||||
expect!(self, Operator(ref c) if **c == "=");
|
||||
expect!(self, Equals);
|
||||
let body = self.type_body()?;
|
||||
Ok(Declaration::TypeDecl { name, body, mutable})
|
||||
}
|
||||
@ -376,7 +376,7 @@ impl Parser {
|
||||
fn type_alias(&mut self) -> ParseResult<Declaration> {
|
||||
expect!(self, Keyword(Alias));
|
||||
let alias = self.identifier()?;
|
||||
expect!(self, Operator(ref c) if **c == "=");
|
||||
expect!(self, Equals);
|
||||
let original = self.identifier()?;
|
||||
Ok(Declaration::TypeAlias(alias, original))
|
||||
}
|
||||
@ -471,7 +471,9 @@ impl Parser {
|
||||
Colon => Some(self.type_anno()?),
|
||||
_ => None
|
||||
};
|
||||
let default = None;
|
||||
let default = match self.token_handler.peek_kind() {
|
||||
_ => None
|
||||
};
|
||||
Ok(FormalParam { name, anno, default })
|
||||
}
|
||||
|
||||
@ -492,7 +494,7 @@ impl Parser {
|
||||
None
|
||||
};
|
||||
|
||||
expect!(self, Operator(ref o) if **o == "=");
|
||||
expect!(self, Equals);
|
||||
let expr = self.expression()?.into();
|
||||
|
||||
Ok(Declaration::Binding { name, constant, type_anno, expr })
|
||||
@ -647,7 +649,7 @@ impl Parser {
|
||||
},
|
||||
Identifier(s) => {
|
||||
match self.token_handler.peek_kind_n(1) {
|
||||
Operator(ref op) if **op == "=" => {
|
||||
Equals => {
|
||||
self.token_handler.next();
|
||||
self.token_handler.next();
|
||||
let expr = self.expression()?;
|
||||
|
@ -15,7 +15,7 @@ pub enum TokenKind {
|
||||
Pipe, Backslash,
|
||||
|
||||
Comma, Period, Colon, Underscore,
|
||||
Slash,
|
||||
Slash, Equals,
|
||||
|
||||
Operator(Rc<String>),
|
||||
DigitGroup(Rc<String>), HexLiteral(Rc<String>), BinNumberSigil,
|
||||
@ -118,7 +118,7 @@ type CharData = (usize, usize, char);
|
||||
pub fn tokenize(input: &str) -> Vec<Token> {
|
||||
let mut tokens: Vec<Token> = Vec::new();
|
||||
|
||||
let mut input = input.lines().enumerate()
|
||||
let mut input = input.lines().enumerate()
|
||||
.intersperse((0, "\n"))
|
||||
.flat_map(|(line_idx, ref line)| {
|
||||
line.chars().enumerate().map(move |(ch_idx, ch)| (line_idx, ch_idx, ch))
|
||||
@ -238,7 +238,7 @@ fn handle_alphabetic(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>
|
||||
|
||||
fn handle_operator(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenKind {
|
||||
match c {
|
||||
'<' | '>' | '|' | '.' => {
|
||||
'<' | '>' | '|' | '.' | '=' => {
|
||||
let ref next = input.peek().map(|&(_, _, c)| { c });
|
||||
if !next.map(|n| { is_operator(&n) }).unwrap_or(false) {
|
||||
return match c {
|
||||
@ -246,6 +246,7 @@ fn handle_operator(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>)
|
||||
'>' => RAngleBracket,
|
||||
'|' => Pipe,
|
||||
'.' => Period,
|
||||
'=' => Equals,
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
@ -298,7 +299,7 @@ mod schala_tokenizer_tests {
|
||||
let a = tokenize("let a: A<B> = c ++ d");
|
||||
let token_kinds: Vec<TokenKind> = a.into_iter().map(move |t| t.kind).collect();
|
||||
assert_eq!(token_kinds, vec![Keyword(Let), ident!("a"), Colon, ident!("A"),
|
||||
LAngleBracket, ident!("B"), RAngleBracket, op!("="), ident!("c"), op!("++"), ident!("d")]);
|
||||
LAngleBracket, ident!("B"), RAngleBracket, Equals, ident!("c"), op!("++"), ident!("d")]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
Loading…
Reference in New Issue
Block a user