Clippy for parsing

This commit is contained in:
Greg Shuflin 2021-10-19 21:53:30 -07:00
parent 91a7abf4cd
commit c9c65b050c
2 changed files with 30 additions and 34 deletions

View File

@ -230,15 +230,15 @@ impl TokenHandler {
self.peek_n(n).kind self.peek_n(n).kind
} }
fn peek(&mut self) -> Token { fn peek(&mut self) -> Token {
self.tokens.get(self.idx).map(|t: &Token| { t.clone()}).unwrap_or(Token { kind: TokenKind::EOF, location: self.end_of_file }) self.tokens.get(self.idx).cloned().unwrap_or(Token { kind: TokenKind::EOF, location: self.end_of_file })
} }
/// calling peek_n(0) is the same thing as peek() /// calling peek_n(0) is the same thing as peek()
fn peek_n(&mut self, n: usize) -> Token { fn peek_n(&mut self, n: usize) -> Token {
self.tokens.get(self.idx + n).map(|t: &Token| { t.clone()}).unwrap_or(Token { kind: TokenKind::EOF, location: self.end_of_file }) self.tokens.get(self.idx + n).cloned().unwrap_or(Token { kind: TokenKind::EOF, location: self.end_of_file })
} }
fn next(&mut self) -> Token { fn next(&mut self) -> Token {
self.idx += 1; self.idx += 1;
self.tokens.get(self.idx - 1).map(|t: &Token| { t.clone() }).unwrap_or(Token { kind: TokenKind::EOF, location: self.end_of_file }) self.tokens.get(self.idx - 1).cloned().unwrap_or(Token { kind: TokenKind::EOF, location: self.end_of_file })
} }
} }
@ -371,11 +371,11 @@ impl Parser {
let kind = match tok.get_kind() { let kind = match tok.get_kind() {
Keyword(Type) => self.type_declaration().map(|decl| { StatementKind::Declaration(decl) }), Keyword(Type) => self.type_declaration().map(|decl| { StatementKind::Declaration(decl) }),
Keyword(Func)=> self.func_declaration().map(|func| { StatementKind::Declaration(func) }), Keyword(Func)=> self.func_declaration().map(|func| { StatementKind::Declaration(func) }),
Keyword(Let) => self.binding_declaration().map(|decl| StatementKind::Declaration(decl)), Keyword(Let) => self.binding_declaration().map(StatementKind::Declaration),
Keyword(Interface) => self.interface_declaration().map(|decl| StatementKind::Declaration(decl)), Keyword(Interface) => self.interface_declaration().map(StatementKind::Declaration),
Keyword(Impl) => self.impl_declaration().map(|decl| StatementKind::Declaration(decl)), Keyword(Impl) => self.impl_declaration().map(StatementKind::Declaration),
Keyword(Import) => self.import_declaration().map(|spec| StatementKind::Import(spec)), Keyword(Import) => self.import_declaration().map(StatementKind::Import),
Keyword(Module) => self.module_declaration().map(|spec| StatementKind::Module(spec)), Keyword(Module) => self.module_declaration().map(StatementKind::Module),
_ => self.expression().map(|expr| { StatementKind::Expression(expr) } ), _ => self.expression().map(|expr| { StatementKind::Expression(expr) } ),
}?; }?;
let id = self.id_store.fresh(); let id = self.id_store.fresh();
@ -472,7 +472,7 @@ impl Parser {
expect!(self, Keyword(Func)); expect!(self, Keyword(Func));
let (name, operator) = match self.token_handler.peek_kind() { let (name, operator) = match self.token_handler.peek_kind() {
Operator(s) => { Operator(s) => {
let name = s.clone(); let name = s;
self.token_handler.next(); self.token_handler.next();
(name, true) (name, true)
}, },
@ -582,7 +582,7 @@ impl Parser {
Colon => Some(self.type_anno()?), Colon => Some(self.type_anno()?),
_ => None _ => None
}; };
if let Some(_) = expr_body.type_anno { if expr_body.type_anno.is_some() {
return ParseError::new_with_token("Bad parse state encountered", self.token_handler.peek()); return ParseError::new_with_token("Bad parse state encountered", self.token_handler.peek());
} }
expr_body.type_anno = type_anno; expr_body.type_anno = type_anno;
@ -619,7 +619,7 @@ impl Parser {
fn precedence_expr(&mut self, precedence: i32) -> ParseResult<Expression> { fn precedence_expr(&mut self, precedence: i32) -> ParseResult<Expression> {
let record = ParseRecord { let record = ParseRecord {
production_name: "precedence_expr".to_string(), production_name: "precedence_expr".to_string(),
next_token: format!("{}", self.token_handler.peek().to_string_with_metadata()), next_token: self.token_handler.peek().to_string_with_metadata(),
level: self.parse_level, level: self.parse_level,
}; };
self.parse_level += 1; self.parse_level += 1;
@ -691,7 +691,7 @@ impl Parser {
self.token_handler.next(); self.token_handler.next();
self.token_handler.next(); self.token_handler.next();
let expr = self.expression()?; let expr = self.expression()?;
InvocationArgument::Keyword { name: s.clone(), expr } InvocationArgument::Keyword { name: s, expr }
}, },
_ => { _ => {
let expr = self.expression()?; let expr = self.expression()?;
@ -1023,7 +1023,7 @@ impl Parser {
let pat = self.pattern()?; let pat = self.pattern()?;
(name, pat) (name, pat)
}, },
_ => (name.clone(), Pattern::Literal(PatternLiteral::StringPattern(name.clone()))) _ => (name.clone(), Pattern::Literal(PatternLiteral::StringPattern(name)))
}) })
} }
@ -1054,7 +1054,7 @@ impl Parser {
self.restrictions.no_struct_literal = true; self.restrictions.no_struct_literal = true;
let x = self.while_cond(); let x = self.while_cond();
self.restrictions.no_struct_literal = false; self.restrictions.no_struct_literal = false;
x?.map(|expr| Box::new(expr)) x?.map(Box::new)
}; };
let body = self.block()?; let body = self.block()?;
Ok(Expression::new(self.id_store.fresh(), WhileExpression {condition, body})) Ok(Expression::new(self.id_store.fresh(), WhileExpression {condition, body}))
@ -1140,7 +1140,7 @@ impl Parser {
StrLiteral {s, ..} => { StrLiteral {s, ..} => {
self.token_handler.next(); self.token_handler.next();
let id = self.id_store.fresh(); let id = self.id_store.fresh();
Ok(Expression::new(id, StringLiteral(s.clone()))) Ok(Expression::new(id, StringLiteral(s)))
} }
e => ParseError::new_with_token(format!("Expected a literal expression, got {:?}", e), tok), e => ParseError::new_with_token(format!("Expected a literal expression, got {:?}", e), tok),
} }
@ -1180,7 +1180,7 @@ impl Parser {
let mut digits = self.digits()?; let mut digits = self.digits()?;
if let Period = self.token_handler.peek_kind() { if let Period = self.token_handler.peek_kind() {
self.token_handler.next(); self.token_handler.next();
digits.push_str("."); digits.push('.');
digits.push_str(&self.digits()?); digits.push_str(&self.digits()?);
match digits.parse::<f64>() { match digits.parse::<f64>() {
Ok(f) => Ok(Expression::new(self.id_store.fresh(), FloatLiteral(f))), Ok(f) => Ok(Expression::new(self.id_store.fresh(), FloatLiteral(f))),
@ -1211,20 +1211,16 @@ impl Parser {
#[recursive_descent_method] #[recursive_descent_method]
fn import_declaration(&mut self) -> ParseResult<ImportSpecifier> { fn import_declaration(&mut self) -> ParseResult<ImportSpecifier> {
expect!(self, Keyword(Import)); expect!(self, Keyword(Import));
let mut path_components = vec![]; let mut path_components = vec![self.identifier()?];
path_components.push(self.identifier()?);
loop { while let (Colon, Colon) = (self.token_handler.peek_kind(), self.token_handler.peek_kind_n(1)) {
match (self.token_handler.peek_kind(), self.token_handler.peek_kind_n(1)) { self.token_handler.next();
(Colon, Colon) => { self.token_handler.next();
self.token_handler.next(); self.token_handler.next();
if let Identifier(_) = self.token_handler.peek_kind() { if let Identifier(_) = self.token_handler.peek_kind() {
path_components.push(self.identifier()?); path_components.push(self.identifier()?);
} else { } else {
break; break;
} }
},
_ => break,
}
} }
let imported_names = match self.token_handler.peek_kind() { let imported_names = match self.token_handler.peek_kind() {

View File

@ -170,12 +170,12 @@ fn parsing_tuples() {
parse_test_wrap_ast!("()", exst!(TupleLiteral(vec![]))); parse_test_wrap_ast!("()", exst!(TupleLiteral(vec![])));
parse_test_wrap_ast!("(\"hella\", 34)", exst!( parse_test_wrap_ast!("(\"hella\", 34)", exst!(
TupleLiteral( TupleLiteral(
vec![ex!(s r#""hella""#).into(), ex!(s "34").into()] vec![ex!(s r#""hella""#), ex!(s "34")]
) )
)); ));
parse_test_wrap_ast!("((1+2), \"slough\")", exst!(TupleLiteral(vec![ parse_test_wrap_ast!("((1+2), \"slough\")", exst!(TupleLiteral(vec![
ex!(binexp!("+", NatLiteral(1), NatLiteral(2))).into(), ex!(binexp!("+", NatLiteral(1), NatLiteral(2))),
ex!(StringLiteral(rc!(slough))).into(), ex!(StringLiteral(rc!(slough))),
]))) ])))
} }
@ -255,7 +255,7 @@ fn parsing_functions() {
parse_test_wrap_ast!("oi()", exst!(Call { f: bx!(ex!(val!("oi"))), arguments: vec![] })); parse_test_wrap_ast!("oi()", exst!(Call { f: bx!(ex!(val!("oi"))), arguments: vec![] }));
parse_test_wrap_ast!("oi(a, 2 + 2)", exst!(Call parse_test_wrap_ast!("oi(a, 2 + 2)", exst!(Call
{ f: bx!(ex!(val!("oi"))), { f: bx!(ex!(val!("oi"))),
arguments: vec![inv!(ex!(val!("a"))), inv!(ex!(binexp!("+", NatLiteral(2), NatLiteral(2)))).into()] arguments: vec![inv!(ex!(val!("a"))), inv!(ex!(binexp!("+", NatLiteral(2), NatLiteral(2))))]
})); }));
parse_error!("a(b,,c)"); parse_error!("a(b,,c)");
@ -541,7 +541,7 @@ fn parsing_lambdas() {
type_anno: None, type_anno: None,
body: vec![exst!(s "y")] } body: vec![exst!(s "y")] }
)), )),
arguments: vec![inv!(ex!(NatLiteral(1))).into()] }) arguments: vec![inv!(ex!(NatLiteral(1)))] })
}; };
parse_test_wrap_ast! { parse_test_wrap_ast! {
@ -589,7 +589,7 @@ fn more_advanced_lambdas() {
exst! { exst! {
Call { Call {
f: bx!(ex!(Call { f: bx!(ex!(val!("wahoo"))), arguments: vec![] })), f: bx!(ex!(Call { f: bx!(ex!(val!("wahoo"))), arguments: vec![] })),
arguments: vec![inv!(ex!(NatLiteral(3))).into()], arguments: vec![inv!(ex!(NatLiteral(3)))],
} }
} }
] ]