Replace regex-based lexer with character-at-a-time lexer (#406)
This commit is contained in:
parent
e615ea0389
commit
596ea34460
1
Cargo.lock
generated
1
Cargo.lock
generated
@ -206,7 +206,6 @@ dependencies = [
|
|||||||
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"libc 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)",
|
"libc 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"regex 1.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"target 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"target 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"tempdir 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
"tempdir 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"unicode-width 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
"unicode-width 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
@ -21,7 +21,6 @@ itertools = "0.8.0"
|
|||||||
lazy_static = "1.0.0"
|
lazy_static = "1.0.0"
|
||||||
libc = "0.2.21"
|
libc = "0.2.21"
|
||||||
log = "0.4.4"
|
log = "0.4.4"
|
||||||
regex = "1.0.0"
|
|
||||||
target = "1.0.0"
|
target = "1.0.0"
|
||||||
tempdir = "0.3.5"
|
tempdir = "0.3.5"
|
||||||
unicode-width = "0.1.3"
|
unicode-width = "0.1.3"
|
||||||
|
80
README.adoc
80
README.adoc
@ -710,40 +710,6 @@ When a script with a shebang is executed, the system supplies the path to the sc
|
|||||||
|
|
||||||
With the above shebang, `just` will change its working directory to the location of the script. If you'd rather leave the working directory unchanged, use `#!/usr/bin/env just --working-directory . --justfile`.
|
With the above shebang, `just` will change its working directory to the location of the script. If you'd rather leave the working directory unchanged, use `#!/usr/bin/env just --working-directory . --justfile`.
|
||||||
|
|
||||||
== Frequently Asked Questions
|
|
||||||
|
|
||||||
=== What are the idiosyncrasies of make that just avoids?
|
|
||||||
|
|
||||||
Make has some behaviors which are either confusing, complicated, or make it unsuitable for use as a general command runner.
|
|
||||||
|
|
||||||
One example is that sometimes make won't run the commands in a recipe. For example, if you have a file called `test` and the following makefile that runs it:
|
|
||||||
|
|
||||||
```make
|
|
||||||
test:
|
|
||||||
./test
|
|
||||||
```
|
|
||||||
|
|
||||||
Make will actually refuse to run it:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
$ make test
|
|
||||||
make: `test' is up to date.
|
|
||||||
```
|
|
||||||
|
|
||||||
Make sees the recipe `test` and assumes that it produces a file called `test`. It then sees that this file exists and thus assumes that the recipe doesn't need to be run.
|
|
||||||
|
|
||||||
To be fair, this behavior is desirable when using make as a build system, but not when using it as a command runner.
|
|
||||||
|
|
||||||
Some other examples include having to understand the difference between `=` and `:=` assignment, the confusing error messages that can be produced if you mess up your makefile, having to use `$$` to write recipes that use environment variables, and incompatibilites between different flavors of make.
|
|
||||||
|
|
||||||
=== What's the relationship between just and cargo build scripts?
|
|
||||||
|
|
||||||
http://doc.crates.io/build-script.html[Cargo build scripts] have a pretty specific use, which is to control how cargo builds your rust project. This might include adding flags to `rustc` invocations, building an external dependency, or running some kind of codegen step.
|
|
||||||
|
|
||||||
`just`, on the other hand, is for all the other miscellaneous commands you might run as part of development. Things like running tests in different configurations, linting your code, pushing build artifacts to a server, removing temporary files, and the like.
|
|
||||||
|
|
||||||
Also, although `just` is written in rust, it can be used regardless of the language or build system your project uses.
|
|
||||||
|
|
||||||
== Miscellanea
|
== Miscellanea
|
||||||
|
|
||||||
=== Companion Tools
|
=== Companion Tools
|
||||||
@ -813,7 +779,7 @@ Before `just` was a fancy rust program it was a tiny shell script that called `m
|
|||||||
|
|
||||||
=== Non-Project Specific Justfile
|
=== Non-Project Specific Justfile
|
||||||
|
|
||||||
If you want some commands to be available everwhere, put them in `~/.justfile` and add the following to your shell's initialization file:
|
If you want some commands to be available everywhere, put them in `~/.justfile` and add the following to your shell's initialization file:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
alias .j='just --justfile ~/.justfile --working-directory ~'
|
alias .j='just --justfile ~/.justfile --working-directory ~'
|
||||||
@ -829,6 +795,50 @@ I'm pretty sure that nobody actually uses this feature, but it's there.
|
|||||||
|
|
||||||
¯\\_(ツ)_/¯
|
¯\\_(ツ)_/¯
|
||||||
|
|
||||||
|
== Contributing
|
||||||
|
|
||||||
|
`just` welcomes your contributions! `just` is released under the maximally permissive [CC0](https://creativecommons.org/publicdomain/zero/1.0/legalcode.txt) public domain dedication and fallback license, so your changes must also released under this license.
|
||||||
|
|
||||||
|
=== Janus
|
||||||
|
|
||||||
|
[Janus](https://github.com/casey/janus) is a tool that collects and analyzes justfiles, and can determine if a new version of `just` breaks or changes the interpretation of existing justfiles.
|
||||||
|
|
||||||
|
Before merging a particularly large or gruesome change, Janus should be run to make sure that nothing breaks. Don't worry about running Janus yourself, Casey will happily run it for you on changes that need it.
|
||||||
|
|
||||||
|
== Frequently Asked Questions
|
||||||
|
|
||||||
|
=== What are the idiosyncrasies of make that just avoids?
|
||||||
|
|
||||||
|
Make has some behaviors which are either confusing, complicated, or make it unsuitable for use as a general command runner.
|
||||||
|
|
||||||
|
One example is that sometimes make won't run the commands in a recipe. For example, if you have a file called `test` and the following makefile that runs it:
|
||||||
|
|
||||||
|
```make
|
||||||
|
test:
|
||||||
|
./test
|
||||||
|
```
|
||||||
|
|
||||||
|
Make will actually refuse to run it:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
$ make test
|
||||||
|
make: `test' is up to date.
|
||||||
|
```
|
||||||
|
|
||||||
|
Make sees the recipe `test` and assumes that it produces a file called `test`. It then sees that this file exists and thus assumes that the recipe doesn't need to be run.
|
||||||
|
|
||||||
|
To be fair, this behavior is desirable when using make as a build system, but not when using it as a command runner.
|
||||||
|
|
||||||
|
Some other examples include having to understand the difference between `=` and `:=` assignment, the confusing error messages that can be produced if you mess up your makefile, having to use `$$` to write recipes that use environment variables, and incompatibilites between different flavors of make.
|
||||||
|
|
||||||
|
=== What's the relationship between just and cargo build scripts?
|
||||||
|
|
||||||
|
http://doc.crates.io/build-script.html[Cargo build scripts] have a pretty specific use, which is to control how cargo builds your rust project. This might include adding flags to `rustc` invocations, building an external dependency, or running some kind of codegen step.
|
||||||
|
|
||||||
|
`just`, on the other hand, is for all the other miscellaneous commands you might run as part of development. Things like running tests in different configurations, linting your code, pushing build artifacts to a server, removing temporary files, and the like.
|
||||||
|
|
||||||
|
Also, although `just` is written in rust, it can be used regardless of the language or build system your project uses.
|
||||||
|
|
||||||
== Further Ramblings
|
== Further Ramblings
|
||||||
|
|
||||||
I personally find it very useful to write a `justfile` for almost every project, big or small.
|
I personally find it very useful to write a `justfile` for almost every project, big or small.
|
||||||
|
30
functions.rs
Normal file
30
functions.rs
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
use crate::common::*;
|
||||||
|
|
||||||
|
pub struct Functions<'a> {
|
||||||
|
stack: Vec<&'a Expression<'a>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Iterator for Functions<'a> {
|
||||||
|
type Item = (&'a Token<'a>, usize);
|
||||||
|
|
||||||
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
|
match self.stack.pop() {
|
||||||
|
None
|
||||||
|
| Some(Expression::String { .. })
|
||||||
|
| Some(Expression::Backtick { .. })
|
||||||
|
| Some(Expression::Variable { .. }) => None,
|
||||||
|
Some(Expression::Call {
|
||||||
|
token, arguments, ..
|
||||||
|
}) => Some((token, arguments.len())),
|
||||||
|
Some(Expression::Concatination { lhs, rhs }) => {
|
||||||
|
self.stack.push(lhs);
|
||||||
|
self.stack.push(rhs);
|
||||||
|
self.next()
|
||||||
|
}
|
||||||
|
Some(Expression::Group { expression }) => {
|
||||||
|
self.stack.push(expression);
|
||||||
|
self.next()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
2
justfile
2
justfile
@ -83,7 +83,7 @@ sloc:
|
|||||||
! grep --color -En '.{101}' src/*.rs
|
! grep --color -En '.{101}' src/*.rs
|
||||||
|
|
||||||
replace FROM TO:
|
replace FROM TO:
|
||||||
find src -name '*.rs' | xargs sed -i '' -E 's/{{FROM}}/{{TO}}/g'
|
sd -i '{{FROM}}' '{{TO}}' src/*.rs
|
||||||
|
|
||||||
test-quine:
|
test-quine:
|
||||||
cargo run -- quine
|
cargo run -- quine
|
||||||
|
@ -1,7 +1,5 @@
|
|||||||
use crate::common::*;
|
use crate::common::*;
|
||||||
|
|
||||||
use brev;
|
|
||||||
|
|
||||||
pub struct AssignmentEvaluator<'a: 'b, 'b> {
|
pub struct AssignmentEvaluator<'a: 'b, 'b> {
|
||||||
pub assignments: &'b BTreeMap<&'a str, Expression<'a>>,
|
pub assignments: &'b BTreeMap<&'a str, Expression<'a>>,
|
||||||
pub invocation_directory: &'b Result<PathBuf, String>,
|
pub invocation_directory: &'b Result<PathBuf, String>,
|
||||||
@ -53,7 +51,7 @@ impl<'a, 'b> AssignmentEvaluator<'a, 'b> {
|
|||||||
let mut evaluated = String::new();
|
let mut evaluated = String::new();
|
||||||
for fragment in line {
|
for fragment in line {
|
||||||
match *fragment {
|
match *fragment {
|
||||||
Fragment::Text { ref text } => evaluated += text.lexeme,
|
Fragment::Text { ref text } => evaluated += text.lexeme(),
|
||||||
Fragment::Expression { ref expression } => {
|
Fragment::Expression { ref expression } => {
|
||||||
evaluated += &self.evaluate_expression(expression, arguments)?;
|
evaluated += &self.evaluate_expression(expression, arguments)?;
|
||||||
}
|
}
|
||||||
@ -183,7 +181,7 @@ mod test {
|
|||||||
output_error: OutputError::Code(code),
|
output_error: OutputError::Code(code),
|
||||||
} => {
|
} => {
|
||||||
assert_eq!(code, 100);
|
assert_eq!(code, 100);
|
||||||
assert_eq!(token.lexeme, "`f() { return 100; }; f`");
|
assert_eq!(token.lexeme(), "`f() { return 100; }; f`");
|
||||||
}
|
}
|
||||||
other => panic!("expected a code run error, but got: {}", other),
|
other => panic!("expected a code run error, but got: {}", other),
|
||||||
}
|
}
|
||||||
@ -211,7 +209,7 @@ recipe:
|
|||||||
token,
|
token,
|
||||||
output_error: OutputError::Code(_),
|
output_error: OutputError::Code(_),
|
||||||
} => {
|
} => {
|
||||||
assert_eq!(token.lexeme, "`echo $exported_variable`");
|
assert_eq!(token.lexeme(), "`echo $exported_variable`");
|
||||||
}
|
}
|
||||||
other => panic!("expected a backtick code errror, but got: {}", other),
|
other => panic!("expected a backtick code errror, but got: {}", other),
|
||||||
}
|
}
|
||||||
|
@ -45,10 +45,10 @@ impl<'a: 'b, 'b> AssignmentResolver<'a, 'b> {
|
|||||||
let message = format!("attempted to resolve unknown assignment `{}`", name);
|
let message = format!("attempted to resolve unknown assignment `{}`", name);
|
||||||
return Err(CompilationError {
|
return Err(CompilationError {
|
||||||
text: "",
|
text: "",
|
||||||
index: 0,
|
offset: 0,
|
||||||
line: 0,
|
line: 0,
|
||||||
column: 0,
|
column: 0,
|
||||||
width: None,
|
width: 0,
|
||||||
kind: Internal { message },
|
kind: Internal { message },
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@ -96,40 +96,40 @@ mod test {
|
|||||||
compilation_error_test! {
|
compilation_error_test! {
|
||||||
name: circular_variable_dependency,
|
name: circular_variable_dependency,
|
||||||
input: "a = b\nb = a",
|
input: "a = b\nb = a",
|
||||||
index: 0,
|
offset: 0,
|
||||||
line: 0,
|
line: 0,
|
||||||
column: 0,
|
column: 0,
|
||||||
width: Some(1),
|
width: 1,
|
||||||
kind: CircularVariableDependency{variable: "a", circle: vec!["a", "b", "a"]},
|
kind: CircularVariableDependency{variable: "a", circle: vec!["a", "b", "a"]},
|
||||||
}
|
}
|
||||||
|
|
||||||
compilation_error_test! {
|
compilation_error_test! {
|
||||||
name: self_variable_dependency,
|
name: self_variable_dependency,
|
||||||
input: "a = a",
|
input: "a = a",
|
||||||
index: 0,
|
offset: 0,
|
||||||
line: 0,
|
line: 0,
|
||||||
column: 0,
|
column: 0,
|
||||||
width: Some(1),
|
width: 1,
|
||||||
kind: CircularVariableDependency{variable: "a", circle: vec!["a", "a"]},
|
kind: CircularVariableDependency{variable: "a", circle: vec!["a", "a"]},
|
||||||
}
|
}
|
||||||
|
|
||||||
compilation_error_test! {
|
compilation_error_test! {
|
||||||
name: unknown_expression_variable,
|
name: unknown_expression_variable,
|
||||||
input: "x = yy",
|
input: "x = yy",
|
||||||
index: 4,
|
offset: 4,
|
||||||
line: 0,
|
line: 0,
|
||||||
column: 4,
|
column: 4,
|
||||||
width: Some(2),
|
width: 2,
|
||||||
kind: UndefinedVariable{variable: "yy"},
|
kind: UndefinedVariable{variable: "yy"},
|
||||||
}
|
}
|
||||||
|
|
||||||
compilation_error_test! {
|
compilation_error_test! {
|
||||||
name: unknown_function,
|
name: unknown_function,
|
||||||
input: "a = foo()",
|
input: "a = foo()",
|
||||||
index: 4,
|
offset: 4,
|
||||||
line: 0,
|
line: 0,
|
||||||
column: 4,
|
column: 4,
|
||||||
width: Some(3),
|
width: 3,
|
||||||
kind: UnknownFunction{function: "foo"},
|
kind: UnknownFunction{function: "foo"},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
30
src/color.rs
30
src/color.rs
@ -2,16 +2,8 @@ use crate::common::*;
|
|||||||
|
|
||||||
use ansi_term::Color::*;
|
use ansi_term::Color::*;
|
||||||
use ansi_term::{ANSIGenericString, Prefix, Style, Suffix};
|
use ansi_term::{ANSIGenericString, Prefix, Style, Suffix};
|
||||||
use atty::is as is_atty;
|
|
||||||
use atty::Stream;
|
use atty::Stream;
|
||||||
|
|
||||||
#[derive(Copy, Clone)]
|
|
||||||
pub enum UseColor {
|
|
||||||
Auto,
|
|
||||||
Always,
|
|
||||||
Never,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Copy, Clone)]
|
#[derive(Copy, Clone)]
|
||||||
pub struct Color {
|
pub struct Color {
|
||||||
use_color: UseColor,
|
use_color: UseColor,
|
||||||
@ -19,16 +11,6 @@ pub struct Color {
|
|||||||
style: Style,
|
style: Style,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for Color {
|
|
||||||
fn default() -> Color {
|
|
||||||
Color {
|
|
||||||
use_color: UseColor::Never,
|
|
||||||
atty: false,
|
|
||||||
style: Style::new(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Color {
|
impl Color {
|
||||||
fn restyle(self, style: Style) -> Color {
|
fn restyle(self, style: Style) -> Color {
|
||||||
Color { style, ..self }
|
Color { style, ..self }
|
||||||
@ -36,7 +18,7 @@ impl Color {
|
|||||||
|
|
||||||
fn redirect(self, stream: Stream) -> Color {
|
fn redirect(self, stream: Stream) -> Color {
|
||||||
Color {
|
Color {
|
||||||
atty: is_atty(stream),
|
atty: atty::is(stream),
|
||||||
..self
|
..self
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -138,3 +120,13 @@ impl Color {
|
|||||||
self.effective_style().suffix()
|
self.effective_style().suffix()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Default for Color {
|
||||||
|
fn default() -> Color {
|
||||||
|
Color {
|
||||||
|
use_color: UseColor::Never,
|
||||||
|
atty: false,
|
||||||
|
style: Style::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -9,6 +9,7 @@ pub(crate) use std::{
|
|||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
process,
|
process,
|
||||||
process::Command,
|
process::Command,
|
||||||
|
str::Chars,
|
||||||
sync::{Mutex, MutexGuard},
|
sync::{Mutex, MutexGuard},
|
||||||
usize, vec,
|
usize, vec,
|
||||||
};
|
};
|
||||||
@ -16,7 +17,6 @@ pub(crate) use std::{
|
|||||||
pub(crate) use edit_distance::edit_distance;
|
pub(crate) use edit_distance::edit_distance;
|
||||||
pub(crate) use libc::{EXIT_FAILURE, EXIT_SUCCESS};
|
pub(crate) use libc::{EXIT_FAILURE, EXIT_SUCCESS};
|
||||||
pub(crate) use log::warn;
|
pub(crate) use log::warn;
|
||||||
pub(crate) use regex::Regex;
|
|
||||||
pub(crate) use tempdir::TempDir;
|
pub(crate) use tempdir::TempDir;
|
||||||
pub(crate) use unicode_width::UnicodeWidthChar;
|
pub(crate) use unicode_width::UnicodeWidthChar;
|
||||||
|
|
||||||
@ -28,10 +28,12 @@ pub(crate) use crate::{
|
|||||||
color::Color,
|
color::Color,
|
||||||
compilation_error::{CompilationError, CompilationErrorKind, CompilationResult},
|
compilation_error::{CompilationError, CompilationErrorKind, CompilationResult},
|
||||||
configuration::Configuration,
|
configuration::Configuration,
|
||||||
cooked_string::CookedString,
|
|
||||||
expression::Expression,
|
expression::Expression,
|
||||||
fragment::Fragment,
|
fragment::Fragment,
|
||||||
function::{evaluate_function, resolve_function, FunctionContext},
|
function::{evaluate_function, resolve_function},
|
||||||
|
function_context::FunctionContext,
|
||||||
|
functions::Functions,
|
||||||
|
interrupt_guard::InterruptGuard,
|
||||||
interrupt_handler::InterruptHandler,
|
interrupt_handler::InterruptHandler,
|
||||||
justfile::Justfile,
|
justfile::Justfile,
|
||||||
lexer::Lexer,
|
lexer::Lexer,
|
||||||
@ -39,11 +41,18 @@ pub(crate) use crate::{
|
|||||||
misc::{default, empty},
|
misc::{default, empty},
|
||||||
parameter::Parameter,
|
parameter::Parameter,
|
||||||
parser::Parser,
|
parser::Parser,
|
||||||
recipe::{Recipe, RecipeContext},
|
position::Position,
|
||||||
|
recipe::Recipe,
|
||||||
|
recipe_context::RecipeContext,
|
||||||
recipe_resolver::RecipeResolver,
|
recipe_resolver::RecipeResolver,
|
||||||
runtime_error::{RunResult, RuntimeError},
|
runtime_error::{RunResult, RuntimeError},
|
||||||
shebang::Shebang,
|
shebang::Shebang,
|
||||||
token::{Token, TokenKind},
|
state::State,
|
||||||
|
string_literal::StringLiteral,
|
||||||
|
token::Token,
|
||||||
|
token_kind::TokenKind,
|
||||||
|
use_color::UseColor,
|
||||||
|
variables::Variables,
|
||||||
verbosity::Verbosity,
|
verbosity::Verbosity,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -7,10 +7,10 @@ pub type CompilationResult<'a, T> = Result<T, CompilationError<'a>>;
|
|||||||
#[derive(Debug, PartialEq)]
|
#[derive(Debug, PartialEq)]
|
||||||
pub struct CompilationError<'a> {
|
pub struct CompilationError<'a> {
|
||||||
pub text: &'a str,
|
pub text: &'a str,
|
||||||
pub index: usize,
|
pub offset: usize,
|
||||||
pub line: usize,
|
pub line: usize,
|
||||||
pub column: usize,
|
pub column: usize,
|
||||||
pub width: Option<usize>,
|
pub width: usize,
|
||||||
pub kind: CompilationErrorKind<'a>,
|
pub kind: CompilationErrorKind<'a>,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -98,8 +98,10 @@ pub enum CompilationErrorKind<'a> {
|
|||||||
function: &'a str,
|
function: &'a str,
|
||||||
},
|
},
|
||||||
UnknownStartOfToken,
|
UnknownStartOfToken,
|
||||||
|
UnpairedCarriageReturn,
|
||||||
UnterminatedInterpolation,
|
UnterminatedInterpolation,
|
||||||
UnterminatedString,
|
UnterminatedString,
|
||||||
|
UnterminatedBacktick,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Display for CompilationError<'a> {
|
impl<'a> Display for CompilationError<'a> {
|
||||||
@ -277,12 +279,18 @@ impl<'a> Display for CompilationError<'a> {
|
|||||||
UnknownStartOfToken => {
|
UnknownStartOfToken => {
|
||||||
writeln!(f, "Unknown start of token:")?;
|
writeln!(f, "Unknown start of token:")?;
|
||||||
}
|
}
|
||||||
|
UnpairedCarriageReturn => {
|
||||||
|
writeln!(f, "Unpaired carriage return")?;
|
||||||
|
}
|
||||||
UnterminatedInterpolation => {
|
UnterminatedInterpolation => {
|
||||||
writeln!(f, "Unterminated interpolation")?;
|
writeln!(f, "Unterminated interpolation")?;
|
||||||
}
|
}
|
||||||
UnterminatedString => {
|
UnterminatedString => {
|
||||||
writeln!(f, "Unterminated string")?;
|
writeln!(f, "Unterminated string")?;
|
||||||
}
|
}
|
||||||
|
UnterminatedBacktick => {
|
||||||
|
writeln!(f, "Unterminated backtick")?;
|
||||||
|
}
|
||||||
Internal { ref message } => {
|
Internal { ref message } => {
|
||||||
writeln!(
|
writeln!(
|
||||||
f,
|
f,
|
||||||
@ -295,6 +303,13 @@ impl<'a> Display for CompilationError<'a> {
|
|||||||
|
|
||||||
write!(f, "{}", message.suffix())?;
|
write!(f, "{}", message.suffix())?;
|
||||||
|
|
||||||
write_error_context(f, self.text, self.index, self.line, self.column, self.width)
|
write_error_context(
|
||||||
|
f,
|
||||||
|
self.text,
|
||||||
|
self.offset,
|
||||||
|
self.line,
|
||||||
|
self.column,
|
||||||
|
self.width,
|
||||||
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -16,7 +16,7 @@ pub enum Expression<'a> {
|
|||||||
rhs: Box<Expression<'a>>,
|
rhs: Box<Expression<'a>>,
|
||||||
},
|
},
|
||||||
String {
|
String {
|
||||||
cooked_string: CookedString<'a>,
|
cooked_string: StringLiteral<'a>,
|
||||||
},
|
},
|
||||||
Variable {
|
Variable {
|
||||||
name: &'a str,
|
name: &'a str,
|
||||||
@ -29,11 +29,11 @@ pub enum Expression<'a> {
|
|||||||
|
|
||||||
impl<'a> Expression<'a> {
|
impl<'a> Expression<'a> {
|
||||||
pub fn variables(&'a self) -> Variables<'a> {
|
pub fn variables(&'a self) -> Variables<'a> {
|
||||||
Variables { stack: vec![self] }
|
Variables::new(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn functions(&'a self) -> Functions<'a> {
|
pub fn functions(&'a self) -> Functions<'a> {
|
||||||
Functions { stack: vec![self] }
|
Functions::new(self)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -64,59 +64,3 @@ impl<'a> Display for Expression<'a> {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct Variables<'a> {
|
|
||||||
stack: Vec<&'a Expression<'a>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> Iterator for Variables<'a> {
|
|
||||||
type Item = &'a Token<'a>;
|
|
||||||
|
|
||||||
fn next(&mut self) -> Option<&'a Token<'a>> {
|
|
||||||
match self.stack.pop() {
|
|
||||||
None
|
|
||||||
| Some(Expression::String { .. })
|
|
||||||
| Some(Expression::Backtick { .. })
|
|
||||||
| Some(Expression::Call { .. }) => None,
|
|
||||||
Some(Expression::Variable { token, .. }) => Some(token),
|
|
||||||
Some(Expression::Concatination { lhs, rhs }) => {
|
|
||||||
self.stack.push(lhs);
|
|
||||||
self.stack.push(rhs);
|
|
||||||
self.next()
|
|
||||||
}
|
|
||||||
Some(Expression::Group { expression }) => {
|
|
||||||
self.stack.push(expression);
|
|
||||||
self.next()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct Functions<'a> {
|
|
||||||
stack: Vec<&'a Expression<'a>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> Iterator for Functions<'a> {
|
|
||||||
type Item = (&'a Token<'a>, usize);
|
|
||||||
|
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
|
||||||
match self.stack.pop() {
|
|
||||||
None
|
|
||||||
| Some(Expression::String { .. })
|
|
||||||
| Some(Expression::Backtick { .. })
|
|
||||||
| Some(Expression::Variable { .. }) => None,
|
|
||||||
Some(Expression::Call {
|
|
||||||
token, arguments, ..
|
|
||||||
}) => Some((token, arguments.len())),
|
|
||||||
Some(Expression::Concatination { lhs, rhs }) => {
|
|
||||||
self.stack.push(lhs);
|
|
||||||
self.stack.push(rhs);
|
|
||||||
self.next()
|
|
||||||
}
|
|
||||||
Some(Expression::Group { expression }) => {
|
|
||||||
self.stack.push(expression);
|
|
||||||
self.next()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
@ -9,7 +9,7 @@ pub enum Fragment<'a> {
|
|||||||
impl<'a> Fragment<'a> {
|
impl<'a> Fragment<'a> {
|
||||||
pub fn continuation(&self) -> bool {
|
pub fn continuation(&self) -> bool {
|
||||||
match *self {
|
match *self {
|
||||||
Fragment::Text { ref text } => text.lexeme.ends_with('\\'),
|
Fragment::Text { ref text } => text.lexeme().ends_with('\\'),
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -37,13 +37,8 @@ impl Function {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct FunctionContext<'a> {
|
|
||||||
pub invocation_directory: &'a Result<PathBuf, String>,
|
|
||||||
pub dotenv: &'a BTreeMap<String, String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn resolve_function<'a>(token: &Token<'a>, argc: usize) -> CompilationResult<'a, ()> {
|
pub fn resolve_function<'a>(token: &Token<'a>, argc: usize) -> CompilationResult<'a, ()> {
|
||||||
let name = token.lexeme;
|
let name = token.lexeme();
|
||||||
if let Some(function) = FUNCTIONS.get(&name) {
|
if let Some(function) = FUNCTIONS.get(&name) {
|
||||||
use self::Function::*;
|
use self::Function::*;
|
||||||
match (function, argc) {
|
match (function, argc) {
|
||||||
@ -58,7 +53,7 @@ pub fn resolve_function<'a>(token: &Token<'a>, argc: usize) -> CompilationResult
|
|||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
Err(token.error(CompilationErrorKind::UnknownFunction {
|
Err(token.error(CompilationErrorKind::UnknownFunction {
|
||||||
function: token.lexeme,
|
function: token.lexeme(),
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
6
src/function_context.rs
Normal file
6
src/function_context.rs
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
use crate::common::*;
|
||||||
|
|
||||||
|
pub struct FunctionContext<'a> {
|
||||||
|
pub invocation_directory: &'a Result<PathBuf, String>,
|
||||||
|
pub dotenv: &'a BTreeMap<String, String>,
|
||||||
|
}
|
36
src/functions.rs
Normal file
36
src/functions.rs
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
use crate::common::*;
|
||||||
|
|
||||||
|
pub struct Functions<'a> {
|
||||||
|
stack: Vec<&'a Expression<'a>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Functions<'a> {
|
||||||
|
pub fn new(root: &'a Expression<'a>) -> Functions<'a> {
|
||||||
|
Functions { stack: vec![root] }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Iterator for Functions<'a> {
|
||||||
|
type Item = (&'a Token<'a>, usize);
|
||||||
|
|
||||||
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
|
match self.stack.pop() {
|
||||||
|
None
|
||||||
|
| Some(Expression::String { .. })
|
||||||
|
| Some(Expression::Backtick { .. })
|
||||||
|
| Some(Expression::Variable { .. }) => None,
|
||||||
|
Some(Expression::Call {
|
||||||
|
token, arguments, ..
|
||||||
|
}) => Some((token, arguments.len())),
|
||||||
|
Some(Expression::Concatination { lhs, rhs }) => {
|
||||||
|
self.stack.push(lhs);
|
||||||
|
self.stack.push(rhs);
|
||||||
|
self.next()
|
||||||
|
}
|
||||||
|
Some(Expression::Group { expression }) => {
|
||||||
|
self.stack.push(expression);
|
||||||
|
self.next()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
16
src/interrupt_guard.rs
Normal file
16
src/interrupt_guard.rs
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
use crate::common::*;
|
||||||
|
|
||||||
|
pub struct InterruptGuard;
|
||||||
|
|
||||||
|
impl InterruptGuard {
|
||||||
|
pub fn new() -> InterruptGuard {
|
||||||
|
InterruptHandler::instance().block();
|
||||||
|
InterruptGuard
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Drop for InterruptGuard {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
InterruptHandler::instance().unblock();
|
||||||
|
}
|
||||||
|
}
|
@ -1,7 +1,5 @@
|
|||||||
use crate::common::*;
|
use crate::common::*;
|
||||||
|
|
||||||
use ctrlc;
|
|
||||||
|
|
||||||
pub struct InterruptHandler {
|
pub struct InterruptHandler {
|
||||||
blocks: u32,
|
blocks: u32,
|
||||||
interrupted: bool,
|
interrupted: bool,
|
||||||
@ -12,7 +10,7 @@ impl InterruptHandler {
|
|||||||
ctrlc::set_handler(|| InterruptHandler::instance().interrupt())
|
ctrlc::set_handler(|| InterruptHandler::instance().interrupt())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn instance() -> MutexGuard<'static, InterruptHandler> {
|
pub fn instance() -> MutexGuard<'static, InterruptHandler> {
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
static ref INSTANCE: Mutex<InterruptHandler> = Mutex::new(InterruptHandler::new());
|
static ref INSTANCE: Mutex<InterruptHandler> = Mutex::new(InterruptHandler::new());
|
||||||
}
|
}
|
||||||
@ -49,11 +47,11 @@ impl InterruptHandler {
|
|||||||
process::exit(130);
|
process::exit(130);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn block(&mut self) {
|
pub fn block(&mut self) {
|
||||||
self.blocks += 1;
|
self.blocks += 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn unblock(&mut self) {
|
pub fn unblock(&mut self) {
|
||||||
if self.blocks == 0 {
|
if self.blocks == 0 {
|
||||||
die!(
|
die!(
|
||||||
"{}",
|
"{}",
|
||||||
@ -76,18 +74,3 @@ impl InterruptHandler {
|
|||||||
function()
|
function()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct InterruptGuard;
|
|
||||||
|
|
||||||
impl InterruptGuard {
|
|
||||||
fn new() -> InterruptGuard {
|
|
||||||
InterruptHandler::instance().block();
|
|
||||||
InterruptGuard
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Drop for InterruptGuard {
|
|
||||||
fn drop(&mut self) {
|
|
||||||
InterruptHandler::instance().unblock();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
1251
src/lexer.rs
1251
src/lexer.rs
File diff suppressed because it is too large
Load Diff
11
src/lib.rs
11
src/lib.rs
@ -20,10 +20,12 @@ mod command_ext;
|
|||||||
mod common;
|
mod common;
|
||||||
mod compilation_error;
|
mod compilation_error;
|
||||||
mod configuration;
|
mod configuration;
|
||||||
mod cooked_string;
|
|
||||||
mod expression;
|
mod expression;
|
||||||
mod fragment;
|
mod fragment;
|
||||||
mod function;
|
mod function;
|
||||||
|
mod function_context;
|
||||||
|
mod functions;
|
||||||
|
mod interrupt_guard;
|
||||||
mod interrupt_handler;
|
mod interrupt_handler;
|
||||||
mod justfile;
|
mod justfile;
|
||||||
mod lexer;
|
mod lexer;
|
||||||
@ -32,13 +34,20 @@ mod misc;
|
|||||||
mod parameter;
|
mod parameter;
|
||||||
mod parser;
|
mod parser;
|
||||||
mod platform;
|
mod platform;
|
||||||
|
mod position;
|
||||||
mod range_ext;
|
mod range_ext;
|
||||||
mod recipe;
|
mod recipe;
|
||||||
|
mod recipe_context;
|
||||||
mod recipe_resolver;
|
mod recipe_resolver;
|
||||||
mod run;
|
mod run;
|
||||||
mod runtime_error;
|
mod runtime_error;
|
||||||
mod shebang;
|
mod shebang;
|
||||||
|
mod state;
|
||||||
|
mod string_literal;
|
||||||
mod token;
|
mod token;
|
||||||
|
mod token_kind;
|
||||||
|
mod use_color;
|
||||||
|
mod variables;
|
||||||
mod verbosity;
|
mod verbosity;
|
||||||
|
|
||||||
pub use crate::run::run;
|
pub use crate::run::run;
|
||||||
|
43
src/misc.rs
43
src/misc.rs
@ -58,11 +58,13 @@ pub fn conjoin<T: Display>(
|
|||||||
pub fn write_error_context(
|
pub fn write_error_context(
|
||||||
f: &mut Formatter,
|
f: &mut Formatter,
|
||||||
text: &str,
|
text: &str,
|
||||||
index: usize,
|
offset: usize,
|
||||||
line: usize,
|
line: usize,
|
||||||
column: usize,
|
column: usize,
|
||||||
width: Option<usize>,
|
width: usize,
|
||||||
) -> Result<(), fmt::Error> {
|
) -> Result<(), fmt::Error> {
|
||||||
|
let width = if width == 0 { 1 } else { width };
|
||||||
|
|
||||||
let line_number = line + 1;
|
let line_number = line + 1;
|
||||||
let red = Color::fmt(f).error();
|
let red = Color::fmt(f).error();
|
||||||
match text.lines().nth(line) {
|
match text.lines().nth(line) {
|
||||||
@ -77,14 +79,14 @@ pub fn write_error_context(
|
|||||||
if i < column {
|
if i < column {
|
||||||
space_column += 4;
|
space_column += 4;
|
||||||
}
|
}
|
||||||
if i >= column && i < column + width.unwrap_or(1) {
|
if i >= column && i < column + width {
|
||||||
space_width += 4;
|
space_width += 4;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if i < column {
|
if i < column {
|
||||||
space_column += UnicodeWidthChar::width(c).unwrap_or(0);
|
space_column += UnicodeWidthChar::width(c).unwrap_or(0);
|
||||||
}
|
}
|
||||||
if i >= column && i < column + width.unwrap_or(1) {
|
if i >= column && i < column + width {
|
||||||
space_width += UnicodeWidthChar::width(c).unwrap_or(0);
|
space_width += UnicodeWidthChar::width(c).unwrap_or(0);
|
||||||
}
|
}
|
||||||
space_line.push(c);
|
space_line.push(c);
|
||||||
@ -95,30 +97,19 @@ pub fn write_error_context(
|
|||||||
writeln!(f, "{0:1$} |", "", line_number_width)?;
|
writeln!(f, "{0:1$} |", "", line_number_width)?;
|
||||||
writeln!(f, "{} | {}", line_number, space_line)?;
|
writeln!(f, "{} | {}", line_number, space_line)?;
|
||||||
write!(f, "{0:1$} |", "", line_number_width)?;
|
write!(f, "{0:1$} |", "", line_number_width)?;
|
||||||
if width == None {
|
write!(
|
||||||
write!(
|
f,
|
||||||
f,
|
" {0:1$}{2}{3:^<4$}{5}",
|
||||||
" {0:1$}{2}^{3}",
|
"",
|
||||||
"",
|
space_column,
|
||||||
space_column,
|
red.prefix(),
|
||||||
red.prefix(),
|
"",
|
||||||
red.suffix()
|
space_width,
|
||||||
)?;
|
red.suffix()
|
||||||
} else {
|
)?;
|
||||||
write!(
|
|
||||||
f,
|
|
||||||
" {0:1$}{2}{3:^<4$}{5}",
|
|
||||||
"",
|
|
||||||
space_column,
|
|
||||||
red.prefix(),
|
|
||||||
"",
|
|
||||||
space_width,
|
|
||||||
red.suffix()
|
|
||||||
)?;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
if index != text.len() {
|
if offset != text.len() {
|
||||||
write!(
|
write!(
|
||||||
f,
|
f,
|
||||||
"internal error: Error has invalid line number: {}",
|
"internal error: Error has invalid line number: {}",
|
||||||
|
221
src/parser.rs
221
src/parser.rs
@ -16,7 +16,8 @@ pub struct Parser<'a> {
|
|||||||
|
|
||||||
impl<'a> Parser<'a> {
|
impl<'a> Parser<'a> {
|
||||||
pub fn parse(text: &'a str) -> CompilationResult<'a, Justfile> {
|
pub fn parse(text: &'a str) -> CompilationResult<'a, Justfile> {
|
||||||
let tokens = Lexer::lex(text)?;
|
let mut tokens = Lexer::lex(text)?;
|
||||||
|
tokens.retain(|token| token.kind != Whitespace);
|
||||||
let parser = Parser::new(text, tokens);
|
let parser = Parser::new(text, tokens);
|
||||||
parser.justfile()
|
parser.justfile()
|
||||||
}
|
}
|
||||||
@ -87,7 +88,7 @@ impl<'a> Parser<'a> {
|
|||||||
doc: Option<Token<'a>>,
|
doc: Option<Token<'a>>,
|
||||||
quiet: bool,
|
quiet: bool,
|
||||||
) -> CompilationResult<'a, ()> {
|
) -> CompilationResult<'a, ()> {
|
||||||
if let Some(recipe) = self.recipes.get(name.lexeme) {
|
if let Some(recipe) = self.recipes.get(name.lexeme()) {
|
||||||
return Err(name.error(DuplicateRecipe {
|
return Err(name.error(DuplicateRecipe {
|
||||||
recipe: recipe.name,
|
recipe: recipe.name,
|
||||||
first: recipe.line_number,
|
first: recipe.line_number,
|
||||||
@ -115,14 +116,14 @@ impl<'a> Parser<'a> {
|
|||||||
|
|
||||||
if parsed_variadic_parameter {
|
if parsed_variadic_parameter {
|
||||||
return Err(parameter.error(ParameterFollowsVariadicParameter {
|
return Err(parameter.error(ParameterFollowsVariadicParameter {
|
||||||
parameter: parameter.lexeme,
|
parameter: parameter.lexeme(),
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
|
|
||||||
if parameters.iter().any(|p| p.name == parameter.lexeme) {
|
if parameters.iter().any(|p| p.name == parameter.lexeme()) {
|
||||||
return Err(parameter.error(DuplicateParameter {
|
return Err(parameter.error(DuplicateParameter {
|
||||||
recipe: name.lexeme,
|
recipe: name.lexeme(),
|
||||||
parameter: parameter.lexeme,
|
parameter: parameter.lexeme(),
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -135,7 +136,7 @@ impl<'a> Parser<'a> {
|
|||||||
|
|
||||||
if parsed_parameter_with_default && default.is_none() {
|
if parsed_parameter_with_default && default.is_none() {
|
||||||
return Err(parameter.error(RequiredParameterFollowsDefaultParameter {
|
return Err(parameter.error(RequiredParameterFollowsDefaultParameter {
|
||||||
parameter: parameter.lexeme,
|
parameter: parameter.lexeme(),
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -143,7 +144,7 @@ impl<'a> Parser<'a> {
|
|||||||
parsed_variadic_parameter = variadic;
|
parsed_variadic_parameter = variadic;
|
||||||
|
|
||||||
parameters.push(Parameter {
|
parameters.push(Parameter {
|
||||||
name: parameter.lexeme,
|
name: parameter.lexeme(),
|
||||||
token: parameter,
|
token: parameter,
|
||||||
default,
|
default,
|
||||||
variadic,
|
variadic,
|
||||||
@ -163,13 +164,13 @@ impl<'a> Parser<'a> {
|
|||||||
let mut dependencies = vec![];
|
let mut dependencies = vec![];
|
||||||
let mut dependency_tokens = vec![];
|
let mut dependency_tokens = vec![];
|
||||||
while let Some(dependency) = self.accept(Name) {
|
while let Some(dependency) = self.accept(Name) {
|
||||||
if dependencies.contains(&dependency.lexeme) {
|
if dependencies.contains(&dependency.lexeme()) {
|
||||||
return Err(dependency.error(DuplicateDependency {
|
return Err(dependency.error(DuplicateDependency {
|
||||||
recipe: name.lexeme,
|
recipe: name.lexeme(),
|
||||||
dependency: dependency.lexeme,
|
dependency: dependency.lexeme(),
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
dependencies.push(dependency.lexeme);
|
dependencies.push(dependency.lexeme());
|
||||||
dependency_tokens.push(dependency);
|
dependency_tokens.push(dependency);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -197,7 +198,7 @@ impl<'a> Parser<'a> {
|
|||||||
if let Some(token) = self.accept(Text) {
|
if let Some(token) = self.accept(Text) {
|
||||||
if fragments.is_empty() {
|
if fragments.is_empty() {
|
||||||
if lines.is_empty() {
|
if lines.is_empty() {
|
||||||
if token.lexeme.starts_with("#!") {
|
if token.lexeme().starts_with("#!") {
|
||||||
shebang = true;
|
shebang = true;
|
||||||
}
|
}
|
||||||
} else if !shebang
|
} else if !shebang
|
||||||
@ -206,7 +207,7 @@ impl<'a> Parser<'a> {
|
|||||||
.and_then(|line| line.last())
|
.and_then(|line| line.last())
|
||||||
.map(Fragment::continuation)
|
.map(Fragment::continuation)
|
||||||
.unwrap_or(false)
|
.unwrap_or(false)
|
||||||
&& (token.lexeme.starts_with(' ') || token.lexeme.starts_with('\t'))
|
&& (token.lexeme().starts_with(' ') || token.lexeme().starts_with('\t'))
|
||||||
{
|
{
|
||||||
return Err(token.error(ExtraLeadingWhitespace));
|
return Err(token.error(ExtraLeadingWhitespace));
|
||||||
}
|
}
|
||||||
@ -234,12 +235,12 @@ impl<'a> Parser<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
self.recipes.insert(
|
self.recipes.insert(
|
||||||
name.lexeme,
|
name.lexeme(),
|
||||||
Recipe {
|
Recipe {
|
||||||
line_number: name.line,
|
line_number: name.line,
|
||||||
name: name.lexeme,
|
name: name.lexeme(),
|
||||||
doc: doc.map(|t| t.lexeme[1..].trim()),
|
doc: doc.map(|t| t.lexeme()[1..].trim()),
|
||||||
private: &name.lexeme[0..1] == "_",
|
private: &name.lexeme()[0..1] == "_",
|
||||||
dependencies,
|
dependencies,
|
||||||
dependency_tokens,
|
dependency_tokens,
|
||||||
lines,
|
lines,
|
||||||
@ -263,26 +264,26 @@ impl<'a> Parser<'a> {
|
|||||||
}
|
}
|
||||||
let arguments = self.arguments()?;
|
let arguments = self.arguments()?;
|
||||||
if let Some(token) = self.expect(ParenR) {
|
if let Some(token) = self.expect(ParenR) {
|
||||||
return Err(self.unexpected_token(&token, &[Name, StringToken, ParenR]));
|
return Err(self.unexpected_token(&token, &[Name, StringCooked, ParenR]));
|
||||||
}
|
}
|
||||||
Ok(Expression::Call {
|
Ok(Expression::Call {
|
||||||
name: first.lexeme,
|
name: first.lexeme(),
|
||||||
token: first,
|
token: first,
|
||||||
arguments,
|
arguments,
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
Ok(Expression::Variable {
|
Ok(Expression::Variable {
|
||||||
name: first.lexeme,
|
name: first.lexeme(),
|
||||||
token: first,
|
token: first,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Backtick => Ok(Expression::Backtick {
|
Backtick => Ok(Expression::Backtick {
|
||||||
raw: &first.lexeme[1..first.lexeme.len() - 1],
|
raw: &first.lexeme()[1..first.lexeme().len() - 1],
|
||||||
token: first,
|
token: first,
|
||||||
}),
|
}),
|
||||||
RawString | StringToken => Ok(Expression::String {
|
StringRaw | StringCooked => Ok(Expression::String {
|
||||||
cooked_string: CookedString::new(&first)?,
|
cooked_string: StringLiteral::new(&first)?,
|
||||||
}),
|
}),
|
||||||
ParenL => {
|
ParenL => {
|
||||||
let expression = self.expression()?;
|
let expression = self.expression()?;
|
||||||
@ -295,7 +296,7 @@ impl<'a> Parser<'a> {
|
|||||||
expression: Box::new(expression),
|
expression: Box::new(expression),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
_ => Err(self.unexpected_token(&first, &[Name, StringToken])),
|
_ => Err(self.unexpected_token(&first, &[Name, StringCooked])),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -333,13 +334,13 @@ impl<'a> Parser<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn assignment(&mut self, name: Token<'a>, export: bool) -> CompilationResult<'a, ()> {
|
fn assignment(&mut self, name: Token<'a>, export: bool) -> CompilationResult<'a, ()> {
|
||||||
if self.assignments.contains_key(name.lexeme) {
|
if self.assignments.contains_key(name.lexeme()) {
|
||||||
return Err(name.error(DuplicateVariable {
|
return Err(name.error(DuplicateVariable {
|
||||||
variable: name.lexeme,
|
variable: name.lexeme(),
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
if export {
|
if export {
|
||||||
self.exports.insert(name.lexeme);
|
self.exports.insert(name.lexeme());
|
||||||
}
|
}
|
||||||
|
|
||||||
let expression = self.expression()?;
|
let expression = self.expression()?;
|
||||||
@ -347,14 +348,14 @@ impl<'a> Parser<'a> {
|
|||||||
return Err(self.unexpected_token(&token, &[Plus, Eol]));
|
return Err(self.unexpected_token(&token, &[Plus, Eol]));
|
||||||
}
|
}
|
||||||
|
|
||||||
self.assignments.insert(name.lexeme, expression);
|
self.assignments.insert(name.lexeme(), expression);
|
||||||
self.assignment_tokens.insert(name.lexeme, name);
|
self.assignment_tokens.insert(name.lexeme(), name);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn alias(&mut self, name: Token<'a>) -> CompilationResult<'a, ()> {
|
fn alias(&mut self, name: Token<'a>) -> CompilationResult<'a, ()> {
|
||||||
// Make sure alias doesn't already exist
|
// Make sure alias doesn't already exist
|
||||||
if let Some(alias) = self.aliases.get(name.lexeme) {
|
if let Some(alias) = self.aliases.get(name.lexeme()) {
|
||||||
return Err(name.error(DuplicateAlias {
|
return Err(name.error(DuplicateAlias {
|
||||||
alias: alias.name,
|
alias: alias.name,
|
||||||
first: alias.line_number,
|
first: alias.line_number,
|
||||||
@ -363,7 +364,7 @@ impl<'a> Parser<'a> {
|
|||||||
|
|
||||||
// Make sure the next token is of kind Name and keep it
|
// Make sure the next token is of kind Name and keep it
|
||||||
let target = if let Some(next) = self.accept(Name) {
|
let target = if let Some(next) = self.accept(Name) {
|
||||||
next.lexeme
|
next.lexeme()
|
||||||
} else {
|
} else {
|
||||||
let unexpected = self.tokens.next().unwrap();
|
let unexpected = self.tokens.next().unwrap();
|
||||||
return Err(self.unexpected_token(&unexpected, &[Name]));
|
return Err(self.unexpected_token(&unexpected, &[Name]));
|
||||||
@ -375,15 +376,15 @@ impl<'a> Parser<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
self.aliases.insert(
|
self.aliases.insert(
|
||||||
name.lexeme,
|
name.lexeme(),
|
||||||
Alias {
|
Alias {
|
||||||
name: name.lexeme,
|
name: name.lexeme(),
|
||||||
line_number: name.line,
|
line_number: name.line,
|
||||||
private: name.lexeme.starts_with('_'),
|
private: name.lexeme().starts_with('_'),
|
||||||
target,
|
target,
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
self.alias_tokens.insert(name.lexeme, name);
|
self.alias_tokens.insert(name.lexeme(), name);
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@ -416,7 +417,7 @@ impl<'a> Parser<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
Name => {
|
Name => {
|
||||||
if token.lexeme == "export" {
|
if token.lexeme() == "export" {
|
||||||
let next = self.tokens.next().unwrap();
|
let next = self.tokens.next().unwrap();
|
||||||
if next.kind == Name && self.accepted(Equals) {
|
if next.kind == Name && self.accepted(Equals) {
|
||||||
self.assignment(next, true)?;
|
self.assignment(next, true)?;
|
||||||
@ -426,7 +427,7 @@ impl<'a> Parser<'a> {
|
|||||||
self.recipe(&token, doc, false)?;
|
self.recipe(&token, doc, false)?;
|
||||||
doc = None;
|
doc = None;
|
||||||
}
|
}
|
||||||
} else if token.lexeme == "alias" {
|
} else if token.lexeme() == "alias" {
|
||||||
let next = self.tokens.next().unwrap();
|
let next = self.tokens.next().unwrap();
|
||||||
if next.kind == Name && self.accepted(Equals) {
|
if next.kind == Name && self.accepted(Equals) {
|
||||||
self.alias(next)?;
|
self.alias(next)?;
|
||||||
@ -449,10 +450,10 @@ impl<'a> Parser<'a> {
|
|||||||
None => {
|
None => {
|
||||||
return Err(CompilationError {
|
return Err(CompilationError {
|
||||||
text: self.text,
|
text: self.text,
|
||||||
index: 0,
|
offset: 0,
|
||||||
line: 0,
|
line: 0,
|
||||||
column: 0,
|
column: 0,
|
||||||
width: None,
|
width: 0,
|
||||||
kind: Internal {
|
kind: Internal {
|
||||||
message: "unexpected end of token stream".to_string(),
|
message: "unexpected end of token stream".to_string(),
|
||||||
},
|
},
|
||||||
@ -476,18 +477,18 @@ impl<'a> Parser<'a> {
|
|||||||
|
|
||||||
for recipe in self.recipes.values() {
|
for recipe in self.recipes.values() {
|
||||||
for parameter in &recipe.parameters {
|
for parameter in &recipe.parameters {
|
||||||
if self.assignments.contains_key(parameter.token.lexeme) {
|
if self.assignments.contains_key(parameter.token.lexeme()) {
|
||||||
return Err(parameter.token.error(ParameterShadowsVariable {
|
return Err(parameter.token.error(ParameterShadowsVariable {
|
||||||
parameter: parameter.token.lexeme,
|
parameter: parameter.token.lexeme(),
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for dependency in &recipe.dependency_tokens {
|
for dependency in &recipe.dependency_tokens {
|
||||||
if !self.recipes[dependency.lexeme].parameters.is_empty() {
|
if !self.recipes[dependency.lexeme()].parameters.is_empty() {
|
||||||
return Err(dependency.error(DependencyHasParameters {
|
return Err(dependency.error(DependencyHasParameters {
|
||||||
recipe: recipe.name,
|
recipe: recipe.name,
|
||||||
dependency: dependency.lexeme,
|
dependency: dependency.lexeme(),
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -863,253 +864,261 @@ f y=(`echo hello` + x) +z=("foo" + "bar"):"#,
|
|||||||
"x = ('0')",
|
"x = ('0')",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[rustfmt::skip]
|
||||||
|
summary_test! {
|
||||||
|
escaped_dos_newlines,
|
||||||
|
"@spam:\r
|
||||||
|
\t{ \\\r
|
||||||
|
\t\tfiglet test; \\\r
|
||||||
|
\t\tcargo build --color always 2>&1; \\\r
|
||||||
|
\t\tcargo test --color always -- --color always 2>&1; \\\r
|
||||||
|
\t} | less\r
|
||||||
|
",
|
||||||
|
"@spam:
|
||||||
|
{ \\
|
||||||
|
\tfiglet test; \\
|
||||||
|
\tcargo build --color always 2>&1; \\
|
||||||
|
\tcargo test --color always -- --color always 2>&1; \\
|
||||||
|
} | less",
|
||||||
|
}
|
||||||
|
|
||||||
compilation_error_test! {
|
compilation_error_test! {
|
||||||
name: duplicate_alias,
|
name: duplicate_alias,
|
||||||
input: "alias foo = bar\nalias foo = baz",
|
input: "alias foo = bar\nalias foo = baz",
|
||||||
index: 22,
|
offset: 22,
|
||||||
line: 1,
|
line: 1,
|
||||||
column: 6,
|
column: 6,
|
||||||
width: Some(3),
|
width: 3,
|
||||||
kind: DuplicateAlias { alias: "foo", first: 0 },
|
kind: DuplicateAlias { alias: "foo", first: 0 },
|
||||||
}
|
}
|
||||||
|
|
||||||
compilation_error_test! {
|
compilation_error_test! {
|
||||||
name: alias_syntax_multiple_rhs,
|
name: alias_syntax_multiple_rhs,
|
||||||
input: "alias foo = bar baz",
|
input: "alias foo = bar baz",
|
||||||
index: 16,
|
offset: 16,
|
||||||
line: 0,
|
line: 0,
|
||||||
column: 16,
|
column: 16,
|
||||||
width: Some(3),
|
width: 3,
|
||||||
kind: UnexpectedToken { expected: vec![Eol, Eof], found: Name },
|
kind: UnexpectedToken { expected: vec![Eol, Eof], found: Name },
|
||||||
}
|
}
|
||||||
|
|
||||||
compilation_error_test! {
|
compilation_error_test! {
|
||||||
name: alias_syntax_no_rhs,
|
name: alias_syntax_no_rhs,
|
||||||
input: "alias foo = \n",
|
input: "alias foo = \n",
|
||||||
index: 12,
|
offset: 12,
|
||||||
line: 0,
|
line: 0,
|
||||||
column: 12,
|
column: 12,
|
||||||
width: Some(1),
|
width: 1,
|
||||||
kind: UnexpectedToken {expected: vec![Name], found:Eol},
|
kind: UnexpectedToken {expected: vec![Name], found:Eol},
|
||||||
}
|
}
|
||||||
|
|
||||||
compilation_error_test! {
|
compilation_error_test! {
|
||||||
name: unknown_alias_target,
|
name: unknown_alias_target,
|
||||||
input: "alias foo = bar\n",
|
input: "alias foo = bar\n",
|
||||||
index: 6,
|
offset: 6,
|
||||||
line: 0,
|
line: 0,
|
||||||
column: 6,
|
column: 6,
|
||||||
width: Some(3),
|
width: 3,
|
||||||
kind: UnknownAliasTarget {alias: "foo", target: "bar"},
|
kind: UnknownAliasTarget {alias: "foo", target: "bar"},
|
||||||
}
|
}
|
||||||
|
|
||||||
compilation_error_test! {
|
compilation_error_test! {
|
||||||
name: alias_shadows_recipe_before,
|
name: alias_shadows_recipe_before,
|
||||||
input: "bar: \n echo bar\nalias foo = bar\nfoo:\n echo foo",
|
input: "bar: \n echo bar\nalias foo = bar\nfoo:\n echo foo",
|
||||||
index: 23,
|
offset: 23,
|
||||||
line: 2,
|
line: 2,
|
||||||
column: 6,
|
column: 6,
|
||||||
width: Some(3),
|
width: 3,
|
||||||
kind: AliasShadowsRecipe {alias: "foo", recipe_line: 3},
|
kind: AliasShadowsRecipe {alias: "foo", recipe_line: 3},
|
||||||
}
|
}
|
||||||
|
|
||||||
compilation_error_test! {
|
compilation_error_test! {
|
||||||
name: alias_shadows_recipe_after,
|
name: alias_shadows_recipe_after,
|
||||||
input: "foo:\n echo foo\nalias foo = bar\nbar:\n echo bar",
|
input: "foo:\n echo foo\nalias foo = bar\nbar:\n echo bar",
|
||||||
index: 22,
|
offset: 22,
|
||||||
line: 2,
|
line: 2,
|
||||||
column: 6,
|
column: 6,
|
||||||
width: Some(3),
|
width: 3,
|
||||||
kind: AliasShadowsRecipe { alias: "foo", recipe_line: 0 },
|
kind: AliasShadowsRecipe { alias: "foo", recipe_line: 0 },
|
||||||
}
|
}
|
||||||
|
|
||||||
compilation_error_test! {
|
compilation_error_test! {
|
||||||
name: missing_colon,
|
name: missing_colon,
|
||||||
input: "a b c\nd e f",
|
input: "a b c\nd e f",
|
||||||
index: 5,
|
offset: 5,
|
||||||
line: 0,
|
line: 0,
|
||||||
column: 5,
|
column: 5,
|
||||||
width: Some(1),
|
width: 1,
|
||||||
kind: UnexpectedToken{expected: vec![Name, Plus, Colon], found: Eol},
|
kind: UnexpectedToken{expected: vec![Name, Plus, Colon], found: Eol},
|
||||||
}
|
}
|
||||||
|
|
||||||
compilation_error_test! {
|
compilation_error_test! {
|
||||||
name: missing_default_eol,
|
name: missing_default_eol,
|
||||||
input: "hello arg=\n",
|
input: "hello arg=\n",
|
||||||
index: 10,
|
offset: 10,
|
||||||
line: 0,
|
line: 0,
|
||||||
column: 10,
|
column: 10,
|
||||||
width: Some(1),
|
width: 1,
|
||||||
kind: UnexpectedToken{expected: vec![Name, StringToken], found: Eol},
|
kind: UnexpectedToken{expected: vec![Name, StringCooked], found: Eol},
|
||||||
}
|
}
|
||||||
|
|
||||||
compilation_error_test! {
|
compilation_error_test! {
|
||||||
name: missing_default_eof,
|
name: missing_default_eof,
|
||||||
input: "hello arg=",
|
input: "hello arg=",
|
||||||
index: 10,
|
offset: 10,
|
||||||
line: 0,
|
line: 0,
|
||||||
column: 10,
|
column: 10,
|
||||||
width: Some(0),
|
width: 0,
|
||||||
kind: UnexpectedToken{expected: vec![Name, StringToken], found: Eof},
|
kind: UnexpectedToken{expected: vec![Name, StringCooked], found: Eof},
|
||||||
}
|
}
|
||||||
|
|
||||||
compilation_error_test! {
|
compilation_error_test! {
|
||||||
name: parameter_after_variadic,
|
name: parameter_after_variadic,
|
||||||
input: "foo +a bbb:",
|
input: "foo +a bbb:",
|
||||||
index: 7,
|
offset: 7,
|
||||||
line: 0,
|
line: 0,
|
||||||
column: 7,
|
column: 7,
|
||||||
width: Some(3),
|
width: 3,
|
||||||
kind: ParameterFollowsVariadicParameter{parameter: "bbb"},
|
kind: ParameterFollowsVariadicParameter{parameter: "bbb"},
|
||||||
}
|
}
|
||||||
|
|
||||||
compilation_error_test! {
|
compilation_error_test! {
|
||||||
name: required_after_default,
|
name: required_after_default,
|
||||||
input: "hello arg='foo' bar:",
|
input: "hello arg='foo' bar:",
|
||||||
index: 16,
|
offset: 16,
|
||||||
line: 0,
|
line: 0,
|
||||||
column: 16,
|
column: 16,
|
||||||
width: Some(3),
|
width: 3,
|
||||||
kind: RequiredParameterFollowsDefaultParameter{parameter: "bar"},
|
kind: RequiredParameterFollowsDefaultParameter{parameter: "bar"},
|
||||||
}
|
}
|
||||||
|
|
||||||
compilation_error_test! {
|
compilation_error_test! {
|
||||||
name: missing_eol,
|
name: missing_eol,
|
||||||
input: "a b c: z =",
|
input: "a b c: z =",
|
||||||
index: 9,
|
offset: 9,
|
||||||
line: 0,
|
line: 0,
|
||||||
column: 9,
|
column: 9,
|
||||||
width: Some(1),
|
width: 1,
|
||||||
kind: UnexpectedToken{expected: vec![Name, Eol, Eof], found: Equals},
|
kind: UnexpectedToken{expected: vec![Name, Eol, Eof], found: Equals},
|
||||||
}
|
}
|
||||||
|
|
||||||
compilation_error_test! {
|
compilation_error_test! {
|
||||||
name: duplicate_parameter,
|
name: duplicate_parameter,
|
||||||
input: "a b b:",
|
input: "a b b:",
|
||||||
index: 4,
|
offset: 4,
|
||||||
line: 0,
|
line: 0,
|
||||||
column: 4,
|
column: 4,
|
||||||
width: Some(1),
|
width: 1,
|
||||||
kind: DuplicateParameter{recipe: "a", parameter: "b"},
|
kind: DuplicateParameter{recipe: "a", parameter: "b"},
|
||||||
}
|
}
|
||||||
|
|
||||||
compilation_error_test! {
|
compilation_error_test! {
|
||||||
name: parameter_shadows_varible,
|
name: parameter_shadows_varible,
|
||||||
input: "foo = \"h\"\na foo:",
|
input: "foo = \"h\"\na foo:",
|
||||||
index: 12,
|
offset: 12,
|
||||||
line: 1,
|
line: 1,
|
||||||
column: 2,
|
column: 2,
|
||||||
width: Some(3),
|
width: 3,
|
||||||
kind: ParameterShadowsVariable{parameter: "foo"},
|
kind: ParameterShadowsVariable{parameter: "foo"},
|
||||||
}
|
}
|
||||||
|
|
||||||
compilation_error_test! {
|
compilation_error_test! {
|
||||||
name: dependency_has_parameters,
|
name: dependency_has_parameters,
|
||||||
input: "foo arg:\nb: foo",
|
input: "foo arg:\nb: foo",
|
||||||
index: 12,
|
offset: 12,
|
||||||
line: 1,
|
line: 1,
|
||||||
column: 3,
|
column: 3,
|
||||||
width: Some(3),
|
width: 3,
|
||||||
kind: DependencyHasParameters{recipe: "b", dependency: "foo"},
|
kind: DependencyHasParameters{recipe: "b", dependency: "foo"},
|
||||||
}
|
}
|
||||||
|
|
||||||
compilation_error_test! {
|
compilation_error_test! {
|
||||||
name: duplicate_dependency,
|
name: duplicate_dependency,
|
||||||
input: "a b c: b c z z",
|
input: "a b c: b c z z",
|
||||||
index: 13,
|
offset: 13,
|
||||||
line: 0,
|
line: 0,
|
||||||
column: 13,
|
column: 13,
|
||||||
width: Some(1),
|
width: 1,
|
||||||
kind: DuplicateDependency{recipe: "a", dependency: "z"},
|
kind: DuplicateDependency{recipe: "a", dependency: "z"},
|
||||||
}
|
}
|
||||||
|
|
||||||
compilation_error_test! {
|
compilation_error_test! {
|
||||||
name: duplicate_recipe,
|
name: duplicate_recipe,
|
||||||
input: "a:\nb:\na:",
|
input: "a:\nb:\na:",
|
||||||
index: 6,
|
offset: 6,
|
||||||
line: 2,
|
line: 2,
|
||||||
column: 0,
|
column: 0,
|
||||||
width: Some(1),
|
width: 1,
|
||||||
kind: DuplicateRecipe{recipe: "a", first: 0},
|
kind: DuplicateRecipe{recipe: "a", first: 0},
|
||||||
}
|
}
|
||||||
|
|
||||||
compilation_error_test! {
|
compilation_error_test! {
|
||||||
name: duplicate_variable,
|
name: duplicate_variable,
|
||||||
input: "a = \"0\"\na = \"0\"",
|
input: "a = \"0\"\na = \"0\"",
|
||||||
index: 8,
|
offset: 8,
|
||||||
line: 1,
|
line: 1,
|
||||||
column: 0,
|
column: 0,
|
||||||
width: Some(1),
|
width: 1,
|
||||||
kind: DuplicateVariable{variable: "a"},
|
kind: DuplicateVariable{variable: "a"},
|
||||||
}
|
}
|
||||||
|
|
||||||
compilation_error_test! {
|
compilation_error_test! {
|
||||||
name: extra_whitespace,
|
name: extra_whitespace,
|
||||||
input: "a:\n blah\n blarg",
|
input: "a:\n blah\n blarg",
|
||||||
index: 10,
|
offset: 10,
|
||||||
line: 2,
|
line: 2,
|
||||||
column: 1,
|
column: 1,
|
||||||
width: Some(6),
|
width: 6,
|
||||||
kind: ExtraLeadingWhitespace,
|
kind: ExtraLeadingWhitespace,
|
||||||
}
|
}
|
||||||
|
|
||||||
compilation_error_test! {
|
compilation_error_test! {
|
||||||
name: interpolation_outside_of_recipe,
|
name: interpolation_outside_of_recipe,
|
||||||
input: "{{",
|
input: "{{",
|
||||||
index: 0,
|
offset: 0,
|
||||||
line: 0,
|
line: 0,
|
||||||
column: 0,
|
column: 0,
|
||||||
width: Some(2),
|
width: 2,
|
||||||
kind: UnexpectedToken{expected: vec![Name, At], found: InterpolationStart},
|
kind: UnexpectedToken{expected: vec![Name, At], found: InterpolationStart},
|
||||||
}
|
}
|
||||||
|
|
||||||
compilation_error_test! {
|
|
||||||
name: unclosed_interpolation_delimiter,
|
|
||||||
input: "a:\n echo {{ foo",
|
|
||||||
index: 15,
|
|
||||||
line: 1,
|
|
||||||
column: 12,
|
|
||||||
width: Some(0),
|
|
||||||
kind: UnexpectedToken{expected: vec![Plus, InterpolationEnd], found: Dedent},
|
|
||||||
}
|
|
||||||
|
|
||||||
compilation_error_test! {
|
compilation_error_test! {
|
||||||
name: unclosed_parenthesis_in_expression,
|
name: unclosed_parenthesis_in_expression,
|
||||||
input: "x = foo(",
|
input: "x = foo(",
|
||||||
index: 8,
|
offset: 8,
|
||||||
line: 0,
|
line: 0,
|
||||||
column: 8,
|
column: 8,
|
||||||
width: Some(0),
|
width: 0,
|
||||||
kind: UnexpectedToken{expected: vec![Name, StringToken, ParenR], found: Eof},
|
kind: UnexpectedToken{expected: vec![Name, StringCooked, ParenR], found: Eof},
|
||||||
}
|
}
|
||||||
|
|
||||||
compilation_error_test! {
|
compilation_error_test! {
|
||||||
name: unclosed_parenthesis_in_interpolation,
|
name: unclosed_parenthesis_in_interpolation,
|
||||||
input: "a:\n echo {{foo(}}",
|
input: "a:\n echo {{foo(}}",
|
||||||
index: 15,
|
offset: 15,
|
||||||
line: 1,
|
line: 1,
|
||||||
column: 12,
|
column: 12,
|
||||||
width: Some(2),
|
width: 2,
|
||||||
kind: UnexpectedToken{expected: vec![Name, StringToken, ParenR], found: InterpolationEnd},
|
kind: UnexpectedToken{expected: vec![Name, StringCooked, ParenR], found: InterpolationEnd},
|
||||||
}
|
}
|
||||||
|
|
||||||
compilation_error_test! {
|
compilation_error_test! {
|
||||||
name: plus_following_parameter,
|
name: plus_following_parameter,
|
||||||
input: "a b c+:",
|
input: "a b c+:",
|
||||||
index: 5,
|
offset: 5,
|
||||||
line: 0,
|
line: 0,
|
||||||
column: 5,
|
column: 5,
|
||||||
width: Some(1),
|
width: 1,
|
||||||
kind: UnexpectedToken{expected: vec![Name], found: Plus},
|
kind: UnexpectedToken{expected: vec![Name], found: Plus},
|
||||||
}
|
}
|
||||||
|
|
||||||
compilation_error_test! {
|
compilation_error_test! {
|
||||||
name: bad_export,
|
name: bad_export,
|
||||||
input: "export a",
|
input: "export a",
|
||||||
index: 8,
|
offset: 8,
|
||||||
line: 0,
|
line: 0,
|
||||||
column: 8,
|
column: 8,
|
||||||
width: Some(0),
|
width: 0,
|
||||||
kind: UnexpectedToken{expected: vec![Name, Plus, Colon], found: Eof},
|
kind: UnexpectedToken{expected: vec![Name, Plus, Colon], found: Eof},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
7
src/position.rs
Normal file
7
src/position.rs
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
/// Source position
|
||||||
|
#[derive(Copy, Clone, PartialEq, Debug)]
|
||||||
|
pub struct Position {
|
||||||
|
pub offset: usize,
|
||||||
|
pub column: usize,
|
||||||
|
pub line: usize,
|
||||||
|
}
|
@ -38,12 +38,6 @@ pub struct Recipe<'a> {
|
|||||||
pub shebang: bool,
|
pub shebang: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct RecipeContext<'a> {
|
|
||||||
pub invocation_directory: &'a Result<PathBuf, String>,
|
|
||||||
pub configuration: &'a Configuration<'a>,
|
|
||||||
pub scope: BTreeMap<&'a str, String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> Recipe<'a> {
|
impl<'a> Recipe<'a> {
|
||||||
pub fn argument_range(&self) -> RangeInclusive<usize> {
|
pub fn argument_range(&self) -> RangeInclusive<usize> {
|
||||||
self.min_arguments()..=self.max_arguments()
|
self.min_arguments()..=self.max_arguments()
|
||||||
@ -319,7 +313,13 @@ impl<'a> Display for Recipe<'a> {
|
|||||||
if let Some(doc) = self.doc {
|
if let Some(doc) = self.doc {
|
||||||
writeln!(f, "# {}", doc)?;
|
writeln!(f, "# {}", doc)?;
|
||||||
}
|
}
|
||||||
write!(f, "{}", self.name)?;
|
|
||||||
|
if self.quiet {
|
||||||
|
write!(f, "@{}", self.name)?;
|
||||||
|
} else {
|
||||||
|
write!(f, "{}", self.name)?;
|
||||||
|
}
|
||||||
|
|
||||||
for parameter in &self.parameters {
|
for parameter in &self.parameters {
|
||||||
write!(f, " {}", parameter)?;
|
write!(f, " {}", parameter)?;
|
||||||
}
|
}
|
||||||
@ -337,7 +337,7 @@ impl<'a> Display for Recipe<'a> {
|
|||||||
write!(f, " ")?;
|
write!(f, " ")?;
|
||||||
}
|
}
|
||||||
match *piece {
|
match *piece {
|
||||||
Fragment::Text { ref text } => write!(f, "{}", text.lexeme)?,
|
Fragment::Text { ref text } => write!(f, "{}", text.lexeme())?,
|
||||||
Fragment::Expression { ref expression, .. } => write!(f, "{{{{{}}}}}", expression)?,
|
Fragment::Expression { ref expression, .. } => write!(f, "{{{{{}}}}}", expression)?,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
7
src/recipe_context.rs
Normal file
7
src/recipe_context.rs
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
use crate::common::*;
|
||||||
|
|
||||||
|
pub struct RecipeContext<'a> {
|
||||||
|
pub invocation_directory: &'a Result<PathBuf, String>,
|
||||||
|
pub configuration: &'a Configuration<'a>,
|
||||||
|
pub scope: BTreeMap<&'a str, String>,
|
||||||
|
}
|
@ -72,12 +72,12 @@ impl<'a, 'b> RecipeResolver<'a, 'b> {
|
|||||||
|
|
||||||
fn resolve_function(&self, function: &Token, argc: usize) -> CompilationResult<'a, ()> {
|
fn resolve_function(&self, function: &Token, argc: usize) -> CompilationResult<'a, ()> {
|
||||||
resolve_function(function, argc).map_err(|error| CompilationError {
|
resolve_function(function, argc).map_err(|error| CompilationError {
|
||||||
index: error.index,
|
offset: error.offset,
|
||||||
line: error.line,
|
line: error.line,
|
||||||
column: error.column,
|
column: error.column,
|
||||||
width: error.width,
|
width: error.width,
|
||||||
kind: UnknownFunction {
|
kind: UnknownFunction {
|
||||||
function: &self.text[error.index..error.index + error.width.unwrap()],
|
function: &self.text[error.offset..error.offset + error.width],
|
||||||
},
|
},
|
||||||
text: self.text,
|
text: self.text,
|
||||||
})
|
})
|
||||||
@ -88,18 +88,18 @@ impl<'a, 'b> RecipeResolver<'a, 'b> {
|
|||||||
variable: &Token,
|
variable: &Token,
|
||||||
parameters: &[Parameter],
|
parameters: &[Parameter],
|
||||||
) -> CompilationResult<'a, ()> {
|
) -> CompilationResult<'a, ()> {
|
||||||
let name = variable.lexeme;
|
let name = variable.lexeme();
|
||||||
let undefined =
|
let undefined =
|
||||||
!self.assignments.contains_key(name) && !parameters.iter().any(|p| p.name == name);
|
!self.assignments.contains_key(name) && !parameters.iter().any(|p| p.name == name);
|
||||||
if undefined {
|
if undefined {
|
||||||
let error = variable.error(UndefinedVariable { variable: name });
|
let error = variable.error(UndefinedVariable { variable: name });
|
||||||
return Err(CompilationError {
|
return Err(CompilationError {
|
||||||
index: error.index,
|
offset: error.offset,
|
||||||
line: error.line,
|
line: error.line,
|
||||||
column: error.column,
|
column: error.column,
|
||||||
width: error.width,
|
width: error.width,
|
||||||
kind: UndefinedVariable {
|
kind: UndefinedVariable {
|
||||||
variable: &self.text[error.index..error.index + error.width.unwrap()],
|
variable: &self.text[error.offset..error.offset + error.width],
|
||||||
},
|
},
|
||||||
text: self.text,
|
text: self.text,
|
||||||
});
|
});
|
||||||
@ -115,7 +115,7 @@ impl<'a, 'b> RecipeResolver<'a, 'b> {
|
|||||||
self.stack.push(recipe.name);
|
self.stack.push(recipe.name);
|
||||||
self.seen.insert(recipe.name);
|
self.seen.insert(recipe.name);
|
||||||
for dependency_token in &recipe.dependency_tokens {
|
for dependency_token in &recipe.dependency_tokens {
|
||||||
match self.recipes.get(dependency_token.lexeme) {
|
match self.recipes.get(dependency_token.lexeme()) {
|
||||||
Some(dependency) => {
|
Some(dependency) => {
|
||||||
if !self.resolved.contains(dependency.name) {
|
if !self.resolved.contains(dependency.name) {
|
||||||
if self.seen.contains(dependency.name) {
|
if self.seen.contains(dependency.name) {
|
||||||
@ -139,7 +139,7 @@ impl<'a, 'b> RecipeResolver<'a, 'b> {
|
|||||||
None => {
|
None => {
|
||||||
return Err(dependency_token.error(UnknownDependency {
|
return Err(dependency_token.error(UnknownDependency {
|
||||||
recipe: recipe.name,
|
recipe: recipe.name,
|
||||||
unknown: dependency_token.lexeme,
|
unknown: dependency_token.lexeme(),
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -157,80 +157,80 @@ mod test {
|
|||||||
compilation_error_test! {
|
compilation_error_test! {
|
||||||
name: circular_recipe_dependency,
|
name: circular_recipe_dependency,
|
||||||
input: "a: b\nb: a",
|
input: "a: b\nb: a",
|
||||||
index: 8,
|
offset: 8,
|
||||||
line: 1,
|
line: 1,
|
||||||
column: 3,
|
column: 3,
|
||||||
width: Some(1),
|
width: 1,
|
||||||
kind: CircularRecipeDependency{recipe: "b", circle: vec!["a", "b", "a"]},
|
kind: CircularRecipeDependency{recipe: "b", circle: vec!["a", "b", "a"]},
|
||||||
}
|
}
|
||||||
|
|
||||||
compilation_error_test! {
|
compilation_error_test! {
|
||||||
name: self_recipe_dependency,
|
name: self_recipe_dependency,
|
||||||
input: "a: a",
|
input: "a: a",
|
||||||
index: 3,
|
offset: 3,
|
||||||
line: 0,
|
line: 0,
|
||||||
column: 3,
|
column: 3,
|
||||||
width: Some(1),
|
width: 1,
|
||||||
kind: CircularRecipeDependency{recipe: "a", circle: vec!["a", "a"]},
|
kind: CircularRecipeDependency{recipe: "a", circle: vec!["a", "a"]},
|
||||||
}
|
}
|
||||||
|
|
||||||
compilation_error_test! {
|
compilation_error_test! {
|
||||||
name: unknown_dependency,
|
name: unknown_dependency,
|
||||||
input: "a: b",
|
input: "a: b",
|
||||||
index: 3,
|
offset: 3,
|
||||||
line: 0,
|
line: 0,
|
||||||
column: 3,
|
column: 3,
|
||||||
width: Some(1),
|
width: 1,
|
||||||
kind: UnknownDependency{recipe: "a", unknown: "b"},
|
kind: UnknownDependency{recipe: "a", unknown: "b"},
|
||||||
}
|
}
|
||||||
|
|
||||||
compilation_error_test! {
|
compilation_error_test! {
|
||||||
name: unknown_interpolation_variable,
|
name: unknown_interpolation_variable,
|
||||||
input: "x:\n {{ hello}}",
|
input: "x:\n {{ hello}}",
|
||||||
index: 9,
|
offset: 9,
|
||||||
line: 1,
|
line: 1,
|
||||||
column: 6,
|
column: 6,
|
||||||
width: Some(5),
|
width: 5,
|
||||||
kind: UndefinedVariable{variable: "hello"},
|
kind: UndefinedVariable{variable: "hello"},
|
||||||
}
|
}
|
||||||
|
|
||||||
compilation_error_test! {
|
compilation_error_test! {
|
||||||
name: unknown_second_interpolation_variable,
|
name: unknown_second_interpolation_variable,
|
||||||
input: "wtf=\"x\"\nx:\n echo\n foo {{wtf}} {{ lol }}",
|
input: "wtf=\"x\"\nx:\n echo\n foo {{wtf}} {{ lol }}",
|
||||||
index: 33,
|
offset: 33,
|
||||||
line: 3,
|
line: 3,
|
||||||
column: 16,
|
column: 16,
|
||||||
width: Some(3),
|
width: 3,
|
||||||
kind: UndefinedVariable{variable: "lol"},
|
kind: UndefinedVariable{variable: "lol"},
|
||||||
}
|
}
|
||||||
|
|
||||||
compilation_error_test! {
|
compilation_error_test! {
|
||||||
name: unknown_function_in_interpolation,
|
name: unknown_function_in_interpolation,
|
||||||
input: "a:\n echo {{bar()}}",
|
input: "a:\n echo {{bar()}}",
|
||||||
index: 11,
|
offset: 11,
|
||||||
line: 1,
|
line: 1,
|
||||||
column: 8,
|
column: 8,
|
||||||
width: Some(3),
|
width: 3,
|
||||||
kind: UnknownFunction{function: "bar"},
|
kind: UnknownFunction{function: "bar"},
|
||||||
}
|
}
|
||||||
|
|
||||||
compilation_error_test! {
|
compilation_error_test! {
|
||||||
name: unknown_function_in_default,
|
name: unknown_function_in_default,
|
||||||
input: "a f=baz():",
|
input: "a f=baz():",
|
||||||
index: 4,
|
offset: 4,
|
||||||
line: 0,
|
line: 0,
|
||||||
column: 4,
|
column: 4,
|
||||||
width: Some(3),
|
width: 3,
|
||||||
kind: UnknownFunction{function: "baz"},
|
kind: UnknownFunction{function: "baz"},
|
||||||
}
|
}
|
||||||
|
|
||||||
compilation_error_test! {
|
compilation_error_test! {
|
||||||
name: unknown_variable_in_default,
|
name: unknown_variable_in_default,
|
||||||
input: "a f=foo:",
|
input: "a f=foo:",
|
||||||
index: 4,
|
offset: 4,
|
||||||
line: 0,
|
line: 0,
|
||||||
column: 4,
|
column: 4,
|
||||||
width: Some(3),
|
width: 3,
|
||||||
kind: UndefinedVariable{variable: "foo"},
|
kind: UndefinedVariable{variable: "foo"},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
34
src/run.rs
34
src/run.rs
@ -194,27 +194,33 @@ pub fn run() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let override_re = Regex::new("^([^=]+)=(.*)$").unwrap();
|
fn is_override(arg: &&str) -> bool {
|
||||||
|
arg.chars().skip(1).any(|c| c == '=')
|
||||||
|
}
|
||||||
|
|
||||||
let raw_arguments: Vec<_> = matches
|
let raw_arguments: Vec<&str> = matches
|
||||||
.values_of("ARGUMENTS")
|
.values_of("ARGUMENTS")
|
||||||
.map(Iterator::collect)
|
.map(Iterator::collect)
|
||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
|
|
||||||
for argument in raw_arguments
|
for argument in raw_arguments.iter().cloned().take_while(is_override) {
|
||||||
.iter()
|
let i = argument
|
||||||
.take_while(|arg| override_re.is_match(arg))
|
.char_indices()
|
||||||
{
|
.skip(1)
|
||||||
let captures = override_re.captures(argument).unwrap();
|
.filter(|&(_, c)| c == '=')
|
||||||
overrides.insert(
|
.next()
|
||||||
captures.get(1).unwrap().as_str(),
|
.unwrap()
|
||||||
captures.get(2).unwrap().as_str(),
|
.0;
|
||||||
);
|
|
||||||
|
let name = &argument[..i];
|
||||||
|
let value = &argument[i + 1..];
|
||||||
|
|
||||||
|
overrides.insert(name, value);
|
||||||
}
|
}
|
||||||
|
|
||||||
let rest = raw_arguments
|
let rest = raw_arguments
|
||||||
.iter()
|
.into_iter()
|
||||||
.skip_while(|arg| override_re.is_match(arg))
|
.skip_while(is_override)
|
||||||
.enumerate()
|
.enumerate()
|
||||||
.flat_map(|(i, argument)| {
|
.flat_map(|(i, argument)| {
|
||||||
if i == 0 {
|
if i == 0 {
|
||||||
@ -237,7 +243,7 @@ pub fn run() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Some(*argument)
|
Some(argument)
|
||||||
})
|
})
|
||||||
.collect::<Vec<&str>>();
|
.collect::<Vec<&str>>();
|
||||||
|
|
||||||
|
@ -10,10 +10,10 @@ fn write_token_error_context(f: &mut Formatter, token: &Token) -> Result<(), fmt
|
|||||||
write_error_context(
|
write_error_context(
|
||||||
f,
|
f,
|
||||||
token.text,
|
token.text,
|
||||||
token.index,
|
token.offset,
|
||||||
token.line,
|
token.line,
|
||||||
token.column + token.prefix.len(),
|
token.column,
|
||||||
Some(token.lexeme.len()),
|
token.lexeme().len(),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -255,7 +255,12 @@ impl<'a> Display for RuntimeError<'a> {
|
|||||||
ref token,
|
ref token,
|
||||||
ref message,
|
ref message,
|
||||||
} => {
|
} => {
|
||||||
writeln!(f, "Call to function `{}` failed: {}", token.lexeme, message)?;
|
writeln!(
|
||||||
|
f,
|
||||||
|
"Call to function `{}` failed: {}",
|
||||||
|
token.lexeme(),
|
||||||
|
message
|
||||||
|
)?;
|
||||||
error_token = Some(token);
|
error_token = Some(token);
|
||||||
}
|
}
|
||||||
Shebang {
|
Shebang {
|
||||||
|
9
src/state.rs
Normal file
9
src/state.rs
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
use crate::common::*;
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, PartialEq, Debug)]
|
||||||
|
pub enum State<'a> {
|
||||||
|
Normal,
|
||||||
|
Indented { indentation: &'a str },
|
||||||
|
Text,
|
||||||
|
Interpolation { interpolation_start: Position },
|
||||||
|
}
|
@ -1,21 +1,21 @@
|
|||||||
use crate::common::*;
|
use crate::common::*;
|
||||||
|
|
||||||
#[derive(PartialEq, Debug)]
|
#[derive(PartialEq, Debug)]
|
||||||
pub struct CookedString<'a> {
|
pub struct StringLiteral<'a> {
|
||||||
pub raw: &'a str,
|
pub raw: &'a str,
|
||||||
pub cooked: Cow<'a, str>,
|
pub cooked: Cow<'a, str>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> CookedString<'a> {
|
impl<'a> StringLiteral<'a> {
|
||||||
pub fn new(token: &Token<'a>) -> CompilationResult<'a, CookedString<'a>> {
|
pub fn new(token: &Token<'a>) -> CompilationResult<'a, StringLiteral<'a>> {
|
||||||
let raw = &token.lexeme[1..token.lexeme.len() - 1];
|
let raw = &token.lexeme()[1..token.lexeme().len() - 1];
|
||||||
|
|
||||||
if let TokenKind::RawString = token.kind {
|
if let TokenKind::StringRaw = token.kind {
|
||||||
Ok(CookedString {
|
Ok(StringLiteral {
|
||||||
cooked: Cow::Borrowed(raw),
|
cooked: Cow::Borrowed(raw),
|
||||||
raw,
|
raw,
|
||||||
})
|
})
|
||||||
} else if let TokenKind::StringToken = token.kind {
|
} else if let TokenKind::StringCooked = token.kind {
|
||||||
let mut cooked = String::new();
|
let mut cooked = String::new();
|
||||||
let mut escape = false;
|
let mut escape = false;
|
||||||
for c in raw.chars() {
|
for c in raw.chars() {
|
||||||
@ -41,7 +41,7 @@ impl<'a> CookedString<'a> {
|
|||||||
}
|
}
|
||||||
cooked.push(c);
|
cooked.push(c);
|
||||||
}
|
}
|
||||||
Ok(CookedString {
|
Ok(StringLiteral {
|
||||||
raw,
|
raw,
|
||||||
cooked: Cow::Owned(cooked),
|
cooked: Cow::Owned(cooked),
|
||||||
})
|
})
|
||||||
@ -53,7 +53,7 @@ impl<'a> CookedString<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Display for CookedString<'a> {
|
impl<'a> Display for StringLiteral<'a> {
|
||||||
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
||||||
match self.cooked {
|
match self.cooked {
|
||||||
Cow::Borrowed(raw) => write!(f, "'{}'", raw),
|
Cow::Borrowed(raw) => write!(f, "'{}'", raw),
|
@ -138,7 +138,7 @@ impl Fragment {
|
|||||||
fn new(fragment: fragment::Fragment) -> Fragment {
|
fn new(fragment: fragment::Fragment) -> Fragment {
|
||||||
match fragment {
|
match fragment {
|
||||||
fragment::Fragment::Text { text } => Fragment::Text {
|
fragment::Fragment::Text { text } => Fragment::Text {
|
||||||
text: text.lexeme.to_owned(),
|
text: text.lexeme().to_owned(),
|
||||||
},
|
},
|
||||||
fragment::Fragment::Expression { expression } => Fragment::Expression {
|
fragment::Fragment::Expression { expression } => Fragment::Expression {
|
||||||
expression: Expression::new(expression),
|
expression: Expression::new(expression),
|
||||||
|
@ -7,15 +7,47 @@ pub fn parse_success(text: &str) -> Justfile {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn token_summary(tokens: &[Token]) -> String {
|
||||||
|
use TokenKind::*;
|
||||||
|
|
||||||
|
tokens
|
||||||
|
.iter()
|
||||||
|
.map(|t| match t.kind {
|
||||||
|
At => "@",
|
||||||
|
Backtick => "`",
|
||||||
|
Colon => ":",
|
||||||
|
Comma => ",",
|
||||||
|
Comment => "#",
|
||||||
|
Dedent => "<",
|
||||||
|
Eof => ".",
|
||||||
|
Eol => "$",
|
||||||
|
Equals => "=",
|
||||||
|
Indent => ">",
|
||||||
|
InterpolationEnd => "}",
|
||||||
|
InterpolationStart => "{",
|
||||||
|
Line => "^",
|
||||||
|
Name => "N",
|
||||||
|
ParenL => "(",
|
||||||
|
ParenR => ")",
|
||||||
|
Plus => "+",
|
||||||
|
StringRaw => "'",
|
||||||
|
StringCooked => "\"",
|
||||||
|
Text => "_",
|
||||||
|
Whitespace => " ",
|
||||||
|
})
|
||||||
|
.collect::<Vec<&str>>()
|
||||||
|
.join("")
|
||||||
|
}
|
||||||
|
|
||||||
macro_rules! compilation_error_test {
|
macro_rules! compilation_error_test {
|
||||||
(
|
(
|
||||||
name: $name:ident,
|
name: $name:ident,
|
||||||
input: $input:expr,
|
input: $input:expr,
|
||||||
index: $index:expr,
|
offset: $offset:expr,
|
||||||
line: $line:expr,
|
line: $line:expr,
|
||||||
column: $column:expr,
|
column: $column:expr,
|
||||||
width: $width:expr,
|
width: $width:expr,
|
||||||
kind: $kind:expr,
|
kind: $kind:expr,
|
||||||
) => {
|
) => {
|
||||||
#[test]
|
#[test]
|
||||||
fn $name() {
|
fn $name() {
|
||||||
@ -23,19 +55,22 @@ macro_rules! compilation_error_test {
|
|||||||
|
|
||||||
let expected = crate::compilation_error::CompilationError {
|
let expected = crate::compilation_error::CompilationError {
|
||||||
text: input,
|
text: input,
|
||||||
index: $index,
|
offset: $offset,
|
||||||
line: $line,
|
line: $line,
|
||||||
column: $column,
|
column: $column,
|
||||||
width: $width,
|
width: $width,
|
||||||
kind: $kind,
|
kind: $kind,
|
||||||
};
|
};
|
||||||
|
|
||||||
let tokens = crate::lexer::Lexer::lex(input).unwrap();
|
let mut tokens = Lexer::lex(input).unwrap();
|
||||||
|
|
||||||
|
tokens.retain(|token| token.kind != TokenKind::Whitespace);
|
||||||
|
|
||||||
let parser = crate::parser::Parser::new(input, tokens);
|
let parser = crate::parser::Parser::new(input, tokens);
|
||||||
|
|
||||||
if let Err(error) = parser.justfile() {
|
if let Err(error) = parser.justfile() {
|
||||||
assert_eq!(error.text, expected.text);
|
assert_eq!(error.text, expected.text);
|
||||||
assert_eq!(error.index, expected.index);
|
assert_eq!(error.offset, expected.offset);
|
||||||
assert_eq!(error.line, expected.line);
|
assert_eq!(error.line, expected.line);
|
||||||
assert_eq!(error.column, expected.column);
|
assert_eq!(error.column, expected.column);
|
||||||
assert_eq!(error.width, expected.width);
|
assert_eq!(error.width, expected.width);
|
||||||
|
71
src/token.rs
71
src/token.rs
@ -2,80 +2,27 @@ use crate::common::*;
|
|||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
pub struct Token<'a> {
|
pub struct Token<'a> {
|
||||||
pub index: usize,
|
pub offset: usize,
|
||||||
|
pub length: usize,
|
||||||
pub line: usize,
|
pub line: usize,
|
||||||
pub column: usize,
|
pub column: usize,
|
||||||
pub text: &'a str,
|
pub text: &'a str,
|
||||||
pub prefix: &'a str,
|
|
||||||
pub lexeme: &'a str,
|
|
||||||
pub kind: TokenKind,
|
pub kind: TokenKind,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Token<'a> {
|
impl<'a> Token<'a> {
|
||||||
|
pub fn lexeme(&self) -> &'a str {
|
||||||
|
&self.text[self.offset..self.offset + self.length]
|
||||||
|
}
|
||||||
|
|
||||||
pub fn error(&self, kind: CompilationErrorKind<'a>) -> CompilationError<'a> {
|
pub fn error(&self, kind: CompilationErrorKind<'a>) -> CompilationError<'a> {
|
||||||
CompilationError {
|
CompilationError {
|
||||||
column: self.column + self.prefix.len(),
|
column: self.column,
|
||||||
index: self.index + self.prefix.len(),
|
offset: self.offset,
|
||||||
line: self.line,
|
line: self.line,
|
||||||
text: self.text,
|
text: self.text,
|
||||||
width: Some(self.lexeme.len()),
|
width: self.length,
|
||||||
kind,
|
kind,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone, Copy)]
|
|
||||||
pub enum TokenKind {
|
|
||||||
At,
|
|
||||||
Backtick,
|
|
||||||
Colon,
|
|
||||||
Comma,
|
|
||||||
Comment,
|
|
||||||
Dedent,
|
|
||||||
Eof,
|
|
||||||
Eol,
|
|
||||||
Equals,
|
|
||||||
Indent,
|
|
||||||
InterpolationEnd,
|
|
||||||
InterpolationStart,
|
|
||||||
Line,
|
|
||||||
Name,
|
|
||||||
ParenL,
|
|
||||||
ParenR,
|
|
||||||
Plus,
|
|
||||||
RawString,
|
|
||||||
StringToken,
|
|
||||||
Text,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Display for TokenKind {
|
|
||||||
fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> {
|
|
||||||
use TokenKind::*;
|
|
||||||
write!(
|
|
||||||
f,
|
|
||||||
"{}",
|
|
||||||
match *self {
|
|
||||||
Backtick => "backtick",
|
|
||||||
Colon => "':'",
|
|
||||||
Comma => "','",
|
|
||||||
Comment => "comment",
|
|
||||||
Dedent => "dedent",
|
|
||||||
Eof => "end of file",
|
|
||||||
Eol => "end of line",
|
|
||||||
Equals => "'='",
|
|
||||||
Indent => "indent",
|
|
||||||
InterpolationEnd => "'}}'",
|
|
||||||
InterpolationStart => "'{{'",
|
|
||||||
Line => "command",
|
|
||||||
Name => "name",
|
|
||||||
Plus => "'+'",
|
|
||||||
At => "'@'",
|
|
||||||
ParenL => "'('",
|
|
||||||
ParenR => "')'",
|
|
||||||
StringToken => "string",
|
|
||||||
RawString => "raw string",
|
|
||||||
Text => "command text",
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
59
src/token_kind.rs
Normal file
59
src/token_kind.rs
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
use crate::common::*;
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Clone, Copy)]
|
||||||
|
pub enum TokenKind {
|
||||||
|
At,
|
||||||
|
Backtick,
|
||||||
|
Colon,
|
||||||
|
Comma,
|
||||||
|
Comment,
|
||||||
|
Dedent,
|
||||||
|
Eof,
|
||||||
|
Eol,
|
||||||
|
Equals,
|
||||||
|
Indent,
|
||||||
|
InterpolationEnd,
|
||||||
|
InterpolationStart,
|
||||||
|
Line,
|
||||||
|
Name,
|
||||||
|
ParenL,
|
||||||
|
ParenR,
|
||||||
|
Plus,
|
||||||
|
StringRaw,
|
||||||
|
StringCooked,
|
||||||
|
Text,
|
||||||
|
Whitespace,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Display for TokenKind {
|
||||||
|
fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> {
|
||||||
|
use TokenKind::*;
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"{}",
|
||||||
|
match *self {
|
||||||
|
At => "'@'",
|
||||||
|
Backtick => "backtick",
|
||||||
|
Colon => "':'",
|
||||||
|
Comma => "','",
|
||||||
|
Comment => "comment",
|
||||||
|
Dedent => "dedent",
|
||||||
|
Eof => "end of file",
|
||||||
|
Eol => "end of line",
|
||||||
|
Equals => "'='",
|
||||||
|
Indent => "indent",
|
||||||
|
InterpolationEnd => "'}}'",
|
||||||
|
InterpolationStart => "'{{'",
|
||||||
|
Line => "command",
|
||||||
|
Name => "name",
|
||||||
|
ParenL => "'('",
|
||||||
|
ParenR => "')'",
|
||||||
|
Plus => "'+'",
|
||||||
|
StringRaw => "raw string",
|
||||||
|
StringCooked => "cooked string",
|
||||||
|
Text => "command text",
|
||||||
|
Whitespace => "whitespace",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
6
src/use_color.rs
Normal file
6
src/use_color.rs
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
#[derive(Copy, Clone)]
|
||||||
|
pub enum UseColor {
|
||||||
|
Auto,
|
||||||
|
Always,
|
||||||
|
Never,
|
||||||
|
}
|
34
src/variables.rs
Normal file
34
src/variables.rs
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
use crate::common::*;
|
||||||
|
|
||||||
|
pub struct Variables<'a> {
|
||||||
|
stack: Vec<&'a Expression<'a>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Variables<'a> {
|
||||||
|
pub fn new(root: &'a Expression<'a>) -> Variables<'a> {
|
||||||
|
Variables { stack: vec![root] }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Iterator for Variables<'a> {
|
||||||
|
type Item = &'a Token<'a>;
|
||||||
|
|
||||||
|
fn next(&mut self) -> Option<&'a Token<'a>> {
|
||||||
|
match self.stack.pop() {
|
||||||
|
None
|
||||||
|
| Some(Expression::String { .. })
|
||||||
|
| Some(Expression::Backtick { .. })
|
||||||
|
| Some(Expression::Call { .. }) => None,
|
||||||
|
Some(Expression::Variable { token, .. }) => Some(token),
|
||||||
|
Some(Expression::Concatination { lhs, rhs }) => {
|
||||||
|
self.stack.push(lhs);
|
||||||
|
self.stack.push(rhs);
|
||||||
|
self.next()
|
||||||
|
}
|
||||||
|
Some(Expression::Group { expression }) => {
|
||||||
|
self.stack.push(expression);
|
||||||
|
self.next()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -431,6 +431,20 @@ integration_test! {
|
|||||||
status: 200,
|
status: 200,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// 😬鎌
|
||||||
|
integration_test! {
|
||||||
|
name: backtick_code_interpolation_mod,
|
||||||
|
justfile: "f:\n 無{{`exit 200`}}",
|
||||||
|
args: (),
|
||||||
|
stdout: "",
|
||||||
|
stderr: "error: Backtick failed with exit code 200
|
||||||
|
|
|
||||||
|
2 | 無{{`exit 200`}}
|
||||||
|
| ^^^^^^^^^^
|
||||||
|
",
|
||||||
|
status: 200,
|
||||||
|
}
|
||||||
|
|
||||||
integration_test! {
|
integration_test! {
|
||||||
name: backtick_code_interpolation_tab,
|
name: backtick_code_interpolation_tab,
|
||||||
justfile: "
|
justfile: "
|
||||||
@ -1029,7 +1043,7 @@ integration_test! {
|
|||||||
Leading whitespace may consist of tabs or spaces, but not both
|
Leading whitespace may consist of tabs or spaces, but not both
|
||||||
|
|
|
|
||||||
2 | echo hello
|
2 | echo hello
|
||||||
| ^
|
| ^^^^^
|
||||||
",
|
",
|
||||||
status: EXIT_FAILURE,
|
status: EXIT_FAILURE,
|
||||||
}
|
}
|
||||||
@ -1056,7 +1070,7 @@ integration_test! {
|
|||||||
Recipe started with `␉␉` but found line with `␉␠`
|
Recipe started with `␉␉` but found line with `␉␠`
|
||||||
|
|
|
|
||||||
3 | echo goodbye
|
3 | echo goodbye
|
||||||
| ^
|
| ^^^^^
|
||||||
",
|
",
|
||||||
status: EXIT_FAILURE,
|
status: EXIT_FAILURE,
|
||||||
}
|
}
|
||||||
@ -1725,14 +1739,14 @@ a:
|
|||||||
integration_test! {
|
integration_test! {
|
||||||
name: unterminated_raw_string,
|
name: unterminated_raw_string,
|
||||||
justfile: "
|
justfile: "
|
||||||
a b=':
|
a b= ':
|
||||||
",
|
",
|
||||||
args: ("a"),
|
args: ("a"),
|
||||||
stdout: "",
|
stdout: "",
|
||||||
stderr: "error: Unterminated string
|
stderr: "error: Unterminated string
|
||||||
|
|
|
|
||||||
2 | a b=':
|
2 | a b= ':
|
||||||
| ^
|
| ^
|
||||||
",
|
",
|
||||||
status: EXIT_FAILURE,
|
status: EXIT_FAILURE,
|
||||||
}
|
}
|
||||||
@ -1740,14 +1754,14 @@ a b=':
|
|||||||
integration_test! {
|
integration_test! {
|
||||||
name: unterminated_string,
|
name: unterminated_string,
|
||||||
justfile: r#"
|
justfile: r#"
|
||||||
a b=":
|
a b= ":
|
||||||
"#,
|
"#,
|
||||||
args: ("a"),
|
args: ("a"),
|
||||||
stdout: "",
|
stdout: "",
|
||||||
stderr: r#"error: Unterminated string
|
stderr: r#"error: Unterminated string
|
||||||
|
|
|
|
||||||
2 | a b=":
|
2 | a b= ":
|
||||||
| ^
|
| ^
|
||||||
"#,
|
"#,
|
||||||
status: EXIT_FAILURE,
|
status: EXIT_FAILURE,
|
||||||
}
|
}
|
||||||
@ -2051,3 +2065,64 @@ foo a=arch() o=os() f=os_family():
|
|||||||
stderr: format!("echo {} {} {}\n", target::arch(), target::os(), target::os_family()).as_str(),
|
stderr: format!("echo {} {} {}\n", target::arch(), target::os(), target::os_family()).as_str(),
|
||||||
status: EXIT_SUCCESS,
|
status: EXIT_SUCCESS,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
integration_test! {
|
||||||
|
name: unterminated_interpolation_eol,
|
||||||
|
justfile: "
|
||||||
|
foo:
|
||||||
|
echo {{
|
||||||
|
",
|
||||||
|
args: (),
|
||||||
|
stdout: "",
|
||||||
|
stderr: r#"error: Unterminated interpolation
|
||||||
|
|
|
||||||
|
3 | echo {{
|
||||||
|
| ^^
|
||||||
|
"#,
|
||||||
|
status: EXIT_FAILURE,
|
||||||
|
}
|
||||||
|
|
||||||
|
integration_test! {
|
||||||
|
name: unterminated_interpolation_eof,
|
||||||
|
justfile: "
|
||||||
|
foo:
|
||||||
|
echo {{",
|
||||||
|
args: (),
|
||||||
|
stdout: "",
|
||||||
|
stderr: r#"error: Unterminated interpolation
|
||||||
|
|
|
||||||
|
3 | echo {{
|
||||||
|
| ^^
|
||||||
|
"#,
|
||||||
|
status: EXIT_FAILURE,
|
||||||
|
}
|
||||||
|
|
||||||
|
integration_test! {
|
||||||
|
name: unterminated_backtick,
|
||||||
|
justfile: "
|
||||||
|
foo a=\t`echo blaaaaaah:
|
||||||
|
echo {{a}}",
|
||||||
|
args: (),
|
||||||
|
stdout: "",
|
||||||
|
stderr: r#"error: Unterminated backtick
|
||||||
|
|
|
||||||
|
2 | foo a= `echo blaaaaaah:
|
||||||
|
| ^
|
||||||
|
"#,
|
||||||
|
status: EXIT_FAILURE,
|
||||||
|
}
|
||||||
|
|
||||||
|
integration_test! {
|
||||||
|
name: unknown_start_of_token,
|
||||||
|
justfile: "
|
||||||
|
assembly_source_files = $(wildcard src/arch/$(arch)/*.s)
|
||||||
|
",
|
||||||
|
args: (),
|
||||||
|
stdout: "",
|
||||||
|
stderr: r#"error: Unknown start of token:
|
||||||
|
|
|
||||||
|
2 | assembly_source_files = $(wildcard src/arch/$(arch)/*.s)
|
||||||
|
| ^
|
||||||
|
"#,
|
||||||
|
status: EXIT_FAILURE,
|
||||||
|
}
|
||||||
|
@ -52,6 +52,7 @@ mod unix {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
#[ignore]
|
||||||
fn interrupt_shebang() {
|
fn interrupt_shebang() {
|
||||||
interrupt_test(
|
interrupt_test(
|
||||||
"
|
"
|
||||||
@ -63,6 +64,7 @@ default:
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
#[ignore]
|
||||||
fn interrupt_line() {
|
fn interrupt_line() {
|
||||||
interrupt_test(
|
interrupt_test(
|
||||||
"
|
"
|
||||||
|
28
variables.rs
Normal file
28
variables.rs
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
use crate::common::*;
|
||||||
|
|
||||||
|
pub struct Variables<'a> {
|
||||||
|
stack: Vec<&'a Expression<'a>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Iterator for Variables<'a> {
|
||||||
|
type Item = &'a Token<'a>;
|
||||||
|
|
||||||
|
fn next(&mut self) -> Option<&'a Token<'a>> {
|
||||||
|
match self.stack.pop() {
|
||||||
|
None
|
||||||
|
| Some(Expression::String { .. })
|
||||||
|
| Some(Expression::Backtick { .. })
|
||||||
|
| Some(Expression::Call { .. }) => None,
|
||||||
|
Some(Expression::Variable { token, .. }) => Some(token),
|
||||||
|
Some(Expression::Concatination { lhs, rhs }) => {
|
||||||
|
self.stack.push(lhs);
|
||||||
|
self.stack.push(rhs);
|
||||||
|
self.next()
|
||||||
|
}
|
||||||
|
Some(Expression::Group { expression }) => {
|
||||||
|
self.stack.push(expression);
|
||||||
|
self.next()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
Loading…
Reference in New Issue
Block a user