diff --git a/GRAMMAR.md b/GRAMMAR.md index f4bbadb..105bf76 100644 --- a/GRAMMAR.md +++ b/GRAMMAR.md @@ -13,7 +13,7 @@ tokens BACKTICK = `[^`\n\r]*` COLON = : COMMENT = #([^!].*)?$ -EOL = \n|\r\n +NEWLINE = \n|\r\n EQUALS = = INTERPOLATION_START = {{ INTERPOLATION_END = }} @@ -36,9 +36,12 @@ justfile : item* EOF item : recipe | assignment | export - | EOL + | eol -assignment : NAME '=' expression EOL +eol : NEWLINE + | COMMENT NEWLINE + +assignment : NAME '=' expression eol export : 'export' assignment @@ -58,7 +61,7 @@ dependencies : NAME+ body : INDENT line+ DEDENT -line : LINE (TEXT | interpolation)+ EOL +line : LINE (TEXT | interpolation)+ eol interpolation : '{{' expression '}}' ``` diff --git a/justfile b/justfile index 61746ca..20180d1 100644 --- a/justfile +++ b/justfile @@ -5,9 +5,7 @@ test: build filter PATTERN: build cargo test --lib {{PATTERN}} -test-quine: - cargo run -- quine clean - +# test with backtrace backtrace: RUST_BACKTRACE=1 cargo test --lib @@ -22,6 +20,7 @@ watch COMMAND='test': version = `sed -En 's/version[[:space:]]*=[[:space:]]*"([^"]+)"/v\1/p' Cargo.toml` +# publish to crates.io publish: lint clippy test git branch | grep '* master' git diff --no-ext-diff --quiet --exit-code @@ -33,6 +32,7 @@ publish: lint clippy test git push origin --tags @echo 'Remember to merge the {{version}} branch on GitHub!' +# clean up feature branch BRANCH done BRANCH: git checkout {{BRANCH}} git pull --rebase github master @@ -40,9 +40,11 @@ done BRANCH: git pull --rebase github master git branch -d {{BRANCH}} +# install just from crates.io install: cargo install -f just +# install development dependencies install-dev-deps: rustup install nightly rustup update nightly @@ -50,14 +52,18 @@ install-dev-deps: cargo install -f cargo-watch cargo install -f cargo-check +# everyone's favorite animate paper clip clippy: rustup run nightly cargo clippy +# count non-empty lines of code sloc: - @cat src/*.rs | wc -l + @cat src/*.rs | sed '/^\s*$/d' | wc -l lint: + echo Checking for FIXME/TODO... ! grep --color -En 'FIXME|TODO' src/*.rs + echo Checking for long lines... ! grep --color -En '.{100}' src/*.rs nop: @@ -68,6 +74,9 @@ fail: backtick-fail: echo {{`exit 1`}} +test-quine: + cargo run -- quine clean + # make a quine, compile it, and verify it quine: create cc tmp/gen0.c -o tmp/gen0 diff --git a/src/app.rs b/src/app.rs index 4031a2e..87630cc 100644 --- a/src/app.rs +++ b/src/app.rs @@ -1,6 +1,7 @@ extern crate clap; extern crate regex; extern crate atty; +extern crate ansi_term; use std::{io, fs, env, process, convert, ffi}; use std::collections::BTreeMap; @@ -46,6 +47,14 @@ impl UseColor { UseColor::Never => false, } } + + fn blue(self, stream: atty::Stream) -> ansi_term::Style { + if self.should_color_stream(stream) { + ansi_term::Style::new().fg(ansi_term::Color::Blue) + } else { + ansi_term::Style::default() + } + } } fn edit>(path: P) -> ! { @@ -210,11 +219,19 @@ pub fn app() { } if matches.is_present("list") { + let blue = use_color.blue(atty::Stream::Stdout); println!("Available recipes:"); for (name, recipe) in &justfile.recipes { print!(" {}", name); for parameter in &recipe.parameters { - print!(" {}", parameter); + if use_color.should_color_stream(atty::Stream::Stdout) { + print!(" {:#}", parameter); + } else { + print!(" {}", parameter); + } + } + if let Some(doc) = recipe.doc { + print!(" {} {}", blue.paint("#"), blue.paint(doc)); } println!(""); } diff --git a/src/integration.rs b/src/integration.rs index fce9800..64fee93 100644 --- a/src/integration.rs +++ b/src/integration.rs @@ -1004,13 +1004,15 @@ recipe: #[test] fn dump() { let text =" +# this recipe does something recipe: @exit 100"; integration_test( &["--dump"], text, 0, - "recipe: + "# this recipe does something +recipe: @exit 100 ", "", @@ -1096,15 +1098,19 @@ fn list() { integration_test( &["--list"], r#" + +# this does a thing hello a b='B ' c='C': echo {{a}} {{b}} {{c}} +# this comment will be ignored + a Z="\t z": "#, 0, r"Available recipes: a Z='\t z' - hello a b='B\t' c='C' + hello a b='B\t' c='C' # this does a thing ", "", ); diff --git a/src/lib.rs b/src/lib.rs index 6276b10..cc0056c 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -19,7 +19,7 @@ extern crate edit_distance; use std::io::prelude::*; -use std::{fs, fmt, process, io, cmp}; +use std::{fs, fmt, process, io, iter, cmp}; use std::ops::Range; use std::fmt::Display; use regex::Regex; @@ -59,6 +59,10 @@ fn re(pattern: &str) -> Regex { Regex::new(pattern).unwrap() } +fn empty>() -> C { + iter::empty().collect() +} + fn contains(range: &Range, i: T) -> bool { i >= range.start && i < range.end } @@ -67,6 +71,7 @@ fn contains(range: &Range, i: T) -> bool { struct Recipe<'a> { line_number: usize, name: &'a str, + doc: Option<&'a str>, lines: Vec>>, dependencies: Vec<&'a str>, dependency_tokens: Vec>, @@ -84,10 +89,12 @@ struct Parameter<'a> { impl<'a> Display for Parameter<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - write!(f, "{}", self.name)?; + let green = maybe_green(f.alternate()); + let cyan = maybe_cyan(f.alternate()); + write!(f, "{}", cyan.paint(self.name))?; if let Some(ref default) = self.default { let escaped = default.chars().flat_map(char::escape_default).collect::();; - write!(f, r#"='{}'"#, escaped)?; + write!(f, r#"='{}'"#, green.paint(escaped))?; } Ok(()) } @@ -281,11 +288,11 @@ impl<'a> Recipe<'a> { }).collect(); let mut evaluator = Evaluator { - evaluated: Map::new(), + evaluated: empty(), scope: scope, exports: exports, - assignments: &Map::new(), - overrides: &Map::new(), + assignments: &empty(), + overrides: &empty(), quiet: options.quiet, }; @@ -418,6 +425,9 @@ impl<'a> Recipe<'a> { impl<'a> Display for Recipe<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { + if let Some(doc) = self.doc { + writeln!(f, "# {}", doc)?; + } write!(f, "{}", self.name)?; for parameter in &self.parameters { write!(f, " {}", parameter)?; @@ -455,9 +465,9 @@ fn resolve_recipes<'a>( text: &'a str, ) -> Result<(), CompileError<'a>> { let mut resolver = Resolver { - seen: Set::new(), - stack: vec![], - resolved: Set::new(), + seen: empty(), + stack: empty(), + resolved: empty(), recipes: recipes, }; @@ -553,9 +563,9 @@ fn resolve_assignments<'a>( let mut resolver = AssignmentResolver { assignments: assignments, assignment_tokens: assignment_tokens, - stack: vec![], - seen: Set::new(), - evaluated: Set::new(), + stack: empty(), + seen: empty(), + evaluated: empty(), }; for name in assignments.keys() { @@ -626,11 +636,11 @@ fn evaluate_assignments<'a>( ) -> Result, RunError<'a>> { let mut evaluator = Evaluator { assignments: assignments, - evaluated: Map::new(), - exports: &Set::new(), + evaluated: empty(), + exports: &empty(), overrides: overrides, quiet: quiet, - scope: &Map::new(), + scope: &empty(), }; for name in assignments.keys() { @@ -676,7 +686,7 @@ impl<'a, 'b> Evaluator<'a, 'b> { if let Some(value) = self.overrides.get(name) { self.evaluated.insert(name, value.to_string()); } else { - let value = self.evaluate_expression(expression, &Map::new())?; + let value = self.evaluate_expression(expression, &empty())?; self.evaluated.insert(name, value); } } else { @@ -950,6 +960,22 @@ fn maybe_red(colors: bool) -> ansi_term::Style { } } +fn maybe_green(colors: bool) -> ansi_term::Style { + if colors { + ansi_term::Style::new().fg(ansi_term::Color::Green) + } else { + ansi_term::Style::default() + } +} + +fn maybe_cyan(colors: bool) -> ansi_term::Style { + if colors { + ansi_term::Style::new().fg(ansi_term::Color::Cyan) + } else { + ansi_term::Style::default() + } +} + fn maybe_bold(colors: bool) -> ansi_term::Style { if colors { ansi_term::Style::new().bold() @@ -1130,7 +1156,7 @@ impl<'a, 'b> Justfile<'a> where 'a: 'b { return Ok(()); } - let mut ran = Set::new(); + let mut ran = empty(); for (i, argument) in arguments.iter().enumerate() { if let Some(recipe) = self.recipes.get(argument) { @@ -1678,14 +1704,13 @@ fn tokenize(text: &str) -> Result, CompileError> { fn parse(text: &str) -> Result { let tokens = tokenize(text)?; - let filtered: Vec<_> = tokens.into_iter().filter(|token| token.kind != Comment).collect(); let parser = Parser { text: text, - tokens: itertools::put_back(filtered), - recipes: Map::<&str, Recipe>::new(), - assignments: Map::<&str, Expression>::new(), - assignment_tokens: Map::<&str, Token>::new(), - exports: Set::<&str>::new(), + tokens: itertools::put_back(tokens), + recipes: empty(), + assignments: empty(), + assignment_tokens: empty(), + exports: empty(), }; parser.file() } @@ -1738,6 +1763,7 @@ impl<'a> Parser<'a> { } fn expect_eol(&mut self) -> Option> { + self.accepted(Comment); if self.peek(Eol) { self.accept(Eol); None @@ -1755,7 +1781,12 @@ impl<'a> Parser<'a> { }) } - fn recipe(&mut self, name: Token<'a>, quiet: bool) -> Result<(), CompileError<'a>> { + fn recipe( + &mut self, + name: Token<'a>, + doc: Option>, + quiet: bool, + ) -> Result<(), CompileError<'a>> { if let Some(recipe) = self.recipes.get(name.lexeme) { return Err(name.error(ErrorKind::DuplicateRecipe { recipe: recipe.name, @@ -1875,6 +1906,7 @@ impl<'a> Parser<'a> { self.recipes.insert(name.lexeme, Recipe { line_number: name.line, name: name.lexeme, + doc: doc.map(|t| t.lexeme[1..].trim()), dependencies: dependencies, dependency_tokens: dependency_tokens, parameters: parameters, @@ -1930,13 +1962,43 @@ impl<'a> Parser<'a> { } fn file(mut self) -> Result, CompileError<'a>> { + // how do i associate comments with recipes? + // save the last doc + // clear it after every item + + let mut doc = None; + + /* + trait Swap { + fn swap(&mut self, T) -> T + } + + impl Swap> for Option { + fn swap(&mut self, replacement: Option) -> Option { + std::mem::replace(self, replacement) + } + } + */ + loop { match self.tokens.next() { Some(token) => match token.kind { Eof => break, - Eol => continue, + Eol => { + doc = None; + continue; + } + Comment => { + if let Some(token) = self.expect_eol() { + return Err(token.error(ErrorKind::InternalError { + message: format!("found comment followed by {}", token.kind), + })); + } + doc = Some(token); + } At => if let Some(name) = self.accept(Name) { - self.recipe(name, true)?; + self.recipe(name, doc, true)?; + doc = None; } else { let unexpected = &self.tokens.next().unwrap(); return Err(self.unexpected_token(unexpected, &[Name])); @@ -1945,18 +2007,19 @@ impl<'a> Parser<'a> { let next = self.tokens.next().unwrap(); if next.kind == Name && self.accepted(Equals) { self.assignment(next, true)?; + doc = None; } else { self.tokens.put_back(next); - self.recipe(token, false)?; + self.recipe(token, doc, false)?; + doc = None; } } else if self.accepted(Equals) { self.assignment(token, false)?; + doc = None; } else { - self.recipe(token, false)?; + self.recipe(token, doc, false)?; + doc = None; }, - Comment => return Err(token.error(ErrorKind::InternalError { - message: "found comment in token stream".to_string() - })), _ => return return Err(self.unexpected_token(&token, &[Name, At])), }, None => return Err(CompileError { diff --git a/src/unit.rs b/src/unit.rs index 4b01bca..f66cba0 100644 --- a/src/unit.rs +++ b/src/unit.rs @@ -90,36 +90,38 @@ fn parse_error(text: &str, expected: CompileError) { #[test] fn tokanize_strings() { tokenize_success( - r#"a = "'a'" + '"b"' + "'c'" + '"d"'"#, - r#"N="+'+"+'."# + r#"a = "'a'" + '"b"' + "'c'" + '"d"'#echo hello"#, + r#"N="+'+"+'#."# ); } #[test] fn tokenize_recipe_interpolation_eol() { - let text = "foo: + let text = "foo: # some comment {{hello}} "; - tokenize_success(text, "N:$>^{N}$<."); + tokenize_success(text, "N:#$>^{N}$<."); } #[test] fn tokenize_recipe_interpolation_eof() { - let text = "foo: - {{hello}}"; - tokenize_success(text, "N:$>^{N}<."); + let text = "foo: # more comments + {{hello}} +# another comment +"; + tokenize_success(text, "N:#$>^{N}$<#$."); } #[test] fn tokenize_recipe_complex_interpolation_expression() { - let text = "foo:\n {{a + b + \"z\" + blarg}}"; - tokenize_success(text, "N:$>^{N+N+\"+N}<."); + let text = "foo: #lol\n {{a + b + \"z\" + blarg}}"; + tokenize_success(text, "N:#$>^{N+N+\"+N}<."); } #[test] fn tokenize_recipe_multiple_interpolations() { - let text = "foo:\n {{a}}0{{b}}1{{c}}"; - tokenize_success(text, "N:$>^{N}_{N}_{N}<."); + let text = "foo:#ok\n {{a}}0{{b}}1{{c}}"; + tokenize_success(text, "N:#$>^{N}_{N}_{N}<."); } #[test] @@ -134,16 +136,19 @@ hello blah blah blah : a b c #whatever #[test] fn tokenize_empty_lines() { let text = " +# this does something hello: asdf bsdf csdf - dsdf + dsdf # whatever + +# yolo "; - tokenize_success(text, "$N:$>^_$^_$$^_$$^_$<."); + tokenize_success(text, "$#$N:$>^_$^_$$^_$$^_$$<#$."); } #[test] @@ -173,11 +178,12 @@ hello: d +# hello bob: frank "; - tokenize_success(text, "$N:$>^_$^_$$^_$$^_$$^_$<."); + tokenize_success(text, "$N:$>^_$^_$$^_$$^_$$<#$N:$>^_$<."); }