diff --git a/justfile b/justfile index 42ea3d7..adcf758 100755 --- a/justfile +++ b/justfile @@ -34,7 +34,7 @@ check: cargo check watch +COMMAND='test': - cargo watch --clear --exec build --exec "{{COMMAND}}" + cargo watch --clear --exec "{{COMMAND}}" man: cargo build --features help4help2man diff --git a/src/assignment_resolver.rs b/src/assignment_resolver.rs index 46a7af9..e6dfa40 100644 --- a/src/assignment_resolver.rs +++ b/src/assignment_resolver.rs @@ -40,13 +40,17 @@ impl<'a: 'b, 'b> AssignmentResolver<'a, 'b> { self.evaluated.insert(name); } else { let message = format!("attempted to resolve unknown assignment `{}`", name); - return Err(CompilationError { + let token = Token { src: "", offset: 0, line: 0, column: 0, - width: 0, + length: 0, + kind: TokenKind::Unspecified, + }; + return Err(CompilationError { kind: Internal { message }, + token, }); } Ok(()) diff --git a/src/common.rs b/src/common.rs index 02a61b7..bd40e76 100644 --- a/src/common.rs +++ b/src/common.rs @@ -35,10 +35,7 @@ pub(crate) use unicode_width::UnicodeWidthChar; pub(crate) use crate::{config_error, keyword, search_error, setting}; // functions -pub(crate) use crate::{ - default::default, empty::empty, load_dotenv::load_dotenv, output::output, - write_message_context::write_message_context, -}; +pub(crate) use crate::{default::default, empty::empty, load_dotenv::load_dotenv, output::output}; // traits pub(crate) use crate::{ diff --git a/src/compilation_error.rs b/src/compilation_error.rs index c1df686..5f9312e 100644 --- a/src/compilation_error.rs +++ b/src/compilation_error.rs @@ -1,13 +1,9 @@ use crate::common::*; #[derive(Debug, PartialEq)] -pub(crate) struct CompilationError<'a> { - pub(crate) src: &'a str, - pub(crate) offset: usize, - pub(crate) line: usize, - pub(crate) column: usize, - pub(crate) width: usize, - pub(crate) kind: CompilationErrorKind<'a>, +pub(crate) struct CompilationError<'src> { + pub(crate) token: Token<'src>, + pub(crate) kind: CompilationErrorKind<'src>, } impl Error for CompilationError<'_> {} @@ -25,7 +21,7 @@ impl Display for CompilationError<'_> { f, "Alias `{}` defined on line {} shadows recipe `{}` defined on line {}", alias, - self.line.ordinal(), + self.token.line.ordinal(), alias, recipe_line.ordinal(), )?; @@ -90,7 +86,7 @@ impl Display for CompilationError<'_> { "Alias `{}` first defined on line {} is redefined on line {}", alias, first.ordinal(), - self.line.ordinal(), + self.token.line.ordinal(), )?; } DuplicateDependency { recipe, dependency } => { @@ -106,7 +102,7 @@ impl Display for CompilationError<'_> { "Recipe `{}` first defined on line {} is redefined on line {}", recipe, first.ordinal(), - self.line.ordinal() + self.token.line.ordinal() )?; } DuplicateSet { setting, first } => { @@ -115,7 +111,7 @@ impl Display for CompilationError<'_> { "Setting `{}` first set on line {} is redefined on line {}", setting, first.ordinal(), - self.line.ordinal(), + self.token.line.ordinal(), )?; } DependencyHasParameters { recipe, dependency } => { @@ -223,14 +219,6 @@ impl Display for CompilationError<'_> { write!(f, "{}", message.suffix())?; - write_message_context( - f, - Color::fmt(f).error(), - self.src, - self.offset, - self.line, - self.column, - self.width, - ) + self.token.write_context(f, Color::fmt(f).error()) } } diff --git a/src/lexer.rs b/src/lexer.rs index 2310e0f..564fc68 100644 --- a/src/lexer.rs +++ b/src/lexer.rs @@ -11,15 +11,15 @@ use TokenKind::*; /// regex-based lexer, which was slower and generally godawful. However, /// this should not be taken as a slight against regular expressions, /// the lexer was just idiosyncratically bad. -pub(crate) struct Lexer<'a> { +pub(crate) struct Lexer<'src> { /// Source text - src: &'a str, + src: &'src str, /// Char iterator - chars: Chars<'a>, + chars: Chars<'src>, /// Tokens - tokens: Vec>, + tokens: Vec>, /// State stack - state: Vec>, + state: Vec>, /// Current token start token_start: Position, /// Current token end @@ -28,14 +28,14 @@ pub(crate) struct Lexer<'a> { next: Option, } -impl<'a> Lexer<'a> { +impl<'src> Lexer<'src> { /// Lex `text` pub(crate) fn lex(src: &str) -> CompilationResult> { Lexer::new(src).tokenize() } /// Create a new Lexer to lex `text` - fn new(src: &'a str) -> Lexer<'a> { + fn new(src: &'src str) -> Lexer<'src> { let mut chars = src.chars(); let next = chars.next(); @@ -58,7 +58,7 @@ impl<'a> Lexer<'a> { /// Advance over the chracter in `self.next`, updating /// `self.token_end` accordingly. - fn advance(&mut self) -> CompilationResult<'a, ()> { + fn advance(&mut self) -> CompilationResult<'src, ()> { match self.next { Some(c) => { let len_utf8 = c.len_utf8(); @@ -84,7 +84,7 @@ impl<'a> Lexer<'a> { } /// Lexeme of in-progress token - fn lexeme(&self) -> &'a str { + fn lexeme(&self) -> &'src str { &self.src[self.token_start.offset..self.token_end.offset] } @@ -104,7 +104,7 @@ impl<'a> Lexer<'a> { } /// Un-lexed text - fn rest(&self) -> &'a str { + fn rest(&self) -> &'src str { &self.src[self.token_end.offset..] } @@ -124,7 +124,7 @@ impl<'a> Lexer<'a> { } /// Get current state - fn state(&self) -> CompilationResult<'a, State<'a>> { + fn state(&self) -> CompilationResult<'src, State<'src>> { if self.state.is_empty() { Err(self.internal_error("Lexer state stack empty")) } else { @@ -133,7 +133,7 @@ impl<'a> Lexer<'a> { } /// Pop current state from stack - fn pop_state(&mut self) -> CompilationResult<'a, ()> { + fn pop_state(&mut self) -> CompilationResult<'src, ()> { if self.state.pop().is_none() { Err(self.internal_error("Lexer attempted to pop in start state")) } else { @@ -158,26 +158,31 @@ impl<'a> Lexer<'a> { } /// Create an internal error with `message` - fn internal_error(&self, message: impl Into) -> CompilationError<'a> { - // Use `self.token_end` as the location of the error - CompilationError { + fn internal_error(&self, message: impl Into) -> CompilationError<'src> { + let token = Token { src: self.src, offset: self.token_end.offset, line: self.token_end.line, column: self.token_end.column, - width: 0, + length: 0, + kind: Unspecified, + }; + + // Use `self.token_end` as the location of the error + CompilationError { kind: CompilationErrorKind::Internal { message: message.into(), }, + token, } } /// Create a compilation error with `kind` - fn error(&self, kind: CompilationErrorKind<'a>) -> CompilationError<'a> { + fn error(&self, kind: CompilationErrorKind<'src>) -> CompilationError<'src> { // Use the in-progress token span as the location of the error. // The width of the error site to highlight depends on the kind of error: - let width = match kind { + let length = match kind { // highlight ' or " UnterminatedString => 1, // highlight ` @@ -186,26 +191,24 @@ impl<'a> Lexer<'a> { _ => self.lexeme().len(), }; - CompilationError { + let token = Token { + kind: Unspecified, src: self.src, offset: self.token_start.offset, line: self.token_start.line, column: self.token_start.column, - width, - kind, - } + length, + }; + + CompilationError { token, kind } } fn unterminated_interpolation_error( &self, - interpolation_start: Position, - ) -> CompilationError<'a> { + interpolation_start: Token<'src>, + ) -> CompilationError<'src> { CompilationError { - src: self.src, - offset: interpolation_start.offset, - line: interpolation_start.line, - column: interpolation_start.column, - width: 2, + token: interpolation_start, kind: UnterminatedInterpolation, } } @@ -251,7 +254,7 @@ impl<'a> Lexer<'a> { } /// Consume the text and produce a series of tokens - fn tokenize(mut self) -> CompilationResult<'a, Vec>> { + fn tokenize(mut self) -> CompilationResult<'src, Vec>> { loop { if self.token_start.column == 0 { self.lex_line_start()?; @@ -287,7 +290,7 @@ impl<'a> Lexer<'a> { } /// Handle blank lines and indentation - fn lex_line_start(&mut self) -> CompilationResult<'a, ()> { + fn lex_line_start(&mut self) -> CompilationResult<'src, ()> { let nonblank_index = self .rest() .char_indices() @@ -384,7 +387,7 @@ impl<'a> Lexer<'a> { } /// Lex token beginning with `start` in normal state - fn lex_normal(&mut self, start: char) -> CompilationResult<'a, ()> { + fn lex_normal(&mut self, start: char) -> CompilationResult<'src, ()> { match start { '@' => self.lex_single(At), '[' => self.lex_single(BracketL), @@ -418,9 +421,9 @@ impl<'a> Lexer<'a> { /// Lex token beginning with `start` in interpolation state fn lex_interpolation( &mut self, - interpolation_start: Position, + interpolation_start: Token<'src>, start: char, - ) -> CompilationResult<'a, ()> { + ) -> CompilationResult<'src, ()> { // Check for end of interpolation if self.rest_starts_with("}}") { // Pop interpolation state @@ -437,7 +440,7 @@ impl<'a> Lexer<'a> { } /// Lex token beginning with `start` in text state - fn lex_text(&mut self) -> CompilationResult<'a, ()> { + fn lex_text(&mut self) -> CompilationResult<'src, ()> { enum Terminator { Newline, NewlineCarriageReturn, @@ -482,30 +485,31 @@ impl<'a> Lexer<'a> { self.lex_double(Eol) } Interpolation => { + self.lex_double(InterpolationStart)?; self.state.push(State::Interpolation { - interpolation_start: self.token_start, + interpolation_start: self.tokens[self.tokens.len() - 1], }); - self.lex_double(InterpolationStart) + Ok(()) } EndOfFile => self.pop_state(), } } /// Lex token beginning with `start` in indented state - fn lex_indented(&mut self) -> CompilationResult<'a, ()> { + fn lex_indented(&mut self) -> CompilationResult<'src, ()> { self.state.push(State::Text); Ok(()) } /// Lex a single character token - fn lex_single(&mut self, kind: TokenKind) -> CompilationResult<'a, ()> { + fn lex_single(&mut self, kind: TokenKind) -> CompilationResult<'src, ()> { self.advance()?; self.token(kind); Ok(()) } /// Lex a double character token - fn lex_double(&mut self, kind: TokenKind) -> CompilationResult<'a, ()> { + fn lex_double(&mut self, kind: TokenKind) -> CompilationResult<'src, ()> { self.advance()?; self.advance()?; self.token(kind); @@ -513,7 +517,7 @@ impl<'a> Lexer<'a> { } /// Lex a token starting with ':' - fn lex_colon(&mut self) -> CompilationResult<'a, ()> { + fn lex_colon(&mut self) -> CompilationResult<'src, ()> { self.advance()?; if self.next_is('=') { @@ -527,7 +531,7 @@ impl<'a> Lexer<'a> { } /// Lex a token starting with '{' - fn lex_brace_l(&mut self) -> CompilationResult<'a, ()> { + fn lex_brace_l(&mut self) -> CompilationResult<'src, ()> { if !self.rest_starts_with("{{") { self.advance()?; @@ -538,7 +542,7 @@ impl<'a> Lexer<'a> { } /// Lex a token starting with '}' - fn lex_brace_r(&mut self) -> CompilationResult<'a, ()> { + fn lex_brace_r(&mut self) -> CompilationResult<'src, ()> { if !self.rest_starts_with("}}") { self.advance()?; @@ -549,7 +553,7 @@ impl<'a> Lexer<'a> { } /// Lex a carriage return and line feed - fn lex_cr_lf(&mut self) -> CompilationResult<'a, ()> { + fn lex_cr_lf(&mut self) -> CompilationResult<'src, ()> { if !self.rest_starts_with("\r\n") { // advance over \r self.advance()?; @@ -561,7 +565,7 @@ impl<'a> Lexer<'a> { } /// Lex name: [a-zA-Z_][a-zA-Z0-9_]* - fn lex_identifier(&mut self) -> CompilationResult<'a, ()> { + fn lex_identifier(&mut self) -> CompilationResult<'src, ()> { // advance over initial character self.advance()?; @@ -579,7 +583,7 @@ impl<'a> Lexer<'a> { } /// Lex comment: #[^\r\n] - fn lex_comment(&mut self) -> CompilationResult<'a, ()> { + fn lex_comment(&mut self) -> CompilationResult<'src, ()> { // advance over # self.advance()?; @@ -593,7 +597,7 @@ impl<'a> Lexer<'a> { } /// Lex backtick: `[^\r\n]*` - fn lex_backtick(&mut self) -> CompilationResult<'a, ()> { + fn lex_backtick(&mut self) -> CompilationResult<'src, ()> { // advance over initial ` self.advance()?; @@ -612,7 +616,7 @@ impl<'a> Lexer<'a> { } /// Lex whitespace: [ \t]+ - fn lex_whitespace(&mut self) -> CompilationResult<'a, ()> { + fn lex_whitespace(&mut self) -> CompilationResult<'src, ()> { while self.next_is_whitespace() { self.advance()? } @@ -623,7 +627,7 @@ impl<'a> Lexer<'a> { } /// Lex raw string: '[^']*' - fn lex_raw_string(&mut self) -> CompilationResult<'a, ()> { + fn lex_raw_string(&mut self) -> CompilationResult<'src, ()> { // advance over opening ' self.advance()?; @@ -646,7 +650,7 @@ impl<'a> Lexer<'a> { } /// Lex cooked string: "[^"\n\r]*" (also processes escape sequences) - fn lex_cooked_string(&mut self) -> CompilationResult<'a, ()> { + fn lex_cooked_string(&mut self) -> CompilationResult<'src, ()> { // advance over opening " self.advance()?; @@ -780,7 +784,7 @@ mod tests { Dedent | Eof => "", // Variable lexemes - Text | StringCooked | StringRaw | Identifier | Comment | Backtick => { + Text | StringCooked | StringRaw | Identifier | Comment | Backtick | Unspecified => { panic!("Token {:?} has no default lexeme", kind) } } @@ -808,22 +812,24 @@ mod tests { offset: usize, line: usize, column: usize, - width: usize, + length: usize, kind: CompilationErrorKind, ) { - let expected = CompilationError { - src, - offset, - line, - column, - width, - kind, - }; - match Lexer::lex(src) { - Ok(_) => panic!("Lexing succeeded but expected: {}\n{}", expected, src), - Err(actual) => { - assert_eq!(actual, expected); + Ok(_) => panic!("Lexing succeeded but expected"), + Err(have) => { + let want = CompilationError { + token: Token { + kind: have.token.kind, + src, + offset, + line, + column, + length, + }, + kind, + }; + assert_eq!(have, want); } } } diff --git a/src/lib.rs b/src/lib.rs index 5d41582..594e2a4 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -87,7 +87,6 @@ mod use_color; mod variables; mod verbosity; mod warning; -mod write_message_context; pub use crate::run::run; diff --git a/src/parser.rs b/src/parser.rs index 6a7aa8f..f329269 100644 --- a/src/parser.rs +++ b/src/parser.rs @@ -702,24 +702,26 @@ mod tests { offset: usize, line: usize, column: usize, - width: usize, + length: usize, kind: CompilationErrorKind, ) { - let expected = CompilationError { - src, - offset, - line, - column, - width, - kind, - }; - let tokens = Lexer::lex(src).expect("Lexing failed in parse test..."); match Parser::parse(&tokens) { - Ok(_) => panic!("Parsing succeeded but expected: {}\n{}", expected, src), - Err(actual) => { - assert_eq!(actual, expected); + Ok(_) => panic!("Parsing unexpectedly succeeded"), + Err(have) => { + let want = CompilationError { + token: Token { + kind: have.token.kind, + src, + offset, + line, + column, + length, + }, + kind, + }; + assert_eq!(have, want); } } } diff --git a/src/runtime_error.rs b/src/runtime_error.rs index d608cc6..ecc5908 100644 --- a/src/runtime_error.rs +++ b/src/runtime_error.rs @@ -82,6 +82,17 @@ impl Error for RuntimeError<'_> { } } +impl<'a> RuntimeError<'a> { + fn context(&self) -> Option { + use RuntimeError::*; + match self { + FunctionCall { function, .. } => Some(function.token()), + Backtick { token, .. } => Some(*token), + _ => None, + } + } +} + impl<'a> Display for RuntimeError<'a> { fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> { use RuntimeError::*; @@ -94,12 +105,10 @@ impl<'a> Display for RuntimeError<'a> { let message = color.message(); write!(f, "{}", message.prefix())?; - let mut error_token: Option = None; - - match *self { + match self { UnknownRecipes { - ref recipes, - ref suggestion, + recipes, + suggestion, } => { write!( f, @@ -111,7 +120,7 @@ impl<'a> Display for RuntimeError<'a> { write!(f, "\nDid you mean `{}`?", suggestion)?; } } - UnknownOverrides { ref overrides } => { + UnknownOverrides { overrides } => { write!( f, "{} {} overridden on the command line but not present in justfile", @@ -121,7 +130,7 @@ impl<'a> Display for RuntimeError<'a> { } ArgumentCountMismatch { recipe, - ref parameters, + parameters, found, min, max, @@ -133,7 +142,7 @@ impl<'a> Display for RuntimeError<'a> { "Recipe `{}` got {} {} but {}takes {}", recipe, found, - Count("argument", found), + Count("argument", *found), if expected < found { "only " } else { "" }, expected )?; @@ -143,7 +152,7 @@ impl<'a> Display for RuntimeError<'a> { "Recipe `{}` got {} {} but takes at least {}", recipe, found, - Count("argument", found), + Count("argument", *found), min )?; } else if found > max { @@ -152,7 +161,7 @@ impl<'a> Display for RuntimeError<'a> { "Recipe `{}` got {} {} but takes at most {}", recipe, found, - Count("argument", found), + Count("argument", *found), max )?; } @@ -182,8 +191,8 @@ impl<'a> Display for RuntimeError<'a> { } Cygpath { recipe, - ref output_error, - } => match *output_error { + output_error, + } => match output_error { OutputError::Code(code) => { write!( f, @@ -208,7 +217,7 @@ impl<'a> Display for RuntimeError<'a> { recipe )?; } - OutputError::Io(ref io_error) => { + OutputError::Io(io_error) => { match io_error.kind() { io::ErrorKind::NotFound => write!( f, @@ -225,7 +234,7 @@ impl<'a> Display for RuntimeError<'a> { _ => write!(f, "Could not run `cygpath` executable:\n{}", io_error), }?; } - OutputError::Utf8(ref utf8_error) => { + OutputError::Utf8(utf8_error) => { write!( f, "Cygpath successfully translated recipe `{}` shebang interpreter path, \ @@ -234,28 +243,24 @@ impl<'a> Display for RuntimeError<'a> { )?; } }, - Dotenv { ref dotenv_error } => { + Dotenv { dotenv_error } => { writeln!(f, "Failed to load .env: {}", dotenv_error)?; } - FunctionCall { - ref function, - ref message, - } => { + FunctionCall { function, message } => { writeln!( f, "Call to function `{}` failed: {}", function.lexeme(), message )?; - error_token = Some(function.token()); } Shebang { recipe, - ref command, - ref argument, - ref io_error, + command, + argument, + io_error, } => { - if let Some(ref argument) = *argument { + if let Some(argument) = argument { write!( f, "Recipe `{}` with shebang `#!{} {}` execution error: {}", @@ -295,12 +300,10 @@ impl<'a> Display for RuntimeError<'a> { recipe, n )?; } else { + write!(f, "Recipe `{}` failed for an unknown reason", recipe)?; } } - IoError { - recipe, - ref io_error, - } => { + IoError { recipe, io_error } => { match io_error.kind() { io::ErrorKind::NotFound => writeln!( f, @@ -320,32 +323,23 @@ impl<'a> Display for RuntimeError<'a> { ), }?; } - TmpdirIoError { - recipe, - ref io_error, - } => writeln!( + TmpdirIoError { recipe, io_error } => writeln!( f, "Recipe `{}` could not be run because of an IO error while trying \ to create a temporary directory or write a file to that directory`:{}", recipe, io_error )?, - Backtick { - ref token, - ref output_error, - } => match *output_error { + Backtick { output_error, .. } => match output_error { OutputError::Code(code) => { writeln!(f, "Backtick failed with exit code {}", code)?; - error_token = Some(*token); } OutputError::Signal(signal) => { writeln!(f, "Backtick was terminated by signal {}", signal)?; - error_token = Some(*token); } OutputError::Unknown => { writeln!(f, "Backtick failed for an unknown reason")?; - error_token = Some(*token); } - OutputError::Io(ref io_error) => { + OutputError::Io(io_error) => { match io_error.kind() { io::ErrorKind::NotFound => write!( f, @@ -364,15 +358,13 @@ impl<'a> Display for RuntimeError<'a> { io_error ), }?; - error_token = Some(*token); } - OutputError::Utf8(ref utf8_error) => { + OutputError::Utf8(utf8_error) => { writeln!( f, "Backtick succeeded but stdout was not utf8: {}", utf8_error )?; - error_token = Some(*token); } }, NoRecipes => { @@ -387,10 +379,10 @@ impl<'a> Display for RuntimeError<'a> { "Recipe `{}` cannot be used as default recipe since it requires at least {} {}.", recipe, min_arguments, - Count("argument", min_arguments), + Count("argument", *min_arguments), )?; } - Internal { ref message } => { + Internal { message } => { write!( f, "Internal runtime error, this may indicate a bug in just: {} \ @@ -402,16 +394,8 @@ impl<'a> Display for RuntimeError<'a> { write!(f, "{}", message.suffix())?; - if let Some(token) = error_token { - write_message_context( - f, - Color::fmt(f).error(), - token.src, - token.offset, - token.line, - token.column, - token.lexeme().len(), - )?; + if let Some(token) = self.context() { + token.write_context(f, Color::fmt(f).error())?; } Ok(()) diff --git a/src/state.rs b/src/state.rs index 5ac4559..2913a3d 100644 --- a/src/state.rs +++ b/src/state.rs @@ -1,9 +1,9 @@ use crate::common::*; #[derive(Copy, Clone, PartialEq, Debug)] -pub(crate) enum State<'a> { +pub(crate) enum State<'src> { Normal, - Indented { indentation: &'a str }, + Indented { indentation: &'src str }, Text, - Interpolation { interpolation_start: Position }, + Interpolation { interpolation_start: Token<'src> }, } diff --git a/src/testing.rs b/src/testing.rs index 15dec5c..59abc6e 100644 --- a/src/testing.rs +++ b/src/testing.rs @@ -42,26 +42,28 @@ pub(crate) fn analysis_error( offset: usize, line: usize, column: usize, - width: usize, + length: usize, kind: CompilationErrorKind, ) { - let expected = CompilationError { - src, - offset, - line, - column, - width, - kind, - }; - let tokens = Lexer::lex(src).expect("Lexing failed in parse test..."); let module = Parser::parse(&tokens).expect("Parsing failed in analysis test..."); match Analyzer::analyze(module) { - Ok(_) => panic!("Analysis succeeded but expected: {}\n{}", expected, src), - Err(actual) => { - assert_eq!(actual, expected); + Ok(_) => panic!("Analysis unexpectedly succeeded"), + Err(have) => { + let want = CompilationError { + token: Token { + kind: have.token.kind, + src, + offset, + line, + column, + length, + }, + kind, + }; + assert_eq!(have, want); } } } diff --git a/src/token.rs b/src/token.rs index 4d05ab3..8ca5696 100644 --- a/src/token.rs +++ b/src/token.rs @@ -16,13 +16,64 @@ impl<'a> Token<'a> { } pub(crate) fn error(&self, kind: CompilationErrorKind<'a>) -> CompilationError<'a> { - CompilationError { - column: self.column, - offset: self.offset, - line: self.line, - src: self.src, - width: self.length, - kind, + CompilationError { token: *self, kind } + } + + pub(crate) fn write_context(&self, f: &mut Formatter, color: Color) -> fmt::Result { + let width = if self.length == 0 { 1 } else { self.length }; + + let line_number = self.line.ordinal(); + match self.src.lines().nth(self.line) { + Some(line) => { + let mut i = 0; + let mut space_column = 0; + let mut space_line = String::new(); + let mut space_width = 0; + for c in line.chars() { + if c == '\t' { + space_line.push_str(" "); + if i < self.column { + space_column += 4; + } + if i >= self.column && i < self.column + width { + space_width += 4; + } + } else { + if i < self.column { + space_column += UnicodeWidthChar::width(c).unwrap_or(0); + } + if i >= self.column && i < self.column + width { + space_width += UnicodeWidthChar::width(c).unwrap_or(0); + } + space_line.push(c); + } + i += c.len_utf8(); + } + let line_number_width = line_number.to_string().len(); + writeln!(f, "{0:1$} |", "", line_number_width)?; + writeln!(f, "{} | {}", line_number, space_line)?; + write!(f, "{0:1$} |", "", line_number_width)?; + write!( + f, + " {0:1$}{2}{3:^<4$}{5}", + "", + space_column, + color.prefix(), + "", + space_width, + color.suffix() + )?; + } + None => { + if self.offset != self.src.len() { + write!( + f, + "internal error: Error has invalid line number: {}", + line_number + )? + } + } } + Ok(()) } } diff --git a/src/token_kind.rs b/src/token_kind.rs index 52802f4..c5e28a4 100644 --- a/src/token_kind.rs +++ b/src/token_kind.rs @@ -24,6 +24,7 @@ pub(crate) enum TokenKind { StringCooked, StringRaw, Text, + Unspecified, Whitespace, } @@ -57,6 +58,7 @@ impl Display for TokenKind { StringRaw => "raw string", Text => "command text", Whitespace => "whitespace", + Unspecified => "unspecified", } ) } diff --git a/src/warning.rs b/src/warning.rs index 74a7683..ef408e4 100644 --- a/src/warning.rs +++ b/src/warning.rs @@ -39,15 +39,7 @@ impl Display for Warning<'_> { if let Some(token) = self.context() { writeln!(f)?; - write_message_context( - f, - Color::fmt(f).warning(), - token.src, - token.offset, - token.line, - token.column, - token.lexeme().len(), - )?; + token.write_context(f, Color::fmt(f).warning())?; } Ok(()) diff --git a/src/write_message_context.rs b/src/write_message_context.rs deleted file mode 100644 index 9974105..0000000 --- a/src/write_message_context.rs +++ /dev/null @@ -1,67 +0,0 @@ -use crate::common::*; - -pub(crate) fn write_message_context( - f: &mut Formatter, - color: Color, - text: &str, - offset: usize, - line: usize, - column: usize, - width: usize, -) -> Result<(), fmt::Error> { - let width = if width == 0 { 1 } else { width }; - - let line_number = line.ordinal(); - match text.lines().nth(line) { - Some(line) => { - let mut i = 0; - let mut space_column = 0; - let mut space_line = String::new(); - let mut space_width = 0; - for c in line.chars() { - if c == '\t' { - space_line.push_str(" "); - if i < column { - space_column += 4; - } - if i >= column && i < column + width { - space_width += 4; - } - } else { - if i < column { - space_column += UnicodeWidthChar::width(c).unwrap_or(0); - } - if i >= column && i < column + width { - space_width += UnicodeWidthChar::width(c).unwrap_or(0); - } - space_line.push(c); - } - i += c.len_utf8(); - } - let line_number_width = line_number.to_string().len(); - writeln!(f, "{0:1$} |", "", line_number_width)?; - writeln!(f, "{} | {}", line_number, space_line)?; - write!(f, "{0:1$} |", "", line_number_width)?; - write!( - f, - " {0:1$}{2}{3:^<4$}{5}", - "", - space_column, - color.prefix(), - "", - space_width, - color.suffix() - )?; - } - None => { - if offset != text.len() { - write!( - f, - "internal error: Error has invalid line number: {}", - line_number - )? - } - } - } - Ok(()) -}