Refactor some other stuff (#252)

This commit is contained in:
Casey Rodarmor 2017-11-17 17:28:06 -08:00 committed by GitHub
parent 2b6b715528
commit 13a124d659
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
20 changed files with 922 additions and 938 deletions

97
Cargo.lock generated
View File

@ -1,27 +1,9 @@
[root]
name = "just"
version = "0.3.3"
dependencies = [
"ansi_term 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
"atty 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
"brev 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)",
"clap 2.26.2 (registry+https://github.com/rust-lang/crates.io-index)",
"edit-distance 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"itertools 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.32 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
"utilities 0.0.0",
]
[[package]] [[package]]
name = "aho-corasick" name = "aho-corasick"
version = "0.6.3" version = "0.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"memchr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "memchr 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -35,7 +17,7 @@ version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.32 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)",
"termion 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)", "termion 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -61,15 +43,14 @@ dependencies = [
[[package]] [[package]]
name = "clap" name = "clap"
version = "2.26.2" version = "2.27.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"ansi_term 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", "ansi_term 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
"atty 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", "atty 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
"bitflags 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)", "bitflags 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)",
"strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", "strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"term_size 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "textwrap 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
"textwrap 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
"vec_map 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", "vec_map 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -81,7 +62,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "either" name = "either"
version = "1.2.0" version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
@ -110,7 +91,25 @@ name = "itertools"
version = "0.6.5" version = "0.6.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"either 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "either 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "just"
version = "0.3.3"
dependencies = [
"ansi_term 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
"atty 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
"brev 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)",
"clap 2.27.1 (registry+https://github.com/rust-lang/crates.io-index)",
"edit-distance 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"itertools 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
"utilities 0.0.0",
] ]
[[package]] [[package]]
@ -124,29 +123,29 @@ dependencies = [
[[package]] [[package]]
name = "lazy_static" name = "lazy_static"
version = "0.2.9" version = "0.2.10"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "libc" name = "libc"
version = "0.2.32" version = "0.2.33"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "memchr" name = "memchr"
version = "1.0.1" version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"libc 0.2.32 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
name = "rand" name = "rand"
version = "0.3.17" version = "0.3.18"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"fuchsia-zircon 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "fuchsia-zircon 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.32 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -168,7 +167,7 @@ version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"aho-corasick 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)", "aho-corasick 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
"memchr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "memchr 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
"regex-syntax 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", "regex-syntax 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
"thread_local 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "thread_local 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)",
"utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -189,17 +188,7 @@ name = "tempdir"
version = "0.3.5" version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"rand 0.3.17 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.3.18 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "term_size"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.32 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -207,17 +196,16 @@ name = "termion"
version = "1.5.1" version = "1.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"libc 0.2.32 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)",
"redox_syscall 0.1.31 (registry+https://github.com/rust-lang/crates.io-index)", "redox_syscall 0.1.31 (registry+https://github.com/rust-lang/crates.io-index)",
"redox_termios 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "redox_termios 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
name = "textwrap" name = "textwrap"
version = "0.8.0" version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"term_size 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -226,7 +214,7 @@ name = "thread_local"
version = "0.3.4" version = "0.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"lazy_static 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)",
"unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -279,27 +267,26 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
"checksum bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "aad18937a628ec6abcd26d1489012cc0e18c21798210f491af69ded9b881106d" "checksum bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "aad18937a628ec6abcd26d1489012cc0e18c21798210f491af69ded9b881106d"
"checksum bitflags 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4efd02e230a02e18f92fc2735f44597385ed02ad8f831e7c1c1156ee5e1ab3a5" "checksum bitflags 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4efd02e230a02e18f92fc2735f44597385ed02ad8f831e7c1c1156ee5e1ab3a5"
"checksum brev 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "d85c3b7957223c752ff78ffd20a1806b0c7262d9aef85ed470546f16b56a5bb2" "checksum brev 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "d85c3b7957223c752ff78ffd20a1806b0c7262d9aef85ed470546f16b56a5bb2"
"checksum clap 2.26.2 (registry+https://github.com/rust-lang/crates.io-index)" = "3451e409013178663435d6f15fdb212f14ee4424a3d74f979d081d0a66b6f1f2" "checksum clap 2.27.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1b8c532887f1a292d17de05ae858a8fe50a301e196f9ef0ddb7ccd0d1d00f180"
"checksum edit-distance 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6a34f5204fbc13582de418611cf3a7dcdd07c6d312a5b631597ba72c06b9d9c9" "checksum edit-distance 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6a34f5204fbc13582de418611cf3a7dcdd07c6d312a5b631597ba72c06b9d9c9"
"checksum either 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "cbee135e9245416869bf52bd6ccc9b59e2482651510784e089b874272f02a252" "checksum either 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "740178ddf48b1a9e878e6d6509a1442a2d42fd2928aae8e7a6f8a36fb01981b3"
"checksum fuchsia-zircon 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "f6c0581a4e363262e52b87f59ee2afe3415361c6ec35e665924eb08afe8ff159" "checksum fuchsia-zircon 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "f6c0581a4e363262e52b87f59ee2afe3415361c6ec35e665924eb08afe8ff159"
"checksum fuchsia-zircon-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "43f3795b4bae048dc6123a6b972cadde2e676f9ded08aef6bb77f5f157684a82" "checksum fuchsia-zircon-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "43f3795b4bae048dc6123a6b972cadde2e676f9ded08aef6bb77f5f157684a82"
"checksum glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "8be18de09a56b60ed0edf84bc9df007e30040691af7acd1c41874faac5895bfb" "checksum glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "8be18de09a56b60ed0edf84bc9df007e30040691af7acd1c41874faac5895bfb"
"checksum itertools 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)" = "d3f2be4da1690a039e9ae5fd575f706a63ad5a2120f161b1d653c9da3930dd21" "checksum itertools 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)" = "d3f2be4da1690a039e9ae5fd575f706a63ad5a2120f161b1d653c9da3930dd21"
"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d" "checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
"checksum lazy_static 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)" = "c9e5e58fa1a4c3b915a561a78a22ee0cac6ab97dca2504428bc1cb074375f8d5" "checksum lazy_static 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)" = "236eb37a62591d4a41a89b7763d7de3e06ca02d5ab2815446a8bae5d2f8c2d57"
"checksum libc 0.2.32 (registry+https://github.com/rust-lang/crates.io-index)" = "56cce3130fd040c28df6f495c8492e5ec5808fb4c9093c310df02b0c8f030148" "checksum libc 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)" = "5ba3df4dcb460b9dfbd070d41c94c19209620c191b0340b929ce748a2bcd42d2"
"checksum memchr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1dbccc0e46f1ea47b9f17e6d67c5a96bd27030519c519c9c91327e31275a47b4" "checksum memchr 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "148fab2e51b4f1cfc66da2a7c32981d1d3c083a803978268bb11fe4b86925e7a"
"checksum rand 0.3.17 (registry+https://github.com/rust-lang/crates.io-index)" = "61efcbcd9fa8d8fbb07c84e34a8af18a1ff177b449689ad38a6e9457ecc7b2ae" "checksum rand 0.3.18 (registry+https://github.com/rust-lang/crates.io-index)" = "6475140dfd8655aeb72e1fd4b7a1cc1c202be65d71669476e392fe62532b9edd"
"checksum redox_syscall 0.1.31 (registry+https://github.com/rust-lang/crates.io-index)" = "8dde11f18c108289bef24469638a04dce49da56084f2d50618b226e47eb04509" "checksum redox_syscall 0.1.31 (registry+https://github.com/rust-lang/crates.io-index)" = "8dde11f18c108289bef24469638a04dce49da56084f2d50618b226e47eb04509"
"checksum redox_termios 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7e891cfe48e9100a70a3b6eb652fef28920c117d366339687bd5576160db0f76" "checksum redox_termios 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7e891cfe48e9100a70a3b6eb652fef28920c117d366339687bd5576160db0f76"
"checksum regex 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1731164734096285ec2a5ec7fea5248ae2f5485b3feeb0115af4fda2183b2d1b" "checksum regex 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1731164734096285ec2a5ec7fea5248ae2f5485b3feeb0115af4fda2183b2d1b"
"checksum regex-syntax 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ad890a5eef7953f55427c50575c680c42841653abd2b028b68cd223d157f62db" "checksum regex-syntax 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ad890a5eef7953f55427c50575c680c42841653abd2b028b68cd223d157f62db"
"checksum strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b4d15c810519a91cf877e7e36e63fe068815c678181439f2f29e2562147c3694" "checksum strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b4d15c810519a91cf877e7e36e63fe068815c678181439f2f29e2562147c3694"
"checksum tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "87974a6f5c1dfb344d733055601650059a3363de2a6104819293baff662132d6" "checksum tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "87974a6f5c1dfb344d733055601650059a3363de2a6104819293baff662132d6"
"checksum term_size 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2b6b55df3198cc93372e85dd2ed817f0e38ce8cc0f22eb32391bfad9c4bf209"
"checksum termion 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "689a3bdfaab439fd92bc87df5c4c78417d3cbe537487274e9b0b2dce76e92096" "checksum termion 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "689a3bdfaab439fd92bc87df5c4c78417d3cbe537487274e9b0b2dce76e92096"
"checksum textwrap 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "df8e08afc40ae3459e4838f303e465aa50d823df8d7f83ca88108f6d3afe7edd" "checksum textwrap 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c0b59b6b4b44d867f1370ef1bd91bfb262bf07bf0ae65c202ea2fbc16153b693"
"checksum thread_local 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "1697c4b57aeeb7a536b647165a2825faddffb1d3bad386d507709bd51a90bb14" "checksum thread_local 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "1697c4b57aeeb7a536b647165a2825faddffb1d3bad386d507709bd51a90bb14"
"checksum unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "bf3a113775714a22dcb774d8ea3655c53a32debae63a063acc00a91cc586245f" "checksum unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "bf3a113775714a22dcb774d8ea3655c53a32debae63a063acc00a91cc586245f"
"checksum unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "382810877fe448991dfc7f0dd6e3ae5d58088fd0ea5e35189655f84e6814fa56" "checksum unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "382810877fe448991dfc7f0dd6e3ae5d58088fd0ea5e35189655f84e6814fa56"

View File

@ -8,17 +8,17 @@ homepage = "https://github.com/casey/just"
readme = "crates-io-readme.md" readme = "crates-io-readme.md"
[dependencies] [dependencies]
ansi_term = "^0.9.0" ansi_term = "0.9.0"
atty = "^0.2.1" atty = "0.2.1"
brev = "^0.1.6" brev = "0.1.6"
clap = "^2.0.0" clap = "2.0.0"
edit-distance = "^2.0.0" edit-distance = "2.0.0"
itertools = "^0.6.2" itertools = "0.6.2"
lazy_static = "^0.2.1" lazy_static = "0.2.1"
libc = "^0.2.21" libc = "0.2.21"
regex = "^0.2.2" regex = "0.2.2"
tempdir = "^0.3.5" tempdir = "0.3.5"
unicode-width = "^0.1.3" unicode-width = "0.1.3"
[dev-dependencies.utilities] [dev-dependencies.utilities]
path = "utilities" path = "utilities"

View File

@ -507,7 +507,7 @@ If the first argument passed to `just` contains a `/`, then the following occurs
This may seem a little strange, but it's useful if you wish to run a command in a justfile that is in a subdirectory. This may seem a little strange, but it's useful if you wish to run a command in a justfile that is in a subdirectory.
For example, if you are in a directory which contains a subdirectory named `foo`, which contains justfile with the recipe `build`, which is also the default recipe, the following are all equivalent: For example, if you are in a directory which contains a subdirectory named `foo`, which contains a justfile with the recipe `build`, which is also the default recipe, the following are all equivalent:
```sh ```sh
$ (cd foo && just build) $ (cd foo && just build)

View File

@ -1 +1,3 @@
cyclomatic-complexity-threshold = 1337 cyclomatic-complexity-threshold = 1337
doc-valid-idents = ["FreeBSD"]

View File

@ -62,7 +62,7 @@ install-dev-deps:
# everyone's favorite animate paper clip # everyone's favorite animate paper clip
clippy: lint clippy: lint
rustup run nightly cargo clippy -- -D clippy cargo +nightly clippy -- -D clippy
# count non-empty lines of code # count non-empty lines of code
sloc: sloc:

View File

@ -7,7 +7,7 @@ pub fn evaluate_assignments<'a>(
overrides: &Map<&str, &str>, overrides: &Map<&str, &str>,
quiet: bool, quiet: bool,
shell: &'a str, shell: &'a str,
) -> Result<Map<&'a str, String>, RuntimeError<'a>> { ) -> RunResult<'a, Map<&'a str, String>> {
let mut evaluator = AssignmentEvaluator { let mut evaluator = AssignmentEvaluator {
assignments: assignments, assignments: assignments,
evaluated: empty(), evaluated: empty(),
@ -32,7 +32,7 @@ fn run_backtick<'a, 'b>(
exports: &Set<&'a str>, exports: &Set<&'a str>,
quiet: bool, quiet: bool,
shell: &'b str, shell: &'b str,
) -> Result<String, RuntimeError<'a>> { ) -> RunResult<'a, String> {
let mut cmd = Command::new(shell); let mut cmd = Command::new(shell);
cmd.export_environment_variables(scope, exports)?; cmd.export_environment_variables(scope, exports)?;
@ -46,7 +46,8 @@ fn run_backtick<'a, 'b>(
process::Stdio::inherit() process::Stdio::inherit()
}); });
brev::output(cmd).map_err(|output_error| RuntimeError::Backtick{token: token.clone(), output_error}) brev::output(cmd)
.map_err(|output_error| RuntimeError::Backtick{token: token.clone(), output_error})
} }
pub struct AssignmentEvaluator<'a: 'b, 'b> { pub struct AssignmentEvaluator<'a: 'b, 'b> {
@ -64,7 +65,7 @@ impl<'a, 'b> AssignmentEvaluator<'a, 'b> {
&mut self, &mut self,
line: &[Fragment<'a>], line: &[Fragment<'a>],
arguments: &Map<&str, Cow<str>> arguments: &Map<&str, Cow<str>>
) -> Result<String, RuntimeError<'a>> { ) -> RunResult<'a, String> {
let mut evaluated = String::new(); let mut evaluated = String::new();
for fragment in line { for fragment in line {
match *fragment { match *fragment {
@ -77,7 +78,7 @@ impl<'a, 'b> AssignmentEvaluator<'a, 'b> {
Ok(evaluated) Ok(evaluated)
} }
fn evaluate_assignment(&mut self, name: &'a str) -> Result<(), RuntimeError<'a>> { fn evaluate_assignment(&mut self, name: &'a str) -> RunResult<'a, ()> {
if self.evaluated.contains_key(name) { if self.evaluated.contains_key(name) {
return Ok(()); return Ok(());
} }
@ -102,7 +103,7 @@ impl<'a, 'b> AssignmentEvaluator<'a, 'b> {
&mut self, &mut self,
expression: &Expression<'a>, expression: &Expression<'a>,
arguments: &Map<&str, Cow<str>> arguments: &Map<&str, Cow<str>>
) -> Result<String, RuntimeError<'a>> { ) -> RunResult<'a, String> {
Ok(match *expression { Ok(match *expression {
Expression::Variable{name, ..} => { Expression::Variable{name, ..} => {
if self.evaluated.contains_key(name) { if self.evaluated.contains_key(name) {
@ -141,39 +142,37 @@ mod test {
use testing::parse_success; use testing::parse_success;
use Configuration; use Configuration;
#[test] #[test]
fn backtick_code() { fn backtick_code() {
match parse_success("a:\n echo {{`f() { return 100; }; f`}}") match parse_success("a:\n echo {{`f() { return 100; }; f`}}")
.run(&["a"], &Default::default()).unwrap_err() { .run(&["a"], &Default::default()).unwrap_err() {
RuntimeError::Backtick{token, output_error: OutputError::Code(code)} => { RuntimeError::Backtick{token, output_error: OutputError::Code(code)} => {
assert_eq!(code, 100); assert_eq!(code, 100);
assert_eq!(token.lexeme, "`f() { return 100; }; f`"); assert_eq!(token.lexeme, "`f() { return 100; }; f`");
}, },
other => panic!("expected a code run error, but got: {}", other), other => panic!("expected a code run error, but got: {}", other),
}
} }
}
#[test] #[test]
fn export_assignment_backtick() { fn export_assignment_backtick() {
let text = r#" let text = r#"
export exported_variable = "A" export exported_variable = "A"
b = `echo $exported_variable` b = `echo $exported_variable`
recipe: recipe:
echo {{b}} echo {{b}}
"#; "#;
let options = Configuration {
quiet: true,
..Default::default()
};
let options = Configuration { match parse_success(text).run(&["recipe"], &options).unwrap_err() {
quiet: true, RuntimeError::Backtick{token, output_error: OutputError::Code(_)} => {
..Default::default() assert_eq!(token.lexeme, "`echo $exported_variable`");
}; },
other => panic!("expected a backtick code errror, but got: {}", other),
match parse_success(text).run(&["recipe"], &options).unwrap_err() { }
RuntimeError::Backtick{token, output_error: OutputError::Code(_)} => {
assert_eq!(token.lexeme, "`echo $exported_variable`");
},
other => panic!("expected a backtick code errror, but got: {}", other),
} }
} }
}

View File

@ -1,9 +1,11 @@
use common::*; use common::*;
use CompilationErrorKind::*;
pub fn resolve_assignments<'a>( pub fn resolve_assignments<'a>(
assignments: &Map<&'a str, Expression<'a>>, assignments: &Map<&'a str, Expression<'a>>,
assignment_tokens: &Map<&'a str, Token<'a>>, assignment_tokens: &Map<&'a str, Token<'a>>,
) -> Result<(), CompilationError<'a>> { ) -> CompilationResult<'a, ()> {
let mut resolver = AssignmentResolver { let mut resolver = AssignmentResolver {
assignments: assignments, assignments: assignments,
@ -29,7 +31,7 @@ struct AssignmentResolver<'a: 'b, 'b> {
} }
impl<'a: 'b, 'b> AssignmentResolver<'a, 'b> { impl<'a: 'b, 'b> AssignmentResolver<'a, 'b> {
fn resolve_assignment(&mut self, name: &'a str) -> Result<(), CompilationError<'a>> { fn resolve_assignment(&mut self, name: &'a str) -> CompilationResult<'a, ()> {
if self.evaluated.contains(name) { if self.evaluated.contains(name) {
return Ok(()); return Ok(());
} }
@ -48,13 +50,14 @@ impl<'a: 'b, 'b> AssignmentResolver<'a, 'b> {
line: 0, line: 0,
column: 0, column: 0,
width: None, width: None,
kind: CompilationErrorKind::Internal{message} kind: Internal{message}
}); });
} }
Ok(()) Ok(())
} }
fn resolve_expression(&mut self, expression: &Expression<'a>) -> Result<(), CompilationError<'a>> { fn resolve_expression(
&mut self, expression: &Expression<'a>) -> CompilationResult<'a, ()> {
match *expression { match *expression {
Expression::Variable{name, ref token} => { Expression::Variable{name, ref token} => {
if self.evaluated.contains(name) { if self.evaluated.contains(name) {
@ -62,14 +65,14 @@ impl<'a: 'b, 'b> AssignmentResolver<'a, 'b> {
} else if self.seen.contains(name) { } else if self.seen.contains(name) {
let token = &self.assignment_tokens[name]; let token = &self.assignment_tokens[name];
self.stack.push(name); self.stack.push(name);
return Err(token.error(CompilationErrorKind::CircularVariableDependency { return Err(token.error(CircularVariableDependency {
variable: name, variable: name,
circle: self.stack.clone(), circle: self.stack.clone(),
})); }));
} else if self.assignments.contains_key(name) { } else if self.assignments.contains_key(name) {
self.resolve_assignment(name)?; self.resolve_assignment(name)?;
} else { } else {
return Err(token.error(CompilationErrorKind::UndefinedVariable{variable: name})); return Err(token.error(UndefinedVariable{variable: name}));
} }
} }
Expression::Concatination{ref lhs, ref rhs} => { Expression::Concatination{ref lhs, ref rhs} => {
@ -87,43 +90,46 @@ mod test {
use testing::parse_error; use testing::parse_error;
use super::*; use super::*;
#[test] #[test]
fn circular_variable_dependency() { fn circular_variable_dependency() {
let text = "a = b\nb = a"; let text = "a = b\nb = a";
parse_error(text, CompilationError { let variable = "a";
text: text, let circle = vec!["a", "b", "a"];
index: 0, parse_error(text, CompilationError {
line: 0, text: text,
column: 0, index: 0,
width: Some(1), line: 0,
kind: CompilationErrorKind::CircularVariableDependency{variable: "a", circle: vec!["a", "b", "a"]} column: 0,
}); width: Some(1),
} kind: CircularVariableDependency{variable, circle}
});
}
#[test]
fn self_variable_dependency() {
let text = "a = a";
let variable = "a";
let circle = vec!["a", "a"];
parse_error(text, CompilationError {
text: text,
index: 0,
line: 0,
column: 0,
width: Some(1),
kind: CircularVariableDependency{variable, circle}
});
}
#[test] #[test]
fn self_variable_dependency() { fn unknown_expression_variable() {
let text = "a = a"; let text = "x = yy";
parse_error(text, CompilationError { parse_error(text, CompilationError {
text: text, text: text,
index: 0, index: 4,
line: 0, line: 0,
column: 0, column: 4,
width: Some(1), width: Some(2),
kind: CompilationErrorKind::CircularVariableDependency{variable: "a", circle: vec!["a", "a"]} kind: UndefinedVariable{variable: "yy"},
}); });
} }
#[test]
fn unknown_expression_variable() {
let text = "x = yy";
parse_error(text, CompilationError {
text: text,
index: 4,
line: 0,
column: 4,
width: Some(2),
kind: CompilationErrorKind::UndefinedVariable{variable: "yy"},
});
}
} }

View File

@ -5,7 +5,7 @@ pub trait CommandExt {
&mut self, &mut self,
scope: &Map<&'a str, String>, scope: &Map<&'a str, String>,
exports: &Set<&'a str> exports: &Set<&'a str>
) -> Result<(), RuntimeError<'a>>; ) -> RunResult<'a, ()>;
} }
impl CommandExt for Command { impl CommandExt for Command {
@ -13,7 +13,7 @@ impl CommandExt for Command {
&mut self, &mut self,
scope: &Map<&'a str, String>, scope: &Map<&'a str, String>,
exports: &Set<&'a str> exports: &Set<&'a str>
) -> Result<(), RuntimeError<'a>> { ) -> RunResult<'a, ()> {
for name in exports { for name in exports {
if let Some(value) = scope.get(name) { if let Some(value) = scope.get(name) {
self.env(name, value); self.env(name, value);

View File

@ -2,6 +2,8 @@ use common::*;
use misc::{Or, write_error_context, show_whitespace}; use misc::{Or, write_error_context, show_whitespace};
pub type CompilationResult<'a, T> = Result<T, CompilationError<'a>>;
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
pub struct CompilationError<'a> { pub struct CompilationError<'a> {
pub text: &'a str, pub text: &'a str,

View File

@ -1,6 +1,6 @@
use common::*; use common::*;
pub const DEFAULT_SHELL: &'static str = "sh"; pub const DEFAULT_SHELL: &str = "sh";
pub struct Configuration<'a> { pub struct Configuration<'a> {
pub dry_run: bool, pub dry_run: bool,

View File

@ -7,7 +7,7 @@ pub struct CookedString<'a> {
} }
impl<'a> CookedString<'a> { impl<'a> CookedString<'a> {
pub fn new(token: &Token<'a>) -> Result<CookedString<'a>, CompilationError<'a>> { pub fn new(token: &Token<'a>) -> CompilationResult<'a, CookedString<'a>> {
let raw = &token.lexeme[1..token.lexeme.len()-1]; let raw = &token.lexeme[1..token.lexeme.len()-1];
if let TokenKind::RawString = token.kind { if let TokenKind::RawString = token.kind {

View File

@ -46,7 +46,7 @@ impl<'a, 'b> Justfile<'a> where 'a: 'b {
&'a self, &'a self,
arguments: &[&'a str], arguments: &[&'a str],
options: &Configuration<'a>, options: &Configuration<'a>,
) -> Result<(), RuntimeError<'a>> { ) -> RunResult<'a, ()> {
let unknown_overrides = options.overrides.keys().cloned() let unknown_overrides = options.overrides.keys().cloned()
.filter(|name| !self.assignments.contains_key(name)) .filter(|name| !self.assignments.contains_key(name))
.collect::<Vec<_>>(); .collect::<Vec<_>>();
@ -126,7 +126,7 @@ impl<'a, 'b> Justfile<'a> where 'a: 'b {
scope: &Map<&'c str, String>, scope: &Map<&'c str, String>,
ran: &mut Set<&'a str>, ran: &mut Set<&'a str>,
options: &Configuration<'a>, options: &Configuration<'a>,
) -> Result<(), RuntimeError> { ) -> RunResult<()> {
for dependency_name in &recipe.dependencies { for dependency_name in &recipe.dependencies {
if !ran.contains(dependency_name) { if !ran.contains(dependency_name) {
self.run_recipe(&self.recipes[dependency_name], &[], scope, ran, options)?; self.run_recipe(&self.recipes[dependency_name], &[], scope, ran, options)?;
@ -179,16 +179,16 @@ mod test {
} }
#[test] #[test]
fn run_shebang() { fn run_shebang() {
// this test exists to make sure that shebang recipes // this test exists to make sure that shebang recipes
// run correctly. although this script is still // run correctly. although this script is still
// executed by a shell its behavior depends on the value of a // executed by a shell its behavior depends on the value of a
// variable and continuing even though a command fails, // variable and continuing even though a command fails,
// whereas in plain recipes variables are not available // whereas in plain recipes variables are not available
// in subsequent lines and execution stops when a line // in subsequent lines and execution stops when a line
// fails // fails
let text = " let text = "
a: a:
#!/usr/bin/env sh #!/usr/bin/env sh
code=200 code=200
@ -197,15 +197,16 @@ a:
x x
"; ";
match parse_success(text).run(&["a"], &Default::default()).unwrap_err() { match parse_success(text).run(&["a"], &Default::default()).unwrap_err() {
RuntimeError::Code{recipe, line_number, code} => { RuntimeError::Code{recipe, line_number, code} => {
assert_eq!(recipe, "a"); assert_eq!(recipe, "a");
assert_eq!(code, 200); assert_eq!(code, 200);
assert_eq!(line_number, None); assert_eq!(line_number, None);
}, },
other => panic!("expected a code run error, but got: {}", other), other => panic!("expected a code run error, but got: {}", other),
}
} }
}
#[test] #[test]
fn code_error() { fn code_error() {
match parse_success("fail:\n @exit 100") match parse_success("fail:\n @exit 100")
@ -219,105 +220,105 @@ fn code_error() {
} }
} }
#[test] #[test]
fn run_args() { fn run_args() {
let text = r#" let text = r#"
a return code: a return code:
@x() { {{return}} {{code + "0"}}; }; x"#; @x() { {{return}} {{code + "0"}}; }; x"#;
match parse_success(text).run(&["a", "return", "15"], &Default::default()).unwrap_err() { match parse_success(text).run(&["a", "return", "15"], &Default::default()).unwrap_err() {
RuntimeError::Code{recipe, line_number, code} => { RuntimeError::Code{recipe, line_number, code} => {
assert_eq!(recipe, "a"); assert_eq!(recipe, "a");
assert_eq!(code, 150); assert_eq!(code, 150);
assert_eq!(line_number, Some(3)); assert_eq!(line_number, Some(3));
}, },
other => panic!("expected a code run error, but got: {}", other), other => panic!("expected a code run error, but got: {}", other),
}
} }
}
#[test] #[test]
fn missing_some_arguments() { fn missing_some_arguments() {
match parse_success("a b c d:").run(&["a", "b", "c"], &Default::default()).unwrap_err() { match parse_success("a b c d:").run(&["a", "b", "c"], &Default::default()).unwrap_err() {
RuntimeError::ArgumentCountMismatch{recipe, found, min, max} => { RuntimeError::ArgumentCountMismatch{recipe, found, min, max} => {
assert_eq!(recipe, "a"); assert_eq!(recipe, "a");
assert_eq!(found, 2); assert_eq!(found, 2);
assert_eq!(min, 3); assert_eq!(min, 3);
assert_eq!(max, 3); assert_eq!(max, 3);
}, },
other => panic!("expected a code run error, but got: {}", other), other => panic!("expected a code run error, but got: {}", other),
}
} }
}
#[test] #[test]
fn missing_some_arguments_variadic() { fn missing_some_arguments_variadic() {
match parse_success("a b c +d:").run(&["a", "B", "C"], &Default::default()).unwrap_err() { match parse_success("a b c +d:").run(&["a", "B", "C"], &Default::default()).unwrap_err() {
RuntimeError::ArgumentCountMismatch{recipe, found, min, max} => { RuntimeError::ArgumentCountMismatch{recipe, found, min, max} => {
assert_eq!(recipe, "a"); assert_eq!(recipe, "a");
assert_eq!(found, 2); assert_eq!(found, 2);
assert_eq!(min, 3); assert_eq!(min, 3);
assert_eq!(max, usize::MAX - 1); assert_eq!(max, usize::MAX - 1);
}, },
other => panic!("expected a code run error, but got: {}", other), other => panic!("expected a code run error, but got: {}", other),
}
} }
}
#[test] #[test]
fn missing_all_arguments() { fn missing_all_arguments() {
match parse_success("a b c d:\n echo {{b}}{{c}}{{d}}") match parse_success("a b c d:\n echo {{b}}{{c}}{{d}}")
.run(&["a"], &Default::default()).unwrap_err() { .run(&["a"], &Default::default()).unwrap_err() {
RuntimeError::ArgumentCountMismatch{recipe, found, min, max} => { RuntimeError::ArgumentCountMismatch{recipe, found, min, max} => {
assert_eq!(recipe, "a"); assert_eq!(recipe, "a");
assert_eq!(found, 0); assert_eq!(found, 0);
assert_eq!(min, 3); assert_eq!(min, 3);
assert_eq!(max, 3); assert_eq!(max, 3);
}, },
other => panic!("expected a code run error, but got: {}", other), other => panic!("expected a code run error, but got: {}", other),
}
} }
}
#[test] #[test]
fn missing_some_defaults() { fn missing_some_defaults() {
match parse_success("a b c d='hello':").run(&["a", "b"], &Default::default()).unwrap_err() { match parse_success("a b c d='hello':").run(&["a", "b"], &Default::default()).unwrap_err() {
RuntimeError::ArgumentCountMismatch{recipe, found, min, max} => { RuntimeError::ArgumentCountMismatch{recipe, found, min, max} => {
assert_eq!(recipe, "a"); assert_eq!(recipe, "a");
assert_eq!(found, 1); assert_eq!(found, 1);
assert_eq!(min, 2); assert_eq!(min, 2);
assert_eq!(max, 3); assert_eq!(max, 3);
}, },
other => panic!("expected a code run error, but got: {}", other), other => panic!("expected a code run error, but got: {}", other),
}
} }
}
#[test] #[test]
fn missing_all_defaults() { fn missing_all_defaults() {
match parse_success("a b c='r' d='h':").run(&["a"], &Default::default()).unwrap_err() { match parse_success("a b c='r' d='h':").run(&["a"], &Default::default()).unwrap_err() {
RuntimeError::ArgumentCountMismatch{recipe, found, min, max} => { RuntimeError::ArgumentCountMismatch{recipe, found, min, max} => {
assert_eq!(recipe, "a"); assert_eq!(recipe, "a");
assert_eq!(found, 0); assert_eq!(found, 0);
assert_eq!(min, 1); assert_eq!(min, 1);
assert_eq!(max, 3); assert_eq!(max, 3);
}, },
other => panic!("expected a code run error, but got: {}", other), other => panic!("expected a code run error, but got: {}", other),
}
} }
}
#[test] #[test]
fn unknown_overrides() { fn unknown_overrides() {
let mut options: Configuration = Default::default(); let mut options: Configuration = Default::default();
options.overrides.insert("foo", "bar"); options.overrides.insert("foo", "bar");
options.overrides.insert("baz", "bob"); options.overrides.insert("baz", "bob");
match parse_success("a:\n echo {{`f() { return 100; }; f`}}") match parse_success("a:\n echo {{`f() { return 100; }; f`}}")
.run(&["a"], &options).unwrap_err() { .run(&["a"], &options).unwrap_err() {
RuntimeError::UnknownOverrides{overrides} => { RuntimeError::UnknownOverrides{overrides} => {
assert_eq!(overrides, &["baz", "foo"]); assert_eq!(overrides, &["baz", "foo"]);
}, },
other => panic!("expected a code run error, but got: {}", other), other => panic!("expected a code run error, but got: {}", other),
}
} }
}
#[test] #[test]
fn export_failure() { fn export_failure() {
let text = r#" let text = r#"
export foo = "a" export foo = "a"
baz = "c" baz = "c"
export bar = "b" export bar = "b"
@ -327,19 +328,17 @@ wut:
echo $foo $bar $baz echo $foo $bar $baz
"#; "#;
let options = Configuration { let options = Configuration {
quiet: true, quiet: true,
..Default::default() ..Default::default()
}; };
match parse_success(text).run(&["wut"], &options).unwrap_err() { match parse_success(text).run(&["wut"], &options).unwrap_err() {
RuntimeError::Code{code: _, line_number, recipe} => { RuntimeError::Code{code: _, line_number, recipe} => {
assert_eq!(recipe, "wut"); assert_eq!(recipe, "wut");
assert_eq!(line_number, Some(8)); assert_eq!(line_number, Some(8));
}, },
other => panic!("expected a recipe code errror, but got: {}", other), other => panic!("expected a recipe code errror, but got: {}", other),
}
} }
} }
}

View File

@ -10,28 +10,28 @@ extern crate regex;
extern crate tempdir; extern crate tempdir;
extern crate unicode_width; extern crate unicode_width;
mod platform;
mod run;
mod color;
mod compilation_error;
mod runtime_error;
mod misc;
mod justfile;
mod recipe;
mod token;
mod parser;
mod tokenizer;
mod cooked_string;
mod recipe_resolver;
mod assignment_resolver;
mod assignment_evaluator; mod assignment_evaluator;
mod assignment_resolver;
mod color;
mod command_ext;
mod compilation_error;
mod configuration; mod configuration;
mod parameter; mod cooked_string;
mod expression; mod expression;
mod fragment; mod fragment;
mod shebang; mod justfile;
mod command_ext; mod misc;
mod parameter;
mod parser;
mod platform;
mod range_ext; mod range_ext;
mod recipe;
mod recipe_resolver;
mod run;
mod runtime_error;
mod shebang;
mod token;
mod tokenizer;
#[cfg(test)] mod testing; #[cfg(test)] mod testing;
@ -54,7 +54,7 @@ mod common {
pub use assignment_evaluator::AssignmentEvaluator; pub use assignment_evaluator::AssignmentEvaluator;
pub use command_ext::CommandExt; pub use command_ext::CommandExt;
pub use compilation_error::{CompilationError, CompilationErrorKind}; pub use compilation_error::{CompilationError, CompilationErrorKind, CompilationResult};
pub use configuration::Configuration; pub use configuration::Configuration;
pub use cooked_string::CookedString; pub use cooked_string::CookedString;
pub use expression::Expression; pub use expression::Expression;
@ -64,14 +64,14 @@ mod common {
pub use parameter::Parameter; pub use parameter::Parameter;
pub use parser::Parser; pub use parser::Parser;
pub use recipe::Recipe; pub use recipe::Recipe;
pub use runtime_error::RuntimeError; pub use runtime_error::{RuntimeError, RunResult};
pub use shebang::Shebang; pub use shebang::Shebang;
pub use token::{Token, TokenKind}; pub use token::{Token, TokenKind};
} }
use common::*; use common::*;
fn compile(text: &str) -> Result<Justfile, CompilationError> { fn compile(text: &str) -> CompilationResult<Justfile> {
let tokens = tokenize(text)?; let tokens = tokenize(text)?;
let parser = Parser::new(text, tokens); let parser = Parser::new(text, tokens);
parser.justfile() parser.justfile()

View File

@ -1,7 +1,8 @@
use common::*; use common::*;
use itertools; use itertools;
use token::TokenKind::*; use TokenKind::*;
use CompilationErrorKind::*;
use recipe_resolver::resolve_recipes; use recipe_resolver::resolve_recipes;
use assignment_resolver::resolve_assignments; use assignment_resolver::resolve_assignments;
@ -76,7 +77,7 @@ impl<'a> Parser<'a> {
} }
fn unexpected_token(&self, found: &Token<'a>, expected: &[TokenKind]) -> CompilationError<'a> { fn unexpected_token(&self, found: &Token<'a>, expected: &[TokenKind]) -> CompilationError<'a> {
found.error(CompilationErrorKind::UnexpectedToken { found.error(UnexpectedToken {
expected: expected.to_vec(), expected: expected.to_vec(),
found: found.kind, found: found.kind,
}) })
@ -84,12 +85,12 @@ impl<'a> Parser<'a> {
fn recipe( fn recipe(
&mut self, &mut self,
name: Token<'a>, name: &Token<'a>,
doc: Option<Token<'a>>, doc: Option<Token<'a>>,
quiet: bool, quiet: bool,
) -> Result<(), CompilationError<'a>> { ) -> CompilationResult<'a, ()> {
if let Some(recipe) = self.recipes.get(name.lexeme) { if let Some(recipe) = self.recipes.get(name.lexeme) {
return Err(name.error(CompilationErrorKind::DuplicateRecipe { return Err(name.error(DuplicateRecipe {
recipe: recipe.name, recipe: recipe.name,
first: recipe.line_number first: recipe.line_number
})); }));
@ -113,13 +114,13 @@ impl<'a> Parser<'a> {
let variadic = plus.is_some(); let variadic = plus.is_some();
if parsed_variadic_parameter { if parsed_variadic_parameter {
return Err(parameter.error(CompilationErrorKind::ParameterFollowsVariadicParameter { return Err(parameter.error(ParameterFollowsVariadicParameter {
parameter: parameter.lexeme, parameter: parameter.lexeme,
})); }));
} }
if parameters.iter().any(|p| p.name == parameter.lexeme) { if parameters.iter().any(|p| p.name == parameter.lexeme) {
return Err(parameter.error(CompilationErrorKind::DuplicateParameter { return Err(parameter.error(DuplicateParameter {
recipe: name.lexeme, parameter: parameter.lexeme recipe: name.lexeme, parameter: parameter.lexeme
})); }));
} }
@ -137,7 +138,7 @@ impl<'a> Parser<'a> {
} }
if parsed_parameter_with_default && default.is_none() { if parsed_parameter_with_default && default.is_none() {
return Err(parameter.error(CompilationErrorKind::RequiredParameterFollowsDefaultParameter{ return Err(parameter.error(RequiredParameterFollowsDefaultParameter{
parameter: parameter.lexeme, parameter: parameter.lexeme,
})); }));
} }
@ -167,7 +168,7 @@ impl<'a> Parser<'a> {
let mut dependency_tokens = vec![]; let mut dependency_tokens = vec![];
while let Some(dependency) = self.accept(Name) { while let Some(dependency) = self.accept(Name) {
if dependencies.contains(&dependency.lexeme) { if dependencies.contains(&dependency.lexeme) {
return Err(dependency.error(CompilationErrorKind::DuplicateDependency { return Err(dependency.error(DuplicateDependency {
recipe: name.lexeme, recipe: name.lexeme,
dependency: dependency.lexeme dependency: dependency.lexeme
})); }));
@ -190,7 +191,7 @@ impl<'a> Parser<'a> {
continue; continue;
} }
if let Some(token) = self.expect(Line) { if let Some(token) = self.expect(Line) {
return Err(token.error(CompilationErrorKind::Internal{ return Err(token.error(Internal{
message: format!("Expected a line but got {}", token.kind) message: format!("Expected a line but got {}", token.kind)
})) }))
} }
@ -207,7 +208,7 @@ impl<'a> Parser<'a> {
&& !lines.last().and_then(|line| line.last()) && !lines.last().and_then(|line| line.last())
.map(Fragment::continuation).unwrap_or(false) .map(Fragment::continuation).unwrap_or(false)
&& (token.lexeme.starts_with(' ') || token.lexeme.starts_with('\t')) { && (token.lexeme.starts_with(' ') || token.lexeme.starts_with('\t')) {
return Err(token.error(CompilationErrorKind::ExtraLeadingWhitespace)); return Err(token.error(ExtraLeadingWhitespace));
} }
} }
fragments.push(Fragment::Text{text: token}); fragments.push(Fragment::Text{text: token});
@ -243,7 +244,7 @@ impl<'a> Parser<'a> {
Ok(()) Ok(())
} }
fn expression(&mut self, interpolation: bool) -> Result<Expression<'a>, CompilationError<'a>> { fn expression(&mut self, interpolation: bool) -> CompilationResult<'a, Expression<'a>> {
let first = self.tokens.next().unwrap(); let first = self.tokens.next().unwrap();
let lhs = match first.kind { let lhs = match first.kind {
Name => Expression::Variable {name: first.lexeme, token: first}, Name => Expression::Variable {name: first.lexeme, token: first},
@ -273,9 +274,9 @@ impl<'a> Parser<'a> {
} }
} }
fn assignment(&mut self, name: Token<'a>, export: bool) -> Result<(), CompilationError<'a>> { fn assignment(&mut self, name: Token<'a>, export: bool) -> CompilationResult<'a, ()> {
if self.assignments.contains_key(name.lexeme) { if self.assignments.contains_key(name.lexeme) {
return Err(name.error(CompilationErrorKind::DuplicateVariable {variable: name.lexeme})); return Err(name.error(DuplicateVariable {variable: name.lexeme}));
} }
if export { if export {
self.exports.insert(name.lexeme); self.exports.insert(name.lexeme);
@ -286,7 +287,7 @@ impl<'a> Parser<'a> {
Ok(()) Ok(())
} }
pub fn justfile(mut self) -> Result<Justfile<'a>, CompilationError<'a>> { pub fn justfile(mut self) -> CompilationResult<'a, Justfile<'a>> {
let mut doc = None; let mut doc = None;
loop { loop {
match self.tokens.next() { match self.tokens.next() {
@ -298,14 +299,14 @@ impl<'a> Parser<'a> {
} }
Comment => { Comment => {
if let Some(token) = self.expect_eol() { if let Some(token) = self.expect_eol() {
return Err(token.error(CompilationErrorKind::Internal { return Err(token.error(Internal {
message: format!("found comment followed by {}", token.kind), message: format!("found comment followed by {}", token.kind),
})); }));
} }
doc = Some(token); doc = Some(token);
} }
At => if let Some(name) = self.accept(Name) { At => if let Some(name) = self.accept(Name) {
self.recipe(name, doc, true)?; self.recipe(&name, doc, true)?;
doc = None; doc = None;
} else { } else {
let unexpected = &self.tokens.next().unwrap(); let unexpected = &self.tokens.next().unwrap();
@ -318,14 +319,14 @@ impl<'a> Parser<'a> {
doc = None; doc = None;
} else { } else {
self.tokens.put_back(next); self.tokens.put_back(next);
self.recipe(token, doc, false)?; self.recipe(&token, doc, false)?;
doc = None; doc = None;
} }
} else if self.accepted(Equals) { } else if self.accepted(Equals) {
self.assignment(token, false)?; self.assignment(token, false)?;
doc = None; doc = None;
} else { } else {
self.recipe(token, doc, false)?; self.recipe(&token, doc, false)?;
doc = None; doc = None;
}, },
_ => return Err(self.unexpected_token(&token, &[Name, At])), _ => return Err(self.unexpected_token(&token, &[Name, At])),
@ -336,7 +337,7 @@ impl<'a> Parser<'a> {
line: 0, line: 0,
column: 0, column: 0,
width: None, width: None,
kind: CompilationErrorKind::Internal { kind: Internal {
message: "unexpected end of token stream".to_string() message: "unexpected end of token stream".to_string()
} }
}), }),
@ -344,7 +345,7 @@ impl<'a> Parser<'a> {
} }
if let Some(token) = self.tokens.next() { if let Some(token) = self.tokens.next() {
return Err(token.error(CompilationErrorKind::Internal { return Err(token.error(Internal {
message: format!("unexpected token remaining after parsing completed: {:?}", token.kind) message: format!("unexpected token remaining after parsing completed: {:?}", token.kind)
})) }))
} }
@ -354,7 +355,7 @@ impl<'a> Parser<'a> {
for recipe in self.recipes.values() { for recipe in self.recipes.values() {
for parameter in &recipe.parameters { for parameter in &recipe.parameters {
if self.assignments.contains_key(parameter.token.lexeme) { if self.assignments.contains_key(parameter.token.lexeme) {
return Err(parameter.token.error(CompilationErrorKind::ParameterShadowsVariable { return Err(parameter.token.error(ParameterShadowsVariable {
parameter: parameter.token.lexeme parameter: parameter.token.lexeme
})); }));
} }
@ -362,7 +363,7 @@ impl<'a> Parser<'a> {
for dependency in &recipe.dependency_tokens { for dependency in &recipe.dependency_tokens {
if !self.recipes[dependency.lexeme].parameters.is_empty() { if !self.recipes[dependency.lexeme].parameters.is_empty() {
return Err(dependency.error(CompilationErrorKind::DependencyHasParameters { return Err(dependency.error(DependencyHasParameters {
recipe: recipe.name, recipe: recipe.name,
dependency: dependency.lexeme, dependency: dependency.lexeme,
})); }));
@ -387,78 +388,83 @@ mod test {
use testing::parse_success; use testing::parse_success;
use testing::parse_error; use testing::parse_error;
fn parse_summary(input: &str, output: &str) { macro_rules! summary_test {
let justfile = parse_success(input); ($name:ident, $input:expr, $expected:expr $(,)*) => {
let s = format!("{:#}", justfile); #[test]
if s != output { fn $name() {
println!("got:\n\"{}\"\n", s); let input = $input;
println!("\texpected:\n\"{}\"", output); let expected = $expected;
assert_eq!(s, output); let justfile = parse_success(input);
let actual = format!("{:#}", justfile);
if actual != expected {
println!("got:\n\"{}\"\n", actual);
println!("\texpected:\n\"{}\"", expected);
assert_eq!(actual, expected);
}
}
}
} }
}
#[test] summary_test!{parse_empty,
fn parse_empty() { "
parse_summary("
# hello # hello
", ""); ",
} "",
}
#[test] summary_test!{parse_string_default,
fn parse_string_default() { r#"
parse_summary(r#"
foo a="b\t": foo a="b\t":
"#, r#"foo a='b\t':"#); "#,
} r#"foo a='b\t':"#,
}
#[test] summary_test!{parse_variadic,
fn parse_variadic() { r#"
parse_summary(r#"
foo +a: foo +a:
"#, r#"foo +a:"#); "#,
} r#"foo +a:"#,
}
#[test] summary_test!{parse_variadic_string_default,
fn parse_variadic_string_default() { r#"
parse_summary(r#"
foo +a="Hello": foo +a="Hello":
"#, r#"foo +a='Hello':"#); "#,
} r#"foo +a='Hello':"#,
}
#[test] summary_test!{parse_raw_string_default,
fn parse_raw_string_default() { r#"
parse_summary(r#"
foo a='b\t': foo a='b\t':
"#, r#"foo a='b\\t':"#); "#,
} r#"foo a='b\\t':"#,
}
#[test] summary_test!{parse_export,
fn parse_export() { r#"
parse_summary(r#"
export a = "hello" export a = "hello"
"#, r#"export a = "hello""#); "#,
} r#"export a = "hello""#,
}
summary_test!{parse_complex,
#[test] "
fn parse_complex() {
parse_summary("
x: x:
y: y:
z: z:
@ -472,7 +478,8 @@ hello a b c : x y z #hello
1 1
2 2
3 3
", "bar = foo ",
"bar = foo
foo = \"xx\" foo = \"xx\"
@ -490,359 +497,349 @@ x:
y: y:
z:"); z:"
} }
#[test] summary_test!{parse_shebang,
fn parse_shebang() { "
parse_summary("
practicum = 'hello' practicum = 'hello'
install: install:
\t#!/bin/sh \t#!/bin/sh
\tif [[ -f {{practicum}} ]]; then \tif [[ -f {{practicum}} ]]; then
\t\treturn \t\treturn
\tfi \tfi
", "practicum = \"hello\" ",
"practicum = \"hello\"
install: install:
#!/bin/sh #!/bin/sh
if [[ -f {{practicum}} ]]; then if [[ -f {{practicum}} ]]; then
\treturn \treturn
fi" fi",
); }
}
#[test] summary_test!{parse_assignments,
fn parse_assignments() { r#"a = "0"
parse_summary(
r#"a = "0"
c = a + b + a + b c = a + b + a + b
b = "1" b = "1"
"#, "#,
r#"a = "0"
r#"a = "0"
b = "1" b = "1"
c = a + b + a + b"#); c = a + b + a + b"#,
} }
#[test] summary_test!{parse_assignment_backticks,
fn parse_assignment_backticks() { "a = `echo hello`
parse_summary(
"a = `echo hello`
c = a + b + a + b c = a + b + a + b
b = `echo goodbye`", b = `echo goodbye`",
"a = `echo hello`
"a = `echo hello`
b = `echo goodbye` b = `echo goodbye`
c = a + b + a + b"); c = a + b + a + b",
} }
#[test] summary_test!{parse_interpolation_backticks,
fn parse_interpolation_backticks() { r#"a:
parse_summary(
r#"a:
echo {{ `echo hello` + "blarg" }} {{ `echo bob` }}"#, echo {{ `echo hello` + "blarg" }} {{ `echo bob` }}"#,
r#"a: r#"a:
echo {{`echo hello` + "blarg"}} {{`echo bob`}}"#, echo {{`echo hello` + "blarg"}} {{`echo bob`}}"#,
); }
}
#[test] summary_test!{eof_test,
fn missing_colon() { "x:\ny:\nz:\na b c: x y z",
let text = "a b c\nd e f"; "a b c: x y z\n\nx:\n\ny:\n\nz:",
parse_error(text, CompilationError { }
text: text,
index: 5,
line: 0,
column: 5,
width: Some(1),
kind: CompilationErrorKind::UnexpectedToken{expected: vec![Name, Plus, Colon], found: Eol},
});
}
#[test] summary_test!{string_quote_escape,
fn missing_default_eol() {
let text = "hello arg=\n";
parse_error(text, CompilationError {
text: text,
index: 10,
line: 0,
column: 10,
width: Some(1),
kind: CompilationErrorKind::UnexpectedToken{expected: vec![StringToken, RawString], found: Eol},
});
}
#[test]
fn missing_default_eof() {
let text = "hello arg=";
parse_error(text, CompilationError {
text: text,
index: 10,
line: 0,
column: 10,
width: Some(0),
kind: CompilationErrorKind::UnexpectedToken{expected: vec![StringToken, RawString], found: Eof},
});
}
#[test]
fn missing_default_colon() {
let text = "hello arg=:";
parse_error(text, CompilationError {
text: text,
index: 10,
line: 0,
column: 10,
width: Some(1),
kind: CompilationErrorKind::UnexpectedToken{expected: vec![StringToken, RawString], found: Colon},
});
}
#[test]
fn missing_default_backtick() {
let text = "hello arg=`hello`";
parse_error(text, CompilationError {
text: text,
index: 10,
line: 0,
column: 10,
width: Some(7),
kind: CompilationErrorKind::UnexpectedToken{expected: vec![StringToken, RawString], found: Backtick},
});
}
#[test]
fn parameter_after_variadic() {
let text = "foo +a bbb:";
parse_error(text, CompilationError {
text: text,
index: 7,
line: 0,
column: 7,
width: Some(3),
kind: CompilationErrorKind::ParameterFollowsVariadicParameter{parameter: "bbb"}
});
}
#[test]
fn required_after_default() {
let text = "hello arg='foo' bar:";
parse_error(text, CompilationError {
text: text,
index: 16,
line: 0,
column: 16,
width: Some(3),
kind: CompilationErrorKind::RequiredParameterFollowsDefaultParameter{parameter: "bar"},
});
}
#[test]
fn missing_eol() {
let text = "a b c: z =";
parse_error(text, CompilationError {
text: text,
index: 9,
line: 0,
column: 9,
width: Some(1),
kind: CompilationErrorKind::UnexpectedToken{expected: vec![Name, Eol, Eof], found: Equals},
});
}
#[test]
fn eof_test() {
parse_summary("x:\ny:\nz:\na b c: x y z", "a b c: x y z\n\nx:\n\ny:\n\nz:");
}
#[test]
fn duplicate_parameter() {
let text = "a b b:";
parse_error(text, CompilationError {
text: text,
index: 4,
line: 0,
column: 4,
width: Some(1),
kind: CompilationErrorKind::DuplicateParameter{recipe: "a", parameter: "b"}
});
}
#[test]
fn parameter_shadows_varible() {
let text = "foo = \"h\"\na foo:";
parse_error(text, CompilationError {
text: text,
index: 12,
line: 1,
column: 2,
width: Some(3),
kind: CompilationErrorKind::ParameterShadowsVariable{parameter: "foo"}
});
}
#[test]
fn dependency_has_parameters() {
let text = "foo arg:\nb: foo";
parse_error(text, CompilationError {
text: text,
index: 12,
line: 1,
column: 3,
width: Some(3),
kind: CompilationErrorKind::DependencyHasParameters{recipe: "b", dependency: "foo"}
});
}
#[test]
fn duplicate_dependency() {
let text = "a b c: b c z z";
parse_error(text, CompilationError {
text: text,
index: 13,
line: 0,
column: 13,
width: Some(1),
kind: CompilationErrorKind::DuplicateDependency{recipe: "a", dependency: "z"}
});
}
#[test]
fn duplicate_recipe() {
let text = "a:\nb:\na:";
parse_error(text, CompilationError {
text: text,
index: 6,
line: 2,
column: 0,
width: Some(1),
kind: CompilationErrorKind::DuplicateRecipe{recipe: "a", first: 0}
});
}
#[test]
fn duplicate_variable() {
let text = "a = \"0\"\na = \"0\"";
parse_error(text, CompilationError {
text: text,
index: 8,
line: 1,
column: 0,
width: Some(1),
kind: CompilationErrorKind::DuplicateVariable{variable: "a"}
});
}
#[test]
fn string_quote_escape() {
parse_summary(
r#"a = "hello\"""#, r#"a = "hello\"""#,
r#"a = "hello\"""# r#"a = "hello\"""#,
); }
}
#[test] summary_test!{string_escapes,
fn string_escapes() {
parse_summary(
r#"a = "\n\t\r\"\\""#, r#"a = "\n\t\r\"\\""#,
r#"a = "\n\t\r\"\\""# r#"a = "\n\t\r\"\\""#,
); }
}
#[test] summary_test!{parameters,
fn parameters() { "a b c:
parse_summary(
"a b c:
{{b}} {{c}}", {{b}} {{c}}",
"a b c: "a b c:
{{b}} {{c}}", {{b}} {{c}}",
); }
}
#[test]
fn missing_colon() {
let text = "a b c\nd e f";
parse_error(text, CompilationError {
text: text,
index: 5,
line: 0,
column: 5,
width: Some(1),
kind: UnexpectedToken{expected: vec![Name, Plus, Colon], found: Eol},
});
}
#[test]
fn missing_default_eol() {
let text = "hello arg=\n";
let expected = vec![StringToken, RawString];
let found = Eol;
parse_error(text, CompilationError {
text: text,
index: 10,
line: 0,
column: 10,
width: Some(1),
kind: UnexpectedToken{expected, found},
});
}
#[test] #[test]
fn extra_whitespace() { fn missing_default_eof() {
let text = "a:\n blah\n blarg"; let text = "hello arg=";
parse_error(text, CompilationError { let expected = vec![StringToken, RawString];
text: text, let found = Eof;
index: 10, parse_error(text, CompilationError {
line: 2, text: text,
column: 1, index: 10,
width: Some(6), line: 0,
kind: CompilationErrorKind::ExtraLeadingWhitespace column: 10,
}); width: Some(0),
kind: UnexpectedToken{expected, found},
});
}
// extra leading whitespace is okay in a shebang recipe #[test]
parse_success("a:\n #!\n print(1)"); fn missing_default_colon() {
} let text = "hello arg=:";
#[test] let expected = vec![StringToken, RawString];
fn interpolation_outside_of_recipe() { let found = Colon;
let text = "{{"; parse_error(text, CompilationError {
parse_error(text, CompilationError { text: text,
text: text, index: 10,
index: 0, line: 0,
line: 0, column: 10,
column: 0, width: Some(1),
width: Some(2), kind: UnexpectedToken{expected, found},
kind: CompilationErrorKind::UnexpectedToken{expected: vec![Name, At], found: InterpolationStart}, });
}); }
}
#[test]
fn unclosed_interpolation_delimiter() {
let text = "a:\n echo {{ foo";
parse_error(text, CompilationError {
text: text,
index: 15,
line: 1,
column: 12,
width: Some(0),
kind: CompilationErrorKind::UnexpectedToken{expected: vec![Plus, Eol, InterpolationEnd], found: Dedent},
});
}
#[test] #[test]
fn plus_following_parameter() { fn missing_default_backtick() {
let text = "a b c+:"; let text = "hello arg=`hello`";
parse_error(text, CompilationError { let expected = vec![StringToken, RawString];
text: text, let found = Backtick;
index: 5, parse_error(text, CompilationError {
line: 0, text: text,
column: 5, index: 10,
width: Some(1), line: 0,
kind: CompilationErrorKind::UnexpectedToken{expected: vec![Name], found: Plus}, column: 10,
}); width: Some(7),
} kind: UnexpectedToken{expected, found},
});
}
#[test] #[test]
fn readme_test() { fn parameter_after_variadic() {
let mut justfiles = vec![]; let text = "foo +a bbb:";
let mut current = None; parse_error(text, CompilationError {
text: text,
index: 7,
line: 0,
column: 7,
width: Some(3),
kind: ParameterFollowsVariadicParameter{parameter: "bbb"}
});
}
for line in brev::slurp("README.asc").lines() { #[test]
if let Some(mut justfile) = current { fn required_after_default() {
if line == "```" { let text = "hello arg='foo' bar:";
justfiles.push(justfile); parse_error(text, CompilationError {
current = None; text: text,
} else { index: 16,
justfile += line; line: 0,
justfile += "\n"; column: 16,
current = Some(justfile); width: Some(3),
kind: RequiredParameterFollowsDefaultParameter{parameter: "bar"},
});
}
#[test]
fn missing_eol() {
let text = "a b c: z =";
parse_error(text, CompilationError {
text: text,
index: 9,
line: 0,
column: 9,
width: Some(1),
kind: UnexpectedToken{expected: vec![Name, Eol, Eof], found: Equals},
});
}
#[test]
fn duplicate_parameter() {
let text = "a b b:";
parse_error(text, CompilationError {
text: text,
index: 4,
line: 0,
column: 4,
width: Some(1),
kind: DuplicateParameter{recipe: "a", parameter: "b"}
});
}
#[test]
fn parameter_shadows_varible() {
let text = "foo = \"h\"\na foo:";
parse_error(text, CompilationError {
text: text,
index: 12,
line: 1,
column: 2,
width: Some(3),
kind: ParameterShadowsVariable{parameter: "foo"}
});
}
#[test]
fn dependency_has_parameters() {
let text = "foo arg:\nb: foo";
parse_error(text, CompilationError {
text: text,
index: 12,
line: 1,
column: 3,
width: Some(3),
kind: DependencyHasParameters{recipe: "b", dependency: "foo"}
});
}
#[test]
fn duplicate_dependency() {
let text = "a b c: b c z z";
parse_error(text, CompilationError {
text: text,
index: 13,
line: 0,
column: 13,
width: Some(1),
kind: DuplicateDependency{recipe: "a", dependency: "z"}
});
}
#[test]
fn duplicate_recipe() {
let text = "a:\nb:\na:";
parse_error(text, CompilationError {
text: text,
index: 6,
line: 2,
column: 0,
width: Some(1),
kind: DuplicateRecipe{recipe: "a", first: 0}
});
}
#[test]
fn duplicate_variable() {
let text = "a = \"0\"\na = \"0\"";
parse_error(text, CompilationError {
text: text,
index: 8,
line: 1,
column: 0,
width: Some(1),
kind: DuplicateVariable{variable: "a"}
});
}
#[test]
fn extra_whitespace() {
let text = "a:\n blah\n blarg";
parse_error(text, CompilationError {
text: text,
index: 10,
line: 2,
column: 1,
width: Some(6),
kind: ExtraLeadingWhitespace
});
// extra whitespace is okay in a shebang recipe
parse_success("a:\n #!\n print(1)");
}
#[test]
fn interpolation_outside_of_recipe() {
let text = "{{";
let expected = vec![Name, At];
let found = InterpolationStart;
parse_error(text, CompilationError {
text: text,
index: 0,
line: 0,
column: 0,
width: Some(2),
kind: UnexpectedToken{expected, found},
});
}
#[test]
fn unclosed_interpolation_delimiter() {
let text = "a:\n echo {{ foo";
let expected = vec![Plus, Eol, InterpolationEnd];
let found = Dedent;
parse_error(text, CompilationError {
text: text,
index: 15,
line: 1,
column: 12,
width: Some(0),
kind: UnexpectedToken{expected, found},
});
}
#[test]
fn plus_following_parameter() {
let text = "a b c+:";
parse_error(text, CompilationError {
text: text,
index: 5,
line: 0,
column: 5,
width: Some(1),
kind: UnexpectedToken{expected: vec![Name], found: Plus},
});
}
#[test]
fn readme_test() {
let mut justfiles = vec![];
let mut current = None;
for line in brev::slurp("README.asc").lines() {
if let Some(mut justfile) = current {
if line == "```" {
justfiles.push(justfile);
current = None;
} else {
justfile += line;
justfile += "\n";
current = Some(justfile);
}
} else if line == "```make" {
current = Some(String::new());
} }
} else if line == "```make" { }
current = Some(String::new());
for justfile in justfiles {
parse_success(&justfile);
} }
} }
for justfile in justfiles {
parse_success(&justfile);
}
}
} }

View File

@ -54,7 +54,7 @@ impl<'a> Recipe<'a> {
scope: &Map<&'a str, String>, scope: &Map<&'a str, String>,
exports: &Set<&'a str>, exports: &Set<&'a str>,
options: &Configuration, options: &Configuration,
) -> Result<(), RuntimeError<'a>> { ) -> RunResult<'a, ()> {
if options.verbose { if options.verbose {
let color = options.color.stderr().banner(); let color = options.color.stderr().banner();
eprintln!("{}===> Running recipe `{}`...{}", color.prefix(), self.name, color.suffix()); eprintln!("{}===> Running recipe `{}`...{}", color.prefix(), self.name, color.suffix());
@ -150,7 +150,7 @@ impl<'a> Recipe<'a> {
// create a command to run the script // create a command to run the script
let mut command = Platform::make_shebang_command(&path, interpreter, argument) let mut command = Platform::make_shebang_command(&path, interpreter, argument)
.map_err(|output_error| RuntimeError::Cygpath{recipe: self.name, output_error: output_error})?; .map_err(|output_error| RuntimeError::Cygpath{recipe: self.name, output_error})?;
command.export_environment_variables(scope, exports)?; command.export_environment_variables(scope, exports)?;

View File

@ -1,10 +1,12 @@
use common::*; use common::*;
use CompilationErrorKind::*;
pub fn resolve_recipes<'a>( pub fn resolve_recipes<'a>(
recipes: &Map<&'a str, Recipe<'a>>, recipes: &Map<&'a str, Recipe<'a>>,
assignments: &Map<&'a str, Expression<'a>>, assignments: &Map<&'a str, Expression<'a>>,
text: &'a str, text: &'a str,
) -> Result<(), CompilationError<'a>> { ) -> CompilationResult<'a, ()> {
let mut resolver = RecipeResolver { let mut resolver = RecipeResolver {
seen: empty(), seen: empty(),
stack: empty(), stack: empty(),
@ -35,14 +37,14 @@ pub fn resolve_recipes<'a>(
// two lifetime parameters instead of one, with one being the lifetime // two lifetime parameters instead of one, with one being the lifetime
// of the struct, and the second being the lifetime of the tokens // of the struct, and the second being the lifetime of the tokens
// that it contains // that it contains
let error = variable.error(CompilationErrorKind::UndefinedVariable{variable: name}); let error = variable.error(UndefinedVariable{variable: name});
return Err(CompilationError { return Err(CompilationError {
text: text, text: text,
index: error.index, index: error.index,
line: error.line, line: error.line,
column: error.column, column: error.column,
width: error.width, width: error.width,
kind: CompilationErrorKind::UndefinedVariable { kind: UndefinedVariable {
variable: &text[error.index..error.index + error.width.unwrap()], variable: &text[error.index..error.index + error.width.unwrap()],
} }
}); });
@ -64,7 +66,7 @@ struct RecipeResolver<'a: 'b, 'b> {
} }
impl<'a, 'b> RecipeResolver<'a, 'b> { impl<'a, 'b> RecipeResolver<'a, 'b> {
fn resolve(&mut self, recipe: &Recipe<'a>) -> Result<(), CompilationError<'a>> { fn resolve(&mut self, recipe: &Recipe<'a>) -> CompilationResult<'a, ()> {
if self.resolved.contains(recipe.name) { if self.resolved.contains(recipe.name) {
return Ok(()) return Ok(())
} }
@ -76,7 +78,7 @@ impl<'a, 'b> RecipeResolver<'a, 'b> {
if self.seen.contains(dependency.name) { if self.seen.contains(dependency.name) {
let first = self.stack[0]; let first = self.stack[0];
self.stack.push(first); self.stack.push(first);
return Err(dependency_token.error(CompilationErrorKind::CircularRecipeDependency { return Err(dependency_token.error(CircularRecipeDependency {
recipe: recipe.name, recipe: recipe.name,
circle: self.stack.iter() circle: self.stack.iter()
.skip_while(|name| **name != dependency.name) .skip_while(|name| **name != dependency.name)
@ -85,7 +87,7 @@ impl<'a, 'b> RecipeResolver<'a, 'b> {
} }
self.resolve(dependency)?; self.resolve(dependency)?;
}, },
None => return Err(dependency_token.error(CompilationErrorKind::UnknownDependency { None => return Err(dependency_token.error(UnknownDependency {
recipe: recipe.name, recipe: recipe.name,
unknown: dependency_token.lexeme unknown: dependency_token.lexeme
})), })),
@ -102,70 +104,70 @@ mod test {
use super::*; use super::*;
use testing::parse_error; use testing::parse_error;
#[test] #[test]
fn circular_recipe_dependency() { fn circular_recipe_dependency() {
let text = "a: b\nb: a"; let text = "a: b\nb: a";
parse_error(text, CompilationError { let recipe = "b";
text: text, let circle = vec!["a", "b", "a"];
index: 8, parse_error(text, CompilationError {
line: 1, text: text,
column: 3, index: 8,
width: Some(1), line: 1,
kind: CompilationErrorKind::CircularRecipeDependency{recipe: "b", circle: vec!["a", "b", "a"]} column: 3,
}); width: Some(1),
} kind: CircularRecipeDependency{recipe, circle}
});
}
#[test] #[test]
fn self_recipe_dependency() { fn self_recipe_dependency() {
let text = "a: a"; let text = "a: a";
parse_error(text, CompilationError { parse_error(text, CompilationError {
text: text, text: text,
index: 3, index: 3,
line: 0, line: 0,
column: 3, column: 3,
width: Some(1), width: Some(1),
kind: CompilationErrorKind::CircularRecipeDependency{recipe: "a", circle: vec!["a", "a"]} kind: CircularRecipeDependency{recipe: "a", circle: vec!["a", "a"]}
}); });
} }
#[test]
fn unknown_dependency() {
let text = "a: b";
parse_error(text, CompilationError {
text: text,
index: 3,
line: 0,
column: 3,
width: Some(1),
kind: UnknownDependency{recipe: "a", unknown: "b"}
});
}
#[test] #[test]
fn unknown_dependency() { fn unknown_interpolation_variable() {
let text = "a: b"; let text = "x:\n {{ hello}}";
parse_error(text, CompilationError { parse_error(text, CompilationError {
text: text, text: text,
index: 3, index: 9,
line: 0, line: 1,
column: 3, column: 6,
width: Some(1), width: Some(5),
kind: CompilationErrorKind::UnknownDependency{recipe: "a", unknown: "b"} kind: UndefinedVariable{variable: "hello"},
}); });
} }
#[test]
fn unknown_interpolation_variable() {
let text = "x:\n {{ hello}}";
parse_error(text, CompilationError {
text: text,
index: 9,
line: 1,
column: 6,
width: Some(5),
kind: CompilationErrorKind::UndefinedVariable{variable: "hello"},
});
}
#[test]
fn unknown_second_interpolation_variable() {
let text = "wtf=\"x\"\nx:\n echo\n foo {{wtf}} {{ lol }}";
parse_error(text, CompilationError {
text: text,
index: 33,
line: 3,
column: 16,
width: Some(3),
kind: CompilationErrorKind::UndefinedVariable{variable: "lol"},
});
}
#[test]
fn unknown_second_interpolation_variable() {
let text = "wtf=\"x\"\nx:\n echo\n foo {{wtf}} {{ lol }}";
parse_error(text, CompilationError {
text: text,
index: 33,
line: 3,
column: 16,
width: Some(3),
kind: UndefinedVariable{variable: "lol"},
});
}
} }

View File

@ -276,7 +276,7 @@ pub fn run() {
if let Some(doc) = recipe.doc { if let Some(doc) = recipe.doc {
print!(" {} {}", doc_color.paint("#"), doc_color.paint(doc)); print!(" {} {}", doc_color.paint("#"), doc_color.paint(doc));
} }
println!(""); println!();
} }
process::exit(EXIT_SUCCESS); process::exit(EXIT_SUCCESS);
} }

View File

@ -6,6 +6,8 @@ use misc::{And, Or, maybe_s, Tick, ticks, write_error_context};
use self::RuntimeError::*; use self::RuntimeError::*;
pub type RunResult<'a, T> = Result<T, RuntimeError<'a>>;
fn write_token_error_context(f: &mut fmt::Formatter, token: &Token) -> Result<(), fmt::Error> { fn write_token_error_context(f: &mut fmt::Formatter, token: &Token) -> Result<(), fmt::Error> {
write_error_context( write_error_context(
f, f,

View File

@ -1,6 +1,7 @@
use common::*; use common::*;
use compile; use compile;
use tokenizer::tokenize;
pub fn parse_success(text: &str) -> Justfile { pub fn parse_success(text: &str) -> Justfile {
match compile(text) { match compile(text) {
@ -10,7 +11,10 @@ pub fn parse_success(text: &str) -> Justfile {
} }
pub fn parse_error(text: &str, expected: CompilationError) { pub fn parse_error(text: &str, expected: CompilationError) {
if let Err(error) = compile(text) { let tokens = tokenize(text).unwrap();
let parser = Parser::new(text, tokens);
if let Err(error) = parser.justfile() {
assert_eq!(error.text, expected.text); assert_eq!(error.text, expected.text);
assert_eq!(error.index, expected.index); assert_eq!(error.index, expected.index);
assert_eq!(error.line, expected.line); assert_eq!(error.line, expected.line);

View File

@ -1,6 +1,7 @@
use common::*; use common::*;
use TokenKind::*; use TokenKind::*;
use CompilationErrorKind::*;
fn re(pattern: &str) -> Regex { fn re(pattern: &str) -> Regex {
Regex::new(pattern).unwrap() Regex::new(pattern).unwrap()
@ -18,7 +19,7 @@ fn mixed_whitespace(text: &str) -> bool {
!(text.chars().all(|c| c == ' ') || text.chars().all(|c| c == '\t')) !(text.chars().all(|c| c == ' ') || text.chars().all(|c| c == '\t'))
} }
pub fn tokenize(text: &str) -> Result<Vec<Token>, CompilationError> { pub fn tokenize(text: &str) -> CompilationResult<Vec<Token>> {
lazy_static! { lazy_static! {
static ref BACKTICK: Regex = token(r"`[^`\n\r]*`" ); static ref BACKTICK: Regex = token(r"`[^`\n\r]*`" );
static ref COLON: Regex = token(r":" ); static ref COLON: Regex = token(r":" );
@ -84,7 +85,7 @@ pub fn tokenize(text: &str) -> Result<Vec<Token>, CompilationError> {
// indent: was no indentation, now there is // indent: was no indentation, now there is
(&State::Start, Some(current)) => { (&State::Start, Some(current)) => {
if mixed_whitespace(current) { if mixed_whitespace(current) {
return error!(CompilationErrorKind::MixedLeadingWhitespace{whitespace: current}) return error!(MixedLeadingWhitespace{whitespace: current})
} }
//indent = Some(current); //indent = Some(current);
state.push(State::Indent(current)); state.push(State::Indent(current));
@ -99,7 +100,7 @@ pub fn tokenize(text: &str) -> Result<Vec<Token>, CompilationError> {
// was indentation and still is, check if the new indentation matches // was indentation and still is, check if the new indentation matches
(&State::Indent(previous), Some(current)) => { (&State::Indent(previous), Some(current)) => {
if !current.starts_with(previous) { if !current.starts_with(previous) {
return error!(CompilationErrorKind::InconsistentLeadingWhitespace{ return error!(InconsistentLeadingWhitespace{
expected: previous, expected: previous,
found: current found: current
}); });
@ -108,7 +109,7 @@ pub fn tokenize(text: &str) -> Result<Vec<Token>, CompilationError> {
} }
// at column 0 in some other state: this should never happen // at column 0 in some other state: this should never happen
(&State::Text, _) | (&State::Interpolation, _) => { (&State::Text, _) | (&State::Interpolation, _) => {
return error!(CompilationErrorKind::Internal { return error!(Internal {
message: "unexpected state at column 0".to_string() message: "unexpected state at column 0".to_string()
}); });
} }
@ -143,7 +144,7 @@ pub fn tokenize(text: &str) -> Result<Vec<Token>, CompilationError> {
(column, state.last().unwrap(), LINE.captures(rest)) { (column, state.last().unwrap(), LINE.captures(rest)) {
let line = captures.get(0).unwrap().as_str(); let line = captures.get(0).unwrap().as_str();
if !line.starts_with(indent) { if !line.starts_with(indent) {
return error!(CompilationErrorKind::Internal{message: "unexpected indent".to_string()}); return error!(Internal{message: "unexpected indent".to_string()});
} }
state.push(State::Text); state.push(State::Text);
(&line[0..indent.len()], "", Line) (&line[0..indent.len()], "", Line)
@ -161,7 +162,7 @@ pub fn tokenize(text: &str) -> Result<Vec<Token>, CompilationError> {
state.pop(); state.pop();
(captures.get(1).unwrap().as_str(), captures.get(2).unwrap().as_str(), Eol) (captures.get(1).unwrap().as_str(), captures.get(2).unwrap().as_str(), Eol)
} else { } else {
return error!(CompilationErrorKind::Internal { return error!(Internal {
message: format!("Could not match token in text state: \"{}\"", rest) message: format!("Could not match token in text state: \"{}\"", rest)
}); });
} }
@ -176,7 +177,7 @@ pub fn tokenize(text: &str) -> Result<Vec<Token>, CompilationError> {
(captures.get(1).unwrap().as_str(), captures.get(2).unwrap().as_str(), Name) (captures.get(1).unwrap().as_str(), captures.get(2).unwrap().as_str(), Name)
} else if let Some(captures) = EOL.captures(rest) { } else if let Some(captures) = EOL.captures(rest) {
if state.last().unwrap() == &State::Interpolation { if state.last().unwrap() == &State::Interpolation {
return error!(CompilationErrorKind::Internal { return error!(Internal {
message: "hit EOL while still in interpolation state".to_string() message: "hit EOL while still in interpolation state".to_string()
}); });
} }
@ -196,18 +197,18 @@ pub fn tokenize(text: &str) -> Result<Vec<Token>, CompilationError> {
} else if let Some(captures) = RAW_STRING.captures(rest) { } else if let Some(captures) = RAW_STRING.captures(rest) {
(captures.get(1).unwrap().as_str(), captures.get(2).unwrap().as_str(), RawString) (captures.get(1).unwrap().as_str(), captures.get(2).unwrap().as_str(), RawString)
} else if UNTERMINATED_RAW_STRING.is_match(rest) { } else if UNTERMINATED_RAW_STRING.is_match(rest) {
return error!(CompilationErrorKind::UnterminatedString); return error!(UnterminatedString);
} else if let Some(captures) = STRING.captures(rest) { } else if let Some(captures) = STRING.captures(rest) {
let prefix = captures.get(1).unwrap().as_str(); let prefix = captures.get(1).unwrap().as_str();
let contents = &rest[prefix.len()+1..]; let contents = &rest[prefix.len()+1..];
if contents.is_empty() { if contents.is_empty() {
return error!(CompilationErrorKind::UnterminatedString); return error!(UnterminatedString);
} }
let mut len = 0; let mut len = 0;
let mut escape = false; let mut escape = false;
for c in contents.chars() { for c in contents.chars() {
if c == '\n' || c == '\r' { if c == '\n' || c == '\r' {
return error!(CompilationErrorKind::UnterminatedString); return error!(UnterminatedString);
} else if !escape && c == '"' { } else if !escape && c == '"' {
break; break;
} else if !escape && c == '\\' { } else if !escape && c == '\\' {
@ -220,13 +221,13 @@ pub fn tokenize(text: &str) -> Result<Vec<Token>, CompilationError> {
let start = prefix.len(); let start = prefix.len();
let content_end = start + len + 1; let content_end = start + len + 1;
if escape || content_end >= rest.len() { if escape || content_end >= rest.len() {
return error!(CompilationErrorKind::UnterminatedString); return error!(UnterminatedString);
} }
(prefix, &rest[start..content_end + 1], StringToken) (prefix, &rest[start..content_end + 1], StringToken)
} else if rest.starts_with("#!") { } else if rest.starts_with("#!") {
return error!(CompilationErrorKind::OuterShebang) return error!(OuterShebang)
} else { } else {
return error!(CompilationErrorKind::UnknownStartOfToken) return error!(UnknownStartOfToken)
}; };
tokens.push(Token { tokens.push(Token {
@ -245,7 +246,7 @@ pub fn tokenize(text: &str) -> Result<Vec<Token>, CompilationError> {
let last = tokens.last().unwrap(); let last = tokens.last().unwrap();
match last.kind { match last.kind {
Eof => {}, Eof => {},
_ => return Err(last.error(CompilationErrorKind::Internal { _ => return Err(last.error(Internal {
message: format!("zero length token: {:?}", last) message: format!("zero length token: {:?}", last)
})), })),
} }
@ -283,33 +284,26 @@ pub fn tokenize(text: &str) -> Result<Vec<Token>, CompilationError> {
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use super::*; use super::*;
use testing::parse_error;
fn tokenize_success(text: &str, expected_summary: &str) { macro_rules! summary_test {
let tokens = tokenize(text).unwrap(); ($name:ident, $input:expr, $expected:expr $(,)*) => {
let roundtrip = tokens.iter().map(|t| { #[test]
let mut s = String::new(); fn $name() {
s += t.prefix; let input = $input;
s += t.lexeme; let expected = $expected;
s let tokens = tokenize(input).unwrap();
}).collect::<Vec<_>>().join(""); let roundtrip = tokens.iter().map(|t| {
let summary = token_summary(&tokens); let mut s = String::new();
if summary != expected_summary { s += t.prefix;
panic!("token summary mismatch:\nexpected: {}\ngot: {}\n", expected_summary, summary); s += t.lexeme;
} s
assert_eq!(text, roundtrip); }).collect::<Vec<_>>().join("");
} let actual = token_summary(&tokens);
if actual != expected {
fn tokenize_error(text: &str, expected: CompilationError) { panic!("token summary mismatch:\nexpected: {}\ngot: {}\n", expected, actual);
if let Err(error) = tokenize(text) { }
assert_eq!(error.text, expected.text); assert_eq!(input, roundtrip);
assert_eq!(error.index, expected.index); }
assert_eq!(error.line, expected.line);
assert_eq!(error.column, expected.column);
assert_eq!(error.kind, expected.kind);
assert_eq!(error, expected);
} else {
panic!("tokenize() succeeded but expected: {}\n{}", expected, text);
} }
} }
@ -337,55 +331,83 @@ mod test {
}).collect::<Vec<_>>().join("") }).collect::<Vec<_>>().join("")
} }
#[test] macro_rules! error_test {
fn tokanize_strings() { (
tokenize_success( name: $name:ident,
input: $input:expr,
index: $index:expr,
line: $line:expr,
column: $column:expr,
width: $width:expr,
kind: $kind:expr,
) => {
#[test]
fn $name() {
let input = $input;
let expected = CompilationError {
text: input,
index: $index,
line: $line,
column: $column,
width: $width,
kind: $kind,
};
if let Err(error) = tokenize(input) {
assert_eq!(error.text, expected.text);
assert_eq!(error.index, expected.index);
assert_eq!(error.line, expected.line);
assert_eq!(error.column, expected.column);
assert_eq!(error.kind, expected.kind);
assert_eq!(error, expected);
} else {
panic!("tokenize() succeeded but expected: {}\n{}", expected, input);
}
}
}
}
summary_test!{tokenize_strings,
r#"a = "'a'" + '"b"' + "'c'" + '"d"'#echo hello"#, r#"a = "'a'" + '"b"' + "'c'" + '"d"'#echo hello"#,
r#"N="+'+"+'#."# r#"N="+'+"+'#."#,
); }
}
#[test] summary_test!{tokenize_recipe_interpolation_eol,
fn tokenize_recipe_interpolation_eol() { "foo: # some comment
let text = "foo: # some comment
{{hello}} {{hello}}
"; ",
tokenize_success(text, "N:#$>^{N}$<."); "N:#$>^{N}$<.",
} }
#[test] summary_test!{tokenize_recipe_interpolation_eof,
fn tokenize_recipe_interpolation_eof() { "foo: # more comments
let text = "foo: # more comments
{{hello}} {{hello}}
# another comment # another comment
"; ",
tokenize_success(text, "N:#$>^{N}$<#$."); "N:#$>^{N}$<#$.",
} }
#[test] summary_test!{tokenize_recipe_complex_interpolation_expression,
fn tokenize_recipe_complex_interpolation_expression() { "foo: #lol\n {{a + b + \"z\" + blarg}}",
let text = "foo: #lol\n {{a + b + \"z\" + blarg}}"; "N:#$>^{N+N+\"+N}<.",
tokenize_success(text, "N:#$>^{N+N+\"+N}<."); }
}
#[test] summary_test!{tokenize_recipe_multiple_interpolations,
fn tokenize_recipe_multiple_interpolations() { "foo:#ok\n {{a}}0{{b}}1{{c}}",
let text = "foo:#ok\n {{a}}0{{b}}1{{c}}"; "N:#$>^{N}_{N}_{N}<.",
tokenize_success(text, "N:#$>^{N}_{N}_{N}<."); }
}
#[test] summary_test!{tokenize_junk,
fn tokenize_junk() { "bob
let text = "bob
hello blah blah blah : a b c #whatever hello blah blah blah : a b c #whatever
"; ",
tokenize_success(text, "N$$NNNN:NNN#$."); "N$$NNNN:NNN#$.",
} }
#[test] summary_test!{tokenize_empty_lines,
fn tokenize_empty_lines() { "
let text = "
# this does something # this does something
hello: hello:
asdf asdf
@ -396,41 +418,32 @@ hello:
dsdf # whatever dsdf # whatever
# yolo # yolo
"; ",
"$#$N:$>^_$^_$$^_$$^_$$<#$.",
}
tokenize_success(text, "$#$N:$>^_$^_$$^_$$^_$$<#$."); summary_test!{tokenize_comment_before_variable,
} "
#[test]
fn tokenize_comment_before_variable() {
let text = "
# #
A='1' A='1'
echo: echo:
echo {{A}} echo {{A}}
"; ",
tokenize_success(text, "$#$N='$N:$>^_{N}$<."); "$#$N='$N:$>^_{N}$<.",
} }
#[test] summary_test!{tokenize_interpolation_backticks,
fn tokenize_interpolation_backticks() {
tokenize_success(
"hello:\n echo {{`echo hello` + `echo goodbye`}}", "hello:\n echo {{`echo hello` + `echo goodbye`}}",
"N:$>^_{`+`}<." "N:$>^_{`+`}<.",
); }
}
#[test] summary_test!{tokenize_assignment_backticks,
fn tokenize_assignment_backticks() {
tokenize_success(
"a = `echo hello` + `echo goodbye`", "a = `echo hello` + `echo goodbye`",
"N=`+`." "N=`+`.",
); }
}
#[test] summary_test!{tokenize_multiple,
fn tokenize_multiple() { "
let text = "
hello: hello:
a a
b b
@ -442,79 +455,15 @@ hello:
# hello # hello
bob: bob:
frank frank
"; ",
tokenize_success(text, "$N:$>^_$^_$$^_$$^_$$<#$N:$>^_$<."); "$N:$>^_$^_$$^_$$^_$$<#$N:$>^_$<.",
} }
summary_test!{tokenize_comment, "a:=#", "N:=#."}
#[test] summary_test!{tokenize_order,
fn tokenize_comment() { r"
tokenize_success("a:=#", "N:=#.")
}
#[test]
fn tokenize_space_then_tab() {
let text = "a:
0
1
\t2
";
tokenize_error(text, CompilationError {
text: text,
index: 9,
line: 3,
column: 0,
width: None,
kind: CompilationErrorKind::InconsistentLeadingWhitespace{expected: " ", found: "\t"},
});
}
#[test]
fn tokenize_tabs_then_tab_space() {
let text = "a:
\t\t0
\t\t 1
\t 2
";
tokenize_error(text, CompilationError {
text: text,
index: 12,
line: 3,
column: 0,
width: None,
kind: CompilationErrorKind::InconsistentLeadingWhitespace{expected: "\t\t", found: "\t "},
});
}
#[test]
fn tokenize_outer_shebang() {
let text = "#!/usr/bin/env bash";
tokenize_error(text, CompilationError {
text: text,
index: 0,
line: 0,
column: 0,
width: None,
kind: CompilationErrorKind::OuterShebang
});
}
#[test]
fn tokenize_unknown() {
let text = "~";
tokenize_error(text, CompilationError {
text: text,
index: 0,
line: 0,
column: 0,
width: None,
kind: CompilationErrorKind::UnknownStartOfToken
});
}
#[test]
fn tokenize_order() {
let text = r"
b: a b: a
@mv a b @mv a b
@ -526,60 +475,95 @@ d: c
@rm c @rm c
c: b c: b
@mv b c"; @mv b c",
tokenize_success(text, "$N:N$>^_$$<N:$>^_$^_$$<N:N$>^_$$<N:N$>^_<."); "$N:N$>^_$$<N:$>^_$^_$$<N:N$>^_$$<N:N$>^_<.",
} }
#[test] error_test! {
fn unterminated_string() { name: tokenize_space_then_tab,
let text = r#"a = ""#; input: "a:
parse_error(text, CompilationError { 0
text: text, 1
\t2
",
index: 9,
line: 3,
column: 0,
width: None,
kind: InconsistentLeadingWhitespace{expected: " ", found: "\t"},
}
error_test! {
name: tokenize_tabs_then_tab_space,
input: "a:
\t\t0
\t\t 1
\t 2
",
index: 12,
line: 3,
column: 0,
width: None,
kind: InconsistentLeadingWhitespace{expected: "\t\t", found: "\t "},
}
error_test! {
name: tokenize_outer_shebang,
input: "#!/usr/bin/env bash",
index: 0,
line: 0,
column: 0,
width: None,
kind: OuterShebang,
}
error_test! {
name: tokenize_unknown,
input: "~",
index: 0,
line: 0,
column: 0,
width: None,
kind: UnknownStartOfToken,
}
error_test! {
name: unterminated_string,
input: r#"a = ""#,
index: 3, index: 3,
line: 0, line: 0,
column: 3, column: 3,
width: None, width: None,
kind: CompilationErrorKind::UnterminatedString, kind: UnterminatedString,
}); }
}
#[test] error_test! {
fn unterminated_string_with_escapes() { name: unterminated_string_with_escapes,
let text = r#"a = "\n\t\r\"\\"#; input: r#"a = "\n\t\r\"\\"#,
parse_error(text, CompilationError {
text: text,
index: 3, index: 3,
line: 0, line: 0,
column: 3, column: 3,
width: None, width: None,
kind: CompilationErrorKind::UnterminatedString, kind: UnterminatedString,
}); }
}
#[test] error_test! {
fn unterminated_raw_string() { name: unterminated_raw_string,
let text = "r a='asdf"; input: "r a='asdf",
parse_error(text, CompilationError {
text: text,
index: 4, index: 4,
line: 0, line: 0,
column: 4, column: 4,
width: None, width: None,
kind: CompilationErrorKind::UnterminatedString, kind: UnterminatedString,
}); }
}
error_test! {
#[test] name: mixed_leading_whitespace,
fn mixed_leading_whitespace() { input: "a:\n\t echo hello",
let text = "a:\n\t echo hello";
parse_error(text, CompilationError {
text: text,
index: 3, index: 3,
line: 1, line: 1,
column: 0, column: 0,
width: None, width: None,
kind: CompilationErrorKind::MixedLeadingWhitespace{whitespace: "\t "} kind: MixedLeadingWhitespace{whitespace: "\t "},
}); }
}
} }