mirror of
https://gitlab.computer.surgery/matrix/grapevine.git
synced 2025-12-18 16:21:24 +01:00
Merge branch 'benjamin/complement' into 'main'
basic complement support See merge request matrix/grapevine!47
This commit is contained in:
commit
f3e0de22ad
17 changed files with 1903 additions and 26 deletions
2
.cargo/config.toml
Normal file
2
.cargo/config.toml
Normal file
|
|
@ -0,0 +1,2 @@
|
||||||
|
[alias]
|
||||||
|
xtask = "run --package xtask --"
|
||||||
228
Cargo.lock
generated
228
Cargo.lock
generated
|
|
@ -247,6 +247,15 @@ dependencies = [
|
||||||
"windows-targets 0.52.6",
|
"windows-targets 0.52.6",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "backtrace-ext"
|
||||||
|
version = "0.2.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "537beee3be4a18fb023b570f80e3ae28003db9167a751266b259926e25539d50"
|
||||||
|
dependencies = [
|
||||||
|
"backtrace",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "base64"
|
name = "base64"
|
||||||
version = "0.21.7"
|
version = "0.21.7"
|
||||||
|
|
@ -382,6 +391,12 @@ version = "1.0.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "cfg_aliases"
|
||||||
|
version = "0.1.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "fd16c4719339c4530435d38e511904438d07cce7950afa3718a84ac36c10e89e"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cfg_aliases"
|
name = "cfg_aliases"
|
||||||
version = "0.2.1"
|
version = "0.2.1"
|
||||||
|
|
@ -444,6 +459,19 @@ version = "1.1.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "3d7b894f5411737b7867f4827955924d7c254fc9f4d91a6aad6b097804b1018b"
|
checksum = "3d7b894f5411737b7867f4827955924d7c254fc9f4d91a6aad6b097804b1018b"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "console"
|
||||||
|
version = "0.15.8"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "0e1f83fc076bd6dd27517eacdf25fef6c4dfe5f1d7448bafaaf3a26f13b5e4eb"
|
||||||
|
dependencies = [
|
||||||
|
"encode_unicode",
|
||||||
|
"lazy_static",
|
||||||
|
"libc",
|
||||||
|
"unicode-width",
|
||||||
|
"windows-sys 0.52.0",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "const-oid"
|
name = "const-oid"
|
||||||
version = "0.9.6"
|
version = "0.9.6"
|
||||||
|
|
@ -600,6 +628,12 @@ version = "1.13.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0"
|
checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "encode_unicode"
|
||||||
|
version = "0.3.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "enum-as-inner"
|
name = "enum-as-inner"
|
||||||
version = "0.6.1"
|
version = "0.6.1"
|
||||||
|
|
@ -817,7 +851,7 @@ dependencies = [
|
||||||
"image",
|
"image",
|
||||||
"jsonwebtoken",
|
"jsonwebtoken",
|
||||||
"lru-cache",
|
"lru-cache",
|
||||||
"nix",
|
"nix 0.29.0",
|
||||||
"num_cpus",
|
"num_cpus",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"opentelemetry",
|
"opentelemetry",
|
||||||
|
|
@ -1150,6 +1184,28 @@ dependencies = [
|
||||||
"serde",
|
"serde",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "indicatif"
|
||||||
|
version = "0.17.8"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "763a5a8f45087d6bcea4222e7b72c291a054edf80e4ef6efd2a4979878c7bea3"
|
||||||
|
dependencies = [
|
||||||
|
"console",
|
||||||
|
"instant",
|
||||||
|
"number_prefix",
|
||||||
|
"portable-atomic",
|
||||||
|
"unicode-width",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "instant"
|
||||||
|
version = "0.1.13"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222"
|
||||||
|
dependencies = [
|
||||||
|
"cfg-if",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ipconfig"
|
name = "ipconfig"
|
||||||
version = "0.3.2"
|
version = "0.3.2"
|
||||||
|
|
@ -1168,6 +1224,12 @@ version = "2.10.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "187674a687eed5fe42285b40c6291f9a01517d415fad1c3cbc6a9f778af7fcd4"
|
checksum = "187674a687eed5fe42285b40c6291f9a01517d415fad1c3cbc6a9f778af7fcd4"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "is_ci"
|
||||||
|
version = "1.2.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "7655c9839580ee829dfacba1d1278c2b7883e50a277ff7541299489d6bdfdc45"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "itertools"
|
name = "itertools"
|
||||||
version = "0.12.1"
|
version = "0.12.1"
|
||||||
|
|
@ -1393,6 +1455,37 @@ version = "2.7.4"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3"
|
checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "miette"
|
||||||
|
version = "7.2.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "4edc8853320c2a0dab800fbda86253c8938f6ea88510dc92c5f1ed20e794afc1"
|
||||||
|
dependencies = [
|
||||||
|
"backtrace",
|
||||||
|
"backtrace-ext",
|
||||||
|
"cfg-if",
|
||||||
|
"miette-derive",
|
||||||
|
"owo-colors",
|
||||||
|
"supports-color",
|
||||||
|
"supports-hyperlinks",
|
||||||
|
"supports-unicode",
|
||||||
|
"terminal_size",
|
||||||
|
"textwrap",
|
||||||
|
"thiserror",
|
||||||
|
"unicode-width",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "miette-derive"
|
||||||
|
version = "7.2.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "dcf09caffaac8068c346b6df2a7fc27a177fd20b39421a39ce0a211bde679a6c"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "mime"
|
name = "mime"
|
||||||
version = "0.3.17"
|
version = "0.3.17"
|
||||||
|
|
@ -1436,6 +1529,18 @@ dependencies = [
|
||||||
"windows-sys 0.52.0",
|
"windows-sys 0.52.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "nix"
|
||||||
|
version = "0.28.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "ab2156c4fce2f8df6c499cc1c763e4394b7482525bf2a9701c9d79d215f519e4"
|
||||||
|
dependencies = [
|
||||||
|
"bitflags 2.6.0",
|
||||||
|
"cfg-if",
|
||||||
|
"cfg_aliases 0.1.1",
|
||||||
|
"libc",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nix"
|
name = "nix"
|
||||||
version = "0.29.0"
|
version = "0.29.0"
|
||||||
|
|
@ -1444,7 +1549,7 @@ checksum = "71e2746dc3a24dd78b3cfcb7be93368c6de9963d30f43a6a73998a9cf4b17b46"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags 2.6.0",
|
"bitflags 2.6.0",
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
"cfg_aliases",
|
"cfg_aliases 0.2.1",
|
||||||
"libc",
|
"libc",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
@ -1512,6 +1617,12 @@ dependencies = [
|
||||||
"libc",
|
"libc",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "number_prefix"
|
||||||
|
version = "0.4.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "object"
|
name = "object"
|
||||||
version = "0.36.4"
|
version = "0.36.4"
|
||||||
|
|
@ -1626,6 +1737,12 @@ version = "0.1.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
|
checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "owo-colors"
|
||||||
|
version = "4.1.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "fb37767f6569cd834a413442455e0f066d0d522de8630436e2a1761d9726ba56"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "parking_lot"
|
name = "parking_lot"
|
||||||
version = "0.12.3"
|
version = "0.12.3"
|
||||||
|
|
@ -1779,6 +1896,12 @@ dependencies = [
|
||||||
"miniz_oxide 0.7.4",
|
"miniz_oxide 0.7.4",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "portable-atomic"
|
||||||
|
version = "1.6.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "powerfmt"
|
name = "powerfmt"
|
||||||
version = "0.2.0"
|
version = "0.2.0"
|
||||||
|
|
@ -1812,6 +1935,16 @@ dependencies = [
|
||||||
"unicode-ident",
|
"unicode-ident",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "process-wrap"
|
||||||
|
version = "8.0.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "38ee68ae331824036479c84060534b18254c864fa73366c58d86db3b7b811619"
|
||||||
|
dependencies = [
|
||||||
|
"indexmap 2.5.0",
|
||||||
|
"nix 0.28.0",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "prometheus"
|
name = "prometheus"
|
||||||
version = "0.13.4"
|
version = "0.13.4"
|
||||||
|
|
@ -2636,6 +2769,16 @@ version = "1.3.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
|
checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "signal-hook"
|
||||||
|
version = "0.3.17"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "8621587d4798caf8eb44879d42e56b9a93ea5dcd315a6487c357130095b62801"
|
||||||
|
dependencies = [
|
||||||
|
"libc",
|
||||||
|
"signal-hook-registry",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "signal-hook-registry"
|
name = "signal-hook-registry"
|
||||||
version = "1.4.2"
|
version = "1.4.2"
|
||||||
|
|
@ -2693,6 +2836,12 @@ version = "1.13.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67"
|
checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "smawk"
|
||||||
|
version = "0.3.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "b7c388c1b5e93756d0c740965c41e8822f866621d41acbdf6336a6a168f8840c"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "socket2"
|
name = "socket2"
|
||||||
version = "0.5.7"
|
version = "0.5.7"
|
||||||
|
|
@ -2756,6 +2905,27 @@ version = "2.6.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292"
|
checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "supports-color"
|
||||||
|
version = "3.0.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "8775305acf21c96926c900ad056abeef436701108518cf890020387236ac5a77"
|
||||||
|
dependencies = [
|
||||||
|
"is_ci",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "supports-hyperlinks"
|
||||||
|
version = "3.0.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "2c0a1e5168041f5f3ff68ff7d95dcb9c8749df29f6e7e89ada40dd4c9de404ee"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "supports-unicode"
|
||||||
|
version = "3.0.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "b7401a30af6cb5818bb64852270bb722533397edcfc7344954a38f420819ece2"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "syn"
|
name = "syn"
|
||||||
version = "2.0.77"
|
version = "2.0.77"
|
||||||
|
|
@ -2792,6 +2962,17 @@ dependencies = [
|
||||||
"windows-sys 0.48.0",
|
"windows-sys 0.48.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "textwrap"
|
||||||
|
version = "0.16.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "23d434d3f8967a09480fb04132ebe0a3e088c173e6d0ee7897abbdf4eab0f8b9"
|
||||||
|
dependencies = [
|
||||||
|
"smawk",
|
||||||
|
"unicode-linebreak",
|
||||||
|
"unicode-width",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "thiserror"
|
name = "thiserror"
|
||||||
version = "1.0.64"
|
version = "1.0.64"
|
||||||
|
|
@ -3279,6 +3460,12 @@ version = "1.0.13"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "e91b56cd4cadaeb79bbf1a5645f6b4f8dc5bde8834ad5894a8db35fda9efa1fe"
|
checksum = "e91b56cd4cadaeb79bbf1a5645f6b4f8dc5bde8834ad5894a8db35fda9efa1fe"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "unicode-linebreak"
|
||||||
|
version = "0.1.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "3b09c83c3c29d37506a3e260c08c03743a6bb66a9cd432c6934ab501a190571f"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "unicode-normalization"
|
name = "unicode-normalization"
|
||||||
version = "0.1.24"
|
version = "0.1.24"
|
||||||
|
|
@ -3288,6 +3475,12 @@ dependencies = [
|
||||||
"tinyvec",
|
"tinyvec",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "unicode-width"
|
||||||
|
version = "0.1.14"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "unsafe-libyaml"
|
name = "unsafe-libyaml"
|
||||||
version = "0.2.11"
|
version = "0.2.11"
|
||||||
|
|
@ -3690,6 +3883,37 @@ version = "2.5.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "213b7324336b53d2414b2db8537e56544d981803139155afa84f76eeebb7a546"
|
checksum = "213b7324336b53d2414b2db8537e56544d981803139155afa84f76eeebb7a546"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "xshell"
|
||||||
|
version = "0.2.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "6db0ab86eae739efd1b054a8d3d16041914030ac4e01cd1dca0cf252fd8b6437"
|
||||||
|
dependencies = [
|
||||||
|
"xshell-macros",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "xshell-macros"
|
||||||
|
version = "0.2.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "9d422e8e38ec76e2f06ee439ccc765e9c6a9638b9e7c9f2e8255e4d41e8bd852"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "xtask"
|
||||||
|
version = "0.1.0"
|
||||||
|
dependencies = [
|
||||||
|
"clap",
|
||||||
|
"indicatif",
|
||||||
|
"miette",
|
||||||
|
"process-wrap",
|
||||||
|
"rand",
|
||||||
|
"serde",
|
||||||
|
"serde_json",
|
||||||
|
"signal-hook",
|
||||||
|
"strum",
|
||||||
|
"xshell",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "zerocopy"
|
name = "zerocopy"
|
||||||
version = "0.7.35"
|
version = "0.7.35"
|
||||||
|
|
|
||||||
113
Cargo.toml
113
Cargo.toml
|
|
@ -1,3 +1,11 @@
|
||||||
|
[workspace]
|
||||||
|
members = ["xtask"]
|
||||||
|
[workspace.package]
|
||||||
|
license = "Apache-2.0"
|
||||||
|
|
||||||
|
# See also `rust-toolchain.toml`
|
||||||
|
rust-version = "1.81.0"
|
||||||
|
|
||||||
# Keep alphabetically sorted
|
# Keep alphabetically sorted
|
||||||
[workspace.lints.rust]
|
[workspace.lints.rust]
|
||||||
elided_lifetimes_in_paths = "warn"
|
elided_lifetimes_in_paths = "warn"
|
||||||
|
|
@ -71,21 +79,8 @@ wildcard_dependencies = "warn"
|
||||||
missing_errors_doc = "allow"
|
missing_errors_doc = "allow"
|
||||||
missing_panics_doc = "allow"
|
missing_panics_doc = "allow"
|
||||||
|
|
||||||
[package]
|
|
||||||
name = "grapevine"
|
|
||||||
description = "A Matrix homeserver written in Rust"
|
|
||||||
license = "Apache-2.0"
|
|
||||||
version = "0.1.0"
|
|
||||||
edition = "2021"
|
|
||||||
|
|
||||||
# See also `rust-toolchain.toml`
|
|
||||||
rust-version = "1.81.0"
|
|
||||||
|
|
||||||
[lints]
|
|
||||||
workspace = true
|
|
||||||
|
|
||||||
# Keep sorted
|
# Keep sorted
|
||||||
[dependencies]
|
[workspace.dependencies]
|
||||||
argon2 = "0.5.3"
|
argon2 = "0.5.3"
|
||||||
async-trait = "0.1.82"
|
async-trait = "0.1.82"
|
||||||
axum = { version = "0.7.6", default-features = false, features = ["form", "http1", "http2", "json", "matched-path", "tracing"] }
|
axum = { version = "0.7.6", default-features = false, features = ["form", "http1", "http2", "json", "matched-path", "tracing"] }
|
||||||
|
|
@ -102,8 +97,11 @@ http-body-util = "0.1.2"
|
||||||
hyper = "1.4.1"
|
hyper = "1.4.1"
|
||||||
hyper-util = { version = "0.1.8", features = ["client", "client-legacy", "service"] }
|
hyper-util = { version = "0.1.8", features = ["client", "client-legacy", "service"] }
|
||||||
image = { version = "0.25.2", default-features = false, features = ["jpeg", "png", "gif"] }
|
image = { version = "0.25.2", default-features = false, features = ["jpeg", "png", "gif"] }
|
||||||
|
indicatif = "0.17.8"
|
||||||
jsonwebtoken = "9.3.0"
|
jsonwebtoken = "9.3.0"
|
||||||
lru-cache = "0.1.2"
|
lru-cache = "0.1.2"
|
||||||
|
miette = { version = "7.2.0", features = ["fancy"] }
|
||||||
|
nix = { version = "0.29", features = ["resource"] }
|
||||||
num_cpus = "1.16.0"
|
num_cpus = "1.16.0"
|
||||||
once_cell = "1.19.0"
|
once_cell = "1.19.0"
|
||||||
opentelemetry = "0.24.0"
|
opentelemetry = "0.24.0"
|
||||||
|
|
@ -111,27 +109,29 @@ opentelemetry-jaeger-propagator = "0.3.0"
|
||||||
opentelemetry-otlp = "0.17.0"
|
opentelemetry-otlp = "0.17.0"
|
||||||
opentelemetry-prometheus = "0.17.0"
|
opentelemetry-prometheus = "0.17.0"
|
||||||
opentelemetry_sdk = { version = "0.24.0", features = ["rt-tokio"] }
|
opentelemetry_sdk = { version = "0.24.0", features = ["rt-tokio"] }
|
||||||
parking_lot = { version = "0.12.3", optional = true }
|
parking_lot = "0.12.3"
|
||||||
phf = { version = "0.11.2", features = ["macros"] }
|
phf = { version = "0.11.2", features = ["macros"] }
|
||||||
|
process-wrap = { version = "8.0.2", default-features = false, features = ["std", "process-group"] }
|
||||||
prometheus = "0.13.4"
|
prometheus = "0.13.4"
|
||||||
rand = "0.8.5"
|
rand = "0.8.5"
|
||||||
regex = "1.10.6"
|
regex = "1.10.6"
|
||||||
reqwest = { version = "0.12.7", default-features = false, features = ["http2", "rustls-tls-native-roots", "socks"] }
|
reqwest = { version = "0.12.7", default-features = false, features = ["http2", "rustls-tls-native-roots", "socks"] }
|
||||||
ring = "0.17.8"
|
ring = "0.17.8"
|
||||||
rocksdb = { package = "rust-rocksdb", version = "0.26.0", features = ["lz4", "multi-threaded-cf", "zstd"], optional = true }
|
rocksdb = { package = "rust-rocksdb", version = "0.26.0", features = ["lz4", "multi-threaded-cf", "zstd"] }
|
||||||
ruma = { git = "https://github.com/ruma/ruma", branch = "main", features = ["compat", "rand", "appservice-api-c", "client-api", "federation-api", "push-gateway-api-c", "server-util", "state-res", "unstable-msc2448", "unstable-msc3575", "unstable-exhaustive-types", "ring-compat", "unstable-unspecified" ] }
|
ruma = { git = "https://github.com/ruma/ruma", branch = "main", features = ["compat", "rand", "appservice-api-c", "client-api", "federation-api", "push-gateway-api-c", "server-util", "state-res", "unstable-msc2448", "unstable-msc3575", "unstable-exhaustive-types", "ring-compat", "unstable-unspecified" ] }
|
||||||
rusqlite = { version = "0.32.1", optional = true, features = ["bundled"] }
|
rusqlite = { version = "0.32.1", features = ["bundled"] }
|
||||||
rustls = { version = "0.23.13", default-features = false, features = ["ring", "log", "logging", "std", "tls12"] }
|
rustls = { version = "0.23.13", default-features = false, features = ["ring", "log", "logging", "std", "tls12"] }
|
||||||
sd-notify = { version = "0.4.2", optional = true }
|
sd-notify = { version = "0.4.2" }
|
||||||
serde = { version = "1.0.210", features = ["rc"] }
|
serde = { version = "1.0.210", features = ["rc", "derive"] }
|
||||||
serde_html_form = "0.2.6"
|
serde_html_form = "0.2.6"
|
||||||
serde_json = { version = "1.0.128", features = ["raw_value"] }
|
serde_json = { version = "1.0.128", features = ["raw_value"] }
|
||||||
serde_yaml = "0.9.34"
|
serde_yaml = "0.9.34"
|
||||||
sha-1 = "0.10.1"
|
sha-1 = "0.10.1"
|
||||||
|
signal-hook = "0.3.17"
|
||||||
strum = { version = "0.26.3", features = ["derive"] }
|
strum = { version = "0.26.3", features = ["derive"] }
|
||||||
thiserror = "1.0.64"
|
thiserror = "1.0.64"
|
||||||
thread_local = "1.1.8"
|
thread_local = "1.1.8"
|
||||||
tikv-jemallocator = { version = "0.6.0", features = ["unprefixed_malloc_on_supported_platforms"], optional = true }
|
tikv-jemallocator = { version = "0.6.0", features = ["unprefixed_malloc_on_supported_platforms"] }
|
||||||
tokio = { version = "1.40.0", features = ["fs", "macros", "signal", "sync"] }
|
tokio = { version = "1.40.0", features = ["fs", "macros", "signal", "sync"] }
|
||||||
toml = "0.8.19"
|
toml = "0.8.19"
|
||||||
tower = { version = "0.5.1", features = ["util"] }
|
tower = { version = "0.5.1", features = ["util"] }
|
||||||
|
|
@ -142,9 +142,80 @@ tracing-opentelemetry = "0.25.0"
|
||||||
tracing-subscriber = { version = "0.3.18", features = ["env-filter", "json"] }
|
tracing-subscriber = { version = "0.3.18", features = ["env-filter", "json"] }
|
||||||
trust-dns-resolver = "0.23.2"
|
trust-dns-resolver = "0.23.2"
|
||||||
xdg = "2.5.2"
|
xdg = "2.5.2"
|
||||||
|
xshell = "0.2.6"
|
||||||
|
|
||||||
|
[package]
|
||||||
|
name = "grapevine"
|
||||||
|
description = "A Matrix homeserver written in Rust"
|
||||||
|
license.workspace = true
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2021"
|
||||||
|
rust-version.workspace = true
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
|
||||||
|
# Keep sorted
|
||||||
|
[dependencies]
|
||||||
|
argon2.workspace = true
|
||||||
|
async-trait.workspace = true
|
||||||
|
axum.workspace = true
|
||||||
|
axum-extra.workspace = true
|
||||||
|
axum-server.workspace = true
|
||||||
|
base64.workspace = true
|
||||||
|
bytes.workspace = true
|
||||||
|
clap.workspace = true
|
||||||
|
futures-util.workspace = true
|
||||||
|
hmac.workspace = true
|
||||||
|
html-escape.workspace = true
|
||||||
|
http.workspace = true
|
||||||
|
http-body-util.workspace = true
|
||||||
|
hyper.workspace = true
|
||||||
|
hyper-util.workspace = true
|
||||||
|
image.workspace = true
|
||||||
|
jsonwebtoken.workspace = true
|
||||||
|
lru-cache.workspace = true
|
||||||
|
num_cpus.workspace = true
|
||||||
|
once_cell.workspace = true
|
||||||
|
opentelemetry.workspace = true
|
||||||
|
opentelemetry-jaeger-propagator.workspace = true
|
||||||
|
opentelemetry-otlp.workspace = true
|
||||||
|
opentelemetry-prometheus.workspace = true
|
||||||
|
opentelemetry_sdk.workspace = true
|
||||||
|
parking_lot = { workspace = true, optional = true }
|
||||||
|
phf.workspace = true
|
||||||
|
prometheus.workspace = true
|
||||||
|
rand.workspace = true
|
||||||
|
regex.workspace = true
|
||||||
|
reqwest.workspace = true
|
||||||
|
ring.workspace = true
|
||||||
|
rocksdb = { workspace = true, optional = true }
|
||||||
|
ruma.workspace = true
|
||||||
|
rustls.workspace = true
|
||||||
|
rusqlite = { workspace = true, optional = true }
|
||||||
|
sd-notify = { workspace = true, optional = true }
|
||||||
|
serde.workspace = true
|
||||||
|
serde_html_form.workspace = true
|
||||||
|
serde_json.workspace = true
|
||||||
|
serde_yaml.workspace = true
|
||||||
|
sha-1.workspace = true
|
||||||
|
strum.workspace = true
|
||||||
|
thiserror.workspace = true
|
||||||
|
thread_local.workspace = true
|
||||||
|
tikv-jemallocator = { workspace = true, optional = true }
|
||||||
|
tokio.workspace = true
|
||||||
|
toml.workspace = true
|
||||||
|
tower.workspace = true
|
||||||
|
tower-http.workspace = true
|
||||||
|
tracing.workspace = true
|
||||||
|
tracing-flame.workspace = true
|
||||||
|
tracing-opentelemetry.workspace = true
|
||||||
|
tracing-subscriber.workspace = true
|
||||||
|
trust-dns-resolver.workspace = true
|
||||||
|
xdg.workspace = true
|
||||||
|
|
||||||
[target.'cfg(unix)'.dependencies]
|
[target.'cfg(unix)'.dependencies]
|
||||||
nix = { version = "0.29", features = ["resource"] }
|
nix.workspace = true
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = ["rocksdb", "sqlite", "systemd"]
|
default = ["rocksdb", "sqlite", "systemd"]
|
||||||
|
|
|
||||||
|
|
@ -5,11 +5,33 @@ set -euo pipefail
|
||||||
toplevel="$(git rev-parse --show-toplevel)"
|
toplevel="$(git rev-parse --show-toplevel)"
|
||||||
|
|
||||||
# Build and cache the specified arguments
|
# Build and cache the specified arguments
|
||||||
|
#
|
||||||
|
# Usage: nix-build-and-cache just [args...] -- [build-only-args...]
|
||||||
|
#
|
||||||
|
# Arguments after the '--' (build-only-args) will only be passed to the
|
||||||
|
# 'nix build' command, while arguments before '--' will also be passed to any
|
||||||
|
# other nix command using the installable list. This is useful for things like
|
||||||
|
# '--no-link', which are only relevant to 'nix build'.
|
||||||
just() {
|
just() {
|
||||||
|
build_args=()
|
||||||
|
args=()
|
||||||
|
build_only_mode=0
|
||||||
|
for arg in "$@"; do
|
||||||
|
if [ "$build_only_mode" = 1 ]; then
|
||||||
|
build_args+=( "$arg" )
|
||||||
|
elif [ "$arg" = "--" ]; then
|
||||||
|
# Everything after the -- is passed only to 'nix build'
|
||||||
|
build_only_mode=1
|
||||||
|
else
|
||||||
|
args+=( "$arg" )
|
||||||
|
build_args+=( "$arg" )
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
if command -v nom &> /dev/null; then
|
if command -v nom &> /dev/null; then
|
||||||
nom build "$@"
|
nom build "${build_args[@]}"
|
||||||
else
|
else
|
||||||
nix build "$@"
|
nix build "${build_args[@]}"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ -z ${ATTIC_TOKEN+x} ]; then
|
if [ -z ${ATTIC_TOKEN+x} ]; then
|
||||||
|
|
@ -24,7 +46,7 @@ just() {
|
||||||
"$ATTIC_TOKEN"
|
"$ATTIC_TOKEN"
|
||||||
|
|
||||||
# Find all output paths of the installables and their build dependencies
|
# Find all output paths of the installables and their build dependencies
|
||||||
readarray -t derivations < <(nix path-info --derivation "$@")
|
readarray -t derivations < <(nix path-info --derivation "${args[@]}")
|
||||||
cache=()
|
cache=()
|
||||||
for derivation in "${derivations[@]}"; do
|
for derivation in "${derivations[@]}"; do
|
||||||
cache+=(
|
cache+=(
|
||||||
|
|
|
||||||
220
complement-baseline.tsv
Normal file
220
complement-baseline.tsv
Normal file
|
|
@ -0,0 +1,220 @@
|
||||||
|
test result
|
||||||
|
GLOBAL FAIL
|
||||||
|
TestACLs PASS
|
||||||
|
TestBannedUserCannotSendJoin FAIL
|
||||||
|
TestCannotSendKnockViaSendKnockInMSC3787Room FAIL
|
||||||
|
TestCannotSendKnockViaSendKnockInMSC3787Room/event_with_mismatched_state_key FAIL
|
||||||
|
TestCannotSendKnockViaSendKnockInMSC3787Room/invite_event FAIL
|
||||||
|
TestCannotSendKnockViaSendKnockInMSC3787Room/join_event FAIL
|
||||||
|
TestCannotSendKnockViaSendKnockInMSC3787Room/leave_event FAIL
|
||||||
|
TestCannotSendKnockViaSendKnockInMSC3787Room/non-state_membership_event FAIL
|
||||||
|
TestCannotSendKnockViaSendKnockInMSC3787Room/regular_event FAIL
|
||||||
|
TestCannotSendNonJoinViaSendJoinV1 FAIL
|
||||||
|
TestCannotSendNonJoinViaSendJoinV1/event_with_mismatched_state_key PASS
|
||||||
|
TestCannotSendNonJoinViaSendJoinV1/invite_event PASS
|
||||||
|
TestCannotSendNonJoinViaSendJoinV1/knock_event PASS
|
||||||
|
TestCannotSendNonJoinViaSendJoinV1/leave_event FAIL
|
||||||
|
TestCannotSendNonJoinViaSendJoinV1/non-state_membership_event PASS
|
||||||
|
TestCannotSendNonJoinViaSendJoinV1/regular_event FAIL
|
||||||
|
TestCannotSendNonJoinViaSendJoinV2 FAIL
|
||||||
|
TestCannotSendNonJoinViaSendJoinV2/event_with_mismatched_state_key PASS
|
||||||
|
TestCannotSendNonJoinViaSendJoinV2/invite_event PASS
|
||||||
|
TestCannotSendNonJoinViaSendJoinV2/knock_event PASS
|
||||||
|
TestCannotSendNonJoinViaSendJoinV2/leave_event FAIL
|
||||||
|
TestCannotSendNonJoinViaSendJoinV2/non-state_membership_event PASS
|
||||||
|
TestCannotSendNonJoinViaSendJoinV2/regular_event FAIL
|
||||||
|
TestCannotSendNonKnockViaSendKnock FAIL
|
||||||
|
TestCannotSendNonLeaveViaSendLeaveV1 FAIL
|
||||||
|
TestCannotSendNonLeaveViaSendLeaveV1/event_with_mismatched_state_key FAIL
|
||||||
|
TestCannotSendNonLeaveViaSendLeaveV1/invite_event FAIL
|
||||||
|
TestCannotSendNonLeaveViaSendLeaveV1/join_event FAIL
|
||||||
|
TestCannotSendNonLeaveViaSendLeaveV1/knock_event FAIL
|
||||||
|
TestCannotSendNonLeaveViaSendLeaveV1/non-state_membership_event FAIL
|
||||||
|
TestCannotSendNonLeaveViaSendLeaveV1/regular_event FAIL
|
||||||
|
TestCannotSendNonLeaveViaSendLeaveV2 FAIL
|
||||||
|
TestCannotSendNonLeaveViaSendLeaveV2/event_with_mismatched_state_key FAIL
|
||||||
|
TestCannotSendNonLeaveViaSendLeaveV2/invite_event FAIL
|
||||||
|
TestCannotSendNonLeaveViaSendLeaveV2/join_event FAIL
|
||||||
|
TestCannotSendNonLeaveViaSendLeaveV2/knock_event FAIL
|
||||||
|
TestCannotSendNonLeaveViaSendLeaveV2/non-state_membership_event FAIL
|
||||||
|
TestCannotSendNonLeaveViaSendLeaveV2/regular_event FAIL
|
||||||
|
TestClientSpacesSummary FAIL
|
||||||
|
TestClientSpacesSummary/max_depth FAIL
|
||||||
|
TestClientSpacesSummary/pagination FAIL
|
||||||
|
TestClientSpacesSummary/query_whole_graph FAIL
|
||||||
|
TestClientSpacesSummary/redact_link FAIL
|
||||||
|
TestClientSpacesSummary/suggested_only FAIL
|
||||||
|
TestClientSpacesSummaryJoinRules FAIL
|
||||||
|
TestContentMediaV1 PASS
|
||||||
|
TestDeviceListsUpdateOverFederation FAIL
|
||||||
|
TestDeviceListsUpdateOverFederation/good_connectivity FAIL
|
||||||
|
TestDeviceListsUpdateOverFederation/interrupted_connectivity FAIL
|
||||||
|
TestDeviceListsUpdateOverFederation/stopped_server FAIL
|
||||||
|
TestDeviceListsUpdateOverFederationOnRoomJoin FAIL
|
||||||
|
TestEventAuth FAIL
|
||||||
|
TestFederatedClientSpaces FAIL
|
||||||
|
TestFederationKeyUploadQuery FAIL
|
||||||
|
TestFederationKeyUploadQuery/Can_claim_remote_one_time_key_using_POST FAIL
|
||||||
|
TestFederationKeyUploadQuery/Can_query_remote_device_keys_using_POST FAIL
|
||||||
|
TestFederationRedactSendsWithoutEvent PASS
|
||||||
|
TestFederationRejectInvite FAIL
|
||||||
|
TestFederationRoomsInvite FAIL
|
||||||
|
TestFederationRoomsInvite/Parallel FAIL
|
||||||
|
TestFederationRoomsInvite/Parallel/Invited_user_can_reject_invite_over_federation FAIL
|
||||||
|
TestFederationRoomsInvite/Parallel/Invited_user_can_reject_invite_over_federation_for_empty_room PASS
|
||||||
|
TestFederationRoomsInvite/Parallel/Invited_user_can_reject_invite_over_federation_several_times FAIL
|
||||||
|
TestFederationRoomsInvite/Parallel/Invited_user_has_'is_direct'_flag_in_prev_content_after_joining PASS
|
||||||
|
TestFederationRoomsInvite/Parallel/Remote_invited_user_can_see_room_metadata PASS
|
||||||
|
TestFederationThumbnail PASS
|
||||||
|
TestGetMissingEventsGapFilling FAIL
|
||||||
|
TestInboundCanReturnMissingEvents FAIL
|
||||||
|
TestInboundCanReturnMissingEvents/Inbound_federation_can_return_missing_events_for_invited_visibility FAIL
|
||||||
|
TestInboundCanReturnMissingEvents/Inbound_federation_can_return_missing_events_for_joined_visibility FAIL
|
||||||
|
TestInboundCanReturnMissingEvents/Inbound_federation_can_return_missing_events_for_shared_visibility FAIL
|
||||||
|
TestInboundCanReturnMissingEvents/Inbound_federation_can_return_missing_events_for_world_readable_visibility FAIL
|
||||||
|
TestInboundFederationKeys PASS
|
||||||
|
TestInboundFederationProfile PASS
|
||||||
|
TestInboundFederationProfile/Inbound_federation_can_query_profile_data PASS
|
||||||
|
TestInboundFederationProfile/Non-numeric_ports_in_server_names_are_rejected PASS
|
||||||
|
TestInboundFederationRejectsEventsWithRejectedAuthEvents FAIL
|
||||||
|
TestIsDirectFlagFederation PASS
|
||||||
|
TestIsDirectFlagLocal PASS
|
||||||
|
TestJoinFederatedRoomFailOver PASS
|
||||||
|
TestJoinFederatedRoomFromApplicationServiceBridgeUser FAIL
|
||||||
|
TestJoinFederatedRoomFromApplicationServiceBridgeUser/join_remote_federated_room_as_application_service_user FAIL
|
||||||
|
TestJoinFederatedRoomWithUnverifiableEvents PASS
|
||||||
|
TestJoinFederatedRoomWithUnverifiableEvents//send_join_response_missing_signatures_shouldn't_block_room_join PASS
|
||||||
|
TestJoinFederatedRoomWithUnverifiableEvents//send_join_response_with_bad_signatures_shouldn't_block_room_join PASS
|
||||||
|
TestJoinFederatedRoomWithUnverifiableEvents//send_join_response_with_state_with_unverifiable_auth_events_shouldn't_block_room_join PASS
|
||||||
|
TestJoinFederatedRoomWithUnverifiableEvents//send_join_response_with_unobtainable_keys_shouldn't_block_room_join PASS
|
||||||
|
TestJoinViaRoomIDAndServerName PASS
|
||||||
|
TestJumpToDateEndpoint FAIL
|
||||||
|
TestJumpToDateEndpoint/parallel FAIL
|
||||||
|
TestJumpToDateEndpoint/parallel/federation FAIL
|
||||||
|
TestJumpToDateEndpoint/parallel/federation/can_paginate_after_getting_remote_event_from_timestamp_to_event_endpoint FAIL
|
||||||
|
TestJumpToDateEndpoint/parallel/federation/looking_backwards,_should_be_able_to_find_event_that_was_sent_before_we_joined FAIL
|
||||||
|
TestJumpToDateEndpoint/parallel/federation/looking_forwards,_should_be_able_to_find_event_that_was_sent_before_we_joined FAIL
|
||||||
|
TestJumpToDateEndpoint/parallel/federation/when_looking_backwards_before_the_room_was_created,_should_be_able_to_find_event_that_was_imported FAIL
|
||||||
|
TestJumpToDateEndpoint/parallel/should_find_event_after_given_timestmap FAIL
|
||||||
|
TestJumpToDateEndpoint/parallel/should_find_event_before_given_timestmap FAIL
|
||||||
|
TestJumpToDateEndpoint/parallel/should_find_next_event_topologically_after_given_timestmap_when_all_message_timestamps_are_the_same FAIL
|
||||||
|
TestJumpToDateEndpoint/parallel/should_find_next_event_topologically_before_given_timestamp_when_all_message_timestamps_are_the_same FAIL
|
||||||
|
TestJumpToDateEndpoint/parallel/should_find_nothing_after_the_latest_timestmap PASS
|
||||||
|
TestJumpToDateEndpoint/parallel/should_find_nothing_before_the_earliest_timestmap PASS
|
||||||
|
TestJumpToDateEndpoint/parallel/should_not_be_able_to_query_a_private_room_you_are_not_a_member_of FAIL
|
||||||
|
TestJumpToDateEndpoint/parallel/should_not_be_able_to_query_a_public_room_you_are_not_a_member_of FAIL
|
||||||
|
TestKnockRoomsInPublicRoomsDirectory FAIL
|
||||||
|
TestKnockRoomsInPublicRoomsDirectoryInMSC3787Room FAIL
|
||||||
|
TestKnocking FAIL
|
||||||
|
TestKnockingInMSC3787Room FAIL
|
||||||
|
TestKnockingInMSC3787Room/A_user_can_knock_on_a_room_without_a_reason FAIL
|
||||||
|
TestKnockingInMSC3787Room/A_user_can_knock_on_a_room_without_a_reason#01 FAIL
|
||||||
|
TestKnockingInMSC3787Room/A_user_cannot_knock_on_a_room_they_are_already_in FAIL
|
||||||
|
TestKnockingInMSC3787Room/A_user_cannot_knock_on_a_room_they_are_already_in#01 FAIL
|
||||||
|
TestKnockingInMSC3787Room/A_user_cannot_knock_on_a_room_they_are_already_invited_to FAIL
|
||||||
|
TestKnockingInMSC3787Room/A_user_cannot_knock_on_a_room_they_are_already_invited_to#01 FAIL
|
||||||
|
TestKnockingInMSC3787Room/A_user_in_the_room_can_accept_a_knock PASS
|
||||||
|
TestKnockingInMSC3787Room/A_user_in_the_room_can_accept_a_knock#01 PASS
|
||||||
|
TestKnockingInMSC3787Room/A_user_in_the_room_can_reject_a_knock FAIL
|
||||||
|
TestKnockingInMSC3787Room/A_user_in_the_room_can_reject_a_knock#01 FAIL
|
||||||
|
TestKnockingInMSC3787Room/A_user_that_has_already_knocked_is_allowed_to_knock_again_on_the_same_room FAIL
|
||||||
|
TestKnockingInMSC3787Room/A_user_that_has_already_knocked_is_allowed_to_knock_again_on_the_same_room#01 FAIL
|
||||||
|
TestKnockingInMSC3787Room/A_user_that_has_knocked_on_a_local_room_can_rescind_their_knock_and_then_knock_again FAIL
|
||||||
|
TestKnockingInMSC3787Room/A_user_that_is_banned_from_a_room_cannot_knock_on_it FAIL
|
||||||
|
TestKnockingInMSC3787Room/A_user_that_is_banned_from_a_room_cannot_knock_on_it#01 FAIL
|
||||||
|
TestKnockingInMSC3787Room/Attempting_to_join_a_room_with_join_rule_'knock'_without_an_invite_should_fail FAIL
|
||||||
|
TestKnockingInMSC3787Room/Attempting_to_join_a_room_with_join_rule_'knock'_without_an_invite_should_fail#01 FAIL
|
||||||
|
TestKnockingInMSC3787Room/Change_the_join_rule_of_a_room_from_'invite'_to_'knock' PASS
|
||||||
|
TestKnockingInMSC3787Room/Change_the_join_rule_of_a_room_from_'invite'_to_'knock'#01 PASS
|
||||||
|
TestKnockingInMSC3787Room/Knocking_on_a_room_with_a_join_rule_other_than_'knock'_should_fail FAIL
|
||||||
|
TestKnockingInMSC3787Room/Knocking_on_a_room_with_a_join_rule_other_than_'knock'_should_fail#01 FAIL
|
||||||
|
TestKnockingInMSC3787Room/Knocking_on_a_room_with_join_rule_'knock'_should_succeed FAIL
|
||||||
|
TestKnockingInMSC3787Room/Knocking_on_a_room_with_join_rule_'knock'_should_succeed#01 FAIL
|
||||||
|
TestKnockingInMSC3787Room/Users_in_the_room_see_a_user's_membership_update_when_they_knock FAIL
|
||||||
|
TestKnockingInMSC3787Room/Users_in_the_room_see_a_user's_membership_update_when_they_knock#01 FAIL
|
||||||
|
TestLocalPngThumbnail PASS
|
||||||
|
TestLocalPngThumbnail/test_/_matrix/client/v1/media_endpoint PASS
|
||||||
|
TestMediaFilenames FAIL
|
||||||
|
TestMediaFilenames/Parallel FAIL
|
||||||
|
TestMediaFilenames/Parallel/ASCII FAIL
|
||||||
|
TestMediaFilenames/Parallel/ASCII/Can_download_file_'ascii' FAIL
|
||||||
|
TestMediaFilenames/Parallel/ASCII/Can_download_file_'ascii'_over_/_matrix/client/v1/media/download FAIL
|
||||||
|
TestMediaFilenames/Parallel/ASCII/Can_download_file_'name;with;semicolons' FAIL
|
||||||
|
TestMediaFilenames/Parallel/ASCII/Can_download_file_'name;with;semicolons'_over_/_matrix/client/v1/media/download FAIL
|
||||||
|
TestMediaFilenames/Parallel/ASCII/Can_download_file_'name_with_spaces' FAIL
|
||||||
|
TestMediaFilenames/Parallel/ASCII/Can_download_file_'name_with_spaces'_over_/_matrix/client/v1/media/download FAIL
|
||||||
|
TestMediaFilenames/Parallel/ASCII/Can_download_specifying_a_different_ASCII_file_name PASS
|
||||||
|
TestMediaFilenames/Parallel/ASCII/Can_download_specifying_a_different_ASCII_file_name_over__matrix/client/v1/media/download PASS
|
||||||
|
TestMediaFilenames/Parallel/ASCII/Can_upload_with_ASCII_file_name PASS
|
||||||
|
TestMediaFilenames/Parallel/Unicode FAIL
|
||||||
|
TestMediaFilenames/Parallel/Unicode/Can_download_specifying_a_different_Unicode_file_name PASS
|
||||||
|
TestMediaFilenames/Parallel/Unicode/Can_download_specifying_a_different_Unicode_file_name_over__matrix/client/v1/media/download PASS
|
||||||
|
TestMediaFilenames/Parallel/Unicode/Can_download_with_Unicode_file_name_locally FAIL
|
||||||
|
TestMediaFilenames/Parallel/Unicode/Can_download_with_Unicode_file_name_locally_over__matrix/client/v1/media/download FAIL
|
||||||
|
TestMediaFilenames/Parallel/Unicode/Can_download_with_Unicode_file_name_over_federation FAIL
|
||||||
|
TestMediaFilenames/Parallel/Unicode/Can_download_with_Unicode_file_name_over_federation_via__matrix/client/v1/media/download FAIL
|
||||||
|
TestMediaFilenames/Parallel/Unicode/Can_upload_with_Unicode_file_name PASS
|
||||||
|
TestMediaFilenames/Parallel/Unicode/Will_serve_safe_media_types_as_inline SKIP
|
||||||
|
TestMediaFilenames/Parallel/Unicode/Will_serve_safe_media_types_as_inline_via__matrix/client/v1/media/download SKIP
|
||||||
|
TestMediaFilenames/Parallel/Unicode/Will_serve_safe_media_types_with_parameters_as_inline SKIP
|
||||||
|
TestMediaFilenames/Parallel/Unicode/Will_serve_safe_media_types_with_parameters_as_inline_via__matrix/client/v1/media/download SKIP
|
||||||
|
TestMediaFilenames/Parallel/Unicode/Will_serve_unsafe_media_types_as_attachments SKIP
|
||||||
|
TestMediaFilenames/Parallel/Unicode/Will_serve_unsafe_media_types_as_attachments_via__matrix/client/v1/media/download SKIP
|
||||||
|
TestMediaWithoutFileName FAIL
|
||||||
|
TestMediaWithoutFileName/parallel FAIL
|
||||||
|
TestMediaWithoutFileName/parallel/Can_download_without_a_file_name_locally PASS
|
||||||
|
TestMediaWithoutFileName/parallel/Can_download_without_a_file_name_over_federation FAIL
|
||||||
|
TestMediaWithoutFileName/parallel/Can_upload_without_a_file_name PASS
|
||||||
|
TestMediaWithoutFileNameCSMediaV1 PASS
|
||||||
|
TestMediaWithoutFileNameCSMediaV1/parallel PASS
|
||||||
|
TestMediaWithoutFileNameCSMediaV1/parallel/Can_download_without_a_file_name_locally PASS
|
||||||
|
TestMediaWithoutFileNameCSMediaV1/parallel/Can_download_without_a_file_name_over_federation PASS
|
||||||
|
TestMediaWithoutFileNameCSMediaV1/parallel/Can_upload_without_a_file_name PASS
|
||||||
|
TestNetworkPartitionOrdering FAIL
|
||||||
|
TestOutboundFederationIgnoresMissingEventWithBadJSONForRoomVersion6 FAIL
|
||||||
|
TestOutboundFederationProfile PASS
|
||||||
|
TestOutboundFederationProfile/Outbound_federation_can_query_profile_data PASS
|
||||||
|
TestOutboundFederationSend PASS
|
||||||
|
TestRemoteAliasRequestsUnderstandUnicode PASS
|
||||||
|
TestRemotePngThumbnail PASS
|
||||||
|
TestRemotePngThumbnail/test_/_matrix/client/v1/media_endpoint PASS
|
||||||
|
TestRemotePresence FAIL
|
||||||
|
TestRemotePresence/Presence_changes_are_also_reported_to_remote_room_members FAIL
|
||||||
|
TestRemotePresence/Presence_changes_to_UNAVAILABLE_are_reported_to_remote_room_members FAIL
|
||||||
|
TestRemoteTyping FAIL
|
||||||
|
TestRestrictedRoomsLocalJoin FAIL
|
||||||
|
TestRestrictedRoomsLocalJoinInMSC3787Room FAIL
|
||||||
|
TestRestrictedRoomsLocalJoinInMSC3787Room/Join_should_fail_initially PASS
|
||||||
|
TestRestrictedRoomsLocalJoinInMSC3787Room/Join_should_fail_when_left_allowed_room FAIL
|
||||||
|
TestRestrictedRoomsLocalJoinInMSC3787Room/Join_should_fail_with_mangled_join_rules PASS
|
||||||
|
TestRestrictedRoomsLocalJoinInMSC3787Room/Join_should_succeed_when_invited FAIL
|
||||||
|
TestRestrictedRoomsLocalJoinInMSC3787Room/Join_should_succeed_when_joined_to_allowed_room PASS
|
||||||
|
TestRestrictedRoomsRemoteJoin FAIL
|
||||||
|
TestRestrictedRoomsRemoteJoinFailOver FAIL
|
||||||
|
TestRestrictedRoomsRemoteJoinFailOverInMSC3787Room FAIL
|
||||||
|
TestRestrictedRoomsRemoteJoinInMSC3787Room FAIL
|
||||||
|
TestRestrictedRoomsRemoteJoinInMSC3787Room/Join_should_fail_initially PASS
|
||||||
|
TestRestrictedRoomsRemoteJoinInMSC3787Room/Join_should_fail_when_left_allowed_room PASS
|
||||||
|
TestRestrictedRoomsRemoteJoinInMSC3787Room/Join_should_fail_with_mangled_join_rules PASS
|
||||||
|
TestRestrictedRoomsRemoteJoinInMSC3787Room/Join_should_succeed_when_invited PASS
|
||||||
|
TestRestrictedRoomsRemoteJoinInMSC3787Room/Join_should_succeed_when_joined_to_allowed_room FAIL
|
||||||
|
TestRestrictedRoomsRemoteJoinLocalUser FAIL
|
||||||
|
TestRestrictedRoomsRemoteJoinLocalUserInMSC3787Room FAIL
|
||||||
|
TestRestrictedRoomsSpacesSummaryFederation FAIL
|
||||||
|
TestRestrictedRoomsSpacesSummaryLocal FAIL
|
||||||
|
TestSendJoinPartialStateResponse SKIP
|
||||||
|
TestSyncOmitsStateChangeOnFilteredEvents PASS
|
||||||
|
TestToDeviceMessagesOverFederation FAIL
|
||||||
|
TestToDeviceMessagesOverFederation/good_connectivity PASS
|
||||||
|
TestToDeviceMessagesOverFederation/interrupted_connectivity FAIL
|
||||||
|
TestToDeviceMessagesOverFederation/stopped_server FAIL
|
||||||
|
TestUnbanViaInvite FAIL
|
||||||
|
TestUnknownEndpoints FAIL
|
||||||
|
TestUnknownEndpoints/Client-server_endpoints PASS
|
||||||
|
TestUnknownEndpoints/Key_endpoints FAIL
|
||||||
|
TestUnknownEndpoints/Media_endpoints PASS
|
||||||
|
TestUnknownEndpoints/Server-server_endpoints PASS
|
||||||
|
TestUnknownEndpoints/Unknown_prefix PASS
|
||||||
|
TestUnrejectRejectedEvents FAIL
|
||||||
|
TestUserAppearsInChangedDeviceListOnJoinOverFederation PASS
|
||||||
|
TestWriteMDirectAccountData PASS
|
||||||
|
|
|
@ -20,6 +20,11 @@
|
||||||
let
|
let
|
||||||
# Keep sorted
|
# Keep sorted
|
||||||
mkScope = pkgs: pkgs.lib.makeScope pkgs.newScope (self: {
|
mkScope = pkgs: pkgs.lib.makeScope pkgs.newScope (self: {
|
||||||
|
complement = self.callPackage ./nix/pkgs/complement {};
|
||||||
|
|
||||||
|
complement-grapevine-oci-image =
|
||||||
|
self.callPackage ./nix/pkgs/complement-grapevine-oci-image { };
|
||||||
|
|
||||||
craneLib =
|
craneLib =
|
||||||
(inputs.crane.mkLib pkgs).overrideToolchain self.toolchain;
|
(inputs.crane.mkLib pkgs).overrideToolchain self.toolchain;
|
||||||
|
|
||||||
|
|
@ -72,6 +77,9 @@
|
||||||
{
|
{
|
||||||
packages = {
|
packages = {
|
||||||
default = (mkScope pkgs).default;
|
default = (mkScope pkgs).default;
|
||||||
|
complement = (mkScope pkgs).complement;
|
||||||
|
complement-grapevine-oci-image =
|
||||||
|
(mkScope pkgs).complement-grapevine-oci-image;
|
||||||
}
|
}
|
||||||
//
|
//
|
||||||
builtins.listToAttrs
|
builtins.listToAttrs
|
||||||
|
|
|
||||||
38
nix/pkgs/complement-grapevine-oci-image/config.toml
Normal file
38
nix/pkgs/complement-grapevine-oci-image/config.toml
Normal file
|
|
@ -0,0 +1,38 @@
|
||||||
|
# this config file is processed with envsubst before being loaded
|
||||||
|
|
||||||
|
server_name = "$SERVER_NAME"
|
||||||
|
|
||||||
|
allow_registration = true
|
||||||
|
|
||||||
|
# complement tests the unauthenticated media endpoints
|
||||||
|
serve_media_unauthenticated = true
|
||||||
|
|
||||||
|
[server_discovery.client]
|
||||||
|
base_url = "https://$SERVER_NAME"
|
||||||
|
|
||||||
|
[federation]
|
||||||
|
trusted_servers = []
|
||||||
|
|
||||||
|
[database]
|
||||||
|
backend = "rocksdb"
|
||||||
|
path = "/app/db"
|
||||||
|
|
||||||
|
[observability.logs]
|
||||||
|
filter = "debug,h2=warn,hyper=warn"
|
||||||
|
# ansi escapes can make it hard to read the log files in an editor
|
||||||
|
colors = false
|
||||||
|
|
||||||
|
[tls]
|
||||||
|
certs = "/app/grapevine.crt"
|
||||||
|
key = "/app/grapevine.key"
|
||||||
|
|
||||||
|
[[listen]]
|
||||||
|
type = "tcp"
|
||||||
|
address = "0.0.0.0"
|
||||||
|
port = 8008
|
||||||
|
|
||||||
|
[[listen]]
|
||||||
|
type = "tcp"
|
||||||
|
address = "0.0.0.0"
|
||||||
|
port = 8448
|
||||||
|
tls = true
|
||||||
78
nix/pkgs/complement-grapevine-oci-image/default.nix
Normal file
78
nix/pkgs/complement-grapevine-oci-image/default.nix
Normal file
|
|
@ -0,0 +1,78 @@
|
||||||
|
# Keep sorted
|
||||||
|
{ buildEnv
|
||||||
|
, coreutils
|
||||||
|
, default
|
||||||
|
, dockerTools
|
||||||
|
, envsubst
|
||||||
|
, moreutils
|
||||||
|
, openssl
|
||||||
|
, writeShellScript
|
||||||
|
, writeTextDir
|
||||||
|
}:
|
||||||
|
|
||||||
|
dockerTools.buildImage {
|
||||||
|
name = "complement-grapevine";
|
||||||
|
|
||||||
|
copyToRoot = buildEnv {
|
||||||
|
name = "image-root";
|
||||||
|
paths = [
|
||||||
|
(writeTextDir "app/config.toml" (builtins.readFile ./config.toml))
|
||||||
|
coreutils
|
||||||
|
default
|
||||||
|
moreutils
|
||||||
|
envsubst
|
||||||
|
openssl
|
||||||
|
];
|
||||||
|
pathsToLink = [ "/bin" "/app" ];
|
||||||
|
};
|
||||||
|
|
||||||
|
config = {
|
||||||
|
ExposedPorts = {
|
||||||
|
"8008/tcp" = {};
|
||||||
|
"8448/tcp" = {};
|
||||||
|
};
|
||||||
|
Cmd = [
|
||||||
|
(writeShellScript "docker-entrypoint.sh" ''
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
mkdir -p /tmp
|
||||||
|
|
||||||
|
# trust certs signed by the complement test CA
|
||||||
|
mkdir -p /etc/ssl/certs
|
||||||
|
# we don't have any other trusted certs, so just replace this file
|
||||||
|
# entirely
|
||||||
|
cp /complement/ca/ca.crt /etc/ssl/certs/ca-certificates.crt
|
||||||
|
|
||||||
|
# sign our TLS cert with the complement test CA
|
||||||
|
cat > /app/v3.ext <<EOF
|
||||||
|
authorityKeyIdentifier=keyid,issuer
|
||||||
|
basicConstraints=CA:FALSE
|
||||||
|
keyUsage = digitalSignature, nonRepudiation, keyEncipherment, dataEncipherment
|
||||||
|
subjectAltName=DNS:$SERVER_NAME
|
||||||
|
EOF
|
||||||
|
openssl genrsa \
|
||||||
|
-out /app/grapevine.key \
|
||||||
|
2048
|
||||||
|
openssl req -new \
|
||||||
|
-sha256 \
|
||||||
|
-key /app/grapevine.key \
|
||||||
|
-subj "/CN=$SERVER_NAME" \
|
||||||
|
-out /app/grapevine.csr
|
||||||
|
openssl x509 -req \
|
||||||
|
-in /app/grapevine.csr \
|
||||||
|
-CA /complement/ca/ca.crt \
|
||||||
|
-CAkey /complement/ca/ca.key \
|
||||||
|
-CAcreateserial \
|
||||||
|
-out /app/grapevine.crt \
|
||||||
|
-extfile /app/v3.ext \
|
||||||
|
-days 365 \
|
||||||
|
-sha256
|
||||||
|
|
||||||
|
envsubst --no-unset < /app/config.toml | sponge /app/config.toml
|
||||||
|
|
||||||
|
export RUST_BACKTRACE=full
|
||||||
|
grapevine serve --config /app/config.toml
|
||||||
|
'')
|
||||||
|
];
|
||||||
|
};
|
||||||
|
}
|
||||||
33
nix/pkgs/complement/default.nix
Normal file
33
nix/pkgs/complement/default.nix
Normal file
|
|
@ -0,0 +1,33 @@
|
||||||
|
# Dependencies (keep sorted)
|
||||||
|
{ buildGoModule
|
||||||
|
, fetchFromGitHub
|
||||||
|
, lib
|
||||||
|
, olm
|
||||||
|
}:
|
||||||
|
|
||||||
|
buildGoModule {
|
||||||
|
name = "complement";
|
||||||
|
|
||||||
|
src = fetchFromGitHub {
|
||||||
|
owner = "matrix-org";
|
||||||
|
repo = "complement";
|
||||||
|
rev = "5b72981ea3de814a2c41e010dbc0a2cffcbd2e79";
|
||||||
|
hash = "sha256-kSZcuXHHRsq6Vfv2rDBq682VdUkeRWse944fR25Zc0A=";
|
||||||
|
};
|
||||||
|
|
||||||
|
vendorHash = "sha256-mNt6lI4ppVjZC5UMsNqEDHQ0kjXutEFZTCMvXwp24ew=";
|
||||||
|
|
||||||
|
buildInputs = [ olm ];
|
||||||
|
|
||||||
|
doCheck = false;
|
||||||
|
postBuild = ''
|
||||||
|
# compiles the tests into a binary
|
||||||
|
go test -c ./tests -o "$GOPATH/bin/complement.test"
|
||||||
|
'';
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
description = "Matrix compliance test suite";
|
||||||
|
homepage = "https://github.com/matrix-org/complement";
|
||||||
|
license = lib.licenses.asl20;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
@ -84,6 +84,7 @@ let
|
||||||
"Cargo.lock"
|
"Cargo.lock"
|
||||||
"Cargo.toml"
|
"Cargo.toml"
|
||||||
"src"
|
"src"
|
||||||
|
"xtask"
|
||||||
];
|
];
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -2,8 +2,11 @@
|
||||||
{ buildPlatform
|
{ buildPlatform
|
||||||
, default
|
, default
|
||||||
, engage
|
, engage
|
||||||
|
, complement
|
||||||
|
, go
|
||||||
, inputs
|
, inputs
|
||||||
, jq
|
, jq
|
||||||
|
, lib
|
||||||
, lychee
|
, lychee
|
||||||
, markdownlint-cli
|
, markdownlint-cli
|
||||||
, mdbook
|
, mdbook
|
||||||
|
|
@ -17,6 +20,13 @@ mkShell {
|
||||||
# sources, and it can read this environment variable to do so. The
|
# sources, and it can read this environment variable to do so. The
|
||||||
# `rust-src` component is required in order for this to work.
|
# `rust-src` component is required in order for this to work.
|
||||||
RUST_SRC_PATH = "${toolchain}/lib/rustlib/src/rust/library";
|
RUST_SRC_PATH = "${toolchain}/lib/rustlib/src/rust/library";
|
||||||
|
|
||||||
|
# See the doc comment on `use_xtask_path` in `xtask/src/main.rs`.
|
||||||
|
GRAPEVINE_XTASK_PATH = lib.makeBinPath [
|
||||||
|
# Keep sorted
|
||||||
|
complement
|
||||||
|
go
|
||||||
|
];
|
||||||
};
|
};
|
||||||
|
|
||||||
# Development tools
|
# Development tools
|
||||||
|
|
|
||||||
21
xtask/Cargo.toml
Normal file
21
xtask/Cargo.toml
Normal file
|
|
@ -0,0 +1,21 @@
|
||||||
|
[package]
|
||||||
|
name = "xtask"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2021"
|
||||||
|
license.workspace = true
|
||||||
|
rust-version.workspace = true
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
clap.workspace = true
|
||||||
|
miette.workspace = true
|
||||||
|
process-wrap.workspace = true
|
||||||
|
indicatif.workspace = true
|
||||||
|
rand.workspace = true
|
||||||
|
serde.workspace = true
|
||||||
|
serde_json.workspace = true
|
||||||
|
signal-hook.workspace = true
|
||||||
|
strum.workspace = true
|
||||||
|
xshell.workspace = true
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
142
xtask/src/complement.rs
Normal file
142
xtask/src/complement.rs
Normal file
|
|
@ -0,0 +1,142 @@
|
||||||
|
use std::{
|
||||||
|
fs::{self},
|
||||||
|
path::{Path, PathBuf},
|
||||||
|
};
|
||||||
|
|
||||||
|
use miette::{miette, IntoDiagnostic, LabeledSpan, Result, WrapErr};
|
||||||
|
use serde::Deserialize;
|
||||||
|
use xshell::{cmd, Shell};
|
||||||
|
|
||||||
|
mod docker;
|
||||||
|
mod summary;
|
||||||
|
mod test2json;
|
||||||
|
|
||||||
|
use self::{
|
||||||
|
docker::{load_docker_image, retag_docker_image},
|
||||||
|
summary::{compare_summary, read_summary},
|
||||||
|
test2json::{count_complement_tests, run_complement},
|
||||||
|
};
|
||||||
|
|
||||||
|
/// Runs complement tests, writes results to an output directory, and compares
|
||||||
|
/// results with a baseline.
|
||||||
|
///
|
||||||
|
/// The output directory structure is
|
||||||
|
///
|
||||||
|
/// - `$out/summary.tsv`: a TSV file with the pass/fail/skip result for each
|
||||||
|
/// test
|
||||||
|
///
|
||||||
|
/// - `$out/raw-log.jsonl`: raw output of the go test2json tool
|
||||||
|
///
|
||||||
|
/// - `$out/logs/...`: a text file named `$test.txt` for each test, containing
|
||||||
|
/// the test logs.
|
||||||
|
///
|
||||||
|
/// These files will be updated incrementally during the test run. When the run
|
||||||
|
/// the complete, the wrapper compares the results in `$out/summary.tsv`
|
||||||
|
/// against the baseline result. If there are any differences, it exits with an
|
||||||
|
/// error.
|
||||||
|
///
|
||||||
|
/// The expected workflow is to run this after making changes to Grapevine, to
|
||||||
|
/// look for regressions in tests that were previously passing. If you make
|
||||||
|
/// change that fix an existing failing test, you need to make sure that they
|
||||||
|
/// did not introduce any regressions, and then copy the `summary.tsv` file from
|
||||||
|
/// your test run over the existing `complement-baseline.tsv` file in the
|
||||||
|
/// repository root. The intent is that `complement-baseline.tsv` should always
|
||||||
|
/// be in sync with the expected results from a test run.
|
||||||
|
#[derive(clap::Args)]
|
||||||
|
pub(crate) struct Args {
|
||||||
|
/// Directory to write test results
|
||||||
|
///
|
||||||
|
/// This directory will be created automatically, but it must be empty.
|
||||||
|
/// If it exists and is not empty, an error will be returned.
|
||||||
|
#[clap(short, long)]
|
||||||
|
out: PathBuf,
|
||||||
|
|
||||||
|
/// Baseline test summary file to compare with
|
||||||
|
///
|
||||||
|
/// If unspecified, defaults to `$repo_root/complement-baseline.tsv`
|
||||||
|
#[clap(short, long)]
|
||||||
|
baseline: Option<PathBuf>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::needless_pass_by_value)]
|
||||||
|
pub(crate) fn main(args: Args, sh: &Shell) -> Result<()> {
|
||||||
|
let toplevel = get_toplevel_path(sh)
|
||||||
|
.wrap_err("failed to determine repository root directory")?;
|
||||||
|
let baseline_path = args
|
||||||
|
.baseline
|
||||||
|
.unwrap_or_else(|| toplevel.join("complement-baseline.tsv"));
|
||||||
|
let baseline = read_summary(&baseline_path).wrap_err_with(|| {
|
||||||
|
format!(
|
||||||
|
"failed to read baseline test result summary from \
|
||||||
|
{baseline_path:?}"
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
create_out_dir(&args.out).wrap_err_with(|| {
|
||||||
|
format!("error initializing output directory {:?}", args.out)
|
||||||
|
})?;
|
||||||
|
let docker_image = load_docker_image(sh, &toplevel).wrap_err(
|
||||||
|
"failed to build and load complement-grapevine docker image",
|
||||||
|
)?;
|
||||||
|
let docker_image = retag_docker_image(sh, &docker_image)
|
||||||
|
.wrap_err("failed to retag docker image")?;
|
||||||
|
let test_count = count_complement_tests(sh, &docker_image)
|
||||||
|
.wrap_err("failed to determine total complement test count")?;
|
||||||
|
let results = run_complement(sh, &args.out, &docker_image, test_count)
|
||||||
|
.wrap_err("failed to run complement tests")?;
|
||||||
|
let summary_path = args.out.join("summary.tsv");
|
||||||
|
compare_summary(&baseline, &results, &baseline_path, &summary_path)?;
|
||||||
|
println!("\nTest results were identical to baseline.");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Deserialize a single-line json string using [`serde_json::from_str`] and
|
||||||
|
/// convert the error to a miette diagnostic.
|
||||||
|
///
|
||||||
|
/// # Panics
|
||||||
|
/// Panics if `line` contains a newline.
|
||||||
|
fn from_json_line<'a, T: Deserialize<'a>>(line: &'a str) -> Result<T> {
|
||||||
|
assert!(
|
||||||
|
!line.contains('\n'),
|
||||||
|
"from_json_line requires single-line json source"
|
||||||
|
);
|
||||||
|
serde_json::from_str(line).map_err(|e| {
|
||||||
|
// Needs single-line input so that we don't have to deal with converting
|
||||||
|
// line/column to a span offset.
|
||||||
|
let offset = e.column() - 1;
|
||||||
|
let label = LabeledSpan::at_offset(offset, "error here");
|
||||||
|
miette!(labels = vec![label], "{e}").with_source_code(line.to_owned())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Ensures that output directory exists and is empty
|
||||||
|
///
|
||||||
|
/// If the directory does not exist, it will be created. If it is not empty, an
|
||||||
|
/// error will be returned.
|
||||||
|
///
|
||||||
|
/// We have no protection against concurrent programs modifying the contents of
|
||||||
|
/// the directory while the complement wrapper tool is running.
|
||||||
|
fn create_out_dir(out: &Path) -> Result<()> {
|
||||||
|
fs::create_dir_all(out)
|
||||||
|
.into_diagnostic()
|
||||||
|
.wrap_err("error creating output directory")?;
|
||||||
|
let mut entries = fs::read_dir(out)
|
||||||
|
.into_diagnostic()
|
||||||
|
.wrap_err("error checking current contents of output directory")?;
|
||||||
|
if entries.next().is_some() {
|
||||||
|
return Err(miette!(
|
||||||
|
"output directory is not empty. Refusing to run, instead of \
|
||||||
|
possibly overwriting existing files."
|
||||||
|
));
|
||||||
|
}
|
||||||
|
fs::create_dir(out.join("logs"))
|
||||||
|
.into_diagnostic()
|
||||||
|
.wrap_err("error creating logs subdirectory in output directory")?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the path to the repository root
|
||||||
|
fn get_toplevel_path(sh: &Shell) -> Result<PathBuf> {
|
||||||
|
let path =
|
||||||
|
cmd!(sh, "git rev-parse --show-toplevel").read().into_diagnostic()?;
|
||||||
|
Ok(path.into())
|
||||||
|
}
|
||||||
133
xtask/src/complement/docker.rs
Normal file
133
xtask/src/complement/docker.rs
Normal file
|
|
@ -0,0 +1,133 @@
|
||||||
|
//! Functions for working with docker images and containers.
|
||||||
|
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
|
use miette::{miette, IntoDiagnostic, LabeledSpan, Result, WrapErr};
|
||||||
|
use rand::{distributions::Alphanumeric, thread_rng, Rng};
|
||||||
|
use serde::Deserialize;
|
||||||
|
use xshell::{cmd, Shell};
|
||||||
|
|
||||||
|
use super::from_json_line;
|
||||||
|
|
||||||
|
/// Build the 'grapevine-complement' OCI image and load it into the docker
|
||||||
|
/// daemon.
|
||||||
|
pub(crate) fn load_docker_image(sh: &Shell, toplevel: &Path) -> Result<String> {
|
||||||
|
// > i would Not trust that parser as far as i can throw it
|
||||||
|
// - @jade_:matrix.org, 2024-06-19
|
||||||
|
//
|
||||||
|
// So we're not even gonna try to escape the arbitrary top level path
|
||||||
|
// correctly for a flake installable reference. Instead we're just gonna cd
|
||||||
|
// into toplevel before running nix commands.
|
||||||
|
let _pushd_guard = sh.push_dir(toplevel);
|
||||||
|
|
||||||
|
let installable = ".#complement-grapevine-oci-image";
|
||||||
|
cmd!(sh, "nix-build-and-cache just {installable} -- --no-link")
|
||||||
|
.run()
|
||||||
|
.into_diagnostic()
|
||||||
|
.wrap_err("error building complement-grapevine-oci-image")?;
|
||||||
|
let oci_image_path = cmd!(sh, "nix path-info {installable}")
|
||||||
|
.read()
|
||||||
|
.into_diagnostic()
|
||||||
|
.wrap_err(
|
||||||
|
"error getting nix store path for complement-grapevine-oci-image",
|
||||||
|
)?;
|
||||||
|
|
||||||
|
// Instead of building the image with a fixed tag, we let nix choose the tag
|
||||||
|
// based on the input hash, and then determine the image/tag it used by
|
||||||
|
// parsing the 'docker load' output. This is to avoid a race condition
|
||||||
|
// between multiple concurrent 'xtask complement' invocations, which might
|
||||||
|
// otherwise assign the same tag to different images.
|
||||||
|
let load_output = cmd!(sh, "docker image load --input {oci_image_path}")
|
||||||
|
.read()
|
||||||
|
.into_diagnostic()
|
||||||
|
.wrap_err("error loading complement-grapevine docker image")?;
|
||||||
|
let expected_prefix = "Loaded image: ";
|
||||||
|
let docker_image = load_output
|
||||||
|
.strip_prefix(expected_prefix)
|
||||||
|
.ok_or_else(|| {
|
||||||
|
// Miette doesn't support inclusive ranges.
|
||||||
|
// <https://github.com/zkat/miette/pull/385>
|
||||||
|
#[allow(clippy::range_plus_one)]
|
||||||
|
let span = 0..(expected_prefix.len().min(load_output.len()) + 1);
|
||||||
|
let label =
|
||||||
|
LabeledSpan::at(span, format!("Expected {expected_prefix:?}"));
|
||||||
|
miette!(
|
||||||
|
labels = vec![label],
|
||||||
|
"failed to parse 'docker image load' output"
|
||||||
|
)
|
||||||
|
.with_source_code(load_output.clone())
|
||||||
|
})?
|
||||||
|
.to_owned();
|
||||||
|
Ok(docker_image)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Retags the docker image with a random tag. Returns the new image reference.
|
||||||
|
///
|
||||||
|
/// This is useful so that we can uniquely identify the set of docker containers
|
||||||
|
/// spawned by a complement run. Without using a unique tag, there is no way to
|
||||||
|
/// determine which docker containers to kill if a run is cancelled, since other
|
||||||
|
/// concurrent complement runs may have created containers with the same image.
|
||||||
|
pub(crate) fn retag_docker_image(sh: &Shell, image: &str) -> Result<String> {
|
||||||
|
let mut rng = thread_rng();
|
||||||
|
let new_tag: String =
|
||||||
|
(0..16).map(|_| char::from(rng.sample(Alphanumeric))).collect();
|
||||||
|
let (repo, _old_tag) = image.split_once(':').ok_or_else(|| {
|
||||||
|
miette!(
|
||||||
|
"Docker image reference was not in the expected format. Expected \
|
||||||
|
\"{{repository}}:{{tag}}\", got {image:?}"
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
let new_image = format!("{repo}:{new_tag}");
|
||||||
|
cmd!(sh, "docker image tag {image} {new_image}").run().into_diagnostic()?;
|
||||||
|
Ok(new_image)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Kills all docker containers using a particular image.
|
||||||
|
///
|
||||||
|
/// This can be used to clean up dangling docker images after a cancelled
|
||||||
|
/// complement run, but it's important that the image reference be unique. See
|
||||||
|
/// the [`retag_docker_image`] function for a discussion of this.
|
||||||
|
pub(crate) fn kill_docker_containers(sh: &Shell, image: &str) -> Result<()> {
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
struct ContainerInfo {
|
||||||
|
#[serde(rename = "ID")]
|
||||||
|
id: String,
|
||||||
|
#[serde(rename = "Image")]
|
||||||
|
image: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
// --filter ancestor={image} doesn't work here, because images with the same
|
||||||
|
// image id will be picked up even if their image reference (repo:tag) are
|
||||||
|
// different. We need to list all the containers and filter them ourselves.
|
||||||
|
let containers = cmd!(sh, "docker container ls --format json")
|
||||||
|
.read()
|
||||||
|
.into_diagnostic()
|
||||||
|
.wrap_err("error listing running docker containers")?;
|
||||||
|
let containers = containers
|
||||||
|
.lines()
|
||||||
|
.map(from_json_line)
|
||||||
|
.collect::<Result<Vec<ContainerInfo>, _>>()
|
||||||
|
.wrap_err(
|
||||||
|
"error parsing docker container info from 'docker container ls' \
|
||||||
|
output",
|
||||||
|
)?;
|
||||||
|
|
||||||
|
let our_containers = containers
|
||||||
|
.into_iter()
|
||||||
|
.filter(|container| container.image == image)
|
||||||
|
.map(|container| container.id)
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
if !our_containers.is_empty() {
|
||||||
|
// Ignore non-zero exit status because 'docker kill' will fail if
|
||||||
|
// containers already exited before sending the signal, which is
|
||||||
|
// fine.
|
||||||
|
cmd!(sh, "docker kill --signal=SIGKILL {our_containers...}")
|
||||||
|
.ignore_status()
|
||||||
|
.run()
|
||||||
|
.into_diagnostic()
|
||||||
|
.wrap_err("error killing docker containers")?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
286
xtask/src/complement/summary.rs
Normal file
286
xtask/src/complement/summary.rs
Normal file
|
|
@ -0,0 +1,286 @@
|
||||||
|
//! Functions for working with the `summary.tsv` files emitted by the complement
|
||||||
|
//! wrapper.
|
||||||
|
//!
|
||||||
|
//! This file is a TSV containing test names and results for each test in a
|
||||||
|
//! complement run.
|
||||||
|
|
||||||
|
use std::{
|
||||||
|
collections::BTreeMap,
|
||||||
|
fs,
|
||||||
|
io::{BufWriter, Write},
|
||||||
|
path::Path,
|
||||||
|
};
|
||||||
|
|
||||||
|
use miette::{
|
||||||
|
miette, IntoDiagnostic, LabeledSpan, NamedSource, Result, WrapErr,
|
||||||
|
};
|
||||||
|
|
||||||
|
use super::test2json::TestResult;
|
||||||
|
|
||||||
|
pub(crate) type TestResults = BTreeMap<String, TestResult>;
|
||||||
|
|
||||||
|
/// Escape a string value for use in a TSV file.
|
||||||
|
///
|
||||||
|
/// According to the [tsv spec][1], the only characters that need to be escaped
|
||||||
|
/// are `\n`, `\t`, `\r`, and `\`.
|
||||||
|
///
|
||||||
|
/// [1]: https://www.loc.gov/preservation/digital/formats/fdd/fdd000533.shtml
|
||||||
|
fn escape_tsv_value(value: &str) -> String {
|
||||||
|
value
|
||||||
|
.replace('\\', "\\\\")
|
||||||
|
.replace('\n', "\\n")
|
||||||
|
.replace('\t', "\\t")
|
||||||
|
.replace('\r', "\\r")
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Converts a string from a TSV value from to unescaped form.
|
||||||
|
fn unescape_tsv_value(value: &str) -> String {
|
||||||
|
let mut chars = value.chars();
|
||||||
|
let mut out = String::new();
|
||||||
|
while let Some(c) = chars.next() {
|
||||||
|
if c == '\\' {
|
||||||
|
match chars.next() {
|
||||||
|
Some('\\') => out.push('\\'),
|
||||||
|
Some('n') => out.push('\n'),
|
||||||
|
Some('t') => out.push('\t'),
|
||||||
|
Some('r') => out.push('\r'),
|
||||||
|
Some(c2) => {
|
||||||
|
out.push(c);
|
||||||
|
out.push(c2);
|
||||||
|
}
|
||||||
|
None => out.push(c),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
out.push(c);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
out
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Write a test result summary to a writer.
|
||||||
|
pub(crate) fn write_summary<W: Write>(
|
||||||
|
w: &mut BufWriter<W>,
|
||||||
|
summary: &TestResults,
|
||||||
|
) -> Result<()> {
|
||||||
|
// Write header line
|
||||||
|
writeln!(w, "test\tresult").into_diagnostic()?;
|
||||||
|
// Write rows
|
||||||
|
for (test, result) in summary {
|
||||||
|
writeln!(
|
||||||
|
w,
|
||||||
|
"{}\t{}",
|
||||||
|
escape_tsv_value(test),
|
||||||
|
escape_tsv_value(&result.to_string())
|
||||||
|
)
|
||||||
|
.into_diagnostic()?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Reads test result summary from a TSV file written by a previous run of the
|
||||||
|
/// complement wrapper.
|
||||||
|
pub(crate) fn read_summary(
|
||||||
|
path: &Path,
|
||||||
|
) -> Result<BTreeMap<String, TestResult>> {
|
||||||
|
let contents = fs::read_to_string(path)
|
||||||
|
.into_diagnostic()
|
||||||
|
.wrap_err("failed to read summary file contents")?;
|
||||||
|
let source = NamedSource::new(path.to_string_lossy(), contents);
|
||||||
|
let contents = &source.inner();
|
||||||
|
|
||||||
|
let mut offset = 0;
|
||||||
|
// The TSV spec allows CRLF, but we never emit these ourselves
|
||||||
|
let mut lines = contents.split('\n');
|
||||||
|
|
||||||
|
let header_line = lines.next().ok_or_else(|| {
|
||||||
|
miette!(
|
||||||
|
labels = vec![LabeledSpan::at_offset(0, "expected header row")],
|
||||||
|
"summary file missing header row",
|
||||||
|
)
|
||||||
|
.with_source_code(source.clone())
|
||||||
|
})?;
|
||||||
|
let expected_header_line = "test\tresult";
|
||||||
|
if header_line != expected_header_line {
|
||||||
|
return Err(miette!(
|
||||||
|
labels = vec![LabeledSpan::at(
|
||||||
|
0..header_line.len(),
|
||||||
|
"unexpected header"
|
||||||
|
)],
|
||||||
|
"summary file header row has unexpected columns. Expecting \
|
||||||
|
{expected_header_line:?}."
|
||||||
|
)
|
||||||
|
.with_source_code(source));
|
||||||
|
}
|
||||||
|
offset += header_line.len() + 1;
|
||||||
|
|
||||||
|
let mut results = BTreeMap::new();
|
||||||
|
for line in lines {
|
||||||
|
if line.is_empty() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let tabs = line.match_indices('\t').collect::<Vec<_>>();
|
||||||
|
let column_count = tabs.len() + 1;
|
||||||
|
let (result_span, test, result) = match tabs[..] {
|
||||||
|
[(first_tab, _)] => {
|
||||||
|
let result_span = offset + first_tab + 1..offset + line.len();
|
||||||
|
let test = line.get(..first_tab).expect(
|
||||||
|
"index should be valid because it was returned from \
|
||||||
|
'match_indices'",
|
||||||
|
);
|
||||||
|
let result = line.get(first_tab + 1..).expect(
|
||||||
|
"index should be valid because it was returned from \
|
||||||
|
'match_indices'",
|
||||||
|
);
|
||||||
|
(result_span, test, result)
|
||||||
|
}
|
||||||
|
[] => {
|
||||||
|
return Err(miette!(
|
||||||
|
labels = vec![LabeledSpan::at_offset(
|
||||||
|
offset + line.len(),
|
||||||
|
"expected more columns here"
|
||||||
|
)],
|
||||||
|
"each row in the summary file should have exactly two \
|
||||||
|
columns. This row only has {column_count} columns.",
|
||||||
|
)
|
||||||
|
.with_source_code(source))
|
||||||
|
}
|
||||||
|
[_, (first_bad_tab, _), ..] => {
|
||||||
|
let span = offset + first_bad_tab..offset + line.len();
|
||||||
|
return Err(miette!(
|
||||||
|
labels =
|
||||||
|
vec![LabeledSpan::at(span, "unexpected extra columns")],
|
||||||
|
"each row in the summary file should have exactly two \
|
||||||
|
columns. This row has {column_count} columns.",
|
||||||
|
)
|
||||||
|
.with_source_code(source));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let test = unescape_tsv_value(test);
|
||||||
|
let result = unescape_tsv_value(result);
|
||||||
|
|
||||||
|
let result = result.parse().map_err(|_| {
|
||||||
|
miette!(
|
||||||
|
labels =
|
||||||
|
vec![LabeledSpan::at(result_span, "invalid result value")],
|
||||||
|
"test result value must be one of 'PASS', 'FAIL', or 'SKIP'."
|
||||||
|
)
|
||||||
|
.with_source_code(source.clone())
|
||||||
|
})?;
|
||||||
|
|
||||||
|
results.insert(test, result);
|
||||||
|
offset += line.len() + 1;
|
||||||
|
}
|
||||||
|
Ok(results)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Print a bulleted list of test names, truncating if there are too many.
|
||||||
|
fn print_truncated_tests(tests: &[&str]) {
|
||||||
|
let max = 5;
|
||||||
|
for test in &tests[..max.min(tests.len())] {
|
||||||
|
println!(" - {test}");
|
||||||
|
}
|
||||||
|
if tests.len() > max {
|
||||||
|
println!(" ... ({} more)", tests.len() - max);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Compares new test results against older results, returning a error if they
|
||||||
|
/// differ.
|
||||||
|
///
|
||||||
|
/// A description of the differences will be logged separately from the returned
|
||||||
|
/// error.
|
||||||
|
pub(crate) fn compare_summary(
|
||||||
|
old: &TestResults,
|
||||||
|
new: &TestResults,
|
||||||
|
old_path: &Path,
|
||||||
|
new_path: &Path,
|
||||||
|
) -> Result<()> {
|
||||||
|
let mut unexpected_pass: Vec<&str> = Vec::new();
|
||||||
|
let mut unexpected_fail: Vec<&str> = Vec::new();
|
||||||
|
let mut unexpected_skip: Vec<&str> = Vec::new();
|
||||||
|
let mut added: Vec<&str> = Vec::new();
|
||||||
|
let mut removed: Vec<&str> = Vec::new();
|
||||||
|
|
||||||
|
for (test, new_result) in new {
|
||||||
|
if let Some(old_result) = old.get(test) {
|
||||||
|
if old_result != new_result {
|
||||||
|
match new_result {
|
||||||
|
TestResult::Pass => unexpected_pass.push(test),
|
||||||
|
TestResult::Fail => unexpected_fail.push(test),
|
||||||
|
TestResult::Skip => unexpected_skip.push(test),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
added.push(test);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for test in old.keys() {
|
||||||
|
if !new.contains_key(test) {
|
||||||
|
removed.push(test);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut differences = false;
|
||||||
|
if !added.is_empty() {
|
||||||
|
differences = true;
|
||||||
|
println!(
|
||||||
|
"\n{} tests were added that were not present in the baseline:",
|
||||||
|
added.len()
|
||||||
|
);
|
||||||
|
print_truncated_tests(&added);
|
||||||
|
}
|
||||||
|
if !removed.is_empty() {
|
||||||
|
differences = true;
|
||||||
|
println!(
|
||||||
|
"\n{} tests present in the baseline were removed:",
|
||||||
|
removed.len()
|
||||||
|
);
|
||||||
|
print_truncated_tests(&removed);
|
||||||
|
}
|
||||||
|
if !unexpected_pass.is_empty() {
|
||||||
|
differences = true;
|
||||||
|
println!(
|
||||||
|
"\n{} tests passed that did not pass in the baseline:",
|
||||||
|
unexpected_pass.len()
|
||||||
|
);
|
||||||
|
print_truncated_tests(&unexpected_pass);
|
||||||
|
}
|
||||||
|
if !unexpected_skip.is_empty() {
|
||||||
|
differences = true;
|
||||||
|
println!(
|
||||||
|
"\n{} tests skipped that were not skipped in the baseline:",
|
||||||
|
unexpected_skip.len()
|
||||||
|
);
|
||||||
|
print_truncated_tests(&unexpected_skip);
|
||||||
|
}
|
||||||
|
if !unexpected_fail.is_empty() {
|
||||||
|
differences = true;
|
||||||
|
println!(
|
||||||
|
"\n{} tests failed that did not fail in the baseline (these are \
|
||||||
|
likely regressions):",
|
||||||
|
unexpected_fail.len()
|
||||||
|
);
|
||||||
|
print_truncated_tests(&unexpected_fail);
|
||||||
|
}
|
||||||
|
|
||||||
|
if differences {
|
||||||
|
Err(miette!(
|
||||||
|
help = format!(
|
||||||
|
"Evaluate each of the differences to determine whether they \
|
||||||
|
are expected. If all differences are expected, copy the new \
|
||||||
|
summary file {new_path:?} to {old_path:?} and commit the \
|
||||||
|
change. If some differences are unexpected, fix them and try \
|
||||||
|
another test run.\n\nAn example of an expected change would \
|
||||||
|
be a test that is now passing after your changes fixed it. \
|
||||||
|
An example of an unexpected change would be an unrelated \
|
||||||
|
test that is now failing, which would be a regression."
|
||||||
|
),
|
||||||
|
"Test results differed from baseline in {old_path:?}. The \
|
||||||
|
differences are described above."
|
||||||
|
))
|
||||||
|
} else {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
514
xtask/src/complement/test2json.rs
Normal file
514
xtask/src/complement/test2json.rs
Normal file
|
|
@ -0,0 +1,514 @@
|
||||||
|
//! Functions for working with the go [`test2json`][test2json] tool.
|
||||||
|
//!
|
||||||
|
//! [test2json]: https://pkg.go.dev/cmd/test2json@go1.22.4
|
||||||
|
|
||||||
|
use std::{
|
||||||
|
collections::BTreeMap,
|
||||||
|
fs::{self, File},
|
||||||
|
io::{BufRead, BufReader, BufWriter, Seek, SeekFrom, Write},
|
||||||
|
mem, panic,
|
||||||
|
path::{Path, PathBuf},
|
||||||
|
process::{Command, Stdio},
|
||||||
|
sync::{
|
||||||
|
atomic::{AtomicBool, Ordering},
|
||||||
|
Arc, Mutex,
|
||||||
|
},
|
||||||
|
thread,
|
||||||
|
time::Duration,
|
||||||
|
};
|
||||||
|
|
||||||
|
use indicatif::{ProgressBar, ProgressStyle};
|
||||||
|
use miette::{miette, IntoDiagnostic, Result, WrapErr};
|
||||||
|
use process_wrap::std::{ProcessGroup, StdChildWrapper, StdCommandWrap};
|
||||||
|
use serde::Deserialize;
|
||||||
|
use signal_hook::{
|
||||||
|
consts::signal::{SIGINT, SIGQUIT, SIGTERM},
|
||||||
|
flag,
|
||||||
|
iterator::Signals,
|
||||||
|
};
|
||||||
|
use strum::{Display, EnumString};
|
||||||
|
use xshell::{cmd, Shell};
|
||||||
|
|
||||||
|
use super::{
|
||||||
|
docker::kill_docker_containers,
|
||||||
|
from_json_line,
|
||||||
|
summary::{write_summary, TestResults},
|
||||||
|
};
|
||||||
|
|
||||||
|
/// Returns the total number of complement tests that will be run
|
||||||
|
///
|
||||||
|
/// This is only able to count toplevel tests, and will not included subtests
|
||||||
|
/// (`A/B`)
|
||||||
|
pub(crate) fn count_complement_tests(
|
||||||
|
sh: &Shell,
|
||||||
|
docker_image: &str,
|
||||||
|
) -> Result<u64> {
|
||||||
|
let test_list = cmd!(sh, "go tool test2json complement.test -test.list .*")
|
||||||
|
.env("COMPLEMENT_BASE_IMAGE", docker_image)
|
||||||
|
.read()
|
||||||
|
.into_diagnostic()?;
|
||||||
|
let test_count = u64::try_from(test_list.lines().count())
|
||||||
|
.into_diagnostic()
|
||||||
|
.wrap_err("test count overflowed u64")?;
|
||||||
|
Ok(test_count)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Run complement tests.
|
||||||
|
///
|
||||||
|
/// This function mostly deals with handling shutdown signals, while the actual
|
||||||
|
/// logic for running complement is in `run_complement_inner`, which is spawned
|
||||||
|
/// as a separate thread. This is necessary because the go `test2json` tool
|
||||||
|
/// ignores SIGTERM and SIGINT. Without signal handling on our end, terminating
|
||||||
|
/// the complement wrapper process would leave a dangling complement child
|
||||||
|
/// process running.
|
||||||
|
///
|
||||||
|
/// The reason that `test2json` does this is that it does not implement any kind
|
||||||
|
/// of test cleanup, and so the developers decided that ignoring termination
|
||||||
|
/// signals entirely was safer. Running go unit tests outside of `test2json`
|
||||||
|
/// (and so without machine-readable output) does not have this limitation.
|
||||||
|
/// Unfortunately neither of these are an option for us. We need
|
||||||
|
/// machine-readable output to compare against the baseline result. Complement
|
||||||
|
/// runs can take 40+ minutes, so being able to cancel them is a requirement.
|
||||||
|
///
|
||||||
|
/// Because we don't trigger any of the normal cleanup, we need to handle
|
||||||
|
/// dangling docker containers ourselves.
|
||||||
|
pub(crate) fn run_complement(
|
||||||
|
sh: &Shell,
|
||||||
|
out: &Path,
|
||||||
|
docker_image: &str,
|
||||||
|
test_count: u64,
|
||||||
|
) -> Result<TestResults> {
|
||||||
|
let term_signals = [SIGTERM, SIGINT, SIGQUIT];
|
||||||
|
|
||||||
|
let term_now = Arc::new(AtomicBool::new(false));
|
||||||
|
for sig in &term_signals {
|
||||||
|
// Terminate immediately if `term_now` is true and we receive a
|
||||||
|
// terminating signal
|
||||||
|
flag::register_conditional_shutdown(*sig, 1, Arc::clone(&term_now))
|
||||||
|
.into_diagnostic()
|
||||||
|
.wrap_err("error registering signal handler")?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut signals = Signals::new(term_signals).unwrap();
|
||||||
|
|
||||||
|
let state = Mutex::new(ComplementRunnerState::Startup);
|
||||||
|
let signals_handle = signals.handle();
|
||||||
|
|
||||||
|
let result = thread::scope(|s| {
|
||||||
|
let state_ref = &state;
|
||||||
|
let cloned_sh = sh.clone();
|
||||||
|
let thread_handle = s.spawn(move || {
|
||||||
|
let panic_result = panic::catch_unwind(|| {
|
||||||
|
run_complement_inner(
|
||||||
|
&cloned_sh,
|
||||||
|
out,
|
||||||
|
docker_image,
|
||||||
|
test_count,
|
||||||
|
state_ref,
|
||||||
|
)
|
||||||
|
});
|
||||||
|
// Stop the signal-handling loop, even if we panicked
|
||||||
|
signals_handle.close();
|
||||||
|
match panic_result {
|
||||||
|
Ok(result) => result,
|
||||||
|
Err(panic) => panic::resume_unwind(panic),
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
let canceled = if let Some(signal) = signals.forever().next() {
|
||||||
|
let description = match signal {
|
||||||
|
SIGTERM => "SIGTERM",
|
||||||
|
SIGINT => "ctrl+c",
|
||||||
|
SIGQUIT => "SIGQUIT",
|
||||||
|
_ => unreachable!(),
|
||||||
|
};
|
||||||
|
eprintln!(
|
||||||
|
"Received {description}, stopping complement run. Send \
|
||||||
|
{description} a second time to terminate without cleaning \
|
||||||
|
up, which may leave dangling processes and docker containers"
|
||||||
|
);
|
||||||
|
term_now.store(true, Ordering::Relaxed);
|
||||||
|
|
||||||
|
{
|
||||||
|
let mut state = state.lock().unwrap();
|
||||||
|
let old_state =
|
||||||
|
mem::replace(&mut *state, ComplementRunnerState::Shutdown);
|
||||||
|
match old_state {
|
||||||
|
ComplementRunnerState::Startup => (),
|
||||||
|
ComplementRunnerState::Shutdown => unreachable!(),
|
||||||
|
ComplementRunnerState::Running(mut child) => {
|
||||||
|
// Killing the child process should terminate the
|
||||||
|
// complement runner thread in a
|
||||||
|
// bounded amount of time, because it will cause the
|
||||||
|
// stdout reader to return EOF.
|
||||||
|
child.kill().unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
kill_docker_containers(sh, docker_image).wrap_err(
|
||||||
|
"failed to kill dangling complement docker containers",
|
||||||
|
)?;
|
||||||
|
|
||||||
|
true
|
||||||
|
} else {
|
||||||
|
// hit this branch if the signal handler is closed by the complement
|
||||||
|
// runner thread. This means the complement run finished
|
||||||
|
// without being canceled.
|
||||||
|
false
|
||||||
|
};
|
||||||
|
|
||||||
|
match thread_handle.join() {
|
||||||
|
Ok(result) => {
|
||||||
|
if canceled {
|
||||||
|
Err(miette!("complement run was canceled"))
|
||||||
|
} else {
|
||||||
|
result
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(panic_value) => panic::resume_unwind(panic_value),
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// From this point on, terminate immediately when signalled
|
||||||
|
term_now.store(true, Ordering::Relaxed);
|
||||||
|
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Possible states for the complement runner thread.
|
||||||
|
///
|
||||||
|
/// The current state should be protected by a mutex, where state changes are
|
||||||
|
/// only performed while the mutex is locked. This is to prevent a race
|
||||||
|
/// condition where the main thread handles a shutdown signal at the same time
|
||||||
|
/// that the complement runner thread is starting the child process, and so the
|
||||||
|
/// main thread fails to kill the child process.
|
||||||
|
///
|
||||||
|
/// Valid state transitions:
|
||||||
|
///
|
||||||
|
/// - `Startup` -> `Running`
|
||||||
|
/// - `Startup` -> `Shutdown`
|
||||||
|
/// - `Running` -> `Shutdown`
|
||||||
|
#[derive(Debug)]
|
||||||
|
enum ComplementRunnerState {
|
||||||
|
/// The complement child process has not been started yet
|
||||||
|
Startup,
|
||||||
|
/// The complement child process is running, and we have not yet received
|
||||||
|
/// a shutdown signal.
|
||||||
|
Running(Box<dyn StdChildWrapper>),
|
||||||
|
/// We have received a shutdown signal.
|
||||||
|
Shutdown,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Spawn complement chind process and handle it's output
|
||||||
|
///
|
||||||
|
/// This is the "complement runner" thread, spawned by the [`run_complement`]
|
||||||
|
/// function.
|
||||||
|
fn run_complement_inner(
|
||||||
|
sh: &Shell,
|
||||||
|
out: &Path,
|
||||||
|
docker_image: &str,
|
||||||
|
test_count: u64,
|
||||||
|
state: &Mutex<ComplementRunnerState>,
|
||||||
|
) -> Result<TestResults> {
|
||||||
|
let cmd = cmd!(sh, "go tool test2json complement.test -test.v=test2json")
|
||||||
|
.env("COMPLEMENT_BASE_IMAGE", docker_image)
|
||||||
|
.env("COMPLEMENT_SPAWN_HS_TIMEOUT", "5")
|
||||||
|
.env("COMPLEMENT_ALWAYS_PRINT_SERVER_LOGS", "1");
|
||||||
|
eprintln!("$ {cmd}");
|
||||||
|
|
||||||
|
let stdout = {
|
||||||
|
let mut state = state.lock().unwrap();
|
||||||
|
match &*state {
|
||||||
|
ComplementRunnerState::Startup => (),
|
||||||
|
ComplementRunnerState::Running(_) => unreachable!(),
|
||||||
|
ComplementRunnerState::Shutdown => {
|
||||||
|
return Err(miette!("complement run was canceled"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let mut cmd = Command::from(cmd);
|
||||||
|
cmd.stdout(Stdio::piped());
|
||||||
|
let mut child = StdCommandWrap::from(cmd)
|
||||||
|
.wrap(ProcessGroup::leader())
|
||||||
|
.spawn()
|
||||||
|
.into_diagnostic()
|
||||||
|
.wrap_err("error spawning complement process")?;
|
||||||
|
let stdout = child.stdout().take().expect(
|
||||||
|
"child process spawned with piped stdout should have stdout",
|
||||||
|
);
|
||||||
|
*state = ComplementRunnerState::Running(child);
|
||||||
|
stdout
|
||||||
|
};
|
||||||
|
let lines = BufReader::new(stdout).lines();
|
||||||
|
|
||||||
|
let mut ctx = TestContext::new(out, test_count)?;
|
||||||
|
for line in lines {
|
||||||
|
let line = line
|
||||||
|
.into_diagnostic()
|
||||||
|
.wrap_err("error reading output from complement process")?;
|
||||||
|
ctx.handle_line(&line)?;
|
||||||
|
}
|
||||||
|
ctx.finish()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Schema from <https://pkg.go.dev/cmd/test2json#hdr-Output_Format>
|
||||||
|
///
|
||||||
|
/// Only the fields that we need are included here.
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
#[serde(
|
||||||
|
rename_all = "snake_case",
|
||||||
|
rename_all_fields = "PascalCase",
|
||||||
|
tag = "Action"
|
||||||
|
)]
|
||||||
|
enum GoTestEvent {
|
||||||
|
Run {
|
||||||
|
test: Option<String>,
|
||||||
|
},
|
||||||
|
Pass {
|
||||||
|
test: Option<String>,
|
||||||
|
},
|
||||||
|
Fail {
|
||||||
|
test: Option<String>,
|
||||||
|
},
|
||||||
|
Skip {
|
||||||
|
test: Option<String>,
|
||||||
|
},
|
||||||
|
Output {
|
||||||
|
test: Option<String>,
|
||||||
|
output: String,
|
||||||
|
},
|
||||||
|
#[serde(other)]
|
||||||
|
OtherAction,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Display, EnumString, Eq, PartialEq, Debug)]
|
||||||
|
#[strum(serialize_all = "UPPERCASE")]
|
||||||
|
pub(crate) enum TestResult {
|
||||||
|
Pass,
|
||||||
|
Fail,
|
||||||
|
Skip,
|
||||||
|
}
|
||||||
|
|
||||||
|
struct TestContext {
|
||||||
|
pb: ProgressBar,
|
||||||
|
pass_count: u64,
|
||||||
|
fail_count: u64,
|
||||||
|
skip_count: u64,
|
||||||
|
// We do not need a specific method to flush this before dropping
|
||||||
|
// `TestContext`, because the file is only written from the
|
||||||
|
// `update_summary_file` method. This method always calls flush on
|
||||||
|
// a non-error path, and the file is left in an inconsistent state on an
|
||||||
|
// error anyway.
|
||||||
|
summary_file: BufWriter<File>,
|
||||||
|
raw_log_file: BufWriter<File>,
|
||||||
|
log_dir: PathBuf,
|
||||||
|
results: TestResults,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a string to use for displaying a test name
|
||||||
|
///
|
||||||
|
/// From the test2json docs:
|
||||||
|
///
|
||||||
|
/// > The Test field, if present, specifies the test, example, or benchmark
|
||||||
|
/// > function that caused the event. Events for the overall package test do not
|
||||||
|
/// > set Test.
|
||||||
|
///
|
||||||
|
/// For events that do not have a `Test` field, we display their test name as
|
||||||
|
/// `"GLOBAL"` instead.
|
||||||
|
fn test_str(test: &Option<String>) -> &str {
|
||||||
|
if let Some(test) = test {
|
||||||
|
test
|
||||||
|
} else {
|
||||||
|
"GLOBAL"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns whether a test name is a toplevel test (as opposed to a subtest)
|
||||||
|
fn test_is_toplevel(test: &str) -> bool {
|
||||||
|
!test.contains('/')
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TestContext {
|
||||||
|
fn new(out: &Path, test_count: u64) -> Result<TestContext> {
|
||||||
|
// TODO: figure out how to display ETA without it fluctuating wildly.
|
||||||
|
let style = ProgressStyle::with_template(
|
||||||
|
"({msg}) {pos}/{len} [{elapsed}] {wide_bar}",
|
||||||
|
)
|
||||||
|
.expect("static progress bar template should be valid")
|
||||||
|
.progress_chars("##-");
|
||||||
|
let pb = ProgressBar::new(test_count).with_style(style);
|
||||||
|
pb.enable_steady_tick(Duration::from_secs(1));
|
||||||
|
|
||||||
|
let summary_file = File::create(out.join("summary.tsv"))
|
||||||
|
.into_diagnostic()
|
||||||
|
.wrap_err("failed to create summary file in output dir")?;
|
||||||
|
let summary_file = BufWriter::new(summary_file);
|
||||||
|
|
||||||
|
let raw_log_file = File::create(out.join("raw-log.jsonl"))
|
||||||
|
.into_diagnostic()
|
||||||
|
.wrap_err("failed to create raw log file in output dir")?;
|
||||||
|
let raw_log_file = BufWriter::new(raw_log_file);
|
||||||
|
|
||||||
|
let log_dir = out.join("logs");
|
||||||
|
|
||||||
|
let ctx = TestContext {
|
||||||
|
pb,
|
||||||
|
pass_count: 0,
|
||||||
|
fail_count: 0,
|
||||||
|
skip_count: 0,
|
||||||
|
log_dir,
|
||||||
|
summary_file,
|
||||||
|
raw_log_file,
|
||||||
|
results: BTreeMap::new(),
|
||||||
|
};
|
||||||
|
|
||||||
|
ctx.update_progress();
|
||||||
|
Ok(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn finish(mut self) -> Result<TestResults> {
|
||||||
|
self.raw_log_file
|
||||||
|
.flush()
|
||||||
|
.into_diagnostic()
|
||||||
|
.wrap_err("error flushing writes to raw log file")?;
|
||||||
|
Ok(self.results)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_raw_log_line(&mut self, line: &str) -> Result<()> {
|
||||||
|
self.raw_log_file
|
||||||
|
.write_all(line.as_bytes())
|
||||||
|
.into_diagnostic()
|
||||||
|
.wrap_err("error writing line to raw log file")?;
|
||||||
|
self.raw_log_file
|
||||||
|
.write_all(b"\n")
|
||||||
|
.into_diagnostic()
|
||||||
|
.wrap_err("error writing line to raw log file")?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn update_progress(&self) {
|
||||||
|
self.pb
|
||||||
|
.set_position(self.pass_count + self.fail_count + self.skip_count);
|
||||||
|
self.pb.set_message(format!(
|
||||||
|
"PASS {}, FAIL {}, SKIP {}",
|
||||||
|
self.pass_count, self.fail_count, self.skip_count
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
fn update_summary_file(&mut self) -> Result<()> {
|
||||||
|
// Truncate the file to clear existing contents
|
||||||
|
self.summary_file
|
||||||
|
.get_mut()
|
||||||
|
.seek(SeekFrom::Start(0))
|
||||||
|
.into_diagnostic()?;
|
||||||
|
self.summary_file.get_mut().set_len(0).into_diagnostic()?;
|
||||||
|
write_summary(&mut self.summary_file, &self.results)?;
|
||||||
|
self.summary_file.flush().into_diagnostic()?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn handle_test_result(
|
||||||
|
&mut self,
|
||||||
|
test: &str,
|
||||||
|
result: TestResult,
|
||||||
|
) -> Result<()> {
|
||||||
|
self.pb.println(format!("=== {result}\t{test}"));
|
||||||
|
self.results.insert(test.to_owned(), result);
|
||||||
|
// 'complement.test -test.list' is only able to count toplevel tests
|
||||||
|
// ahead-of-time, so we don't include subtests in the pass/fail/skip
|
||||||
|
// counts.
|
||||||
|
if test_is_toplevel(test) {
|
||||||
|
match result {
|
||||||
|
TestResult::Pass => self.pass_count += 1,
|
||||||
|
TestResult::Fail => self.fail_count += 1,
|
||||||
|
TestResult::Skip => self.skip_count += 1,
|
||||||
|
}
|
||||||
|
self.update_progress();
|
||||||
|
}
|
||||||
|
self.update_summary_file().wrap_err("error writing summary file")?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn handle_test_output(&mut self, test: &str, output: &str) -> Result<()> {
|
||||||
|
let path = self.log_dir.join(test).with_extension("txt");
|
||||||
|
|
||||||
|
// Some tests have a '/' in their name, so create the extra dirs if they
|
||||||
|
// don't already exist.
|
||||||
|
let parent_dir = path.parent().expect(
|
||||||
|
"log file path should have parent. At worst, the toplevel dir is \
|
||||||
|
$out/logs/.",
|
||||||
|
);
|
||||||
|
fs::create_dir_all(parent_dir).into_diagnostic().wrap_err_with(
|
||||||
|
|| {
|
||||||
|
format!(
|
||||||
|
"error creating directory at {parent_dir:?} for log file \
|
||||||
|
{path:?}"
|
||||||
|
)
|
||||||
|
},
|
||||||
|
)?;
|
||||||
|
|
||||||
|
let mut log_file = File::options()
|
||||||
|
.create(true)
|
||||||
|
.append(true)
|
||||||
|
.open(&path)
|
||||||
|
.into_diagnostic()
|
||||||
|
.wrap_err_with(|| format!("error creating log file at {path:?}"))?;
|
||||||
|
log_file.write_all(output.as_bytes()).into_diagnostic().wrap_err_with(
|
||||||
|
|| format!("error writing to log file at {path:?}"),
|
||||||
|
)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn handle_event(&mut self, event: GoTestEvent) -> Result<()> {
|
||||||
|
match event {
|
||||||
|
GoTestEvent::OtherAction => (),
|
||||||
|
GoTestEvent::Run {
|
||||||
|
test,
|
||||||
|
} => {
|
||||||
|
self.pb.println(format!("=== RUN \t{}", test_str(&test)));
|
||||||
|
}
|
||||||
|
GoTestEvent::Pass {
|
||||||
|
test,
|
||||||
|
} => {
|
||||||
|
self.handle_test_result(test_str(&test), TestResult::Pass)?;
|
||||||
|
}
|
||||||
|
GoTestEvent::Fail {
|
||||||
|
test,
|
||||||
|
} => {
|
||||||
|
self.handle_test_result(test_str(&test), TestResult::Fail)?;
|
||||||
|
}
|
||||||
|
GoTestEvent::Skip {
|
||||||
|
test,
|
||||||
|
} => {
|
||||||
|
self.handle_test_result(test_str(&test), TestResult::Skip)?;
|
||||||
|
}
|
||||||
|
GoTestEvent::Output {
|
||||||
|
test,
|
||||||
|
output,
|
||||||
|
} => {
|
||||||
|
let test = test_str(&test);
|
||||||
|
self.handle_test_output(test, &output).wrap_err_with(|| {
|
||||||
|
format!(
|
||||||
|
"failed to write test output to a log file for test \
|
||||||
|
{test:?}"
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Processes a line of output from `test2json`
|
||||||
|
fn handle_line(&mut self, line: &str) -> Result<()> {
|
||||||
|
self.write_raw_log_line(line)?;
|
||||||
|
let result = from_json_line(line).wrap_err(
|
||||||
|
"failed to parse go test2json event from complement tests. \
|
||||||
|
Ignoring this event",
|
||||||
|
);
|
||||||
|
match result {
|
||||||
|
Ok(event) => self.handle_event(event)?,
|
||||||
|
Err(e) => eprintln!("{e:?}"),
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
74
xtask/src/main.rs
Normal file
74
xtask/src/main.rs
Normal file
|
|
@ -0,0 +1,74 @@
|
||||||
|
mod complement;
|
||||||
|
|
||||||
|
use std::{env, ffi::OsString, process::ExitCode};
|
||||||
|
|
||||||
|
use clap::{Parser, Subcommand};
|
||||||
|
use miette::{miette, IntoDiagnostic, Result, WrapErr};
|
||||||
|
use xshell::Shell;
|
||||||
|
|
||||||
|
#[derive(Parser)]
|
||||||
|
struct Args {
|
||||||
|
#[clap(subcommand)]
|
||||||
|
command: Command,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Subcommand)]
|
||||||
|
enum Command {
|
||||||
|
Complement(complement::Args),
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() -> ExitCode {
|
||||||
|
let Err(e) = try_main() else {
|
||||||
|
return ExitCode::SUCCESS;
|
||||||
|
};
|
||||||
|
// Include a leading newline because sometimes an error will occur in
|
||||||
|
// the middle of displaying a progress indicator.
|
||||||
|
eprintln!("\n{e:?}");
|
||||||
|
ExitCode::FAILURE
|
||||||
|
}
|
||||||
|
|
||||||
|
fn try_main() -> Result<()> {
|
||||||
|
let args = Args::parse();
|
||||||
|
let sh = new_shell()?;
|
||||||
|
match args.command {
|
||||||
|
Command::Complement(args) => complement::main(args, &sh),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn new_shell() -> Result<Shell> {
|
||||||
|
let path = get_shell_path()?;
|
||||||
|
let sh = Shell::new()
|
||||||
|
.into_diagnostic()
|
||||||
|
.wrap_err("failed to initialize internal xshell::Shell wrapper")?;
|
||||||
|
sh.set_var("PATH", path);
|
||||||
|
Ok(sh)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the value to set the `PATH` environment variable to in
|
||||||
|
/// [`xshell::Shell`] instances.
|
||||||
|
///
|
||||||
|
/// This function appends the paths from the `GRAPEVINE_XTASK_PATH` environment
|
||||||
|
/// variable to the existing value of `PATH` set in the xtask process.
|
||||||
|
///
|
||||||
|
/// Executable dependencies that are only called by commands in xtask should be
|
||||||
|
/// added to `GRAPEVINE_XTASK_PATH` instead of `PATH` in the devshell, to avoid
|
||||||
|
/// polluting the devshell path with extra entries.
|
||||||
|
fn get_shell_path() -> Result<OsString> {
|
||||||
|
let xtask_path = env::var_os("GRAPEVINE_XTASK_PATH").ok_or(miette!(
|
||||||
|
help = "This tool must be run from inside the Grapevine devshell. \
|
||||||
|
Make sure you didn't interrupt direnv or something similar.",
|
||||||
|
"GRAPEVINE_XTASK_PATH environment variable is unset"
|
||||||
|
))?;
|
||||||
|
if let Some(path) = env::var_os("PATH") {
|
||||||
|
let old_paths = env::split_paths(&path);
|
||||||
|
let xtask_paths = env::split_paths(&xtask_path);
|
||||||
|
env::join_paths(old_paths.chain(xtask_paths))
|
||||||
|
.into_diagnostic()
|
||||||
|
.wrap_err(
|
||||||
|
"error constructing new PATH value to include the paths from \
|
||||||
|
GRAPEVINE_XTASK_PATH",
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
Ok(xtask_path)
|
||||||
|
}
|
||||||
|
}
|
||||||
Loading…
Add table
Add a link
Reference in a new issue