From d20e217b1c8af0b6bdb43a02e1a99419168ed9ba Mon Sep 17 00:00:00 2001 From: Benjamin Lee Date: Thu, 6 Jun 2024 21:34:23 -0700 Subject: [PATCH 01/14] add nix derivation for complement Alternative to this would be just running 'go test' and pointing it at the complement source code when we want to do a test run. This would mean that we can't cache the unit test build, and would have to include the 'olm' input in the devshell. --- flake.nix | 3 +++ nix/pkgs/complement/default.nix | 33 +++++++++++++++++++++++++++++++++ 2 files changed, 36 insertions(+) create mode 100644 nix/pkgs/complement/default.nix diff --git a/flake.nix b/flake.nix index 50fe2135..9cdd3ceb 100644 --- a/flake.nix +++ b/flake.nix @@ -20,6 +20,8 @@ let # Keep sorted mkScope = pkgs: pkgs.lib.makeScope pkgs.newScope (self: { + complement = self.callPackage ./nix/pkgs/complement {}; + craneLib = (inputs.crane.mkLib pkgs).overrideToolchain self.toolchain; @@ -72,6 +74,7 @@ { packages = { default = (mkScope pkgs).default; + complement = (mkScope pkgs).complement; } // builtins.listToAttrs diff --git a/nix/pkgs/complement/default.nix b/nix/pkgs/complement/default.nix new file mode 100644 index 00000000..2494f064 --- /dev/null +++ b/nix/pkgs/complement/default.nix @@ -0,0 +1,33 @@ +# Dependencies (keep sorted) +{ buildGoModule +, fetchFromGitHub +, lib +, olm +}: + +buildGoModule { + name = "complement"; + + src = fetchFromGitHub { + owner = "matrix-org"; + repo = "complement"; + rev = "5b72981ea3de814a2c41e010dbc0a2cffcbd2e79"; + hash = "sha256-kSZcuXHHRsq6Vfv2rDBq682VdUkeRWse944fR25Zc0A="; + }; + + vendorHash = "sha256-mNt6lI4ppVjZC5UMsNqEDHQ0kjXutEFZTCMvXwp24ew="; + + buildInputs = [ olm ]; + + doCheck = false; + postBuild = '' + # compiles the tests into a binary + go test -c ./tests -o "$GOPATH/bin/complement.test" + ''; + + meta = { + description = "Matrix compliance test suite"; + homepage = "https://github.com/matrix-org/complement"; + license = lib.licenses.asl20; + }; +} From cd439af2c4d45f568de0d5d1fbd2934b31162325 Mon Sep 17 00:00:00 2001 From: Benjamin Lee Date: Thu, 13 Jun 2024 01:09:43 -0700 Subject: [PATCH 02/14] add test image for complement This image should satisfy the requirements described in [1]. openssl commands were copied from [2]. [1]: https://github.com/matrix-org/complement?tab=readme-ov-file#image-requirements [2]: https://github.com/matrix-org/complement?tab=readme-ov-file#complement-pki --- flake.nix | 5 ++ .../config.toml | 38 +++++++++ .../default.nix | 78 +++++++++++++++++++ 3 files changed, 121 insertions(+) create mode 100644 nix/pkgs/complement-grapevine-oci-image/config.toml create mode 100644 nix/pkgs/complement-grapevine-oci-image/default.nix diff --git a/flake.nix b/flake.nix index 9cdd3ceb..8b47fa30 100644 --- a/flake.nix +++ b/flake.nix @@ -22,6 +22,9 @@ mkScope = pkgs: pkgs.lib.makeScope pkgs.newScope (self: { complement = self.callPackage ./nix/pkgs/complement {}; + complement-grapevine-oci-image = + self.callPackage ./nix/pkgs/complement-grapevine-oci-image { }; + craneLib = (inputs.crane.mkLib pkgs).overrideToolchain self.toolchain; @@ -75,6 +78,8 @@ packages = { default = (mkScope pkgs).default; complement = (mkScope pkgs).complement; + complement-grapevine-oci-image = + (mkScope pkgs).complement-grapevine-oci-image; } // builtins.listToAttrs diff --git a/nix/pkgs/complement-grapevine-oci-image/config.toml b/nix/pkgs/complement-grapevine-oci-image/config.toml new file mode 100644 index 00000000..5c110173 --- /dev/null +++ b/nix/pkgs/complement-grapevine-oci-image/config.toml @@ -0,0 +1,38 @@ +# this config file is processed with envsubst before being loaded + +server_name = "$SERVER_NAME" + +allow_registration = true + +# complement tests the unauthenticated media endpoints +serve_media_unauthenticated = true + +[server_discovery.client] +base_url = "https://$SERVER_NAME" + +[federation] +trusted_servers = [] + +[database] +backend = "rocksdb" +path = "/app/db" + +[observability.logs] +filter = "debug,h2=warn,hyper=warn" +# ansi escapes can make it hard to read the log files in an editor +colors = false + +[tls] +certs = "/app/grapevine.crt" +key = "/app/grapevine.key" + +[[listen]] +type = "tcp" +address = "0.0.0.0" +port = 8008 + +[[listen]] +type = "tcp" +address = "0.0.0.0" +port = 8448 +tls = true diff --git a/nix/pkgs/complement-grapevine-oci-image/default.nix b/nix/pkgs/complement-grapevine-oci-image/default.nix new file mode 100644 index 00000000..7cf379e7 --- /dev/null +++ b/nix/pkgs/complement-grapevine-oci-image/default.nix @@ -0,0 +1,78 @@ +# Keep sorted +{ buildEnv +, coreutils +, default +, dockerTools +, envsubst +, moreutils +, openssl +, writeShellScript +, writeTextDir +}: + +dockerTools.buildImage { + name = "complement-grapevine"; + + copyToRoot = buildEnv { + name = "image-root"; + paths = [ + (writeTextDir "app/config.toml" (builtins.readFile ./config.toml)) + coreutils + default + moreutils + envsubst + openssl + ]; + pathsToLink = [ "/bin" "/app" ]; + }; + + config = { + ExposedPorts = { + "8008/tcp" = {}; + "8448/tcp" = {}; + }; + Cmd = [ + (writeShellScript "docker-entrypoint.sh" '' + set -euo pipefail + + mkdir -p /tmp + + # trust certs signed by the complement test CA + mkdir -p /etc/ssl/certs + # we don't have any other trusted certs, so just replace this file + # entirely + cp /complement/ca/ca.crt /etc/ssl/certs/ca-certificates.crt + + # sign our TLS cert with the complement test CA + cat > /app/v3.ext < Date: Wed, 19 Jun 2024 19:38:31 -0700 Subject: [PATCH 03/14] move dependencies and some package attrs to workspace This is in preparation for creating a separate `xtask` package. --- Cargo.toml | 104 ++++++++++++++++++++++++++++++++++++++++++----------- 1 file changed, 84 insertions(+), 20 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 7904770e..f5c132a8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,3 +1,9 @@ +[workspace.package] +license = "Apache-2.0" + +# See also `rust-toolchain.toml` +rust-version = "1.81.0" + # Keep alphabetically sorted [workspace.lints.rust] elided_lifetimes_in_paths = "warn" @@ -71,21 +77,8 @@ wildcard_dependencies = "warn" missing_errors_doc = "allow" missing_panics_doc = "allow" -[package] -name = "grapevine" -description = "A Matrix homeserver written in Rust" -license = "Apache-2.0" -version = "0.1.0" -edition = "2021" - -# See also `rust-toolchain.toml` -rust-version = "1.81.0" - -[lints] -workspace = true - # Keep sorted -[dependencies] +[workspace.dependencies] argon2 = "0.5.3" async-trait = "0.1.82" axum = { version = "0.7.6", default-features = false, features = ["form", "http1", "http2", "json", "matched-path", "tracing"] } @@ -104,6 +97,7 @@ hyper-util = { version = "0.1.8", features = ["client", "client-legacy", "servic image = { version = "0.25.2", default-features = false, features = ["jpeg", "png", "gif"] } jsonwebtoken = "9.3.0" lru-cache = "0.1.2" +nix = { version = "0.29", features = ["resource"] } num_cpus = "1.16.0" once_cell = "1.19.0" opentelemetry = "0.24.0" @@ -111,18 +105,18 @@ opentelemetry-jaeger-propagator = "0.3.0" opentelemetry-otlp = "0.17.0" opentelemetry-prometheus = "0.17.0" opentelemetry_sdk = { version = "0.24.0", features = ["rt-tokio"] } -parking_lot = { version = "0.12.3", optional = true } +parking_lot = "0.12.3" phf = { version = "0.11.2", features = ["macros"] } prometheus = "0.13.4" rand = "0.8.5" regex = "1.10.6" reqwest = { version = "0.12.7", default-features = false, features = ["http2", "rustls-tls-native-roots", "socks"] } ring = "0.17.8" -rocksdb = { package = "rust-rocksdb", version = "0.26.0", features = ["lz4", "multi-threaded-cf", "zstd"], optional = true } +rocksdb = { package = "rust-rocksdb", version = "0.26.0", features = ["lz4", "multi-threaded-cf", "zstd"] } ruma = { git = "https://github.com/ruma/ruma", branch = "main", features = ["compat", "rand", "appservice-api-c", "client-api", "federation-api", "push-gateway-api-c", "server-util", "state-res", "unstable-msc2448", "unstable-msc3575", "unstable-exhaustive-types", "ring-compat", "unstable-unspecified" ] } -rusqlite = { version = "0.32.1", optional = true, features = ["bundled"] } +rusqlite = { version = "0.32.1", features = ["bundled"] } rustls = { version = "0.23.13", default-features = false, features = ["ring", "log", "logging", "std", "tls12"] } -sd-notify = { version = "0.4.2", optional = true } +sd-notify = { version = "0.4.2" } serde = { version = "1.0.210", features = ["rc"] } serde_html_form = "0.2.6" serde_json = { version = "1.0.128", features = ["raw_value"] } @@ -131,7 +125,7 @@ sha-1 = "0.10.1" strum = { version = "0.26.3", features = ["derive"] } thiserror = "1.0.64" thread_local = "1.1.8" -tikv-jemallocator = { version = "0.6.0", features = ["unprefixed_malloc_on_supported_platforms"], optional = true } +tikv-jemallocator = { version = "0.6.0", features = ["unprefixed_malloc_on_supported_platforms"] } tokio = { version = "1.40.0", features = ["fs", "macros", "signal", "sync"] } toml = "0.8.19" tower = { version = "0.5.1", features = ["util"] } @@ -143,8 +137,78 @@ tracing-subscriber = { version = "0.3.18", features = ["env-filter", "json"] } trust-dns-resolver = "0.23.2" xdg = "2.5.2" +[package] +name = "grapevine" +description = "A Matrix homeserver written in Rust" +license.workspace = true +version = "0.1.0" +edition = "2021" +rust-version.workspace = true + +[lints] +workspace = true + +# Keep sorted +[dependencies] +argon2.workspace = true +async-trait.workspace = true +axum.workspace = true +axum-extra.workspace = true +axum-server.workspace = true +base64.workspace = true +bytes.workspace = true +clap.workspace = true +futures-util.workspace = true +hmac.workspace = true +html-escape.workspace = true +http.workspace = true +http-body-util.workspace = true +hyper.workspace = true +hyper-util.workspace = true +image.workspace = true +jsonwebtoken.workspace = true +lru-cache.workspace = true +num_cpus.workspace = true +once_cell.workspace = true +opentelemetry.workspace = true +opentelemetry-jaeger-propagator.workspace = true +opentelemetry-otlp.workspace = true +opentelemetry-prometheus.workspace = true +opentelemetry_sdk.workspace = true +parking_lot = { workspace = true, optional = true } +phf.workspace = true +prometheus.workspace = true +rand.workspace = true +regex.workspace = true +reqwest.workspace = true +ring.workspace = true +rocksdb = { workspace = true, optional = true } +ruma.workspace = true +rustls.workspace = true +rusqlite = { workspace = true, optional = true } +sd-notify = { workspace = true, optional = true } +serde.workspace = true +serde_html_form.workspace = true +serde_json.workspace = true +serde_yaml.workspace = true +sha-1.workspace = true +strum.workspace = true +thiserror.workspace = true +thread_local.workspace = true +tikv-jemallocator = { workspace = true, optional = true } +tokio.workspace = true +toml.workspace = true +tower.workspace = true +tower-http.workspace = true +tracing.workspace = true +tracing-flame.workspace = true +tracing-opentelemetry.workspace = true +tracing-subscriber.workspace = true +trust-dns-resolver.workspace = true +xdg.workspace = true + [target.'cfg(unix)'.dependencies] -nix = { version = "0.29", features = ["resource"] } +nix.workspace = true [features] default = ["rocksdb", "sqlite", "systemd"] From 3f89bc4a7c1486807481b41e8893d53c0ee3e601 Mon Sep 17 00:00:00 2001 From: Benjamin Lee Date: Thu, 20 Jun 2024 00:40:44 -0700 Subject: [PATCH 04/14] allow 'nix build'-specific nix-build-and-cache args We need --no-link for the complement script. I picked the syntax for this as an analogy with 'cargo rustc --cargo-args -- --rustc-args', but not sure how I feel about it. --- bin/nix-build-and-cache | 28 +++++++++++++++++++++++++--- 1 file changed, 25 insertions(+), 3 deletions(-) diff --git a/bin/nix-build-and-cache b/bin/nix-build-and-cache index ff06bc96..a9d097a6 100755 --- a/bin/nix-build-and-cache +++ b/bin/nix-build-and-cache @@ -5,11 +5,33 @@ set -euo pipefail toplevel="$(git rev-parse --show-toplevel)" # Build and cache the specified arguments +# +# Usage: nix-build-and-cache just [args...] -- [build-only-args...] +# +# Arguments after the '--' (build-only-args) will only be passed to the +# 'nix build' command, while arguments before '--' will also be passed to any +# other nix command using the installable list. This is useful for things like +# '--no-link', which are only relevant to 'nix build'. just() { + build_args=() + args=() + build_only_mode=0 + for arg in "$@"; do + if [ "$build_only_mode" = 1 ]; then + build_args+=( "$arg" ) + elif [ "$arg" = "--" ]; then + # Everything after the -- is passed only to 'nix build' + build_only_mode=1 + else + args+=( "$arg" ) + build_args+=( "$arg" ) + fi + done + if command -v nom &> /dev/null; then - nom build "$@" + nom build "${build_args[@]}" else - nix build "$@" + nix build "${build_args[@]}" fi if [ -z ${ATTIC_TOKEN+x} ]; then @@ -24,7 +46,7 @@ just() { "$ATTIC_TOKEN" # Find all output paths of the installables and their build dependencies - readarray -t derivations < <(nix path-info --derivation "$@") + readarray -t derivations < <(nix path-info --derivation "${args[@]}") cache=() for derivation in "${derivations[@]}"; do cache+=( From ef6eb27b9bd2caade89c570beca4f694bc5e75ce Mon Sep 17 00:00:00 2001 From: Benjamin Lee Date: Thu, 20 Jun 2024 19:06:57 -0700 Subject: [PATCH 05/14] add complement wrapper xtask script --- .cargo/config.toml | 2 + Cargo.lock | 126 ++++++++++++++++++++++++++++++ Cargo.toml | 4 + nix/pkgs/default/default.nix | 1 + nix/shell.nix | 6 ++ xtask/Cargo.toml | 14 ++++ xtask/src/complement.rs | 32 ++++++++ xtask/src/complement/docker.rs | 58 ++++++++++++++ xtask/src/complement/test2json.rs | 19 +++++ xtask/src/main.rs | 30 +++++++ 10 files changed, 292 insertions(+) create mode 100644 .cargo/config.toml create mode 100644 xtask/Cargo.toml create mode 100644 xtask/src/complement.rs create mode 100644 xtask/src/complement/docker.rs create mode 100644 xtask/src/complement/test2json.rs create mode 100644 xtask/src/main.rs diff --git a/.cargo/config.toml b/.cargo/config.toml new file mode 100644 index 00000000..35049cbc --- /dev/null +++ b/.cargo/config.toml @@ -0,0 +1,2 @@ +[alias] +xtask = "run --package xtask --" diff --git a/Cargo.lock b/Cargo.lock index caff42d9..b161e921 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -247,6 +247,15 @@ dependencies = [ "windows-targets 0.52.6", ] +[[package]] +name = "backtrace-ext" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "537beee3be4a18fb023b570f80e3ae28003db9167a751266b259926e25539d50" +dependencies = [ + "backtrace", +] + [[package]] name = "base64" version = "0.21.7" @@ -1168,6 +1177,12 @@ version = "2.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "187674a687eed5fe42285b40c6291f9a01517d415fad1c3cbc6a9f778af7fcd4" +[[package]] +name = "is_ci" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7655c9839580ee829dfacba1d1278c2b7883e50a277ff7541299489d6bdfdc45" + [[package]] name = "itertools" version = "0.12.1" @@ -1393,6 +1408,37 @@ version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" +[[package]] +name = "miette" +version = "7.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4edc8853320c2a0dab800fbda86253c8938f6ea88510dc92c5f1ed20e794afc1" +dependencies = [ + "backtrace", + "backtrace-ext", + "cfg-if", + "miette-derive", + "owo-colors", + "supports-color", + "supports-hyperlinks", + "supports-unicode", + "terminal_size", + "textwrap", + "thiserror", + "unicode-width", +] + +[[package]] +name = "miette-derive" +version = "7.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dcf09caffaac8068c346b6df2a7fc27a177fd20b39421a39ce0a211bde679a6c" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "mime" version = "0.3.17" @@ -1626,6 +1672,12 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" +[[package]] +name = "owo-colors" +version = "4.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb37767f6569cd834a413442455e0f066d0d522de8630436e2a1761d9726ba56" + [[package]] name = "parking_lot" version = "0.12.3" @@ -2693,6 +2745,12 @@ version = "1.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" +[[package]] +name = "smawk" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7c388c1b5e93756d0c740965c41e8822f866621d41acbdf6336a6a168f8840c" + [[package]] name = "socket2" version = "0.5.7" @@ -2756,6 +2814,27 @@ version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" +[[package]] +name = "supports-color" +version = "3.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8775305acf21c96926c900ad056abeef436701108518cf890020387236ac5a77" +dependencies = [ + "is_ci", +] + +[[package]] +name = "supports-hyperlinks" +version = "3.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c0a1e5168041f5f3ff68ff7d95dcb9c8749df29f6e7e89ada40dd4c9de404ee" + +[[package]] +name = "supports-unicode" +version = "3.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7401a30af6cb5818bb64852270bb722533397edcfc7344954a38f420819ece2" + [[package]] name = "syn" version = "2.0.77" @@ -2792,6 +2871,17 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "textwrap" +version = "0.16.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23d434d3f8967a09480fb04132ebe0a3e088c173e6d0ee7897abbdf4eab0f8b9" +dependencies = [ + "smawk", + "unicode-linebreak", + "unicode-width", +] + [[package]] name = "thiserror" version = "1.0.64" @@ -3279,6 +3369,12 @@ version = "1.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e91b56cd4cadaeb79bbf1a5645f6b4f8dc5bde8834ad5894a8db35fda9efa1fe" +[[package]] +name = "unicode-linebreak" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b09c83c3c29d37506a3e260c08c03743a6bb66a9cd432c6934ab501a190571f" + [[package]] name = "unicode-normalization" version = "0.1.24" @@ -3288,6 +3384,12 @@ dependencies = [ "tinyvec", ] +[[package]] +name = "unicode-width" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af" + [[package]] name = "unsafe-libyaml" version = "0.2.11" @@ -3690,6 +3792,30 @@ version = "2.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "213b7324336b53d2414b2db8537e56544d981803139155afa84f76eeebb7a546" +[[package]] +name = "xshell" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6db0ab86eae739efd1b054a8d3d16041914030ac4e01cd1dca0cf252fd8b6437" +dependencies = [ + "xshell-macros", +] + +[[package]] +name = "xshell-macros" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d422e8e38ec76e2f06ee439ccc765e9c6a9638b9e7c9f2e8255e4d41e8bd852" + +[[package]] +name = "xtask" +version = "0.1.0" +dependencies = [ + "clap", + "miette", + "xshell", +] + [[package]] name = "zerocopy" version = "0.7.35" diff --git a/Cargo.toml b/Cargo.toml index f5c132a8..9493e894 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,3 +1,5 @@ +[workspace] +members = ["xtask"] [workspace.package] license = "Apache-2.0" @@ -97,6 +99,7 @@ hyper-util = { version = "0.1.8", features = ["client", "client-legacy", "servic image = { version = "0.25.2", default-features = false, features = ["jpeg", "png", "gif"] } jsonwebtoken = "9.3.0" lru-cache = "0.1.2" +miette = { version = "7.2.0", features = ["fancy"] } nix = { version = "0.29", features = ["resource"] } num_cpus = "1.16.0" once_cell = "1.19.0" @@ -136,6 +139,7 @@ tracing-opentelemetry = "0.25.0" tracing-subscriber = { version = "0.3.18", features = ["env-filter", "json"] } trust-dns-resolver = "0.23.2" xdg = "2.5.2" +xshell = "0.2.6" [package] name = "grapevine" diff --git a/nix/pkgs/default/default.nix b/nix/pkgs/default/default.nix index fe31cc74..7359c126 100644 --- a/nix/pkgs/default/default.nix +++ b/nix/pkgs/default/default.nix @@ -84,6 +84,7 @@ let "Cargo.lock" "Cargo.toml" "src" + "xtask" ]; }; diff --git a/nix/shell.nix b/nix/shell.nix index 130d63e6..01d6f430 100644 --- a/nix/shell.nix +++ b/nix/shell.nix @@ -2,6 +2,8 @@ { buildPlatform , default , engage +, complement +, go , inputs , jq , lychee @@ -34,6 +36,10 @@ mkShell { markdownlint-cli mdbook toolchain + + # TODO: don't pollute the devshell with these + go + complement ] ++ default.nativeBuildInputs diff --git a/xtask/Cargo.toml b/xtask/Cargo.toml new file mode 100644 index 00000000..5d282c75 --- /dev/null +++ b/xtask/Cargo.toml @@ -0,0 +1,14 @@ +[package] +name = "xtask" +version = "0.1.0" +edition = "2021" +license.workspace = true +rust-version.workspace = true + +[dependencies] +clap.workspace = true +miette.workspace = true +xshell.workspace = true + +[lints] +workspace = true diff --git a/xtask/src/complement.rs b/xtask/src/complement.rs new file mode 100644 index 00000000..456e9a41 --- /dev/null +++ b/xtask/src/complement.rs @@ -0,0 +1,32 @@ +use std::path::PathBuf; + +use miette::{IntoDiagnostic, Result, WrapErr}; +use xshell::{cmd, Shell}; + +mod docker; +mod test2json; + +use self::{docker::load_docker_image, test2json::run_complement}; + +#[derive(clap::Args)] +pub(crate) struct Args; + +#[allow(clippy::needless_pass_by_value)] +pub(crate) fn main(_args: Args) -> Result<()> { + let sh = Shell::new().unwrap(); + let toplevel = get_toplevel_path(&sh) + .wrap_err("failed to determine repository root directory")?; + let docker_image = load_docker_image(&sh, &toplevel).wrap_err( + "failed to build and load complement-grapevine docker image", + )?; + run_complement(&sh, &docker_image) + .wrap_err("failed to run complement tests")?; + Ok(()) +} + +/// Returns the path to the repository root +fn get_toplevel_path(sh: &Shell) -> Result { + let path = + cmd!(sh, "git rev-parse --show-toplevel").read().into_diagnostic()?; + Ok(path.into()) +} diff --git a/xtask/src/complement/docker.rs b/xtask/src/complement/docker.rs new file mode 100644 index 00000000..ada0c8a9 --- /dev/null +++ b/xtask/src/complement/docker.rs @@ -0,0 +1,58 @@ +//! Functions for working with docker images and containers. + +use std::path::Path; + +use miette::{miette, IntoDiagnostic, LabeledSpan, Result, WrapErr}; +use xshell::{cmd, Shell}; + +/// Build the 'grapevine-complement' OCI image and load it into the docker +/// daemon. +pub(crate) fn load_docker_image(sh: &Shell, toplevel: &Path) -> Result { + // > i would Not trust that parser as far as i can throw it + // - @jade_:matrix.org, 2024-06-19 + // + // So we're not even gonna try to escape the arbitrary top level path + // correctly for a flake installable reference. Instead we're just gonna cd + // into toplevel before running nix commands. + let _pushd_guard = sh.push_dir(toplevel); + + let installable = ".#complement-grapevine-oci-image"; + cmd!(sh, "nix-build-and-cache just {installable} -- --no-link") + .run() + .into_diagnostic() + .wrap_err("error building complement-grapevine-oci-image")?; + let oci_image_path = cmd!(sh, "nix path-info {installable}") + .read() + .into_diagnostic() + .wrap_err( + "error getting nix store path for complement-grapevine-oci-image", + )?; + + // Instead of building the image with a fixed tag, we let nix choose the tag + // based on the input hash, and then determine the image/tag it used by + // parsing the 'docker load' output. This is to avoid a race condition + // between multiple concurrent 'xtask complement' invocations, which might + // otherwise assign the same tag to different images. + let load_output = cmd!(sh, "docker image load --input {oci_image_path}") + .read() + .into_diagnostic() + .wrap_err("error loading complement-grapevine docker image")?; + let expected_prefix = "Loaded image: "; + let docker_image = load_output + .strip_prefix(expected_prefix) + .ok_or_else(|| { + // Miette doesn't support inclusive ranges. + // + #[allow(clippy::range_plus_one)] + let span = 0..(expected_prefix.len().min(load_output.len()) + 1); + let label = + LabeledSpan::at(span, format!("Expected {expected_prefix:?}")); + miette!( + labels = vec![label], + "failed to parse 'docker image load' output" + ) + .with_source_code(load_output.clone()) + })? + .to_owned(); + Ok(docker_image) +} diff --git a/xtask/src/complement/test2json.rs b/xtask/src/complement/test2json.rs new file mode 100644 index 00000000..595813e3 --- /dev/null +++ b/xtask/src/complement/test2json.rs @@ -0,0 +1,19 @@ +//! Functions for working with the go [`test2json`][test2json] tool. +//! +//! [test2json]: https://pkg.go.dev/cmd/test2json@go1.22.4 + +use miette::{IntoDiagnostic, Result}; +use xshell::{cmd, Shell}; + +/// Runs complement test suite +pub(crate) fn run_complement(sh: &Shell, docker_image: &str) -> Result<()> { + // TODO: handle SIG{INT,TERM} + // TODO: XTASK_PATH variable, so that we don't need to pollute devshell with + // go + cmd!(sh, "go tool test2json complement.test -test.v=test2json") + .env("COMPLEMENT_BASE_IMAGE", docker_image) + .env("COMPLEMENT_SPAWN_HS_TIMEOUT", "5") + .env("COMPLEMENT_ALWAYS_PRINT_SERVER_LOGS", "1") + .run() + .into_diagnostic() +} diff --git a/xtask/src/main.rs b/xtask/src/main.rs new file mode 100644 index 00000000..70fa34c2 --- /dev/null +++ b/xtask/src/main.rs @@ -0,0 +1,30 @@ +mod complement; + +use std::process::ExitCode; + +use clap::{Parser, Subcommand}; + +#[derive(Parser)] +struct Args { + #[clap(subcommand)] + command: Command, +} + +#[derive(Subcommand)] +enum Command { + Complement(complement::Args), +} + +fn main() -> ExitCode { + let args = Args::parse(); + let result = match args.command { + Command::Complement(args) => complement::main(args), + }; + let Err(e) = result else { + return ExitCode::SUCCESS; + }; + // Include a leading newline because sometimes an error will occur in + // the middle of displaying a progress indicator. + eprintln!("\n{e:?}"); + ExitCode::FAILURE +} From e4e224f5dc0abdfe9f2b4ee50fcc80cb2b1dd12a Mon Sep 17 00:00:00 2001 From: Benjamin Lee Date: Fri, 21 Jun 2024 00:17:06 -0700 Subject: [PATCH 06/14] add live progress display to complement wrapper Added the `derive` feature to the workspace serde dependency here. Previously, the dependency was only used in the main package, which ended up enabling the `derive` feature through transitive serde dependencies. This is not the case for xtask, so we need to enable it explicitly. --- Cargo.lock | 57 ++++++++ Cargo.toml | 3 +- xtask/Cargo.toml | 4 + xtask/src/complement.rs | 9 +- xtask/src/complement/test2json.rs | 212 +++++++++++++++++++++++++++++- 5 files changed, 277 insertions(+), 8 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index b161e921..8ce87cc6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -453,6 +453,19 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3d7b894f5411737b7867f4827955924d7c254fc9f4d91a6aad6b097804b1018b" +[[package]] +name = "console" +version = "0.15.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e1f83fc076bd6dd27517eacdf25fef6c4dfe5f1d7448bafaaf3a26f13b5e4eb" +dependencies = [ + "encode_unicode", + "lazy_static", + "libc", + "unicode-width", + "windows-sys 0.52.0", +] + [[package]] name = "const-oid" version = "0.9.6" @@ -609,6 +622,12 @@ version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" +[[package]] +name = "encode_unicode" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f" + [[package]] name = "enum-as-inner" version = "0.6.1" @@ -1159,6 +1178,28 @@ dependencies = [ "serde", ] +[[package]] +name = "indicatif" +version = "0.17.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "763a5a8f45087d6bcea4222e7b72c291a054edf80e4ef6efd2a4979878c7bea3" +dependencies = [ + "console", + "instant", + "number_prefix", + "portable-atomic", + "unicode-width", +] + +[[package]] +name = "instant" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222" +dependencies = [ + "cfg-if", +] + [[package]] name = "ipconfig" version = "0.3.2" @@ -1558,6 +1599,12 @@ dependencies = [ "libc", ] +[[package]] +name = "number_prefix" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3" + [[package]] name = "object" version = "0.36.4" @@ -1831,6 +1878,12 @@ dependencies = [ "miniz_oxide 0.7.4", ] +[[package]] +name = "portable-atomic" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0" + [[package]] name = "powerfmt" version = "0.2.0" @@ -3812,7 +3865,11 @@ name = "xtask" version = "0.1.0" dependencies = [ "clap", + "indicatif", "miette", + "serde", + "serde_json", + "strum", "xshell", ] diff --git a/Cargo.toml b/Cargo.toml index 9493e894..9bfe1f7c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -97,6 +97,7 @@ http-body-util = "0.1.2" hyper = "1.4.1" hyper-util = { version = "0.1.8", features = ["client", "client-legacy", "service"] } image = { version = "0.25.2", default-features = false, features = ["jpeg", "png", "gif"] } +indicatif = "0.17.8" jsonwebtoken = "9.3.0" lru-cache = "0.1.2" miette = { version = "7.2.0", features = ["fancy"] } @@ -120,7 +121,7 @@ ruma = { git = "https://github.com/ruma/ruma", branch = "main", features = ["com rusqlite = { version = "0.32.1", features = ["bundled"] } rustls = { version = "0.23.13", default-features = false, features = ["ring", "log", "logging", "std", "tls12"] } sd-notify = { version = "0.4.2" } -serde = { version = "1.0.210", features = ["rc"] } +serde = { version = "1.0.210", features = ["rc", "derive"] } serde_html_form = "0.2.6" serde_json = { version = "1.0.128", features = ["raw_value"] } serde_yaml = "0.9.34" diff --git a/xtask/Cargo.toml b/xtask/Cargo.toml index 5d282c75..511e054f 100644 --- a/xtask/Cargo.toml +++ b/xtask/Cargo.toml @@ -8,6 +8,10 @@ rust-version.workspace = true [dependencies] clap.workspace = true miette.workspace = true +indicatif.workspace = true +serde.workspace = true +serde_json.workspace = true +strum.workspace = true xshell.workspace = true [lints] diff --git a/xtask/src/complement.rs b/xtask/src/complement.rs index 456e9a41..78ac6247 100644 --- a/xtask/src/complement.rs +++ b/xtask/src/complement.rs @@ -6,7 +6,10 @@ use xshell::{cmd, Shell}; mod docker; mod test2json; -use self::{docker::load_docker_image, test2json::run_complement}; +use self::{ + docker::load_docker_image, + test2json::{count_complement_tests, run_complement}, +}; #[derive(clap::Args)] pub(crate) struct Args; @@ -19,7 +22,9 @@ pub(crate) fn main(_args: Args) -> Result<()> { let docker_image = load_docker_image(&sh, &toplevel).wrap_err( "failed to build and load complement-grapevine docker image", )?; - run_complement(&sh, &docker_image) + let test_count = count_complement_tests(&sh, &docker_image) + .wrap_err("failed to determine total complement test count")?; + run_complement(&sh, &docker_image, test_count) .wrap_err("failed to run complement tests")?; Ok(()) } diff --git a/xtask/src/complement/test2json.rs b/xtask/src/complement/test2json.rs index 595813e3..bfc80aaa 100644 --- a/xtask/src/complement/test2json.rs +++ b/xtask/src/complement/test2json.rs @@ -2,18 +2,220 @@ //! //! [test2json]: https://pkg.go.dev/cmd/test2json@go1.22.4 -use miette::{IntoDiagnostic, Result}; +use std::{ + io::{BufRead, BufReader}, + process::{Command, Stdio}, + time::Duration, +}; + +use indicatif::{ProgressBar, ProgressStyle}; +use miette::{miette, IntoDiagnostic, LabeledSpan, Result, WrapErr}; +use serde::Deserialize; +use strum::Display; use xshell::{cmd, Shell}; +/// Returns the total number of complement tests that will be run +/// +/// This is only able to count toplevel tests, and will not included subtests +/// (`A/B`) +pub(crate) fn count_complement_tests( + sh: &Shell, + docker_image: &str, +) -> Result { + let test_list = cmd!(sh, "go tool test2json complement.test -test.list .*") + .env("COMPLEMENT_BASE_IMAGE", docker_image) + .read() + .into_diagnostic()?; + let test_count = u64::try_from(test_list.lines().count()) + .into_diagnostic() + .wrap_err("test count overflowed u64")?; + Ok(test_count) +} + /// Runs complement test suite -pub(crate) fn run_complement(sh: &Shell, docker_image: &str) -> Result<()> { +pub(crate) fn run_complement( + sh: &Shell, + docker_image: &str, + test_count: u64, +) -> Result<()> { // TODO: handle SIG{INT,TERM} // TODO: XTASK_PATH variable, so that we don't need to pollute devshell with // go - cmd!(sh, "go tool test2json complement.test -test.v=test2json") + let cmd = cmd!(sh, "go tool test2json complement.test -test.v=test2json") .env("COMPLEMENT_BASE_IMAGE", docker_image) .env("COMPLEMENT_SPAWN_HS_TIMEOUT", "5") - .env("COMPLEMENT_ALWAYS_PRINT_SERVER_LOGS", "1") - .run() + .env("COMPLEMENT_ALWAYS_PRINT_SERVER_LOGS", "1"); + eprintln!("$ {cmd}"); + let child = Command::from(cmd) + .stdout(Stdio::piped()) + .spawn() .into_diagnostic() + .wrap_err("error spawning complement process")?; + let stdout = child + .stdout + .expect("child process spawned with piped stdout should have stdout"); + let lines = BufReader::new(stdout).lines(); + + let mut ctx = TestContext::new(test_count); + for line in lines { + let line = line + .into_diagnostic() + .wrap_err("error reading output from complement process")?; + ctx.handle_line(&line); + } + + Ok(()) +} + +/// Schema from +/// +/// Only the fields that we need are included here. +#[derive(Deserialize)] +#[serde( + rename_all = "snake_case", + rename_all_fields = "PascalCase", + tag = "Action" +)] +enum GoTestEvent { + Run { + test: Option, + }, + Pass { + test: Option, + }, + Fail { + test: Option, + }, + Skip { + test: Option, + }, + #[serde(other)] + OtherAction, +} + +#[derive(Copy, Clone, Display, Debug)] +#[strum(serialize_all = "UPPERCASE")] +enum TestResult { + Pass, + Fail, + Skip, +} + +struct TestContext { + pb: ProgressBar, + pass_count: u64, + fail_count: u64, + skip_count: u64, +} + +/// Returns a string to use for displaying a test name +/// +/// From the test2json docs: +/// +/// > The Test field, if present, specifies the test, example, or benchmark +/// > function that caused the event. Events for the overall package test do not +/// > set Test. +/// +/// For events that do not have a `Test` field, we display their test name as +/// `"GLOBAL"` instead. +fn test_str(test: &Option) -> &str { + if let Some(test) = test { + test + } else { + "GLOBAL" + } +} + +/// Returns whether a test name is a toplevel test (as opposed to a subtest) +fn test_is_toplevel(test: &str) -> bool { + !test.contains('/') +} + +impl TestContext { + fn new(test_count: u64) -> TestContext { + // TODO: figure out how to display ETA without it fluctuating wildly. + let style = ProgressStyle::with_template( + "({msg}) {pos}/{len} [{elapsed}] {wide_bar}", + ) + .expect("static progress bar template should be valid") + .progress_chars("##-"); + let pb = ProgressBar::new(test_count).with_style(style); + pb.enable_steady_tick(Duration::from_secs(1)); + let ctx = TestContext { + pb, + pass_count: 0, + fail_count: 0, + skip_count: 0, + }; + ctx.update_progress(); + ctx + } + + fn update_progress(&self) { + self.pb + .set_position(self.pass_count + self.fail_count + self.skip_count); + self.pb.set_message(format!( + "PASS {}, FAIL {}, SKIP {}", + self.pass_count, self.fail_count, self.skip_count + )); + } + + fn handle_test_result(&mut self, test: &str, result: TestResult) { + self.pb.println(format!("=== {result}\t{test}")); + // 'complement.test -test.list' is only able to count toplevel tests + // ahead-of-time, so we don't include subtests in the pass/fail/skip + // counts. + if test_is_toplevel(test) { + match result { + TestResult::Pass => self.pass_count += 1, + TestResult::Fail => self.fail_count += 1, + TestResult::Skip => self.skip_count += 1, + } + self.update_progress(); + } + } + + fn handle_event(&mut self, event: GoTestEvent) { + match event { + GoTestEvent::OtherAction => (), + GoTestEvent::Run { + test, + } => { + self.pb.println(format!("=== RUN \t{}", test_str(&test))); + } + GoTestEvent::Pass { + test, + } => { + self.handle_test_result(test_str(&test), TestResult::Pass); + } + GoTestEvent::Fail { + test, + } => { + self.handle_test_result(test_str(&test), TestResult::Fail); + } + GoTestEvent::Skip { + test, + } => { + self.handle_test_result(test_str(&test), TestResult::Skip); + } + } + } + + /// Processes a line of output from `test2json` + fn handle_line(&mut self, line: &str) { + match serde_json::from_str(line) { + Ok(event) => self.handle_event(event), + Err(e) => { + let label = + LabeledSpan::at_offset(e.column() - 1, "error here"); + let report = miette!(labels = vec![label], "{e}",) + .with_source_code(line.to_owned()) + .wrap_err( + "failed to parse go test2json event from complement \ + tests. Ignoring this event.", + ); + eprintln!("{report:?}"); + } + }; + } } From b3b1d43d8eb2a29c5935ce896580f62be3231bfe Mon Sep 17 00:00:00 2001 From: Benjamin Lee Date: Fri, 21 Jun 2024 12:02:27 -0700 Subject: [PATCH 07/14] write complement test result summary to a tsv file --- xtask/src/complement.rs | 50 +++++++++++++++++++--- xtask/src/complement/summary.rs | 50 ++++++++++++++++++++++ xtask/src/complement/test2json.rs | 69 +++++++++++++++++++++++++------ 3 files changed, 151 insertions(+), 18 deletions(-) create mode 100644 xtask/src/complement/summary.rs diff --git a/xtask/src/complement.rs b/xtask/src/complement.rs index 78ac6247..4b9b55a2 100644 --- a/xtask/src/complement.rs +++ b/xtask/src/complement.rs @@ -1,9 +1,13 @@ -use std::path::PathBuf; +use std::{ + fs::{self}, + path::{Path, PathBuf}, +}; -use miette::{IntoDiagnostic, Result, WrapErr}; +use miette::{miette, IntoDiagnostic, Result, WrapErr}; use xshell::{cmd, Shell}; mod docker; +mod summary; mod test2json; use self::{ @@ -12,23 +16,59 @@ use self::{ }; #[derive(clap::Args)] -pub(crate) struct Args; +pub(crate) struct Args { + /// Directory to write test results + /// + /// This directory will be created automatically, but it must be empty. + /// If it exists and is not empty, an error will be returned. + #[clap(short, long)] + out: PathBuf, +} #[allow(clippy::needless_pass_by_value)] -pub(crate) fn main(_args: Args) -> Result<()> { +pub(crate) fn main(args: Args) -> Result<()> { let sh = Shell::new().unwrap(); let toplevel = get_toplevel_path(&sh) .wrap_err("failed to determine repository root directory")?; + create_out_dir(&args.out).wrap_err_with(|| { + format!("error initializing output directory {:?}", args.out) + })?; let docker_image = load_docker_image(&sh, &toplevel).wrap_err( "failed to build and load complement-grapevine docker image", )?; let test_count = count_complement_tests(&sh, &docker_image) .wrap_err("failed to determine total complement test count")?; - run_complement(&sh, &docker_image, test_count) + run_complement(&sh, &args.out, &docker_image, test_count) .wrap_err("failed to run complement tests")?; Ok(()) } +/// Ensures that output directory exists and is empty +/// +/// If the directory does not exist, it will be created. If it is not empty, an +/// error will be returned. +/// +/// We have no protection against concurrent programs modifying the contents of +/// the directory while the complement wrapper tool is running. +fn create_out_dir(out: &Path) -> Result<()> { + fs::create_dir_all(out) + .into_diagnostic() + .wrap_err("error creating output directory")?; + let mut entries = fs::read_dir(out) + .into_diagnostic() + .wrap_err("error checking current contents of output directory")?; + if entries.next().is_some() { + return Err(miette!( + "output directory is not empty. Refusing to run, instead of \ + possibly overwriting existing files." + )); + } + fs::create_dir(out.join("logs")) + .into_diagnostic() + .wrap_err("error creating logs subdirectory in output directory")?; + Ok(()) +} + /// Returns the path to the repository root fn get_toplevel_path(sh: &Shell) -> Result { let path = diff --git a/xtask/src/complement/summary.rs b/xtask/src/complement/summary.rs new file mode 100644 index 00000000..d95f819f --- /dev/null +++ b/xtask/src/complement/summary.rs @@ -0,0 +1,50 @@ +//! Functions for working with the `summary.tsv` files emitted by the complement +//! wrapper. +//! +//! This file is a TSV containing test names and results for each test in a +//! complement run. + +use std::{ + collections::BTreeMap, + io::{BufWriter, Write}, +}; + +use miette::{IntoDiagnostic, Result}; + +use super::test2json::TestResult; + +pub(crate) type TestResults = BTreeMap; + +/// Escape a string value for use in a TSV file. +/// +/// According to the [tsv spec][1], the only characters that need to be escaped +/// are `\n`, `\t`, `\r`, and `\`. +/// +/// [1]: https://www.loc.gov/preservation/digital/formats/fdd/fdd000533.shtml +fn escape_tsv_value(value: &str) -> String { + value + .replace('\\', "\\\\") + .replace('\n', "\\n") + .replace('\t', "\\t") + .replace('\r', "\\r") +} + +/// Write a test result summary to a writer. +pub(crate) fn write_summary( + w: &mut BufWriter, + summary: &TestResults, +) -> Result<()> { + // Write header line + writeln!(w, "test\tresult").into_diagnostic()?; + // Write rows + for (test, result) in summary { + writeln!( + w, + "{}\t{}", + escape_tsv_value(test), + escape_tsv_value(&result.to_string()) + ) + .into_diagnostic()?; + } + Ok(()) +} diff --git a/xtask/src/complement/test2json.rs b/xtask/src/complement/test2json.rs index bfc80aaa..121aebbf 100644 --- a/xtask/src/complement/test2json.rs +++ b/xtask/src/complement/test2json.rs @@ -3,7 +3,10 @@ //! [test2json]: https://pkg.go.dev/cmd/test2json@go1.22.4 use std::{ - io::{BufRead, BufReader}, + collections::BTreeMap, + fs::File, + io::{BufRead, BufReader, BufWriter, Seek, SeekFrom, Write}, + path::Path, process::{Command, Stdio}, time::Duration, }; @@ -14,6 +17,8 @@ use serde::Deserialize; use strum::Display; use xshell::{cmd, Shell}; +use super::summary::{write_summary, TestResults}; + /// Returns the total number of complement tests that will be run /// /// This is only able to count toplevel tests, and will not included subtests @@ -35,6 +40,7 @@ pub(crate) fn count_complement_tests( /// Runs complement test suite pub(crate) fn run_complement( sh: &Shell, + out: &Path, docker_image: &str, test_count: u64, ) -> Result<()> { @@ -56,12 +62,12 @@ pub(crate) fn run_complement( .expect("child process spawned with piped stdout should have stdout"); let lines = BufReader::new(stdout).lines(); - let mut ctx = TestContext::new(test_count); + let mut ctx = TestContext::new(out, test_count)?; for line in lines { let line = line .into_diagnostic() .wrap_err("error reading output from complement process")?; - ctx.handle_line(&line); + ctx.handle_line(&line)?; } Ok(()) @@ -95,7 +101,7 @@ enum GoTestEvent { #[derive(Copy, Clone, Display, Debug)] #[strum(serialize_all = "UPPERCASE")] -enum TestResult { +pub(crate) enum TestResult { Pass, Fail, Skip, @@ -106,6 +112,13 @@ struct TestContext { pass_count: u64, fail_count: u64, skip_count: u64, + // We do not need a specific method to flush this before dropping + // `TestContext`, because the file is only written from the + // `update_summary_file` method. This method always calls flush on + // a non-error path, and the file is left in an inconsistent state on an + // error anyway. + summary_file: BufWriter, + results: TestResults, } /// Returns a string to use for displaying a test name @@ -132,7 +145,7 @@ fn test_is_toplevel(test: &str) -> bool { } impl TestContext { - fn new(test_count: u64) -> TestContext { + fn new(out: &Path, test_count: u64) -> Result { // TODO: figure out how to display ETA without it fluctuating wildly. let style = ProgressStyle::with_template( "({msg}) {pos}/{len} [{elapsed}] {wide_bar}", @@ -141,14 +154,23 @@ impl TestContext { .progress_chars("##-"); let pb = ProgressBar::new(test_count).with_style(style); pb.enable_steady_tick(Duration::from_secs(1)); + + let summary_file = File::create(out.join("summary.tsv")) + .into_diagnostic() + .wrap_err("failed to create summary file in output dir")?; + let summary_file = BufWriter::new(summary_file); + let ctx = TestContext { pb, pass_count: 0, fail_count: 0, skip_count: 0, + summary_file, + results: BTreeMap::new(), }; + ctx.update_progress(); - ctx + Ok(ctx) } fn update_progress(&self) { @@ -160,8 +182,25 @@ impl TestContext { )); } - fn handle_test_result(&mut self, test: &str, result: TestResult) { + fn update_summary_file(&mut self) -> Result<()> { + // Truncate the file to clear existing contents + self.summary_file + .get_mut() + .seek(SeekFrom::Start(0)) + .into_diagnostic()?; + self.summary_file.get_mut().set_len(0).into_diagnostic()?; + write_summary(&mut self.summary_file, &self.results)?; + self.summary_file.flush().into_diagnostic()?; + Ok(()) + } + + fn handle_test_result( + &mut self, + test: &str, + result: TestResult, + ) -> Result<()> { self.pb.println(format!("=== {result}\t{test}")); + self.results.insert(test.to_owned(), result); // 'complement.test -test.list' is only able to count toplevel tests // ahead-of-time, so we don't include subtests in the pass/fail/skip // counts. @@ -173,9 +212,11 @@ impl TestContext { } self.update_progress(); } + self.update_summary_file().wrap_err("error writing summary file")?; + Ok(()) } - fn handle_event(&mut self, event: GoTestEvent) { + fn handle_event(&mut self, event: GoTestEvent) -> Result<()> { match event { GoTestEvent::OtherAction => (), GoTestEvent::Run { @@ -186,25 +227,26 @@ impl TestContext { GoTestEvent::Pass { test, } => { - self.handle_test_result(test_str(&test), TestResult::Pass); + self.handle_test_result(test_str(&test), TestResult::Pass)?; } GoTestEvent::Fail { test, } => { - self.handle_test_result(test_str(&test), TestResult::Fail); + self.handle_test_result(test_str(&test), TestResult::Fail)?; } GoTestEvent::Skip { test, } => { - self.handle_test_result(test_str(&test), TestResult::Skip); + self.handle_test_result(test_str(&test), TestResult::Skip)?; } } + Ok(()) } /// Processes a line of output from `test2json` - fn handle_line(&mut self, line: &str) { + fn handle_line(&mut self, line: &str) -> Result<()> { match serde_json::from_str(line) { - Ok(event) => self.handle_event(event), + Ok(event) => self.handle_event(event)?, Err(e) => { let label = LabeledSpan::at_offset(e.column() - 1, "error here"); @@ -217,5 +259,6 @@ impl TestContext { eprintln!("{report:?}"); } }; + Ok(()) } } From ab883ffa44c19de5a90e816ae0203defcb05a8ef Mon Sep 17 00:00:00 2001 From: Benjamin Lee Date: Fri, 21 Jun 2024 12:04:59 -0700 Subject: [PATCH 08/14] write complement per-test logs to files --- xtask/src/complement/test2json.rs | 54 +++++++++++++++++++++++++++++-- 1 file changed, 52 insertions(+), 2 deletions(-) diff --git a/xtask/src/complement/test2json.rs b/xtask/src/complement/test2json.rs index 121aebbf..c08580a9 100644 --- a/xtask/src/complement/test2json.rs +++ b/xtask/src/complement/test2json.rs @@ -4,9 +4,9 @@ use std::{ collections::BTreeMap, - fs::File, + fs::{self, File}, io::{BufRead, BufReader, BufWriter, Seek, SeekFrom, Write}, - path::Path, + path::{Path, PathBuf}, process::{Command, Stdio}, time::Duration, }; @@ -95,6 +95,10 @@ enum GoTestEvent { Skip { test: Option, }, + Output { + test: Option, + output: String, + }, #[serde(other)] OtherAction, } @@ -118,6 +122,7 @@ struct TestContext { // a non-error path, and the file is left in an inconsistent state on an // error anyway. summary_file: BufWriter, + log_dir: PathBuf, results: TestResults, } @@ -160,11 +165,14 @@ impl TestContext { .wrap_err("failed to create summary file in output dir")?; let summary_file = BufWriter::new(summary_file); + let log_dir = out.join("logs"); + let ctx = TestContext { pb, pass_count: 0, fail_count: 0, skip_count: 0, + log_dir, summary_file, results: BTreeMap::new(), }; @@ -216,6 +224,36 @@ impl TestContext { Ok(()) } + fn handle_test_output(&mut self, test: &str, output: &str) -> Result<()> { + let path = self.log_dir.join(test).with_extension("txt"); + + // Some tests have a '/' in their name, so create the extra dirs if they + // don't already exist. + let parent_dir = path.parent().expect( + "log file path should have parent. At worst, the toplevel dir is \ + $out/logs/.", + ); + fs::create_dir_all(parent_dir).into_diagnostic().wrap_err_with( + || { + format!( + "error creating directory at {parent_dir:?} for log file \ + {path:?}" + ) + }, + )?; + + let mut log_file = File::options() + .create(true) + .append(true) + .open(&path) + .into_diagnostic() + .wrap_err_with(|| format!("error creating log file at {path:?}"))?; + log_file.write_all(output.as_bytes()).into_diagnostic().wrap_err_with( + || format!("error writing to log file at {path:?}"), + )?; + Ok(()) + } + fn handle_event(&mut self, event: GoTestEvent) -> Result<()> { match event { GoTestEvent::OtherAction => (), @@ -239,6 +277,18 @@ impl TestContext { } => { self.handle_test_result(test_str(&test), TestResult::Skip)?; } + GoTestEvent::Output { + test, + output, + } => { + let test = test_str(&test); + self.handle_test_output(test, &output).wrap_err_with(|| { + format!( + "failed to write test output to a log file for test \ + {test:?}" + ) + })?; + } } Ok(()) } From 8eab6eea204d09df0f7bb0886c654f41db97f3cd Mon Sep 17 00:00:00 2001 From: Benjamin Lee Date: Fri, 21 Jun 2024 16:33:51 -0700 Subject: [PATCH 09/14] compare complement test results to a baseline One thing that might be neat in the future is noticing differing results while the tests are still running, and modifying the log messages to indicate them. I can imagine situations where you would want to abort the test run immediately after seeing the first regression. --- complement-baseline.tsv | 220 +++++++++++++++++++++++++++ xtask/src/complement.rs | 21 ++- xtask/src/complement/summary.rs | 238 +++++++++++++++++++++++++++++- xtask/src/complement/test2json.rs | 8 +- 4 files changed, 481 insertions(+), 6 deletions(-) create mode 100644 complement-baseline.tsv diff --git a/complement-baseline.tsv b/complement-baseline.tsv new file mode 100644 index 00000000..cc489de8 --- /dev/null +++ b/complement-baseline.tsv @@ -0,0 +1,220 @@ +test result +GLOBAL FAIL +TestACLs PASS +TestBannedUserCannotSendJoin FAIL +TestCannotSendKnockViaSendKnockInMSC3787Room FAIL +TestCannotSendKnockViaSendKnockInMSC3787Room/event_with_mismatched_state_key FAIL +TestCannotSendKnockViaSendKnockInMSC3787Room/invite_event FAIL +TestCannotSendKnockViaSendKnockInMSC3787Room/join_event FAIL +TestCannotSendKnockViaSendKnockInMSC3787Room/leave_event FAIL +TestCannotSendKnockViaSendKnockInMSC3787Room/non-state_membership_event FAIL +TestCannotSendKnockViaSendKnockInMSC3787Room/regular_event FAIL +TestCannotSendNonJoinViaSendJoinV1 FAIL +TestCannotSendNonJoinViaSendJoinV1/event_with_mismatched_state_key PASS +TestCannotSendNonJoinViaSendJoinV1/invite_event PASS +TestCannotSendNonJoinViaSendJoinV1/knock_event PASS +TestCannotSendNonJoinViaSendJoinV1/leave_event FAIL +TestCannotSendNonJoinViaSendJoinV1/non-state_membership_event PASS +TestCannotSendNonJoinViaSendJoinV1/regular_event FAIL +TestCannotSendNonJoinViaSendJoinV2 FAIL +TestCannotSendNonJoinViaSendJoinV2/event_with_mismatched_state_key PASS +TestCannotSendNonJoinViaSendJoinV2/invite_event PASS +TestCannotSendNonJoinViaSendJoinV2/knock_event PASS +TestCannotSendNonJoinViaSendJoinV2/leave_event FAIL +TestCannotSendNonJoinViaSendJoinV2/non-state_membership_event PASS +TestCannotSendNonJoinViaSendJoinV2/regular_event FAIL +TestCannotSendNonKnockViaSendKnock FAIL +TestCannotSendNonLeaveViaSendLeaveV1 FAIL +TestCannotSendNonLeaveViaSendLeaveV1/event_with_mismatched_state_key FAIL +TestCannotSendNonLeaveViaSendLeaveV1/invite_event FAIL +TestCannotSendNonLeaveViaSendLeaveV1/join_event FAIL +TestCannotSendNonLeaveViaSendLeaveV1/knock_event FAIL +TestCannotSendNonLeaveViaSendLeaveV1/non-state_membership_event FAIL +TestCannotSendNonLeaveViaSendLeaveV1/regular_event FAIL +TestCannotSendNonLeaveViaSendLeaveV2 FAIL +TestCannotSendNonLeaveViaSendLeaveV2/event_with_mismatched_state_key FAIL +TestCannotSendNonLeaveViaSendLeaveV2/invite_event FAIL +TestCannotSendNonLeaveViaSendLeaveV2/join_event FAIL +TestCannotSendNonLeaveViaSendLeaveV2/knock_event FAIL +TestCannotSendNonLeaveViaSendLeaveV2/non-state_membership_event FAIL +TestCannotSendNonLeaveViaSendLeaveV2/regular_event FAIL +TestClientSpacesSummary FAIL +TestClientSpacesSummary/max_depth FAIL +TestClientSpacesSummary/pagination FAIL +TestClientSpacesSummary/query_whole_graph FAIL +TestClientSpacesSummary/redact_link FAIL +TestClientSpacesSummary/suggested_only FAIL +TestClientSpacesSummaryJoinRules FAIL +TestContentMediaV1 PASS +TestDeviceListsUpdateOverFederation FAIL +TestDeviceListsUpdateOverFederation/good_connectivity FAIL +TestDeviceListsUpdateOverFederation/interrupted_connectivity FAIL +TestDeviceListsUpdateOverFederation/stopped_server FAIL +TestDeviceListsUpdateOverFederationOnRoomJoin FAIL +TestEventAuth FAIL +TestFederatedClientSpaces FAIL +TestFederationKeyUploadQuery FAIL +TestFederationKeyUploadQuery/Can_claim_remote_one_time_key_using_POST FAIL +TestFederationKeyUploadQuery/Can_query_remote_device_keys_using_POST FAIL +TestFederationRedactSendsWithoutEvent PASS +TestFederationRejectInvite FAIL +TestFederationRoomsInvite FAIL +TestFederationRoomsInvite/Parallel FAIL +TestFederationRoomsInvite/Parallel/Invited_user_can_reject_invite_over_federation FAIL +TestFederationRoomsInvite/Parallel/Invited_user_can_reject_invite_over_federation_for_empty_room PASS +TestFederationRoomsInvite/Parallel/Invited_user_can_reject_invite_over_federation_several_times FAIL +TestFederationRoomsInvite/Parallel/Invited_user_has_'is_direct'_flag_in_prev_content_after_joining PASS +TestFederationRoomsInvite/Parallel/Remote_invited_user_can_see_room_metadata PASS +TestFederationThumbnail PASS +TestGetMissingEventsGapFilling FAIL +TestInboundCanReturnMissingEvents FAIL +TestInboundCanReturnMissingEvents/Inbound_federation_can_return_missing_events_for_invited_visibility FAIL +TestInboundCanReturnMissingEvents/Inbound_federation_can_return_missing_events_for_joined_visibility FAIL +TestInboundCanReturnMissingEvents/Inbound_federation_can_return_missing_events_for_shared_visibility FAIL +TestInboundCanReturnMissingEvents/Inbound_federation_can_return_missing_events_for_world_readable_visibility FAIL +TestInboundFederationKeys PASS +TestInboundFederationProfile PASS +TestInboundFederationProfile/Inbound_federation_can_query_profile_data PASS +TestInboundFederationProfile/Non-numeric_ports_in_server_names_are_rejected PASS +TestInboundFederationRejectsEventsWithRejectedAuthEvents FAIL +TestIsDirectFlagFederation PASS +TestIsDirectFlagLocal PASS +TestJoinFederatedRoomFailOver PASS +TestJoinFederatedRoomFromApplicationServiceBridgeUser FAIL +TestJoinFederatedRoomFromApplicationServiceBridgeUser/join_remote_federated_room_as_application_service_user FAIL +TestJoinFederatedRoomWithUnverifiableEvents PASS +TestJoinFederatedRoomWithUnverifiableEvents//send_join_response_missing_signatures_shouldn't_block_room_join PASS +TestJoinFederatedRoomWithUnverifiableEvents//send_join_response_with_bad_signatures_shouldn't_block_room_join PASS +TestJoinFederatedRoomWithUnverifiableEvents//send_join_response_with_state_with_unverifiable_auth_events_shouldn't_block_room_join PASS +TestJoinFederatedRoomWithUnverifiableEvents//send_join_response_with_unobtainable_keys_shouldn't_block_room_join PASS +TestJoinViaRoomIDAndServerName PASS +TestJumpToDateEndpoint FAIL +TestJumpToDateEndpoint/parallel FAIL +TestJumpToDateEndpoint/parallel/federation FAIL +TestJumpToDateEndpoint/parallel/federation/can_paginate_after_getting_remote_event_from_timestamp_to_event_endpoint FAIL +TestJumpToDateEndpoint/parallel/federation/looking_backwards,_should_be_able_to_find_event_that_was_sent_before_we_joined FAIL +TestJumpToDateEndpoint/parallel/federation/looking_forwards,_should_be_able_to_find_event_that_was_sent_before_we_joined FAIL +TestJumpToDateEndpoint/parallel/federation/when_looking_backwards_before_the_room_was_created,_should_be_able_to_find_event_that_was_imported FAIL +TestJumpToDateEndpoint/parallel/should_find_event_after_given_timestmap FAIL +TestJumpToDateEndpoint/parallel/should_find_event_before_given_timestmap FAIL +TestJumpToDateEndpoint/parallel/should_find_next_event_topologically_after_given_timestmap_when_all_message_timestamps_are_the_same FAIL +TestJumpToDateEndpoint/parallel/should_find_next_event_topologically_before_given_timestamp_when_all_message_timestamps_are_the_same FAIL +TestJumpToDateEndpoint/parallel/should_find_nothing_after_the_latest_timestmap PASS +TestJumpToDateEndpoint/parallel/should_find_nothing_before_the_earliest_timestmap PASS +TestJumpToDateEndpoint/parallel/should_not_be_able_to_query_a_private_room_you_are_not_a_member_of FAIL +TestJumpToDateEndpoint/parallel/should_not_be_able_to_query_a_public_room_you_are_not_a_member_of FAIL +TestKnockRoomsInPublicRoomsDirectory FAIL +TestKnockRoomsInPublicRoomsDirectoryInMSC3787Room FAIL +TestKnocking FAIL +TestKnockingInMSC3787Room FAIL +TestKnockingInMSC3787Room/A_user_can_knock_on_a_room_without_a_reason FAIL +TestKnockingInMSC3787Room/A_user_can_knock_on_a_room_without_a_reason#01 FAIL +TestKnockingInMSC3787Room/A_user_cannot_knock_on_a_room_they_are_already_in FAIL +TestKnockingInMSC3787Room/A_user_cannot_knock_on_a_room_they_are_already_in#01 FAIL +TestKnockingInMSC3787Room/A_user_cannot_knock_on_a_room_they_are_already_invited_to FAIL +TestKnockingInMSC3787Room/A_user_cannot_knock_on_a_room_they_are_already_invited_to#01 FAIL +TestKnockingInMSC3787Room/A_user_in_the_room_can_accept_a_knock PASS +TestKnockingInMSC3787Room/A_user_in_the_room_can_accept_a_knock#01 PASS +TestKnockingInMSC3787Room/A_user_in_the_room_can_reject_a_knock FAIL +TestKnockingInMSC3787Room/A_user_in_the_room_can_reject_a_knock#01 FAIL +TestKnockingInMSC3787Room/A_user_that_has_already_knocked_is_allowed_to_knock_again_on_the_same_room FAIL +TestKnockingInMSC3787Room/A_user_that_has_already_knocked_is_allowed_to_knock_again_on_the_same_room#01 FAIL +TestKnockingInMSC3787Room/A_user_that_has_knocked_on_a_local_room_can_rescind_their_knock_and_then_knock_again FAIL +TestKnockingInMSC3787Room/A_user_that_is_banned_from_a_room_cannot_knock_on_it FAIL +TestKnockingInMSC3787Room/A_user_that_is_banned_from_a_room_cannot_knock_on_it#01 FAIL +TestKnockingInMSC3787Room/Attempting_to_join_a_room_with_join_rule_'knock'_without_an_invite_should_fail FAIL +TestKnockingInMSC3787Room/Attempting_to_join_a_room_with_join_rule_'knock'_without_an_invite_should_fail#01 FAIL +TestKnockingInMSC3787Room/Change_the_join_rule_of_a_room_from_'invite'_to_'knock' PASS +TestKnockingInMSC3787Room/Change_the_join_rule_of_a_room_from_'invite'_to_'knock'#01 PASS +TestKnockingInMSC3787Room/Knocking_on_a_room_with_a_join_rule_other_than_'knock'_should_fail FAIL +TestKnockingInMSC3787Room/Knocking_on_a_room_with_a_join_rule_other_than_'knock'_should_fail#01 FAIL +TestKnockingInMSC3787Room/Knocking_on_a_room_with_join_rule_'knock'_should_succeed FAIL +TestKnockingInMSC3787Room/Knocking_on_a_room_with_join_rule_'knock'_should_succeed#01 FAIL +TestKnockingInMSC3787Room/Users_in_the_room_see_a_user's_membership_update_when_they_knock FAIL +TestKnockingInMSC3787Room/Users_in_the_room_see_a_user's_membership_update_when_they_knock#01 FAIL +TestLocalPngThumbnail PASS +TestLocalPngThumbnail/test_/_matrix/client/v1/media_endpoint PASS +TestMediaFilenames FAIL +TestMediaFilenames/Parallel FAIL +TestMediaFilenames/Parallel/ASCII FAIL +TestMediaFilenames/Parallel/ASCII/Can_download_file_'ascii' FAIL +TestMediaFilenames/Parallel/ASCII/Can_download_file_'ascii'_over_/_matrix/client/v1/media/download FAIL +TestMediaFilenames/Parallel/ASCII/Can_download_file_'name;with;semicolons' FAIL +TestMediaFilenames/Parallel/ASCII/Can_download_file_'name;with;semicolons'_over_/_matrix/client/v1/media/download FAIL +TestMediaFilenames/Parallel/ASCII/Can_download_file_'name_with_spaces' FAIL +TestMediaFilenames/Parallel/ASCII/Can_download_file_'name_with_spaces'_over_/_matrix/client/v1/media/download FAIL +TestMediaFilenames/Parallel/ASCII/Can_download_specifying_a_different_ASCII_file_name PASS +TestMediaFilenames/Parallel/ASCII/Can_download_specifying_a_different_ASCII_file_name_over__matrix/client/v1/media/download PASS +TestMediaFilenames/Parallel/ASCII/Can_upload_with_ASCII_file_name PASS +TestMediaFilenames/Parallel/Unicode FAIL +TestMediaFilenames/Parallel/Unicode/Can_download_specifying_a_different_Unicode_file_name PASS +TestMediaFilenames/Parallel/Unicode/Can_download_specifying_a_different_Unicode_file_name_over__matrix/client/v1/media/download PASS +TestMediaFilenames/Parallel/Unicode/Can_download_with_Unicode_file_name_locally FAIL +TestMediaFilenames/Parallel/Unicode/Can_download_with_Unicode_file_name_locally_over__matrix/client/v1/media/download FAIL +TestMediaFilenames/Parallel/Unicode/Can_download_with_Unicode_file_name_over_federation FAIL +TestMediaFilenames/Parallel/Unicode/Can_download_with_Unicode_file_name_over_federation_via__matrix/client/v1/media/download FAIL +TestMediaFilenames/Parallel/Unicode/Can_upload_with_Unicode_file_name PASS +TestMediaFilenames/Parallel/Unicode/Will_serve_safe_media_types_as_inline SKIP +TestMediaFilenames/Parallel/Unicode/Will_serve_safe_media_types_as_inline_via__matrix/client/v1/media/download SKIP +TestMediaFilenames/Parallel/Unicode/Will_serve_safe_media_types_with_parameters_as_inline SKIP +TestMediaFilenames/Parallel/Unicode/Will_serve_safe_media_types_with_parameters_as_inline_via__matrix/client/v1/media/download SKIP +TestMediaFilenames/Parallel/Unicode/Will_serve_unsafe_media_types_as_attachments SKIP +TestMediaFilenames/Parallel/Unicode/Will_serve_unsafe_media_types_as_attachments_via__matrix/client/v1/media/download SKIP +TestMediaWithoutFileName FAIL +TestMediaWithoutFileName/parallel FAIL +TestMediaWithoutFileName/parallel/Can_download_without_a_file_name_locally PASS +TestMediaWithoutFileName/parallel/Can_download_without_a_file_name_over_federation FAIL +TestMediaWithoutFileName/parallel/Can_upload_without_a_file_name PASS +TestMediaWithoutFileNameCSMediaV1 PASS +TestMediaWithoutFileNameCSMediaV1/parallel PASS +TestMediaWithoutFileNameCSMediaV1/parallel/Can_download_without_a_file_name_locally PASS +TestMediaWithoutFileNameCSMediaV1/parallel/Can_download_without_a_file_name_over_federation PASS +TestMediaWithoutFileNameCSMediaV1/parallel/Can_upload_without_a_file_name PASS +TestNetworkPartitionOrdering FAIL +TestOutboundFederationIgnoresMissingEventWithBadJSONForRoomVersion6 FAIL +TestOutboundFederationProfile PASS +TestOutboundFederationProfile/Outbound_federation_can_query_profile_data PASS +TestOutboundFederationSend PASS +TestRemoteAliasRequestsUnderstandUnicode PASS +TestRemotePngThumbnail PASS +TestRemotePngThumbnail/test_/_matrix/client/v1/media_endpoint PASS +TestRemotePresence FAIL +TestRemotePresence/Presence_changes_are_also_reported_to_remote_room_members FAIL +TestRemotePresence/Presence_changes_to_UNAVAILABLE_are_reported_to_remote_room_members FAIL +TestRemoteTyping FAIL +TestRestrictedRoomsLocalJoin FAIL +TestRestrictedRoomsLocalJoinInMSC3787Room FAIL +TestRestrictedRoomsLocalJoinInMSC3787Room/Join_should_fail_initially PASS +TestRestrictedRoomsLocalJoinInMSC3787Room/Join_should_fail_when_left_allowed_room FAIL +TestRestrictedRoomsLocalJoinInMSC3787Room/Join_should_fail_with_mangled_join_rules PASS +TestRestrictedRoomsLocalJoinInMSC3787Room/Join_should_succeed_when_invited FAIL +TestRestrictedRoomsLocalJoinInMSC3787Room/Join_should_succeed_when_joined_to_allowed_room PASS +TestRestrictedRoomsRemoteJoin FAIL +TestRestrictedRoomsRemoteJoinFailOver FAIL +TestRestrictedRoomsRemoteJoinFailOverInMSC3787Room FAIL +TestRestrictedRoomsRemoteJoinInMSC3787Room FAIL +TestRestrictedRoomsRemoteJoinInMSC3787Room/Join_should_fail_initially PASS +TestRestrictedRoomsRemoteJoinInMSC3787Room/Join_should_fail_when_left_allowed_room PASS +TestRestrictedRoomsRemoteJoinInMSC3787Room/Join_should_fail_with_mangled_join_rules PASS +TestRestrictedRoomsRemoteJoinInMSC3787Room/Join_should_succeed_when_invited PASS +TestRestrictedRoomsRemoteJoinInMSC3787Room/Join_should_succeed_when_joined_to_allowed_room FAIL +TestRestrictedRoomsRemoteJoinLocalUser FAIL +TestRestrictedRoomsRemoteJoinLocalUserInMSC3787Room FAIL +TestRestrictedRoomsSpacesSummaryFederation FAIL +TestRestrictedRoomsSpacesSummaryLocal FAIL +TestSendJoinPartialStateResponse SKIP +TestSyncOmitsStateChangeOnFilteredEvents PASS +TestToDeviceMessagesOverFederation FAIL +TestToDeviceMessagesOverFederation/good_connectivity PASS +TestToDeviceMessagesOverFederation/interrupted_connectivity FAIL +TestToDeviceMessagesOverFederation/stopped_server FAIL +TestUnbanViaInvite FAIL +TestUnknownEndpoints FAIL +TestUnknownEndpoints/Client-server_endpoints PASS +TestUnknownEndpoints/Key_endpoints FAIL +TestUnknownEndpoints/Media_endpoints PASS +TestUnknownEndpoints/Server-server_endpoints PASS +TestUnknownEndpoints/Unknown_prefix PASS +TestUnrejectRejectedEvents FAIL +TestUserAppearsInChangedDeviceListOnJoinOverFederation PASS +TestWriteMDirectAccountData PASS diff --git a/xtask/src/complement.rs b/xtask/src/complement.rs index 4b9b55a2..fa04eb59 100644 --- a/xtask/src/complement.rs +++ b/xtask/src/complement.rs @@ -12,6 +12,7 @@ mod test2json; use self::{ docker::load_docker_image, + summary::{compare_summary, read_summary}, test2json::{count_complement_tests, run_complement}, }; @@ -23,6 +24,12 @@ pub(crate) struct Args { /// If it exists and is not empty, an error will be returned. #[clap(short, long)] out: PathBuf, + + /// Baseline test summary file to compare with + /// + /// If unspecified, defaults to `$repo_root/complement-baseline.tsv` + #[clap(short, long)] + baseline: Option, } #[allow(clippy::needless_pass_by_value)] @@ -30,6 +37,15 @@ pub(crate) fn main(args: Args) -> Result<()> { let sh = Shell::new().unwrap(); let toplevel = get_toplevel_path(&sh) .wrap_err("failed to determine repository root directory")?; + let baseline_path = args + .baseline + .unwrap_or_else(|| toplevel.join("complement-baseline.tsv")); + let baseline = read_summary(&baseline_path).wrap_err_with(|| { + format!( + "failed to read baseline test result summary from \ + {baseline_path:?}" + ) + })?; create_out_dir(&args.out).wrap_err_with(|| { format!("error initializing output directory {:?}", args.out) })?; @@ -38,8 +54,11 @@ pub(crate) fn main(args: Args) -> Result<()> { )?; let test_count = count_complement_tests(&sh, &docker_image) .wrap_err("failed to determine total complement test count")?; - run_complement(&sh, &args.out, &docker_image, test_count) + let results = run_complement(&sh, &args.out, &docker_image, test_count) .wrap_err("failed to run complement tests")?; + let summary_path = args.out.join("summary.tsv"); + compare_summary(&baseline, &results, &baseline_path, &summary_path)?; + println!("\nTest results were identical to baseline."); Ok(()) } diff --git a/xtask/src/complement/summary.rs b/xtask/src/complement/summary.rs index d95f819f..f3b03ba9 100644 --- a/xtask/src/complement/summary.rs +++ b/xtask/src/complement/summary.rs @@ -6,10 +6,14 @@ use std::{ collections::BTreeMap, + fs, io::{BufWriter, Write}, + path::Path, }; -use miette::{IntoDiagnostic, Result}; +use miette::{ + miette, IntoDiagnostic, LabeledSpan, NamedSource, Result, WrapErr, +}; use super::test2json::TestResult; @@ -29,6 +33,30 @@ fn escape_tsv_value(value: &str) -> String { .replace('\r', "\\r") } +/// Converts a string from a TSV value from to unescaped form. +fn unescape_tsv_value(value: &str) -> String { + let mut chars = value.chars(); + let mut out = String::new(); + while let Some(c) = chars.next() { + if c == '\\' { + match chars.next() { + Some('\\') => out.push('\\'), + Some('n') => out.push('\n'), + Some('t') => out.push('\t'), + Some('r') => out.push('\r'), + Some(c2) => { + out.push(c); + out.push(c2); + } + None => out.push(c), + } + } else { + out.push(c); + } + } + out +} + /// Write a test result summary to a writer. pub(crate) fn write_summary( w: &mut BufWriter, @@ -48,3 +76,211 @@ pub(crate) fn write_summary( } Ok(()) } + +/// Reads test result summary from a TSV file written by a previous run of the +/// complement wrapper. +pub(crate) fn read_summary( + path: &Path, +) -> Result> { + let contents = fs::read_to_string(path) + .into_diagnostic() + .wrap_err("failed to read summary file contents")?; + let source = NamedSource::new(path.to_string_lossy(), contents); + let contents = &source.inner(); + + let mut offset = 0; + // The TSV spec allows CRLF, but we never emit these ourselves + let mut lines = contents.split('\n'); + + let header_line = lines.next().ok_or_else(|| { + miette!( + labels = vec![LabeledSpan::at_offset(0, "expected header row")], + "summary file missing header row", + ) + .with_source_code(source.clone()) + })?; + let expected_header_line = "test\tresult"; + if header_line != expected_header_line { + return Err(miette!( + labels = vec![LabeledSpan::at( + 0..header_line.len(), + "unexpected header" + )], + "summary file header row has unexpected columns. Expecting \ + {expected_header_line:?}." + ) + .with_source_code(source)); + } + offset += header_line.len() + 1; + + let mut results = BTreeMap::new(); + for line in lines { + if line.is_empty() { + continue; + } + + let tabs = line.match_indices('\t').collect::>(); + let column_count = tabs.len() + 1; + let (result_span, test, result) = match tabs[..] { + [(first_tab, _)] => { + let result_span = offset + first_tab + 1..offset + line.len(); + let test = line.get(..first_tab).expect( + "index should be valid because it was returned from \ + 'match_indices'", + ); + let result = line.get(first_tab + 1..).expect( + "index should be valid because it was returned from \ + 'match_indices'", + ); + (result_span, test, result) + } + [] => { + return Err(miette!( + labels = vec![LabeledSpan::at_offset( + offset + line.len(), + "expected more columns here" + )], + "each row in the summary file should have exactly two \ + columns. This row only has {column_count} columns.", + ) + .with_source_code(source)) + } + [_, (first_bad_tab, _), ..] => { + let span = offset + first_bad_tab..offset + line.len(); + return Err(miette!( + labels = + vec![LabeledSpan::at(span, "unexpected extra columns")], + "each row in the summary file should have exactly two \ + columns. This row has {column_count} columns.", + ) + .with_source_code(source)); + } + }; + + let test = unescape_tsv_value(test); + let result = unescape_tsv_value(result); + + let result = result.parse().map_err(|_| { + miette!( + labels = + vec![LabeledSpan::at(result_span, "invalid result value")], + "test result value must be one of 'PASS', 'FAIL', or 'SKIP'." + ) + .with_source_code(source.clone()) + })?; + + results.insert(test, result); + offset += line.len() + 1; + } + Ok(results) +} + +/// Print a bulleted list of test names, truncating if there are too many. +fn print_truncated_tests(tests: &[&str]) { + let max = 5; + for test in &tests[..max.min(tests.len())] { + println!(" - {test}"); + } + if tests.len() > max { + println!(" ... ({} more)", tests.len() - max); + } +} + +/// Compares new test results against older results, returning a error if they +/// differ. +/// +/// A description of the differences will be logged separately from the returned +/// error. +pub(crate) fn compare_summary( + old: &TestResults, + new: &TestResults, + old_path: &Path, + new_path: &Path, +) -> Result<()> { + let mut unexpected_pass: Vec<&str> = Vec::new(); + let mut unexpected_fail: Vec<&str> = Vec::new(); + let mut unexpected_skip: Vec<&str> = Vec::new(); + let mut added: Vec<&str> = Vec::new(); + let mut removed: Vec<&str> = Vec::new(); + + for (test, new_result) in new { + if let Some(old_result) = old.get(test) { + if old_result != new_result { + match new_result { + TestResult::Pass => unexpected_pass.push(test), + TestResult::Fail => unexpected_fail.push(test), + TestResult::Skip => unexpected_skip.push(test), + } + } + } else { + added.push(test); + } + } + for test in old.keys() { + if !new.contains_key(test) { + removed.push(test); + } + } + + let mut differences = false; + if !added.is_empty() { + differences = true; + println!( + "\n{} tests were added that were not present in the baseline:", + added.len() + ); + print_truncated_tests(&added); + } + if !removed.is_empty() { + differences = true; + println!( + "\n{} tests present in the baseline were removed:", + removed.len() + ); + print_truncated_tests(&removed); + } + if !unexpected_pass.is_empty() { + differences = true; + println!( + "\n{} tests passed that did not pass in the baseline:", + unexpected_pass.len() + ); + print_truncated_tests(&unexpected_pass); + } + if !unexpected_skip.is_empty() { + differences = true; + println!( + "\n{} tests skipped that were not skipped in the baseline:", + unexpected_skip.len() + ); + print_truncated_tests(&unexpected_skip); + } + if !unexpected_fail.is_empty() { + differences = true; + println!( + "\n{} tests failed that did not fail in the baseline (these are \ + likely regressions):", + unexpected_fail.len() + ); + print_truncated_tests(&unexpected_fail); + } + + if differences { + Err(miette!( + help = format!( + "Evaluate each of the differences to determine whether they \ + are expected. If all differences are expected, copy the new \ + summary file {new_path:?} to {old_path:?} and commit the \ + change. If some differences are unexpected, fix them and try \ + another test run.\n\nAn example of an expected change would \ + be a test that is now passing after your changes fixed it. \ + An example of an unexpected change would be an unrelated \ + test that is now failing, which would be a regression." + ), + "Test results differed from baseline in {old_path:?}. The \ + differences are described above." + )) + } else { + Ok(()) + } +} diff --git a/xtask/src/complement/test2json.rs b/xtask/src/complement/test2json.rs index c08580a9..59253b14 100644 --- a/xtask/src/complement/test2json.rs +++ b/xtask/src/complement/test2json.rs @@ -14,7 +14,7 @@ use std::{ use indicatif::{ProgressBar, ProgressStyle}; use miette::{miette, IntoDiagnostic, LabeledSpan, Result, WrapErr}; use serde::Deserialize; -use strum::Display; +use strum::{Display, EnumString}; use xshell::{cmd, Shell}; use super::summary::{write_summary, TestResults}; @@ -43,7 +43,7 @@ pub(crate) fn run_complement( out: &Path, docker_image: &str, test_count: u64, -) -> Result<()> { +) -> Result { // TODO: handle SIG{INT,TERM} // TODO: XTASK_PATH variable, so that we don't need to pollute devshell with // go @@ -70,7 +70,7 @@ pub(crate) fn run_complement( ctx.handle_line(&line)?; } - Ok(()) + Ok(ctx.results) } /// Schema from @@ -103,7 +103,7 @@ enum GoTestEvent { OtherAction, } -#[derive(Copy, Clone, Display, Debug)] +#[derive(Copy, Clone, Display, EnumString, Eq, PartialEq, Debug)] #[strum(serialize_all = "UPPERCASE")] pub(crate) enum TestResult { Pass, From 102430cc79adc0cfea74638df52b76f8330bc8fb Mon Sep 17 00:00:00 2001 From: Benjamin Lee Date: Fri, 21 Jun 2024 16:25:57 -0700 Subject: [PATCH 10/14] CLI help for the complement script --- xtask/src/complement.rs | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/xtask/src/complement.rs b/xtask/src/complement.rs index fa04eb59..8837cde1 100644 --- a/xtask/src/complement.rs +++ b/xtask/src/complement.rs @@ -16,6 +16,31 @@ use self::{ test2json::{count_complement_tests, run_complement}, }; +/// Runs complement tests, writes results to an output directory, and compares +/// results with a baseline. +/// +/// The output directory structure is +/// +/// - `$out/summary.tsv`: a TSV file with the pass/fail/skip result for each +/// test +/// +/// - `$out/raw-log.jsonl`: raw output of the go test2json tool +/// +/// - `$out/logs/...`: a text file named `$test.txt` for each test, containing +/// the test logs. +/// +/// These files will be updated incrementally during the test run. When the run +/// the complete, the wrapper compares the results in `$out/summary.tsv` +/// against the baseline result. If there are any differences, it exits with an +/// error. +/// +/// The expected workflow is to run this after making changes to Grapevine, to +/// look for regressions in tests that were previously passing. If you make +/// change that fix an existing failing test, you need to make sure that they +/// did not introduce any regressions, and then copy the `summary.tsv` file from +/// your test run over the existing `complement-baseline.tsv` file in the +/// repository root. The intent is that `complement-baseline.tsv` should always +/// be in sync with the expected results from a test run. #[derive(clap::Args)] pub(crate) struct Args { /// Directory to write test results From f76806655fca666649b270fa529ea834aae52691 Mon Sep 17 00:00:00 2001 From: Benjamin Lee Date: Fri, 21 Jun 2024 17:08:10 -0700 Subject: [PATCH 11/14] xtask-specific binaries in GRAPEVINE_XTASK_PATH Some possible alternatives to this: - Keep putting them in PATH. - Make xtask a nix derivation. We would lose out on incremental compilation this way, and would end up recompiling xtask from scratch whenever something in the main package changed. - Have xtask call `nix build --inputs-from $toplevel nixpkgs#go` and such. Slow and tedious. --- nix/shell.nix | 12 ++++--- xtask/src/complement.rs | 11 +++--- xtask/src/complement/test2json.rs | 2 -- xtask/src/main.rs | 56 +++++++++++++++++++++++++++---- 4 files changed, 63 insertions(+), 18 deletions(-) diff --git a/nix/shell.nix b/nix/shell.nix index 01d6f430..5d5c1afd 100644 --- a/nix/shell.nix +++ b/nix/shell.nix @@ -6,6 +6,7 @@ , go , inputs , jq +, lib , lychee , markdownlint-cli , mdbook @@ -19,6 +20,13 @@ mkShell { # sources, and it can read this environment variable to do so. The # `rust-src` component is required in order for this to work. RUST_SRC_PATH = "${toolchain}/lib/rustlib/src/rust/library"; + + # See the doc comment on `use_xtask_path` in `xtask/src/main.rs`. + GRAPEVINE_XTASK_PATH = lib.makeBinPath [ + # Keep sorted + complement + go + ]; }; # Development tools @@ -36,10 +44,6 @@ mkShell { markdownlint-cli mdbook toolchain - - # TODO: don't pollute the devshell with these - go - complement ] ++ default.nativeBuildInputs diff --git a/xtask/src/complement.rs b/xtask/src/complement.rs index 8837cde1..f93cbc19 100644 --- a/xtask/src/complement.rs +++ b/xtask/src/complement.rs @@ -58,9 +58,8 @@ pub(crate) struct Args { } #[allow(clippy::needless_pass_by_value)] -pub(crate) fn main(args: Args) -> Result<()> { - let sh = Shell::new().unwrap(); - let toplevel = get_toplevel_path(&sh) +pub(crate) fn main(args: Args, sh: &Shell) -> Result<()> { + let toplevel = get_toplevel_path(sh) .wrap_err("failed to determine repository root directory")?; let baseline_path = args .baseline @@ -74,12 +73,12 @@ pub(crate) fn main(args: Args) -> Result<()> { create_out_dir(&args.out).wrap_err_with(|| { format!("error initializing output directory {:?}", args.out) })?; - let docker_image = load_docker_image(&sh, &toplevel).wrap_err( + let docker_image = load_docker_image(sh, &toplevel).wrap_err( "failed to build and load complement-grapevine docker image", )?; - let test_count = count_complement_tests(&sh, &docker_image) + let test_count = count_complement_tests(sh, &docker_image) .wrap_err("failed to determine total complement test count")?; - let results = run_complement(&sh, &args.out, &docker_image, test_count) + let results = run_complement(sh, &args.out, &docker_image, test_count) .wrap_err("failed to run complement tests")?; let summary_path = args.out.join("summary.tsv"); compare_summary(&baseline, &results, &baseline_path, &summary_path)?; diff --git a/xtask/src/complement/test2json.rs b/xtask/src/complement/test2json.rs index 59253b14..217a2a4f 100644 --- a/xtask/src/complement/test2json.rs +++ b/xtask/src/complement/test2json.rs @@ -45,8 +45,6 @@ pub(crate) fn run_complement( test_count: u64, ) -> Result { // TODO: handle SIG{INT,TERM} - // TODO: XTASK_PATH variable, so that we don't need to pollute devshell with - // go let cmd = cmd!(sh, "go tool test2json complement.test -test.v=test2json") .env("COMPLEMENT_BASE_IMAGE", docker_image) .env("COMPLEMENT_SPAWN_HS_TIMEOUT", "5") diff --git a/xtask/src/main.rs b/xtask/src/main.rs index 70fa34c2..e3f267d5 100644 --- a/xtask/src/main.rs +++ b/xtask/src/main.rs @@ -1,8 +1,10 @@ mod complement; -use std::process::ExitCode; +use std::{env, ffi::OsString, process::ExitCode}; use clap::{Parser, Subcommand}; +use miette::{miette, IntoDiagnostic, Result, WrapErr}; +use xshell::Shell; #[derive(Parser)] struct Args { @@ -16,11 +18,7 @@ enum Command { } fn main() -> ExitCode { - let args = Args::parse(); - let result = match args.command { - Command::Complement(args) => complement::main(args), - }; - let Err(e) = result else { + let Err(e) = try_main() else { return ExitCode::SUCCESS; }; // Include a leading newline because sometimes an error will occur in @@ -28,3 +26,49 @@ fn main() -> ExitCode { eprintln!("\n{e:?}"); ExitCode::FAILURE } + +fn try_main() -> Result<()> { + let args = Args::parse(); + let sh = new_shell()?; + match args.command { + Command::Complement(args) => complement::main(args, &sh), + } +} + +fn new_shell() -> Result { + let path = get_shell_path()?; + let sh = Shell::new() + .into_diagnostic() + .wrap_err("failed to initialize internal xshell::Shell wrapper")?; + sh.set_var("PATH", path); + Ok(sh) +} + +/// Returns the value to set the `PATH` environment variable to in +/// [`xshell::Shell`] instances. +/// +/// This function appends the paths from the `GRAPEVINE_XTASK_PATH` environment +/// variable to the existing value of `PATH` set in the xtask process. +/// +/// Executable dependencies that are only called by commands in xtask should be +/// added to `GRAPEVINE_XTASK_PATH` instead of `PATH` in the devshell, to avoid +/// polluting the devshell path with extra entries. +fn get_shell_path() -> Result { + let xtask_path = env::var_os("GRAPEVINE_XTASK_PATH").ok_or(miette!( + help = "This tool must be run from inside the Grapevine devshell. \ + Make sure you didn't interrupt direnv or something similar.", + "GRAPEVINE_XTASK_PATH environment variable is unset" + ))?; + if let Some(path) = env::var_os("PATH") { + let old_paths = env::split_paths(&path); + let xtask_paths = env::split_paths(&xtask_path); + env::join_paths(old_paths.chain(xtask_paths)) + .into_diagnostic() + .wrap_err( + "error constructing new PATH value to include the paths from \ + GRAPEVINE_XTASK_PATH", + ) + } else { + Ok(xtask_path) + } +} From 0eee282558e4441a28760469d4ae6cb5e52940b8 Mon Sep 17 00:00:00 2001 From: Benjamin Lee Date: Sat, 22 Jun 2024 00:12:23 -0700 Subject: [PATCH 12/14] handle cancellation in complement wrapper What a mess lmao --- Cargo.lock | 44 ++++++- Cargo.toml | 2 + xtask/Cargo.toml | 2 + xtask/src/complement/test2json.rs | 200 ++++++++++++++++++++++++++++-- 4 files changed, 236 insertions(+), 12 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 8ce87cc6..a4c3a2dd 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -391,6 +391,12 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" +[[package]] +name = "cfg_aliases" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd16c4719339c4530435d38e511904438d07cce7950afa3718a84ac36c10e89e" + [[package]] name = "cfg_aliases" version = "0.2.1" @@ -845,7 +851,7 @@ dependencies = [ "image", "jsonwebtoken", "lru-cache", - "nix", + "nix 0.29.0", "num_cpus", "once_cell", "opentelemetry", @@ -1523,6 +1529,18 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "nix" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab2156c4fce2f8df6c499cc1c763e4394b7482525bf2a9701c9d79d215f519e4" +dependencies = [ + "bitflags 2.6.0", + "cfg-if", + "cfg_aliases 0.1.1", + "libc", +] + [[package]] name = "nix" version = "0.29.0" @@ -1531,7 +1549,7 @@ checksum = "71e2746dc3a24dd78b3cfcb7be93368c6de9963d30f43a6a73998a9cf4b17b46" dependencies = [ "bitflags 2.6.0", "cfg-if", - "cfg_aliases", + "cfg_aliases 0.2.1", "libc", ] @@ -1917,6 +1935,16 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "process-wrap" +version = "8.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38ee68ae331824036479c84060534b18254c864fa73366c58d86db3b7b811619" +dependencies = [ + "indexmap 2.5.0", + "nix 0.28.0", +] + [[package]] name = "prometheus" version = "0.13.4" @@ -2741,6 +2769,16 @@ version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" +[[package]] +name = "signal-hook" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8621587d4798caf8eb44879d42e56b9a93ea5dcd315a6487c357130095b62801" +dependencies = [ + "libc", + "signal-hook-registry", +] + [[package]] name = "signal-hook-registry" version = "1.4.2" @@ -3867,8 +3905,10 @@ dependencies = [ "clap", "indicatif", "miette", + "process-wrap", "serde", "serde_json", + "signal-hook", "strum", "xshell", ] diff --git a/Cargo.toml b/Cargo.toml index 9bfe1f7c..aae44a47 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -111,6 +111,7 @@ opentelemetry-prometheus = "0.17.0" opentelemetry_sdk = { version = "0.24.0", features = ["rt-tokio"] } parking_lot = "0.12.3" phf = { version = "0.11.2", features = ["macros"] } +process-wrap = { version = "8.0.2", default-features = false, features = ["std", "process-group"] } prometheus = "0.13.4" rand = "0.8.5" regex = "1.10.6" @@ -126,6 +127,7 @@ serde_html_form = "0.2.6" serde_json = { version = "1.0.128", features = ["raw_value"] } serde_yaml = "0.9.34" sha-1 = "0.10.1" +signal-hook = "0.3.17" strum = { version = "0.26.3", features = ["derive"] } thiserror = "1.0.64" thread_local = "1.1.8" diff --git a/xtask/Cargo.toml b/xtask/Cargo.toml index 511e054f..85f2f210 100644 --- a/xtask/Cargo.toml +++ b/xtask/Cargo.toml @@ -8,9 +8,11 @@ rust-version.workspace = true [dependencies] clap.workspace = true miette.workspace = true +process-wrap.workspace = true indicatif.workspace = true serde.workspace = true serde_json.workspace = true +signal-hook.workspace = true strum.workspace = true xshell.workspace = true diff --git a/xtask/src/complement/test2json.rs b/xtask/src/complement/test2json.rs index 217a2a4f..a49a38ed 100644 --- a/xtask/src/complement/test2json.rs +++ b/xtask/src/complement/test2json.rs @@ -6,14 +6,26 @@ use std::{ collections::BTreeMap, fs::{self, File}, io::{BufRead, BufReader, BufWriter, Seek, SeekFrom, Write}, + mem, panic, path::{Path, PathBuf}, process::{Command, Stdio}, + sync::{ + atomic::{AtomicBool, Ordering}, + Arc, Mutex, + }, + thread, time::Duration, }; use indicatif::{ProgressBar, ProgressStyle}; use miette::{miette, IntoDiagnostic, LabeledSpan, Result, WrapErr}; +use process_wrap::std::{ProcessGroup, StdChildWrapper, StdCommandWrap}; use serde::Deserialize; +use signal_hook::{ + consts::signal::{SIGINT, SIGQUIT, SIGTERM}, + flag, + iterator::Signals, +}; use strum::{Display, EnumString}; use xshell::{cmd, Shell}; @@ -37,27 +49,195 @@ pub(crate) fn count_complement_tests( Ok(test_count) } -/// Runs complement test suite +/// Run complement tests. +/// +/// This function mostly deals with handling shutdown signals, while the actual +/// logic for running complement is in `run_complement_inner`, which is spawned +/// as a separate thread. This is necessary because the go `test2json` tool +/// ignores SIGTERM and SIGINT. Without signal handling on our end, terminating +/// the complement wrapper process would leave a dangling complement child +/// process running. +/// +/// The reason that `test2json` does this is that it does not implement any kind +/// of test cleanup, and so the developers decided that ignoring termination +/// signals entirely was safer. Running go unit tests outside of `test2json` +/// (and so without machine-readable output) does not have this limitation. +/// Unfortunately neither of these are an option for us. We need +/// machine-readable output to compare against the baseline result. Complement +/// runs can take 40+ minutes, so being able to cancel them is a requirement. +/// +/// Because we don't trigger any of the normal cleanup, we need to handle +/// dangling docker containers ourselves. pub(crate) fn run_complement( sh: &Shell, out: &Path, docker_image: &str, test_count: u64, ) -> Result { - // TODO: handle SIG{INT,TERM} + let term_signals = [SIGTERM, SIGINT, SIGQUIT]; + + let term_now = Arc::new(AtomicBool::new(false)); + for sig in &term_signals { + // Terminate immediately if `term_now` is true and we receive a + // terminating signal + flag::register_conditional_shutdown(*sig, 1, Arc::clone(&term_now)) + .into_diagnostic() + .wrap_err("error registering signal handler")?; + } + + let mut signals = Signals::new(term_signals).unwrap(); + + let state = Mutex::new(ComplementRunnerState::Startup); + let signals_handle = signals.handle(); + + let result = thread::scope(|s| { + let state_ref = &state; + let cloned_sh = sh.clone(); + let thread_handle = s.spawn(move || { + let panic_result = panic::catch_unwind(|| { + run_complement_inner( + &cloned_sh, + out, + docker_image, + test_count, + state_ref, + ) + }); + // Stop the signal-handling loop, even if we panicked + signals_handle.close(); + match panic_result { + Ok(result) => result, + Err(panic) => panic::resume_unwind(panic), + } + }); + + let canceled = if let Some(signal) = signals.forever().next() { + let description = match signal { + SIGTERM => "SIGTERM", + SIGINT => "ctrl+c", + SIGQUIT => "SIGQUIT", + _ => unreachable!(), + }; + eprintln!( + "Received {description}, stopping complement run. Send \ + {description} a second time to terminate without cleaning \ + up, which may leave dangling processes and docker containers" + ); + term_now.store(true, Ordering::Relaxed); + + { + let mut state = state.lock().unwrap(); + let old_state = + mem::replace(&mut *state, ComplementRunnerState::Shutdown); + match old_state { + ComplementRunnerState::Startup => (), + ComplementRunnerState::Shutdown => unreachable!(), + ComplementRunnerState::Running(mut child) => { + // Killing the child process should terminate the + // complement runner thread in a + // bounded amount of time, because it will cause the + // stdout reader to return EOF. + child.kill().unwrap(); + } + } + } + + // TODO: kill dangling docker containers + eprintln!( + "WARNING: complement may have left dangling docker \ + containers. Cleanup for these is planned, but has not been \ + implemented yet. You need to identify and kill them manually" + ); + + true + } else { + // hit this branch if the signal handler is closed by the complement + // runner thread. This means the complement run finished + // without being canceled. + false + }; + + match thread_handle.join() { + Ok(result) => { + if canceled { + Err(miette!("complement run was canceled")) + } else { + result + } + } + Err(panic_value) => panic::resume_unwind(panic_value), + } + }); + + // From this point on, terminate immediately when signalled + term_now.store(true, Ordering::Relaxed); + + result +} + +/// Possible states for the complement runner thread. +/// +/// The current state should be protected by a mutex, where state changes are +/// only performed while the mutex is locked. This is to prevent a race +/// condition where the main thread handles a shutdown signal at the same time +/// that the complement runner thread is starting the child process, and so the +/// main thread fails to kill the child process. +/// +/// Valid state transitions: +/// +/// - `Startup` -> `Running` +/// - `Startup` -> `Shutdown` +/// - `Running` -> `Shutdown` +#[derive(Debug)] +enum ComplementRunnerState { + /// The complement child process has not been started yet + Startup, + /// The complement child process is running, and we have not yet received + /// a shutdown signal. + Running(Box), + /// We have received a shutdown signal. + Shutdown, +} + +/// Spawn complement chind process and handle it's output +/// +/// This is the "complement runner" thread, spawned by the [`run_complement`] +/// function. +fn run_complement_inner( + sh: &Shell, + out: &Path, + docker_image: &str, + test_count: u64, + state: &Mutex, +) -> Result { let cmd = cmd!(sh, "go tool test2json complement.test -test.v=test2json") .env("COMPLEMENT_BASE_IMAGE", docker_image) .env("COMPLEMENT_SPAWN_HS_TIMEOUT", "5") .env("COMPLEMENT_ALWAYS_PRINT_SERVER_LOGS", "1"); eprintln!("$ {cmd}"); - let child = Command::from(cmd) - .stdout(Stdio::piped()) - .spawn() - .into_diagnostic() - .wrap_err("error spawning complement process")?; - let stdout = child - .stdout - .expect("child process spawned with piped stdout should have stdout"); + + let stdout = { + let mut state = state.lock().unwrap(); + match &*state { + ComplementRunnerState::Startup => (), + ComplementRunnerState::Running(_) => unreachable!(), + ComplementRunnerState::Shutdown => { + return Err(miette!("complement run was canceled")) + } + } + let mut cmd = Command::from(cmd); + cmd.stdout(Stdio::piped()); + let mut child = StdCommandWrap::from(cmd) + .wrap(ProcessGroup::leader()) + .spawn() + .into_diagnostic() + .wrap_err("error spawning complement process")?; + let stdout = child.stdout().take().expect( + "child process spawned with piped stdout should have stdout", + ); + *state = ComplementRunnerState::Running(child); + stdout + }; let lines = BufReader::new(stdout).lines(); let mut ctx = TestContext::new(out, test_count)?; From e6f5aa615008def3d06ea8c462f9da14fc3f1a1e Mon Sep 17 00:00:00 2001 From: Benjamin Lee Date: Sat, 22 Jun 2024 00:22:33 -0700 Subject: [PATCH 13/14] write raw log file in complement wrapper --- xtask/src/complement/test2json.rs | 31 +++++++++++++++++++++++++++++-- 1 file changed, 29 insertions(+), 2 deletions(-) diff --git a/xtask/src/complement/test2json.rs b/xtask/src/complement/test2json.rs index a49a38ed..620622c0 100644 --- a/xtask/src/complement/test2json.rs +++ b/xtask/src/complement/test2json.rs @@ -247,8 +247,7 @@ fn run_complement_inner( .wrap_err("error reading output from complement process")?; ctx.handle_line(&line)?; } - - Ok(ctx.results) + ctx.finish() } /// Schema from @@ -300,6 +299,7 @@ struct TestContext { // a non-error path, and the file is left in an inconsistent state on an // error anyway. summary_file: BufWriter, + raw_log_file: BufWriter, log_dir: PathBuf, results: TestResults, } @@ -343,6 +343,11 @@ impl TestContext { .wrap_err("failed to create summary file in output dir")?; let summary_file = BufWriter::new(summary_file); + let raw_log_file = File::create(out.join("raw-log.jsonl")) + .into_diagnostic() + .wrap_err("failed to create raw log file in output dir")?; + let raw_log_file = BufWriter::new(raw_log_file); + let log_dir = out.join("logs"); let ctx = TestContext { @@ -352,6 +357,7 @@ impl TestContext { skip_count: 0, log_dir, summary_file, + raw_log_file, results: BTreeMap::new(), }; @@ -359,6 +365,26 @@ impl TestContext { Ok(ctx) } + fn finish(mut self) -> Result { + self.raw_log_file + .flush() + .into_diagnostic() + .wrap_err("error flushing writes to raw log file")?; + Ok(self.results) + } + + fn write_raw_log_line(&mut self, line: &str) -> Result<()> { + self.raw_log_file + .write_all(line.as_bytes()) + .into_diagnostic() + .wrap_err("error writing line to raw log file")?; + self.raw_log_file + .write_all(b"\n") + .into_diagnostic() + .wrap_err("error writing line to raw log file")?; + Ok(()) + } + fn update_progress(&self) { self.pb .set_position(self.pass_count + self.fail_count + self.skip_count); @@ -473,6 +499,7 @@ impl TestContext { /// Processes a line of output from `test2json` fn handle_line(&mut self, line: &str) -> Result<()> { + self.write_raw_log_line(line)?; match serde_json::from_str(line) { Ok(event) => self.handle_event(event)?, Err(e) => { From 2e03b39cdd855a836e42c91ddbe60fe2aaa3ec10 Mon Sep 17 00:00:00 2001 From: Benjamin Lee Date: Sat, 22 Jun 2024 14:21:50 -0700 Subject: [PATCH 14/14] kill dangling docker containers in complement run This is a stupid hack and I hate it, but we do need to support concurrent complement runs if we want to do this in CI. --- Cargo.lock | 1 + xtask/Cargo.toml | 1 + xtask/src/complement.rs | 26 ++++++++++- xtask/src/complement/docker.rs | 75 +++++++++++++++++++++++++++++++ xtask/src/complement/test2json.rs | 37 +++++++-------- 5 files changed, 117 insertions(+), 23 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a4c3a2dd..27ca6162 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3906,6 +3906,7 @@ dependencies = [ "indicatif", "miette", "process-wrap", + "rand", "serde", "serde_json", "signal-hook", diff --git a/xtask/Cargo.toml b/xtask/Cargo.toml index 85f2f210..77bfc3e5 100644 --- a/xtask/Cargo.toml +++ b/xtask/Cargo.toml @@ -10,6 +10,7 @@ clap.workspace = true miette.workspace = true process-wrap.workspace = true indicatif.workspace = true +rand.workspace = true serde.workspace = true serde_json.workspace = true signal-hook.workspace = true diff --git a/xtask/src/complement.rs b/xtask/src/complement.rs index f93cbc19..82b732ac 100644 --- a/xtask/src/complement.rs +++ b/xtask/src/complement.rs @@ -3,7 +3,8 @@ use std::{ path::{Path, PathBuf}, }; -use miette::{miette, IntoDiagnostic, Result, WrapErr}; +use miette::{miette, IntoDiagnostic, LabeledSpan, Result, WrapErr}; +use serde::Deserialize; use xshell::{cmd, Shell}; mod docker; @@ -11,7 +12,7 @@ mod summary; mod test2json; use self::{ - docker::load_docker_image, + docker::{load_docker_image, retag_docker_image}, summary::{compare_summary, read_summary}, test2json::{count_complement_tests, run_complement}, }; @@ -76,6 +77,8 @@ pub(crate) fn main(args: Args, sh: &Shell) -> Result<()> { let docker_image = load_docker_image(sh, &toplevel).wrap_err( "failed to build and load complement-grapevine docker image", )?; + let docker_image = retag_docker_image(sh, &docker_image) + .wrap_err("failed to retag docker image")?; let test_count = count_complement_tests(sh, &docker_image) .wrap_err("failed to determine total complement test count")?; let results = run_complement(sh, &args.out, &docker_image, test_count) @@ -86,6 +89,25 @@ pub(crate) fn main(args: Args, sh: &Shell) -> Result<()> { Ok(()) } +/// Deserialize a single-line json string using [`serde_json::from_str`] and +/// convert the error to a miette diagnostic. +/// +/// # Panics +/// Panics if `line` contains a newline. +fn from_json_line<'a, T: Deserialize<'a>>(line: &'a str) -> Result { + assert!( + !line.contains('\n'), + "from_json_line requires single-line json source" + ); + serde_json::from_str(line).map_err(|e| { + // Needs single-line input so that we don't have to deal with converting + // line/column to a span offset. + let offset = e.column() - 1; + let label = LabeledSpan::at_offset(offset, "error here"); + miette!(labels = vec![label], "{e}").with_source_code(line.to_owned()) + }) +} + /// Ensures that output directory exists and is empty /// /// If the directory does not exist, it will be created. If it is not empty, an diff --git a/xtask/src/complement/docker.rs b/xtask/src/complement/docker.rs index ada0c8a9..7e436844 100644 --- a/xtask/src/complement/docker.rs +++ b/xtask/src/complement/docker.rs @@ -3,8 +3,12 @@ use std::path::Path; use miette::{miette, IntoDiagnostic, LabeledSpan, Result, WrapErr}; +use rand::{distributions::Alphanumeric, thread_rng, Rng}; +use serde::Deserialize; use xshell::{cmd, Shell}; +use super::from_json_line; + /// Build the 'grapevine-complement' OCI image and load it into the docker /// daemon. pub(crate) fn load_docker_image(sh: &Shell, toplevel: &Path) -> Result { @@ -56,3 +60,74 @@ pub(crate) fn load_docker_image(sh: &Shell, toplevel: &Path) -> Result { .to_owned(); Ok(docker_image) } + +/// Retags the docker image with a random tag. Returns the new image reference. +/// +/// This is useful so that we can uniquely identify the set of docker containers +/// spawned by a complement run. Without using a unique tag, there is no way to +/// determine which docker containers to kill if a run is cancelled, since other +/// concurrent complement runs may have created containers with the same image. +pub(crate) fn retag_docker_image(sh: &Shell, image: &str) -> Result { + let mut rng = thread_rng(); + let new_tag: String = + (0..16).map(|_| char::from(rng.sample(Alphanumeric))).collect(); + let (repo, _old_tag) = image.split_once(':').ok_or_else(|| { + miette!( + "Docker image reference was not in the expected format. Expected \ + \"{{repository}}:{{tag}}\", got {image:?}" + ) + })?; + let new_image = format!("{repo}:{new_tag}"); + cmd!(sh, "docker image tag {image} {new_image}").run().into_diagnostic()?; + Ok(new_image) +} + +/// Kills all docker containers using a particular image. +/// +/// This can be used to clean up dangling docker images after a cancelled +/// complement run, but it's important that the image reference be unique. See +/// the [`retag_docker_image`] function for a discussion of this. +pub(crate) fn kill_docker_containers(sh: &Shell, image: &str) -> Result<()> { + #[derive(Deserialize)] + struct ContainerInfo { + #[serde(rename = "ID")] + id: String, + #[serde(rename = "Image")] + image: String, + } + + // --filter ancestor={image} doesn't work here, because images with the same + // image id will be picked up even if their image reference (repo:tag) are + // different. We need to list all the containers and filter them ourselves. + let containers = cmd!(sh, "docker container ls --format json") + .read() + .into_diagnostic() + .wrap_err("error listing running docker containers")?; + let containers = containers + .lines() + .map(from_json_line) + .collect::, _>>() + .wrap_err( + "error parsing docker container info from 'docker container ls' \ + output", + )?; + + let our_containers = containers + .into_iter() + .filter(|container| container.image == image) + .map(|container| container.id) + .collect::>(); + + if !our_containers.is_empty() { + // Ignore non-zero exit status because 'docker kill' will fail if + // containers already exited before sending the signal, which is + // fine. + cmd!(sh, "docker kill --signal=SIGKILL {our_containers...}") + .ignore_status() + .run() + .into_diagnostic() + .wrap_err("error killing docker containers")?; + } + + Ok(()) +} diff --git a/xtask/src/complement/test2json.rs b/xtask/src/complement/test2json.rs index 620622c0..c37f0f9c 100644 --- a/xtask/src/complement/test2json.rs +++ b/xtask/src/complement/test2json.rs @@ -18,7 +18,7 @@ use std::{ }; use indicatif::{ProgressBar, ProgressStyle}; -use miette::{miette, IntoDiagnostic, LabeledSpan, Result, WrapErr}; +use miette::{miette, IntoDiagnostic, Result, WrapErr}; use process_wrap::std::{ProcessGroup, StdChildWrapper, StdCommandWrap}; use serde::Deserialize; use signal_hook::{ @@ -29,7 +29,11 @@ use signal_hook::{ use strum::{Display, EnumString}; use xshell::{cmd, Shell}; -use super::summary::{write_summary, TestResults}; +use super::{ + docker::kill_docker_containers, + from_json_line, + summary::{write_summary, TestResults}, +}; /// Returns the total number of complement tests that will be run /// @@ -142,12 +146,9 @@ pub(crate) fn run_complement( } } - // TODO: kill dangling docker containers - eprintln!( - "WARNING: complement may have left dangling docker \ - containers. Cleanup for these is planned, but has not been \ - implemented yet. You need to identify and kill them manually" - ); + kill_docker_containers(sh, docker_image).wrap_err( + "failed to kill dangling complement docker containers", + )?; true } else { @@ -500,20 +501,14 @@ impl TestContext { /// Processes a line of output from `test2json` fn handle_line(&mut self, line: &str) -> Result<()> { self.write_raw_log_line(line)?; - match serde_json::from_str(line) { + let result = from_json_line(line).wrap_err( + "failed to parse go test2json event from complement tests. \ + Ignoring this event", + ); + match result { Ok(event) => self.handle_event(event)?, - Err(e) => { - let label = - LabeledSpan::at_offset(e.column() - 1, "error here"); - let report = miette!(labels = vec![label], "{e}",) - .with_source_code(line.to_owned()) - .wrap_err( - "failed to parse go test2json event from complement \ - tests. Ignoring this event.", - ); - eprintln!("{report:?}"); - } - }; + Err(e) => eprintln!("{e:?}"), + } Ok(()) } }