diff --git a/.github/workflows/agnix-config-validate-report.yml b/.github/workflows/agnix-config-validate-report.yml index d8d454b0..db0c87bf 100644 --- a/.github/workflows/agnix-config-validate-report.yml +++ b/.github/workflows/agnix-config-validate-report.yml @@ -2,11 +2,6 @@ name: Agnix config validation report on: push: - branches: - - main - pull_request: - branches: - - main permissions: contents: read diff --git a/.github/workflows/cli-integration-tests.yml b/.github/workflows/cli-integration-tests.yml new file mode 100644 index 00000000..1c0ac9e9 --- /dev/null +++ b/.github/workflows/cli-integration-tests.yml @@ -0,0 +1,36 @@ +name: CLI integration tests + +on: + push: + +permissions: + contents: read + +jobs: + cli-integration-tests: + name: Run cli integration tests + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [ubuntu-latest, macos-latest, windows-latest] + timeout-minutes: 25 + steps: + - name: Check out repository + uses: actions/checkout@v6 + + - name: Install Nix + if: matrix.os != 'windows-latest' + uses: cachix/install-nix-action@v31 + + - name: Run CLI integration test app + if: matrix.os != 'windows-latest' + run: nix run .#cli-integration-tests + + - name: Install Rust toolchain + if: matrix.os == 'windows-latest' + uses: dtolnay/rust-toolchain@stable + + - name: Run setup integration tests on Windows + if: matrix.os == 'windows-latest' + working-directory: cli + run: cargo test --test setup_integration diff --git a/.github/workflows/nix-flake-check.yml b/.github/workflows/nix-flake-check.yml index b776a2c3..5b1466b9 100644 --- a/.github/workflows/nix-flake-check.yml +++ b/.github/workflows/nix-flake-check.yml @@ -2,11 +2,6 @@ name: Nix flake check on: push: - branches: - - main - pull_request: - branches: - - main permissions: contents: read diff --git a/.github/workflows/pkl-generated-parity.yml b/.github/workflows/pkl-generated-parity.yml index fe0d12da..dd077a56 100644 --- a/.github/workflows/pkl-generated-parity.yml +++ b/.github/workflows/pkl-generated-parity.yml @@ -2,11 +2,6 @@ name: Pkl generated parity on: push: - branches: - - main - pull_request: - branches: - - main permissions: contents: read diff --git a/.github/workflows/workflow-token-count.yml b/.github/workflows/workflow-token-count.yml index 5eb0a1fe..d403c072 100644 --- a/.github/workflows/workflow-token-count.yml +++ b/.github/workflows/workflow-token-count.yml @@ -2,11 +2,6 @@ name: Workflow token count on: push: - branches: - - main - pull_request: - branches: - - main permissions: contents: read diff --git a/cli/Cargo.lock b/cli/Cargo.lock index 6578820a..f7b302fc 100644 --- a/cli/Cargo.lock +++ b/cli/Cargo.lock @@ -94,6 +94,39 @@ dependencies = [ "rustversion", ] +[[package]] +name = "async-stream" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b5a71a6f37880a80d1d7f19efd781e4b5de42c88f0722cc13bcb6cc2cfe8476" +dependencies = [ + "async-stream-impl", + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "async-stream-impl" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "async-trait" +version = "0.1.89" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "atomic-waker" version = "1.1.2" @@ -106,6 +139,53 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" +[[package]] +name = "axum" +version = "0.7.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edca88bc138befd0323b20752846e6587272d3b03b0343c8ea28a6f819e6e71f" +dependencies = [ + "async-trait", + "axum-core", + "bytes", + "futures-util", + "http", + "http-body", + "http-body-util", + "itoa", + "matchit", + "memchr", + "mime", + "percent-encoding", + "pin-project-lite", + "rustversion", + "serde", + "sync_wrapper", + "tower 0.5.3", + "tower-layer", + "tower-service", +] + +[[package]] +name = "axum-core" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09f2bd6146b97ae3359fa0cc6d6b376d9539582c7b4220f041a33ec24c226199" +dependencies = [ + "async-trait", + "bytes", + "futures-util", + "http", + "http-body", + "http-body-util", + "mime", + "pin-project-lite", + "rustversion", + "sync_wrapper", + "tower-layer", + "tower-service", +] + [[package]] name = "base64" version = "0.22.1" @@ -441,6 +521,12 @@ dependencies = [ "syn", ] +[[package]] +name = "downcast-rs" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75b325c5dbd37f80359721ad39aca5a29fb04c89279657cffdda8736d0c0b9d2" + [[package]] name = "dyn-clone" version = "1.0.20" @@ -522,7 +608,7 @@ checksum = "4e7f34442dbe69c60fe8eaf58a8cafff81a1f278816d8ab4db255b3bef4ac3c4" dependencies = [ "getrandom 0.3.4", "libm", - "rand", + "rand 0.9.2", "siphasher", ] @@ -532,6 +618,17 @@ version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" +[[package]] +name = "filedescriptor" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e40758ed24c9b2eeb76c35fb0aebc66c626084edd827e07e1552279814c6682d" +dependencies = [ + "libc", + "thiserror 1.0.69", + "winapi", +] + [[package]] name = "find-msvc-tools" version = "0.1.9" @@ -549,6 +646,12 @@ dependencies = [ "serde", ] +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + [[package]] name = "foldhash" version = "0.1.5" @@ -590,12 +693,34 @@ version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7e3450815272ef58cec6d564423f6e755e25379b217b0bc688e295ba24df6b1d" +[[package]] +name = "futures-executor" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf29c38818342a3b26b5b923639e7b1f4a61fc5e76102d4b1981c6dc7a7579d" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + [[package]] name = "futures-io" version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cecba35d7ad927e23624b22ad55235f2239cfa44fd10428eecbeba6d6a717718" +[[package]] +name = "futures-macro" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e835b70203e41293343137df5c0664546da5745f82ec9b84d40be8336958447b" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "futures-sink" version = "0.3.32" @@ -616,6 +741,7 @@ checksum = "389ca41296e6190b48053de0321d02a77f32f8a5d2461dd38762c0593805c6d6" dependencies = [ "futures-core", "futures-io", + "futures-macro", "futures-sink", "futures-task", "memchr", @@ -731,6 +857,31 @@ version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280" +[[package]] +name = "h2" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f44da3a8150a6703ed5d34e164b875fd14c2cdab9af1252a9a1020bde2bdc54" +dependencies = [ + "atomic-waker", + "bytes", + "fnv", + "futures-core", + "futures-sink", + "http", + "indexmap 2.13.0", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" + [[package]] name = "hashbrown" version = "0.15.5" @@ -821,6 +972,12 @@ version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" +[[package]] +name = "httpdate" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" + [[package]] name = "hyper" version = "1.8.1" @@ -831,9 +988,11 @@ dependencies = [ "bytes", "futures-channel", "futures-core", + "h2", "http", "http-body", "httparse", + "httpdate", "itoa", "pin-project-lite", "pin-utils", @@ -842,6 +1001,19 @@ dependencies = [ "want", ] +[[package]] +name = "hyper-timeout" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b90d566bffbce6a75bd8b09a05aa8c2cb1fabb6cb348f8840c9e4c90a0d83b0" +dependencies = [ + "hyper", + "hyper-util", + "pin-project-lite", + "tokio", + "tower-service", +] + [[package]] name = "hyper-util" version = "0.1.20" @@ -859,7 +1031,7 @@ dependencies = [ "libc", "percent-encoding", "pin-project-lite", - "socket2", + "socket2 0.6.2", "tokio", "tower-service", "tracing", @@ -997,6 +1169,16 @@ dependencies = [ "icu_properties", ] +[[package]] +name = "indexmap" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +dependencies = [ + "autocfg", + "hashbrown 0.12.3", +] + [[package]] name = "indexmap" version = "2.13.0" @@ -1041,7 +1223,7 @@ version = "0.9.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "189d0897e4cbe8c75efedf3502c18c887b05046e59d28404d4d8e46cbc4d1e86" dependencies = [ - "memoffset", + "memoffset 0.9.1", ] [[package]] @@ -1055,6 +1237,15 @@ dependencies = [ "libc", ] +[[package]] +name = "ioctl-rs" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7970510895cee30b3e9128319f2cefd4bde883a39f38baa279567ba3a7eb97d" +dependencies = [ + "libc", +] + [[package]] name = "ipnet" version = "2.12.0" @@ -1264,12 +1455,27 @@ dependencies = [ "regex-automata", ] +[[package]] +name = "matchit" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" + [[package]] name = "memchr" version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f8ca58f447f06ed17d5fc4043ce1b10dd205e060fb3ce5b979b8ed8e59ff3f79" +[[package]] +name = "memoffset" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce" +dependencies = [ + "autocfg", +] + [[package]] name = "memoffset" version = "0.9.1" @@ -1310,6 +1516,12 @@ dependencies = [ "libmimalloc-sys", ] +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + [[package]] name = "minimal-lexical" version = "0.2.1" @@ -1348,6 +1560,20 @@ dependencies = [ "unicode-segmentation", ] +[[package]] +name = "nix" +version = "0.25.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f346ff70e7dbfd675fe90590b92d59ef2de15a8779ae305ebcbfd3f0caf59be4" +dependencies = [ + "autocfg", + "bitflags 1.3.2", + "cfg-if", + "libc", + "memoffset 0.6.5", + "pin-utils", +] + [[package]] name = "nom" version = "7.1.3" @@ -1464,6 +1690,88 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381" +[[package]] +name = "opentelemetry" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "236e667b670a5cdf90c258f5a55794ec5ac5027e960c224bff8367a59e1e6426" +dependencies = [ + "futures-core", + "futures-sink", + "js-sys", + "pin-project-lite", + "thiserror 2.0.18", + "tracing", +] + +[[package]] +name = "opentelemetry-http" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8863faf2910030d139fb48715ad5ff2f35029fc5f244f6d5f689ddcf4d26253" +dependencies = [ + "async-trait", + "bytes", + "http", + "opentelemetry", + "reqwest", + "tracing", +] + +[[package]] +name = "opentelemetry-otlp" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5bef114c6d41bea83d6dc60eb41720eedd0261a67af57b66dd2b84ac46c01d91" +dependencies = [ + "async-trait", + "futures-core", + "http", + "opentelemetry", + "opentelemetry-http", + "opentelemetry-proto", + "opentelemetry_sdk", + "prost 0.13.5", + "reqwest", + "thiserror 2.0.18", + "tokio", + "tonic", + "tracing", +] + +[[package]] +name = "opentelemetry-proto" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56f8870d3024727e99212eb3bb1762ec16e255e3e6f58eeb3dc8db1aa226746d" +dependencies = [ + "opentelemetry", + "opentelemetry_sdk", + "prost 0.13.5", + "tonic", +] + +[[package]] +name = "opentelemetry_sdk" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "84dfad6042089c7fc1f6118b7040dc2eb4ab520abbf410b79dc481032af39570" +dependencies = [ + "async-trait", + "futures-channel", + "futures-executor", + "futures-util", + "glob", + "opentelemetry", + "percent-encoding", + "rand 0.8.5", + "serde_json", + "thiserror 2.0.18", + "tokio", + "tokio-stream", + "tracing", +] + [[package]] name = "outref" version = "0.5.2" @@ -1514,6 +1822,26 @@ version = "2.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" +[[package]] +name = "pin-project" +version = "1.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1749c7ed4bcaf4c3d0a3efc28538844fb29bcdd7d2b67b2be7e20ba861ff517" +dependencies = [ + "pin-project-internal", +] + +[[package]] +name = "pin-project-internal" +version = "1.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9b20ed30f105399776b9c883e68e536ef602a16ae6f596d2c473591d6ad64c6" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "pin-project-lite" version = "0.2.17" @@ -1558,6 +1886,27 @@ dependencies = [ "universal-hash", ] +[[package]] +name = "portable-pty" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "806ee80c2a03dbe1a9fb9534f8d19e4c0546b790cde8fd1fea9d6390644cb0be" +dependencies = [ + "anyhow", + "bitflags 1.3.2", + "downcast-rs", + "filedescriptor", + "lazy_static", + "libc", + "log", + "nix", + "serial", + "shared_library", + "shell-words", + "winapi", + "winreg", +] + [[package]] name = "potential_utf" version = "0.1.4" @@ -1601,6 +1950,16 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "prost" +version = "0.13.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2796faa41db3ec313a31f7624d9286acf277b52de526150b7e69f3debf891ee5" +dependencies = [ + "bytes", + "prost-derive 0.13.5", +] + [[package]] name = "prost" version = "0.14.3" @@ -1608,7 +1967,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d2ea70524a2f82d518bce41317d0fae74151505651af45faf1ffbd6fd33f0568" dependencies = [ "bytes", - "prost-derive", + "prost-derive 0.14.3", +] + +[[package]] +name = "prost-derive" +version = "0.13.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a56d757972c98b346a9b766e3f02746cde6dd1cd1d1d563472929fdd74bec4d" +dependencies = [ + "anyhow", + "itertools 0.14.0", + "proc-macro2", + "quote", + "syn", ] [[package]] @@ -1639,16 +2011,37 @@ version = "5.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha 0.3.1", + "rand_core 0.6.4", +] + [[package]] name = "rand" version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1" dependencies = [ - "rand_chacha", + "rand_chacha 0.9.0", "rand_core 0.9.5", ] +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core 0.6.4", +] + [[package]] name = "rand_chacha" version = "0.9.0" @@ -1783,7 +2176,7 @@ dependencies = [ "serde_urlencoded", "sync_wrapper", "tokio", - "tower", + "tower 0.5.3", "tower-http", "tower-service", "url", @@ -1870,9 +2263,18 @@ dependencies = [ "inquire", "jsonschema", "lexopt", + "libc", + "opentelemetry", + "opentelemetry-otlp", + "opentelemetry_sdk", + "portable-pty", + "regex", "serde_json", "sha2", "tokio", + "tracing", + "tracing-opentelemetry", + "tracing-subscriber", "turso", ] @@ -1943,6 +2345,48 @@ dependencies = [ "serde", ] +[[package]] +name = "serial" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1237a96570fc377c13baa1b88c7589ab66edced652e43ffb17088f003db3e86" +dependencies = [ + "serial-core", + "serial-unix", + "serial-windows", +] + +[[package]] +name = "serial-core" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f46209b345401737ae2125fe5b19a77acce90cd53e1658cda928e4fe9a64581" +dependencies = [ + "libc", +] + +[[package]] +name = "serial-unix" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f03fbca4c9d866e24a459cbca71283f545a37f8e3e002ad8c70593871453cab7" +dependencies = [ + "ioctl-rs", + "libc", + "serial-core", + "termios", +] + +[[package]] +name = "serial-windows" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15c6d3b776267a75d31bbdfd5d36c0ca051251caafc285827052bc53bcdc8162" +dependencies = [ + "libc", + "serial-core", +] + [[package]] name = "sha1_smol" version = "1.0.1" @@ -1969,6 +2413,22 @@ dependencies = [ "lazy_static", ] +[[package]] +name = "shared_library" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a9e7e0f2bfae24d8a5b5a66c5b257a83c7412304311512a0c054cd5e619da11" +dependencies = [ + "lazy_static", + "libc", +] + +[[package]] +name = "shell-words" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc6fe69c597f9c37bfeeeeeb33da3530379845f10be461a66d16d03eca2ded77" + [[package]] name = "shlex" version = "1.3.0" @@ -2033,6 +2493,16 @@ version = "1.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" +[[package]] +name = "socket2" +version = "0.5.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e22376abed350d73dd1cd119b57ffccad95b4e585a7cda43e286245ce23c0678" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + [[package]] name = "socket2" version = "0.6.2" @@ -2127,13 +2597,42 @@ dependencies = [ "windows-sys 0.61.2", ] +[[package]] +name = "termios" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5d9cf598a6d7ce700a4e6a9199da127e6819a61e64b68609683cc9a01b5683a" +dependencies = [ + "libc", +] + +[[package]] +name = "thiserror" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" +dependencies = [ + "thiserror-impl 1.0.69", +] + [[package]] name = "thiserror" version = "2.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4288b5bcbc7920c07a1149a35cf9590a2aa808e0bc1eafaade0b80947865fbc4" dependencies = [ - "thiserror-impl", + "thiserror-impl 2.0.18", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" +dependencies = [ + "proc-macro2", + "quote", + "syn", ] [[package]] @@ -2203,13 +2702,100 @@ version = "1.49.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72a2903cd7736441aac9df9d7688bd0ce48edccaadf181c3b90be801e81d3d86" dependencies = [ + "bytes", "libc", "mio 1.1.1", "pin-project-lite", - "socket2", + "socket2 0.6.2", + "tokio-macros", "windows-sys 0.61.2", ] +[[package]] +name = "tokio-macros" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c55a2eff8b69ce66c84f85e1da1c233edc36ceb85a2058d11b0d6a3c7e7569c" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tokio-stream" +version = "0.1.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32da49809aab5c3bc678af03902d4ccddea2a87d028d86392a4b1560c6906c70" +dependencies = [ + "futures-core", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio-util" +version = "0.7.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ae9cec805b01e8fc3fd2fe289f89149a9b66dd16786abd8b19cfa7b48cb0098" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tonic" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877c5b330756d856ffcc4553ab34a5684481ade925ecc54bcd1bf02b1d0d4d52" +dependencies = [ + "async-stream", + "async-trait", + "axum", + "base64", + "bytes", + "h2", + "http", + "http-body", + "http-body-util", + "hyper", + "hyper-timeout", + "hyper-util", + "percent-encoding", + "pin-project", + "prost 0.13.5", + "socket2 0.5.10", + "tokio", + "tokio-stream", + "tower 0.4.13", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" +dependencies = [ + "futures-core", + "futures-util", + "indexmap 1.9.3", + "pin-project", + "pin-project-lite", + "rand 0.8.5", + "slab", + "tokio", + "tokio-util", + "tower-layer", + "tower-service", + "tracing", +] + [[package]] name = "tower" version = "0.5.3" @@ -2238,7 +2824,7 @@ dependencies = [ "http-body", "iri-string", "pin-project-lite", - "tower", + "tower 0.5.3", "tower-layer", "tower-service", ] @@ -2273,7 +2859,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "786d480bce6247ab75f005b14ae1624ad978d3029d9113f0a22fa1ac773faeaf" dependencies = [ "crossbeam-channel", - "thiserror", + "thiserror 2.0.18", "time", "tracing-subscriber", ] @@ -2310,6 +2896,24 @@ dependencies = [ "tracing-core", ] +[[package]] +name = "tracing-opentelemetry" +version = "0.29.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "721f2d2569dce9f3dfbbddee5906941e953bfcdf736a62da3377f5751650cc36" +dependencies = [ + "js-sys", + "once_cell", + "opentelemetry", + "opentelemetry_sdk", + "smallvec", + "tracing", + "tracing-core", + "tracing-log", + "tracing-subscriber", + "web-time", +] + [[package]] name = "tracing-subscriber" version = "0.3.22" @@ -2341,7 +2945,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1f2fe423c2c954948babb36edda12b737e321d8541d4eae519694f7d512ecab6" dependencies = [ "mimalloc", - "thiserror", + "thiserror 2.0.18", "tracing", "tracing-subscriber", "turso_sdk_kit", @@ -2380,7 +2984,7 @@ dependencies = [ "parking_lot", "paste", "polling", - "rand", + "rand 0.9.2", "rapidhash", "regex", "regex-syntax", @@ -2392,7 +2996,7 @@ dependencies = [ "strum", "strum_macros", "tempfile", - "thiserror", + "thiserror 2.0.18", "tracing", "tracing-subscriber", "turso_ext", @@ -2436,7 +3040,7 @@ dependencies = [ "miette", "strum", "strum_macros", - "thiserror", + "thiserror 2.0.18", "turso_macros", ] @@ -2477,11 +3081,11 @@ dependencies = [ "genawaiter", "http", "libc", - "prost", + "prost 0.14.3", "roaring", "serde", "serde_json", - "thiserror", + "thiserror 2.0.18", "tracing", "turso_core", "turso_parser", @@ -2513,7 +3117,7 @@ version = "2.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ea3136b675547379c4bd395ca6b938e5ad3c3d20fad76e7fe85f9e0d011419c" dependencies = [ - "rand", + "rand 0.9.2", ] [[package]] @@ -2739,7 +3343,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bb0e353e6a2fbdc176932bbaab493762eb1255a7900fe0fea1a2f96c296cc909" dependencies = [ "anyhow", - "indexmap", + "indexmap 2.13.0", "wasm-encoder", "wasmparser", ] @@ -2752,7 +3356,7 @@ checksum = "47b807c72e1bac69382b3a6fb3dbe8ea4c0ed87ff5629b8685ae6b9a611028fe" dependencies = [ "bitflags 2.11.0", "hashbrown 0.15.5", - "indexmap", + "indexmap 2.13.0", "semver", ] @@ -2766,6 +3370,16 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "web-time" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + [[package]] name = "which" version = "4.4.2" @@ -2868,6 +3482,15 @@ dependencies = [ "windows-targets 0.48.5", ] +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.6", +] + [[package]] name = "windows-sys" version = "0.59.0" @@ -3081,6 +3704,15 @@ version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650" +[[package]] +name = "winreg" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "80d0f4e272c85def139476380b12f9ac60926689dd2e01d4923222f40580869d" +dependencies = [ + "winapi", +] + [[package]] name = "wit-bindgen" version = "0.51.0" @@ -3109,7 +3741,7 @@ checksum = "b7c566e0f4b284dd6561c786d9cb0142da491f46a9fbed79ea69cdad5db17f21" dependencies = [ "anyhow", "heck", - "indexmap", + "indexmap 2.13.0", "prettyplease", "syn", "wasm-metadata", @@ -3140,7 +3772,7 @@ checksum = "9d66ea20e9553b30172b5e831994e35fbde2d165325bec84fc43dbf6f4eb9cb2" dependencies = [ "anyhow", "bitflags 2.11.0", - "indexmap", + "indexmap 2.13.0", "log", "serde", "serde_derive", @@ -3159,7 +3791,7 @@ checksum = "ecc8ac4bc1dc3381b7f59c34f00b67e18f910c2c0f50015669dde7def656a736" dependencies = [ "anyhow", "id-arena", - "indexmap", + "indexmap 2.13.0", "log", "semver", "serde", diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 7eb23c70..9e461920 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -20,7 +20,18 @@ lexopt = "0.3" serde_json = "1" sha2 = "0.10" tokio = { version = "1", default-features = false, features = ["rt"] } +tracing = "0.1" +tracing-opentelemetry = "0.29" +tracing-subscriber = { version = "0.3", features = ["registry"] } turso = "0" +opentelemetry = { version = "0.28", features = ["trace"] } +opentelemetry_sdk = { version = "0.28", features = ["rt-tokio"] } +opentelemetry-otlp = { version = "0.28", features = ["grpc-tonic", "http-proto", "trace"] } [dev-dependencies] jsonschema = "0.33" +portable-pty = "0.8" +regex = "1" + +[target.'cfg(unix)'.dev-dependencies] +libc = "0.2" diff --git a/cli/README.md b/cli/README.md index 44af2d45..2c9d6327 100644 --- a/cli/README.md +++ b/cli/README.md @@ -14,11 +14,28 @@ still deferred. ```bash cargo run --manifest-path cli/Cargo.toml -- --help cargo run --manifest-path cli/Cargo.toml -- setup +cargo run --manifest-path cli/Cargo.toml -- setup --opencode --non-interactive --hooks +cargo run --manifest-path cli/Cargo.toml -- setup --hooks --repo ../demo-repo cargo run --manifest-path cli/Cargo.toml -- doctor +cargo run --manifest-path cli/Cargo.toml -- doctor --format json cargo run --manifest-path cli/Cargo.toml -- mcp cargo run --manifest-path cli/Cargo.toml -- hooks pre-commit cargo run --manifest-path cli/Cargo.toml -- hooks commit-msg .git/COMMIT_EDITMSG cargo run --manifest-path cli/Cargo.toml -- sync +cargo run --manifest-path cli/Cargo.toml -- version --format json +cargo run --manifest-path cli/Cargo.toml -- completion --shell bash +``` + +### Agent-oriented command examples + +Canonical examples mirrored from `sce --help` and `sce setup --help`: + +```bash +sce setup +sce setup --opencode --non-interactive --hooks +sce setup --hooks --repo ../demo-repo +sce setup --opencode --non-interactive --hooks && sce doctor --format json +sce version --format json ``` ## Install and release paths @@ -71,6 +88,11 @@ Crates.io is prepared but intentionally disabled in this phase. - reports actionable diagnostics for missing or misconfigured hooks - `mcp` is a placeholder for future file-cache tooling contracts (`cache-put`/`cache-get`). +- `completion` is implemented and generates deterministic shell completion + scripts: + - `sce completion --shell bash` + - `sce completion --shell zsh` + - `sce completion --shell fish` - `hooks` is implemented for local Git hook execution: - `sce hooks pre-commit` captures staged-only checkpoint attribution - `sce hooks commit-msg ` enforces canonical co-author trailer @@ -83,6 +105,30 @@ Crates.io is prepared but intentionally disabled in this phase. - `sync` is a placeholder that runs a local Turso smoke check, then reports a deferred cloud-sync plan. +### Shell completion install examples + +```bash +# Bash (current shell) +eval "$(sce completion --shell bash)" + +# Zsh (persist for future shells) +sce completion --shell zsh > ~/.zsh/completions/_sce + +# Fish (persist for future shells) +sce completion --shell fish > ~/.config/fish/completions/sce.fish +``` + +### Observability baseline + +- Lifecycle logs remain stderr-only and deterministic through + `SCE_LOG_LEVEL` (`error|warn|info|debug`) and `SCE_LOG_FORMAT` (`text|json`). +- OpenTelemetry export can be enabled with `SCE_OTEL_ENABLED=true`. +- OTLP exporter configuration is env-addressable: + - `OTEL_EXPORTER_OTLP_ENDPOINT` (default: `http://127.0.0.1:4317`) + - `OTEL_EXPORTER_OTLP_PROTOCOL` (`grpc` or `http/protobuf`, default `grpc`) +- Invalid OpenTelemetry configuration fails invocation validation with + actionable error messages. + ## Safety and limitations - `mcp` and `sync` remain placeholders and do not perform MCP transport or @@ -110,9 +156,15 @@ cargo test --manifest-path cli/Cargo.toml cargo build --manifest-path cli/Cargo.toml ``` -Run repository flake checks (includes targeted setup command-surface checks from -`cli/`): +Run repository flake checks (includes targeted setup command-surface and setup +integration checks from `cli/`): ```bash nix flake check ``` + +Run the setup integration suite through the repository flake app entrypoint: + +```bash +nix run .#cli-integration-tests +``` diff --git a/cli/flake.nix b/cli/flake.nix index 6fb1c1d9..eedd5511 100644 --- a/cli/flake.nix +++ b/cli/flake.nix @@ -50,7 +50,11 @@ lockFile = ../cli/Cargo.lock; }; - nativeBuildInputs = [ rustToolchain ]; + nativeBuildInputs = [ + rustToolchain + ]; + + nativeCheckInputs = [ pkgs.git ]; doCheck = false; }; in @@ -115,6 +119,39 @@ ''; }; + checks.cli-setup-integration = rustPlatform.buildRustPackage { + pname = "sce-cli-setup-integration-check"; + version = "0.1.0"; + inherit src; + sourceRoot = "source/cli"; + + cargoLock = { + lockFile = ../cli/Cargo.lock; + }; + + nativeBuildInputs = [ rustToolchain ]; + + buildPhase = '' + runHook preBuild + runHook postBuild + ''; + + checkPhase = '' + runHook preCheck + + export PATH="${pkgs.git}/bin:$PATH" + cargo test --test setup_integration + + runHook postCheck + ''; + + installPhase = '' + runHook preInstall + mkdir -p "$out" + runHook postInstall + ''; + }; + checks.cli-clippy = rustPlatform.buildRustPackage { pname = "sce-cli-clippy-check"; version = "0.1.0"; diff --git a/cli/src/app.rs b/cli/src/app.rs index 799903d0..40a3f2db 100644 --- a/cli/src/app.rs +++ b/cli/src/app.rs @@ -1,44 +1,290 @@ +use std::io::{self, Write}; use std::process::ExitCode; use crate::{command_surface, dependency_contract, services}; -use anyhow::{bail, Context, Result}; +use anyhow::Context; use lexopt::ValueExt; +const EXIT_CODE_PARSE_FAILURE: u8 = 2; +const EXIT_CODE_VALIDATION_FAILURE: u8 = 3; +const EXIT_CODE_RUNTIME_FAILURE: u8 = 4; +const EXIT_CODE_DEPENDENCY_FAILURE: u8 = 5; + +#[derive(Clone, Copy, Debug, Eq, PartialEq)] +enum FailureClass { + Parse, + Validation, + Runtime, + Dependency, +} + +impl FailureClass { + fn exit_code(self) -> u8 { + match self { + Self::Parse => EXIT_CODE_PARSE_FAILURE, + Self::Validation => EXIT_CODE_VALIDATION_FAILURE, + Self::Runtime => EXIT_CODE_RUNTIME_FAILURE, + Self::Dependency => EXIT_CODE_DEPENDENCY_FAILURE, + } + } + + fn as_str(self) -> &'static str { + match self { + Self::Parse => "parse", + Self::Validation => "validation", + Self::Runtime => "runtime", + Self::Dependency => "dependency", + } + } +} + +#[derive(Debug)] +struct ClassifiedError { + class: FailureClass, + code: &'static str, + message: String, +} + +impl ClassifiedError { + fn parse(message: impl Into) -> Self { + Self { + class: FailureClass::Parse, + code: "SCE-ERR-PARSE", + message: message.into(), + } + } + + fn validation(message: impl Into) -> Self { + Self { + class: FailureClass::Validation, + code: "SCE-ERR-VALIDATION", + message: message.into(), + } + } + + fn runtime(message: impl Into) -> Self { + Self { + class: FailureClass::Runtime, + code: "SCE-ERR-RUNTIME", + message: message.into(), + } + } + + fn dependency(message: impl Into) -> Self { + Self { + class: FailureClass::Dependency, + code: "SCE-ERR-DEPENDENCY", + message: message.into(), + } + } +} + +impl FailureClass { + fn default_try_guidance(self) -> &'static str { + match self { + Self::Parse => "run 'sce --help' to see valid usage.", + Self::Validation => { + "run the command-specific '--help' usage shown in the error and retry." + } + Self::Runtime => "inspect the runtime diagnostic details, then retry.", + Self::Dependency => { + "verify required runtime dependencies and environment setup, then retry." + } + } + } +} + +impl std::fmt::Display for ClassifiedError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.message) + } +} + +impl std::error::Error for ClassifiedError {} + #[derive(Clone, Debug, Eq, PartialEq)] enum Command { Help, - Setup(services::setup::SetupMode), - SetupHooks(Option), + Completion(services::completion::CompletionRequest), + CompletionHelp, + Config(services::config::ConfigSubcommand), + Setup(services::setup::SetupRequest), SetupHelp, - Doctor, - Mcp, + Doctor(services::doctor::DoctorRequest), + DoctorHelp, + Mcp(services::mcp::McpRequest), + McpHelp, Hooks(services::hooks::HookSubcommand), - Sync, + HooksHelp, + Sync(services::sync::SyncRequest), + SyncHelp, + Version(services::version::VersionRequest), + VersionHelp, +} + +impl Command { + fn name(&self) -> &'static str { + match self { + Self::Help => "help", + Self::Completion(_) | Self::CompletionHelp => services::completion::NAME, + Self::Config(_) => services::config::NAME, + Self::Setup(_) | Self::SetupHelp => services::setup::NAME, + Self::Doctor(_) | Self::DoctorHelp => services::doctor::NAME, + Self::Mcp(_) | Self::McpHelp => services::mcp::NAME, + Self::Hooks(_) | Self::HooksHelp => services::hooks::NAME, + Self::Sync(_) | Self::SyncHelp => services::sync::NAME, + Self::Version(_) | Self::VersionHelp => services::version::NAME, + } + } } pub fn run(args: I) -> ExitCode where I: IntoIterator, { - match try_run(args) { - Ok(()) => ExitCode::SUCCESS, + run_with_dependency_check(args, || { + dependency_contract::dependency_contract_snapshot().0 + }) +} + +fn run_with_dependency_check(args: I, dependency_check: F) -> ExitCode +where + I: IntoIterator, + F: FnOnce() -> anyhow::Result<()>, +{ + let mut stdout = io::stdout(); + let mut stderr = io::stderr(); + run_with_dependency_check_and_streams(args, dependency_check, &mut stdout, &mut stderr) +} + +fn run_with_dependency_check_and_streams( + args: I, + dependency_check: F, + stdout: &mut StdoutW, + stderr: &mut StderrW, +) -> ExitCode +where + I: IntoIterator, + F: FnOnce() -> anyhow::Result<()>, + StdoutW: Write, + StderrW: Write, +{ + match try_run_with_dependency_check(args, dependency_check) { + Ok(payload) => { + if let Err(error) = write_stdout_payload(stdout, &payload) { + write_error_diagnostic(stderr, &error); + ExitCode::from(error.class.exit_code()) + } else { + ExitCode::SUCCESS + } + } Err(error) => { - eprintln!("Error: {error}"); - ExitCode::from(2) + write_error_diagnostic(stderr, &error); + ExitCode::from(error.class.exit_code()) } } } -fn try_run(args: I) -> Result<()> +fn write_stdout_payload(writer: &mut W, payload: &str) -> Result<(), ClassifiedError> +where + W: Write, +{ + writeln!(writer, "{payload}").map_err(|error| { + ClassifiedError::runtime(format!("Failed to write command output to stdout: {error}")) + }) +} + +fn write_error_diagnostic(writer: &mut W, error: &ClassifiedError) +where + W: Write, +{ + let rendered = if error.message.contains("Try:") { + error.message.clone() + } else { + format!( + "{} Try: {}", + error.message, + error.class.default_try_guidance() + ) + }; + + let _ = writeln!( + writer, + "Error [{}]: {}", + error.code, + services::security::redact_sensitive_text(&rendered) + ); +} + +fn try_run_with_dependency_check( + args: I, + dependency_check: F, +) -> Result where I: IntoIterator, + F: FnOnce() -> anyhow::Result<()>, { - let _ = dependency_contract::dependency_contract_snapshot(); - let command = parse_command(args)?; - dispatch(command) + dependency_check().map_err(|error| { + ClassifiedError::dependency(format!("Failed to initialize dependency contract: {error}")) + })?; + + let logger = services::observability::Logger::from_env().map_err(|error| { + ClassifiedError::validation(format!("Invalid observability configuration: {error}")) + })?; + let telemetry = services::observability::TelemetryRuntime::from_env().map_err(|error| { + ClassifiedError::validation(format!("Invalid observability configuration: {error}")) + })?; + + telemetry.with_default_subscriber(|| { + logger.info( + "sce.app.start", + "Starting command dispatch", + &[("component", services::observability::NAME)], + ); + + let command = match parse_command(args) { + Ok(command) => command, + Err(error) => { + logger.error( + "sce.command.parse_failed", + "Command parse failed", + &[("failure_class", error.class.as_str())], + ); + return Err(error); + } + }; + + logger.info( + "sce.command.parsed", + "Command parsed", + &[("command", command.name())], + ); + + match dispatch(&command) { + Ok(payload) => { + logger.info( + "sce.command.completed", + "Command completed", + &[("command", command.name())], + ); + Ok(payload) + } + Err(error) => { + logger.error( + "sce.command.failed", + "Command failed", + &[ + ("command", command.name()), + ("failure_class", error.class.as_str()), + ], + ); + Err(error) + } + } + }) } -fn parse_command(args: I) -> Result +fn parse_command(args: I) -> Result where I: IntoIterator, { @@ -53,29 +299,37 @@ where } let mut parser = lexopt::Parser::from_args(tail_args.iter().map(String::as_str)); - match parser.next()? { + match parser.next().map_err(|error| { + ClassifiedError::parse(format!( + "Failed to parse arguments: {error}. Try: run 'sce --help' to list valid commands, then retry with a supported form such as 'sce version' or 'sce setup --help'." + )) + })? { Some(lexopt::Arg::Long("help")) => { if tail_args.len() == 1 { Ok(Command::Help) } else { - bail!("{}", unknown_option_message("--help")) + Err(ClassifiedError::parse(unknown_option_message("--help"))) } } Some(lexopt::Arg::Short('h')) => { if tail_args.len() == 1 { Ok(Command::Help) } else { - bail!("{}", unknown_option_message("-h")) + Err(ClassifiedError::parse(unknown_option_message("-h"))) } } - Some(lexopt::Arg::Long(option)) => { - bail!("{}", unknown_option_message(&format!("--{option}"))) - } - Some(lexopt::Arg::Short(option)) => { - bail!("{}", unknown_option_message(&format!("-{option}"))) - } + Some(lexopt::Arg::Long(option)) => Err(ClassifiedError::parse(unknown_option_message( + &format!("--{option}"), + ))), + Some(lexopt::Arg::Short(option)) => Err(ClassifiedError::parse(unknown_option_message( + &format!("-{option}"), + ))), Some(lexopt::Arg::Value(value)) => { - let subcommand = value.string()?; + let subcommand = value.string().map_err(|error| { + ClassifiedError::parse(format!( + "Failed to parse command token: {error}. Try: run 'sce --help' to list valid commands, then rerun with one of them." + )) + })?; parse_subcommand(subcommand, tail_args.into_iter().skip(1).collect()) } None => Ok(Command::Help), @@ -84,117 +338,276 @@ where fn unknown_option_message(option: &str) -> String { format!( - "Unknown option '{}'. Run 'sce --help' to see valid usage.", + "Unknown option '{}'. Try: run 'sce --help' to see top-level usage, or use 'sce --help' for command-specific options.", option ) } -fn parse_subcommand(value: String, tail_args: Vec) -> Result { +fn parse_subcommand(value: String, tail_args: Vec) -> Result { match value.as_str() { "help" => Ok(Command::Help), + "completion" => parse_completion_subcommand(tail_args), + "config" => parse_config_subcommand(tail_args), "setup" => parse_setup_subcommand(tail_args), - "doctor" => parse_non_setup_subcommand(Command::Doctor, tail_args), - "mcp" => parse_non_setup_subcommand(Command::Mcp, tail_args), + "doctor" => parse_doctor_subcommand(tail_args), + "mcp" => parse_mcp_subcommand(tail_args), "hooks" => parse_hooks_subcommand(tail_args), - "sync" => parse_non_setup_subcommand(Command::Sync, tail_args), + "sync" => parse_sync_subcommand(tail_args), + "version" => parse_version_subcommand(tail_args), _ => { if command_surface::is_known_command(&value) { - bail!( - "Command '{}' is currently unavailable in this build.", + return Err(ClassifiedError::parse(format!( + "Command '{}' is currently unavailable in this build. Try: run 'sce --help' to see available commands in this build.", value, - ); + ))); } - bail!( - "Unknown command '{}'. Run 'sce --help' to see the current command surface.", + Err(ClassifiedError::parse(format!( + "Unknown command '{}'. Try: run 'sce --help' to list valid commands, then rerun with a valid command such as 'sce version' or 'sce setup --help'.", value, - ); + ))) } } } -fn parse_setup_subcommand(args: Vec) -> Result { - let options = services::setup::parse_setup_cli_options(args)?; +fn parse_config_subcommand(args: Vec) -> Result { + let subcommand = services::config::parse_config_subcommand(args) + .map_err(|error| ClassifiedError::validation(error.to_string()))?; + Ok(Command::Config(subcommand)) +} + +fn parse_completion_subcommand(args: Vec) -> Result { + if args.len() == 1 && (args[0] == "--help" || args[0] == "-h") { + return Ok(Command::CompletionHelp); + } + + let request = services::completion::parse_completion_request(args) + .map_err(|error| ClassifiedError::validation(error.to_string()))?; + Ok(Command::Completion(request)) +} + +fn parse_setup_subcommand(args: Vec) -> Result { + let options = services::setup::parse_setup_cli_options(args) + .map_err(|error| ClassifiedError::validation(error.to_string()))?; if options.help { return Ok(Command::SetupHelp); } - if options.hooks { - let repo_path = services::setup::resolve_setup_hooks_repository(&options)?; - return Ok(Command::SetupHooks(repo_path)); + let request = services::setup::resolve_setup_request(options) + .map_err(|error| ClassifiedError::validation(error.to_string()))?; + Ok(Command::Setup(request)) +} + +fn parse_doctor_subcommand(args: Vec) -> Result { + if args.len() == 1 && (args[0] == "--help" || args[0] == "-h") { + return Ok(Command::DoctorHelp); } - services::setup::resolve_setup_hooks_repository(&options)?; + let request = services::doctor::parse_doctor_request(args) + .map_err(|error| ClassifiedError::validation(error.to_string()))?; + Ok(Command::Doctor(request)) +} + +fn parse_mcp_subcommand(args: Vec) -> Result { + if args.len() == 1 && (args[0] == "--help" || args[0] == "-h") { + return Ok(Command::McpHelp); + } - let mode = services::setup::resolve_setup_mode(options)?; - Ok(Command::Setup(mode)) + let request = services::mcp::parse_mcp_request(args) + .map_err(|error| ClassifiedError::validation(error.to_string()))?; + Ok(Command::Mcp(request)) } -fn parse_non_setup_subcommand(command: Command, tail_args: Vec) -> Result { - if tail_args.is_empty() { - return Ok(command); +fn parse_sync_subcommand(args: Vec) -> Result { + if args.len() == 1 && (args[0] == "--help" || args[0] == "-h") { + return Ok(Command::SyncHelp); } - bail!( - "Unexpected extra argument '{}'. Run 'sce --help' to see valid usage.", - tail_args[0] - ); + let request = services::sync::parse_sync_request(args) + .map_err(|error| ClassifiedError::validation(error.to_string()))?; + Ok(Command::Sync(request)) } -fn parse_hooks_subcommand(args: Vec) -> Result { - let subcommand = services::hooks::parse_hooks_subcommand(args)?; +fn parse_hooks_subcommand(args: Vec) -> Result { + if args.len() == 1 && (args[0] == "--help" || args[0] == "-h") { + return Ok(Command::HooksHelp); + } + + let subcommand = services::hooks::parse_hooks_subcommand(args) + .map_err(|error| ClassifiedError::validation(error.to_string()))?; Ok(Command::Hooks(subcommand)) } -fn dispatch(command: Command) -> Result<()> { +fn parse_version_subcommand(args: Vec) -> Result { + if args.len() == 1 && (args[0] == "--help" || args[0] == "-h") { + return Ok(Command::VersionHelp); + } + + let request = services::version::parse_version_request(args) + .map_err(|error| ClassifiedError::validation(error.to_string()))?; + Ok(Command::Version(request)) +} + +fn dispatch(command: &Command) -> Result { match command { - Command::Help => println!("{}", command_surface::help_text()), - Command::Setup(mode) => { - let dispatch = services::setup::resolve_setup_dispatch( - mode, - &services::setup::InquireSetupTargetPrompter, - )?; - - match dispatch { - services::setup::SetupDispatch::Proceed(mode) => { - let repository_root = - std::env::current_dir().context("Failed to determine current directory")?; - println!( - "{}", - services::setup::run_setup_for_mode(&repository_root, mode)? - ); - } - services::setup::SetupDispatch::Cancelled => { - println!("{}", services::setup::setup_cancelled_text()); + Command::Help => Ok(command_surface::help_text()), + Command::CompletionHelp => Ok(services::completion::completion_usage_text().to_string()), + Command::Completion(request) => Ok(services::completion::render_completion(*request)), + Command::Config(subcommand) => services::config::run_config_subcommand(subcommand.clone()) + .map_err(|error| ClassifiedError::runtime(error.to_string())), + Command::Setup(request) => { + let current_dir = std::env::current_dir() + .context("Failed to determine current directory") + .map_err(|error| ClassifiedError::runtime(error.to_string()))?; + + let mut sections = Vec::new(); + + if let Some(mode) = request.config_mode { + let dispatch = services::setup::resolve_setup_dispatch( + mode, + &services::setup::InquireSetupTargetPrompter, + ) + .map_err(|error| ClassifiedError::runtime(error.to_string()))?; + + match dispatch { + services::setup::SetupDispatch::Proceed(resolved_mode) => { + let setup_message = + services::setup::run_setup_for_mode(¤t_dir, resolved_mode) + .map_err(|error| ClassifiedError::runtime(error.to_string()))?; + sections.push(setup_message); + } + services::setup::SetupDispatch::Cancelled => { + return Ok(services::setup::setup_cancelled_text().to_string()); + } } } + + if request.install_hooks { + let repository_root = request + .hooks_repo_path + .as_deref() + .unwrap_or(current_dir.as_path()); + let hooks_message = services::setup::run_setup_hooks(repository_root) + .map_err(|error| ClassifiedError::runtime(error.to_string()))?; + sections.push(hooks_message); + } + + Ok(sections.join("\n\n")) } - Command::SetupHooks(repo_path) => { - let current_dir = - std::env::current_dir().context("Failed to determine current directory")?; - let repository_root = repo_path.as_deref().unwrap_or(current_dir.as_path()); - println!("{}", services::setup::run_setup_hooks(repository_root)?); - } - Command::SetupHelp => println!("{}", services::setup::setup_usage_text()), - Command::Doctor => println!("{}", services::doctor::run_doctor()?), - Command::Mcp => println!("{}", services::mcp::run_placeholder_mcp()?), - Command::Hooks(subcommand) => { - println!("{}", services::hooks::run_hooks_subcommand(subcommand)?) - } - Command::Sync => println!("{}", services::sync::run_placeholder_sync()?), + Command::SetupHelp => Ok(services::setup::setup_usage_text().to_string()), + Command::DoctorHelp => Ok(services::doctor::doctor_usage_text().to_string()), + Command::Doctor(request) => services::doctor::run_doctor(*request) + .map_err(|error| ClassifiedError::runtime(error.to_string())), + Command::McpHelp => Ok(services::mcp::mcp_usage_text().to_string()), + Command::Mcp(request) => services::mcp::run_placeholder_mcp(*request) + .map_err(|error| ClassifiedError::runtime(error.to_string())), + Command::HooksHelp => Ok(services::hooks::hooks_usage_text().to_string()), + Command::Hooks(subcommand) => services::hooks::run_hooks_subcommand(subcommand.clone()) + .map_err(|error| ClassifiedError::runtime(error.to_string())), + Command::SyncHelp => Ok(services::sync::sync_usage_text().to_string()), + Command::Sync(request) => services::sync::run_placeholder_sync(*request) + .map_err(|error| ClassifiedError::runtime(error.to_string())), + Command::VersionHelp => Ok(services::version::version_usage_text().to_string()), + Command::Version(request) => services::version::render_version(*request) + .map_err(|error| ClassifiedError::runtime(error.to_string())), } - - Ok(()) } #[cfg(test)] mod tests { use std::process::ExitCode; - use crate::services::setup::{SetupMode, SetupTarget}; + use crate::services::setup::{SetupMode, SetupRequest, SetupTarget}; + + use super::{ + parse_command, run, run_with_dependency_check, run_with_dependency_check_and_streams, + Command, EXIT_CODE_DEPENDENCY_FAILURE, EXIT_CODE_PARSE_FAILURE, EXIT_CODE_RUNTIME_FAILURE, + EXIT_CODE_VALIDATION_FAILURE, + }; + + #[test] + fn successful_output_is_written_to_stdout() { + let mut stdout = Vec::new(); + let mut stderr = Vec::new(); + let code = run_with_dependency_check_and_streams( + vec!["sce".to_string(), "--help".to_string()], + || Ok(()), + &mut stdout, + &mut stderr, + ); + assert_eq!(code, ExitCode::SUCCESS); + + let stdout = String::from_utf8(stdout).expect("stdout should be utf-8"); + assert!(stdout.contains("Usage:")); + } + + #[test] + fn parse_failure_keeps_stdout_empty_and_reports_stderr() { + let mut stdout = Vec::new(); + let mut stderr = Vec::new(); + let code = run_with_dependency_check_and_streams( + vec!["sce".to_string(), "does-not-exist".to_string()], + || Ok(()), + &mut stdout, + &mut stderr, + ); + assert_eq!(code, ExitCode::from(EXIT_CODE_PARSE_FAILURE)); + assert!(stdout.is_empty()); + + let stderr = String::from_utf8(stderr).expect("stderr should be utf-8"); + assert!(stderr.contains("Error [SCE-ERR-PARSE]: Unknown command 'does-not-exist'.")); + assert!(stderr.contains("Try:")); + } + + #[test] + fn parse_failure_stderr_contract_is_exact_and_deterministic() { + let mut first_stdout = Vec::new(); + let mut first_stderr = Vec::new(); + let first_code = run_with_dependency_check_and_streams( + vec!["sce".to_string(), "does-not-exist".to_string()], + || Ok(()), + &mut first_stdout, + &mut first_stderr, + ); + assert_eq!(first_code, ExitCode::from(EXIT_CODE_PARSE_FAILURE)); + assert!(first_stdout.is_empty()); + + let mut second_stdout = Vec::new(); + let mut second_stderr = Vec::new(); + let second_code = run_with_dependency_check_and_streams( + vec!["sce".to_string(), "does-not-exist".to_string()], + || Ok(()), + &mut second_stdout, + &mut second_stderr, + ); + assert_eq!(second_code, ExitCode::from(EXIT_CODE_PARSE_FAILURE)); + assert!(second_stdout.is_empty()); + + let expected = "Error [SCE-ERR-PARSE]: Unknown command 'does-not-exist'. Try: run 'sce --help' to list valid commands, then rerun with a valid command such as 'sce version' or 'sce setup --help'.\n"; + let first_stderr = String::from_utf8(first_stderr).expect("stderr should be utf-8"); + let second_stderr = String::from_utf8(second_stderr).expect("stderr should be utf-8"); + assert_eq!(first_stderr, expected); + assert_eq!(second_stderr, expected); + } + + #[test] + fn dependency_failure_reports_stable_error_code_and_try_guidance() { + let mut stdout = Vec::new(); + let mut stderr = Vec::new(); + let code = run_with_dependency_check_and_streams( + vec!["sce".to_string(), "--help".to_string()], + || anyhow::bail!("simulated dependency check failure"), + &mut stdout, + &mut stderr, + ); + assert_eq!(code, ExitCode::from(EXIT_CODE_DEPENDENCY_FAILURE)); + assert!(stdout.is_empty()); - use super::{parse_command, run, Command}; + let stderr = String::from_utf8(stderr).expect("stderr should be utf-8"); + assert!(stderr.contains("Error [SCE-ERR-DEPENDENCY]:")); + assert!(stderr.contains("Try:")); + } #[test] fn help_path_exits_success() { @@ -205,7 +618,7 @@ mod tests { #[test] fn hooks_command_without_subcommand_exits_non_zero() { let code = run(vec!["sce".to_string(), "hooks".to_string()]); - assert_eq!(code, ExitCode::from(2)); + assert_eq!(code, ExitCode::from(EXIT_CODE_VALIDATION_FAILURE)); } #[test] @@ -214,6 +627,16 @@ mod tests { assert_eq!(code, ExitCode::SUCCESS); } + #[test] + fn config_show_command_exits_success() { + let code = run(vec![ + "sce".to_string(), + "config".to_string(), + "show".to_string(), + ]); + assert_eq!(code, ExitCode::SUCCESS); + } + #[test] fn setup_help_exits_success() { let code = run(vec![ @@ -224,16 +647,123 @@ mod tests { assert_eq!(code, ExitCode::SUCCESS); } + #[test] + fn completion_command_exits_success() { + let code = run(vec![ + "sce".to_string(), + "completion".to_string(), + "--shell".to_string(), + "bash".to_string(), + ]); + assert_eq!(code, ExitCode::SUCCESS); + } + + #[test] + fn completion_help_exits_success() { + let code = run(vec![ + "sce".to_string(), + "completion".to_string(), + "--help".to_string(), + ]); + assert_eq!(code, ExitCode::SUCCESS); + } + #[test] fn sync_command_exits_success() { let code = run(vec!["sce".to_string(), "sync".to_string()]); assert_eq!(code, ExitCode::SUCCESS); } + #[test] + fn doctor_help_exits_success() { + let code = run(vec![ + "sce".to_string(), + "doctor".to_string(), + "--help".to_string(), + ]); + assert_eq!(code, ExitCode::SUCCESS); + } + + #[test] + fn mcp_help_exits_success() { + let code = run(vec![ + "sce".to_string(), + "mcp".to_string(), + "--help".to_string(), + ]); + assert_eq!(code, ExitCode::SUCCESS); + } + + #[test] + fn hooks_help_exits_success() { + let code = run(vec![ + "sce".to_string(), + "hooks".to_string(), + "--help".to_string(), + ]); + assert_eq!(code, ExitCode::SUCCESS); + } + + #[test] + fn sync_help_exits_success() { + let code = run(vec![ + "sce".to_string(), + "sync".to_string(), + "--help".to_string(), + ]); + assert_eq!(code, ExitCode::SUCCESS); + } + + #[test] + fn version_command_exits_success() { + let code = run(vec!["sce".to_string(), "version".to_string()]); + assert_eq!(code, ExitCode::SUCCESS); + } + + #[test] + fn version_help_exits_success() { + let code = run(vec![ + "sce".to_string(), + "version".to_string(), + "--help".to_string(), + ]); + assert_eq!(code, ExitCode::SUCCESS); + } + #[test] fn unknown_command_exits_non_zero() { let code = run(vec!["sce".to_string(), "does-not-exist".to_string()]); - assert_eq!(code, ExitCode::from(2)); + assert_eq!(code, ExitCode::from(EXIT_CODE_PARSE_FAILURE)); + } + + #[test] + fn setup_validation_failure_uses_validation_exit_code() { + let code = run(vec![ + "sce".to_string(), + "setup".to_string(), + "--repo".to_string(), + "../demo-repo".to_string(), + ]); + assert_eq!(code, ExitCode::from(EXIT_CODE_VALIDATION_FAILURE)); + } + + #[test] + fn runtime_failure_uses_runtime_exit_code() { + let code = run(vec![ + "sce".to_string(), + "hooks".to_string(), + "commit-msg".to_string(), + "/definitely/missing/COMMIT_EDITMSG".to_string(), + ]); + assert_eq!(code, ExitCode::from(EXIT_CODE_RUNTIME_FAILURE)); + } + + #[test] + fn dependency_failure_uses_dependency_exit_code() { + let code = run_with_dependency_check(vec!["sce".to_string(), "--help".to_string()], || { + anyhow::bail!("simulated dependency check failure") + }); + assert_eq!(code, ExitCode::from(EXIT_CODE_DEPENDENCY_FAILURE)); } #[test] @@ -283,7 +813,7 @@ mod tests { .expect_err("unknown hook subcommand should fail"); assert_eq!( error.to_string(), - "Unknown hook subcommand 'unknown'. Run 'sce hooks --help' to see valid usage." + "Unknown hook subcommand 'unknown'. Try: run 'sce hooks --help' and use one of 'pre-commit', 'commit-msg', 'post-commit', or 'post-rewrite'." ); } @@ -297,7 +827,11 @@ mod tests { .expect("command should parse"); assert_eq!( command, - Command::Setup(SetupMode::NonInteractive(SetupTarget::OpenCode,)) + Command::Setup(SetupRequest { + config_mode: Some(SetupMode::NonInteractive(SetupTarget::OpenCode,)), + install_hooks: false, + hooks_repo_path: None, + }) ); } @@ -311,7 +845,11 @@ mod tests { .expect("command should parse"); assert_eq!( command, - Command::Setup(SetupMode::NonInteractive(SetupTarget::Claude,)) + Command::Setup(SetupRequest { + config_mode: Some(SetupMode::NonInteractive(SetupTarget::Claude,)), + install_hooks: false, + hooks_repo_path: None, + }) ); } @@ -325,7 +863,11 @@ mod tests { .expect("command should parse"); assert_eq!( command, - Command::Setup(SetupMode::NonInteractive(SetupTarget::Both,)) + Command::Setup(SetupRequest { + config_mode: Some(SetupMode::NonInteractive(SetupTarget::Both,)), + install_hooks: false, + hooks_repo_path: None, + }) ); } @@ -333,7 +875,33 @@ mod tests { fn parser_routes_setup_without_flags_to_interactive_mode() { let command = parse_command(vec!["sce".to_string(), "setup".to_string()]) .expect("command should parse"); - assert_eq!(command, Command::Setup(SetupMode::Interactive)); + assert_eq!( + command, + Command::Setup(SetupRequest { + config_mode: Some(SetupMode::Interactive), + install_hooks: true, + hooks_repo_path: None, + }) + ); + } + + #[test] + fn parser_routes_setup_target_with_non_interactive_flag() { + let command = parse_command(vec![ + "sce".to_string(), + "setup".to_string(), + "--opencode".to_string(), + "--non-interactive".to_string(), + ]) + .expect("command should parse"); + assert_eq!( + command, + Command::Setup(SetupRequest { + config_mode: Some(SetupMode::NonInteractive(SetupTarget::OpenCode,)), + install_hooks: false, + hooks_repo_path: None, + }) + ); } #[test] @@ -344,7 +912,14 @@ mod tests { "--hooks".to_string(), ]) .expect("command should parse"); - assert_eq!(command, Command::SetupHooks(None)); + assert_eq!( + command, + Command::Setup(SetupRequest { + config_mode: None, + install_hooks: true, + hooks_repo_path: None, + }) + ); } #[test] @@ -359,10 +934,168 @@ mod tests { .expect("command should parse"); assert_eq!( command, - Command::SetupHooks(Some(std::path::PathBuf::from("../demo-repo"))) + Command::Setup(SetupRequest { + config_mode: None, + install_hooks: true, + hooks_repo_path: Some(std::path::PathBuf::from("../demo-repo")), + }) + ); + } + + #[test] + fn parser_routes_setup_target_plus_hooks_in_single_request() { + let command = parse_command(vec![ + "sce".to_string(), + "setup".to_string(), + "--opencode".to_string(), + "--hooks".to_string(), + ]) + .expect("command should parse"); + assert_eq!( + command, + Command::Setup(SetupRequest { + config_mode: Some(SetupMode::NonInteractive(SetupTarget::OpenCode,)), + install_hooks: true, + hooks_repo_path: None, + }) + ); + } + + #[test] + fn parser_routes_doctor_help() { + let command = parse_command(vec![ + "sce".to_string(), + "doctor".to_string(), + "--help".to_string(), + ]) + .expect("command should parse"); + assert_eq!(command, Command::DoctorHelp); + } + + #[test] + fn parser_routes_doctor_json_format() { + let command = parse_command(vec![ + "sce".to_string(), + "doctor".to_string(), + "--format".to_string(), + "json".to_string(), + ]) + .expect("command should parse"); + assert_eq!( + command, + Command::Doctor(crate::services::doctor::DoctorRequest { + format: crate::services::doctor::DoctorFormat::Json, + }) + ); + } + + #[test] + fn parser_routes_mcp_help() { + let command = parse_command(vec![ + "sce".to_string(), + "mcp".to_string(), + "--help".to_string(), + ]) + .expect("command should parse"); + assert_eq!(command, Command::McpHelp); + } + + #[test] + fn parser_routes_mcp_json_format() { + let command = parse_command(vec![ + "sce".to_string(), + "mcp".to_string(), + "--format".to_string(), + "json".to_string(), + ]) + .expect("command should parse"); + assert_eq!( + command, + Command::Mcp(crate::services::mcp::McpRequest { + format: crate::services::mcp::McpFormat::Json, + }) + ); + } + + #[test] + fn parser_routes_hooks_help() { + let command = parse_command(vec![ + "sce".to_string(), + "hooks".to_string(), + "--help".to_string(), + ]) + .expect("command should parse"); + assert_eq!(command, Command::HooksHelp); + } + + #[test] + fn parser_routes_sync_help() { + let command = parse_command(vec![ + "sce".to_string(), + "sync".to_string(), + "--help".to_string(), + ]) + .expect("command should parse"); + assert_eq!(command, Command::SyncHelp); + } + + #[test] + fn parser_routes_sync_json_format() { + let command = parse_command(vec![ + "sce".to_string(), + "sync".to_string(), + "--format".to_string(), + "json".to_string(), + ]) + .expect("command should parse"); + assert_eq!( + command, + Command::Sync(crate::services::sync::SyncRequest { + format: crate::services::sync::SyncFormat::Json, + }) + ); + } + + #[test] + fn parser_routes_version_text_by_default() { + let command = parse_command(vec!["sce".to_string(), "version".to_string()]) + .expect("command should parse"); + assert_eq!( + command, + Command::Version(crate::services::version::VersionRequest { + format: crate::services::version::VersionFormat::Text, + }) + ); + } + + #[test] + fn parser_routes_version_json_format() { + let command = parse_command(vec![ + "sce".to_string(), + "version".to_string(), + "--format".to_string(), + "json".to_string(), + ]) + .expect("command should parse"); + assert_eq!( + command, + Command::Version(crate::services::version::VersionRequest { + format: crate::services::version::VersionFormat::Json, + }) ); } + #[test] + fn parser_routes_version_help() { + let command = parse_command(vec![ + "sce".to_string(), + "version".to_string(), + "--help".to_string(), + ]) + .expect("command should parse"); + assert_eq!(command, Command::VersionHelp); + } + #[test] fn parser_rejects_setup_mutually_exclusive_flags() { let error = parse_command(vec![ @@ -374,7 +1107,7 @@ mod tests { .expect_err("mutually exclusive flags should fail"); assert_eq!( error.to_string(), - "Options '--opencode', '--claude', and '--both' are mutually exclusive. Choose exactly one target flag or none for interactive mode." + "Options '--opencode', '--claude', and '--both' are mutually exclusive. Try: choose exactly one target flag (for example 'sce setup --opencode --non-interactive') or omit all target flags for interactive mode." ); } @@ -389,22 +1122,21 @@ mod tests { .expect_err("--repo without --hooks should fail"); assert_eq!( error.to_string(), - "Option '--repo' requires '--hooks'. Run 'sce setup --help' to see valid usage." + "Option '--repo' requires '--hooks'. Try: run 'sce setup --hooks --repo ' or remove '--repo'." ); } #[test] - fn parser_rejects_hooks_with_target_flag() { + fn parser_rejects_setup_non_interactive_without_target() { let error = parse_command(vec![ "sce".to_string(), "setup".to_string(), - "--hooks".to_string(), - "--opencode".to_string(), + "--non-interactive".to_string(), ]) - .expect_err("--hooks with target flag should fail"); + .expect_err("--non-interactive without a target should fail"); assert_eq!( error.to_string(), - "Option '--hooks' cannot be combined with '--opencode', '--claude', or '--both'. Run 'sce setup --help' to see valid usage." + "Option '--non-interactive' requires a target flag. Try: 'sce setup --opencode --non-interactive', 'sce setup --claude --non-interactive', or 'sce setup --both --non-interactive'." ); } @@ -414,7 +1146,7 @@ mod tests { .expect_err("unknown command should fail"); assert_eq!( error.to_string(), - "Unknown command 'nope'. Run 'sce --help' to see the current command surface." + "Unknown command 'nope'. Try: run 'sce --help' to list valid commands, then rerun with a valid command such as 'sce version' or 'sce setup --help'." ); } @@ -424,7 +1156,7 @@ mod tests { .expect_err("unknown option should fail"); assert_eq!( error.to_string(), - "Unknown option '--verbose'. Run 'sce --help' to see valid usage." + "Unknown option '--verbose'. Try: run 'sce --help' to see top-level usage, or use 'sce --help' for command-specific options." ); } @@ -438,7 +1170,56 @@ mod tests { .expect_err("extra argument should fail"); assert_eq!( error.to_string(), - "Unexpected setup argument 'extra'. Run 'sce setup --help' to see valid usage." + "Unexpected setup argument 'extra'. Try: remove the extra argument and use 'sce setup --help' for supported forms." + ); + } + + #[test] + fn parser_routes_config_show_subcommand() { + let command = parse_command(vec![ + "sce".to_string(), + "config".to_string(), + "show".to_string(), + ]) + .expect("command should parse"); + assert_eq!( + command, + Command::Config(crate::services::config::ConfigSubcommand::Show( + crate::services::config::ConfigRequest { + report_format: crate::services::config::ReportFormat::Text, + config_path: None, + log_level: None, + timeout_ms: None, + } + )) + ); + } + + #[test] + fn parser_routes_completion_bash_shell() { + let command = parse_command(vec![ + "sce".to_string(), + "completion".to_string(), + "--shell".to_string(), + "bash".to_string(), + ]) + .expect("command should parse"); + assert_eq!( + command, + Command::Completion(crate::services::completion::CompletionRequest { + shell: crate::services::completion::CompletionShell::Bash, + }) ); } + + #[test] + fn parser_routes_completion_help() { + let command = parse_command(vec![ + "sce".to_string(), + "completion".to_string(), + "--help".to_string(), + ]) + .expect("command should parse"); + assert_eq!(command, Command::CompletionHelp); + } } diff --git a/cli/src/command_surface.rs b/cli/src/command_surface.rs index 7cca2c46..aab329ab 100644 --- a/cli/src/command_surface.rs +++ b/cli/src/command_surface.rs @@ -19,6 +19,11 @@ pub const COMMANDS: &[CommandContract] = &[ status: ImplementationStatus::Implemented, purpose: "Print the current placeholder command surface", }, + CommandContract { + name: services::config::NAME, + status: ImplementationStatus::Implemented, + purpose: "Inspect and validate resolved CLI configuration", + }, CommandContract { name: services::setup::NAME, status: ImplementationStatus::Implemented, @@ -44,6 +49,16 @@ pub const COMMANDS: &[CommandContract] = &[ status: ImplementationStatus::Placeholder, purpose: "Coordinate future cloud sync workflows", }, + CommandContract { + name: services::version::NAME, + status: ImplementationStatus::Implemented, + purpose: "Print deterministic runtime version metadata", + }, + CommandContract { + name: services::completion::NAME, + status: ImplementationStatus::Implemented, + purpose: "Generate deterministic shell completion scripts", + }, ]; pub fn is_known_command(name: &str) -> bool { @@ -67,9 +82,13 @@ pub fn help_text() -> String { format!( "sce - Shared Context Engineering CLI (placeholder foundation)\n\n\ Usage:\n sce [command]\n\n\ -Setup usage:\n sce setup [--opencode|--claude|--both]\n sce setup --hooks [--repo ]\n\n\ +Config usage:\n sce config [--format ] [options]\n\n\ +Setup usage:\n sce setup [--opencode|--claude|--both] [--non-interactive] [--hooks] [--repo ]\n\n\ +Completion usage:\n sce completion --shell \n\n\ +Output format contract:\n Supported commands accept --format \n\n\ +Examples:\n sce setup\n sce setup --opencode --non-interactive --hooks\n sce setup --hooks --repo ../demo-repo\n sce doctor --format json\n sce version --format json\n\n\ Commands:\n{}\n\n\ -Setup defaults to interactive target selection when no setup target flag is passed.\n\ +Setup defaults to interactive target selection when no setup target flag is passed, and installs hooks in the same run.\n\ Use '--hooks' to install required git hooks for the current repository or '--repo ' for a specific repository.\n\ `setup`, `doctor`, and `hooks` are implemented; `mcp` and `sync` remain placeholder-oriented.\n", command_rows @@ -94,7 +113,32 @@ mod tests { #[test] fn help_text_mentions_setup_target_flags() { let help = help_text(); - assert!(help.contains("sce setup [--opencode|--claude|--both]")); - assert!(help.contains("sce setup --hooks [--repo ]")); + assert!(help.contains( + "sce setup [--opencode|--claude|--both] [--non-interactive] [--hooks] [--repo ]" + )); + assert!(help.contains("installs hooks in the same run")); + assert!(help.contains("sce setup --opencode --non-interactive --hooks")); + } + + #[test] + fn help_text_mentions_version_command() { + let help = help_text(); + assert!(help.contains("version")); + } + + #[test] + fn help_text_mentions_completion_command() { + let help = help_text(); + assert!(help.contains("completion")); + assert!(help.contains("sce completion --shell ")); + } + + #[test] + fn help_text_mentions_shared_output_format_contract() { + let help = help_text(); + assert!(help.contains("Output format contract:")); + assert!(help.contains("--format ")); + assert!(help.contains("sce doctor --format json")); + assert!(help.contains("sce version --format json")); } } diff --git a/cli/src/dependency_contract.rs b/cli/src/dependency_contract.rs index 2af40095..4e586c4f 100644 --- a/cli/src/dependency_contract.rs +++ b/cli/src/dependency_contract.rs @@ -7,15 +7,32 @@ pub fn dependency_contract_snapshot() -> ( &'static str, &'static str, &'static str, + &'static str, + &'static str, + &'static str, + &'static str, + &'static str, + &'static str, ) { ( Ok(()), std::any::type_name::>(), std::any::type_name::(), std::any::type_name::(), + std::any::type_name::(), + std::any::type_name::(), + std::any::type_name::(), std::any::type_name::(), std::any::type_name::(), std::any::type_name::(), + std::any::type_name::(), + std::any::type_name::< + tracing_opentelemetry::OpenTelemetryLayer< + tracing_subscriber::Registry, + opentelemetry_sdk::trace::Tracer, + >, + >(), + std::any::type_name::(), std::any::type_name::(), ) } @@ -26,15 +43,35 @@ mod tests { #[test] fn dependency_contract_snapshot_references_agreed_crates() { - let (result, hmac_ty, inquire_ty, lexopt_ty, serde_json_ty, sha2_ty, tokio_ty, turso_ty) = - dependency_contract_snapshot(); + let ( + result, + hmac_ty, + inquire_ty, + lexopt_ty, + opentelemetry_ty, + opentelemetry_otlp_ty, + opentelemetry_sdk_ty, + serde_json_ty, + sha2_ty, + tokio_ty, + tracing_ty, + tracing_opentelemetry_ty, + tracing_subscriber_ty, + turso_ty, + ) = dependency_contract_snapshot(); assert!(result.is_ok()); assert!(hmac_ty.contains("hmac::")); assert!(inquire_ty.contains("inquire::")); assert!(lexopt_ty.contains("lexopt::")); + assert!(opentelemetry_ty.contains("opentelemetry::")); + assert!(opentelemetry_otlp_ty.contains("opentelemetry_otlp::")); + assert!(opentelemetry_sdk_ty.contains("opentelemetry_sdk::")); assert!(serde_json_ty.contains("serde_json::")); assert!(sha2_ty.contains("sha2::")); assert!(tokio_ty.contains("tokio::")); + assert!(tracing_ty.contains("tracing")); + assert!(tracing_opentelemetry_ty.contains("tracing_opentelemetry::")); + assert!(tracing_subscriber_ty.contains("tracing_subscriber::")); assert!(turso_ty.contains("turso::")); } } diff --git a/cli/src/services/completion.rs b/cli/src/services/completion.rs new file mode 100644 index 00000000..f0ef4551 --- /dev/null +++ b/cli/src/services/completion.rs @@ -0,0 +1,329 @@ +use anyhow::{bail, Result}; +use lexopt::Arg; +use lexopt::ValueExt; + +pub const NAME: &str = "completion"; + +#[derive(Clone, Copy, Debug, Eq, PartialEq)] +pub enum CompletionShell { + Bash, + Zsh, + Fish, +} + +#[derive(Clone, Copy, Debug, Eq, PartialEq)] +pub struct CompletionRequest { + pub shell: CompletionShell, +} + +pub fn completion_usage_text() -> &'static str { + "Usage:\n sce completion --shell \n\nExamples:\n sce completion --shell bash > ./sce.bash\n sce completion --shell zsh > ./_sce\n sce completion --shell fish > ~/.config/fish/completions/sce.fish" +} + +pub fn parse_completion_request(args: Vec) -> Result { + let mut parser = lexopt::Parser::from_args(args); + let mut shell = None; + + while let Some(arg) = parser.next()? { + match arg { + Arg::Long("shell") => { + if shell.is_some() { + bail!( + "Option '--shell' may only be provided once. Run 'sce completion --help' to see valid usage." + ); + } + let value = parser.value()?; + let raw = value.string()?; + shell = Some(parse_shell(&raw)?); + } + Arg::Long("help") | Arg::Short('h') => { + bail!("Use 'sce completion --help' for completion usage."); + } + Arg::Long(option) => { + bail!( + "Unknown completion option '--{}'. Run 'sce completion --help' to see valid usage.", + option + ); + } + Arg::Short(option) => { + bail!( + "Unknown completion option '-{}'. Run 'sce completion --help' to see valid usage.", + option + ); + } + Arg::Value(value) => { + bail!( + "Unexpected completion argument '{}'. Run 'sce completion --help' to see valid usage.", + value.string()? + ); + } + } + } + + let Some(shell) = shell else { + bail!( + "Missing required option '--shell '. Run 'sce completion --help' to see valid usage." + ); + }; + + Ok(CompletionRequest { shell }) +} + +fn parse_shell(raw: &str) -> Result { + match raw { + "bash" => Ok(CompletionShell::Bash), + "zsh" => Ok(CompletionShell::Zsh), + "fish" => Ok(CompletionShell::Fish), + _ => bail!( + "Unsupported shell '{}'. Valid values: bash, zsh, fish.", + raw + ), + } +} + +pub fn render_completion(request: CompletionRequest) -> String { + match request.shell { + CompletionShell::Bash => bash_completion_script().to_string(), + CompletionShell::Zsh => zsh_completion_script().to_string(), + CompletionShell::Fish => fish_completion_script().to_string(), + } +} + +fn bash_completion_script() -> &'static str { + r#"_sce_complete() { + local cur prev cmd subcmd + cur="${COMP_WORDS[COMP_CWORD]}" + prev="${COMP_WORDS[COMP_CWORD-1]}" + cmd="${COMP_WORDS[1]}" + subcmd="${COMP_WORDS[2]}" + + if [[ ${COMP_CWORD} -eq 1 ]]; then + COMPREPLY=( $(compgen -W "help config setup doctor mcp hooks sync version completion" -- "${cur}") ) + return + fi + + case "${cmd}" in + config) + if [[ ${COMP_CWORD} -eq 2 ]]; then + COMPREPLY=( $(compgen -W "show validate --help -h" -- "${cur}") ) + return + fi + if [[ "${prev}" == "--format" ]]; then + COMPREPLY=( $(compgen -W "text json" -- "${cur}") ) + return + fi + if [[ "${prev}" == "--log-level" ]]; then + COMPREPLY=( $(compgen -W "error warn info debug" -- "${cur}") ) + return + fi + COMPREPLY=( $(compgen -W "--config --log-level --timeout-ms --format --help -h" -- "${cur}") ) + ;; + setup) + if [[ "${prev}" == "--repo" ]]; then + COMPREPLY=( $(compgen -d -- "${cur}") ) + return + fi + COMPREPLY=( $(compgen -W "--opencode --claude --both --non-interactive --hooks --repo --help -h" -- "${cur}") ) + ;; + doctor) + COMPREPLY=( $(compgen -W "--help -h" -- "${cur}") ) + ;; + mcp) + COMPREPLY=( $(compgen -W "--help -h" -- "${cur}") ) + ;; + hooks) + if [[ ${COMP_CWORD} -eq 2 ]]; then + COMPREPLY=( $(compgen -W "pre-commit commit-msg post-commit post-rewrite --help -h" -- "${cur}") ) + return + fi + if [[ "${subcmd}" == "post-rewrite" && ${COMP_CWORD} -eq 3 ]]; then + COMPREPLY=( $(compgen -W "amend rebase other" -- "${cur}") ) + return + fi + ;; + sync) + COMPREPLY=( $(compgen -W "--help -h" -- "${cur}") ) + ;; + version) + if [[ "${prev}" == "--format" ]]; then + COMPREPLY=( $(compgen -W "text json" -- "${cur}") ) + return + fi + COMPREPLY=( $(compgen -W "--format --help -h" -- "${cur}") ) + ;; + completion) + if [[ "${prev}" == "--shell" ]]; then + COMPREPLY=( $(compgen -W "bash zsh fish" -- "${cur}") ) + return + fi + COMPREPLY=( $(compgen -W "--shell --help -h" -- "${cur}") ) + ;; + help) + ;; + esac +} + +complete -F _sce_complete sce +"# +} + +fn zsh_completion_script() -> &'static str { + r#"#compdef sce + +local -a commands +commands=(help config setup doctor mcp hooks sync version completion) + +if (( CURRENT == 2 )); then + compadd -- $commands + return +fi + +case "${words[2]}" in + config) + if (( CURRENT == 3 )); then + compadd -- show validate --help -h + return + fi + case "${words[CURRENT-1]}" in + --format) + compadd -- text json + return + ;; + --log-level) + compadd -- error warn info debug + return + ;; + esac + compadd -- --config --log-level --timeout-ms --format --help -h + ;; + setup) + if [[ "${words[CURRENT-1]}" == "--repo" ]]; then + _files -/ + return + fi + compadd -- --opencode --claude --both --non-interactive --hooks --repo --help -h + ;; + doctor) + compadd -- --help -h + ;; + mcp) + compadd -- --help -h + ;; + hooks) + if (( CURRENT == 3 )); then + compadd -- pre-commit commit-msg post-commit post-rewrite --help -h + return + fi + if [[ "${words[3]}" == "post-rewrite" && CURRENT == 4 ]]; then + compadd -- amend rebase other + return + fi + ;; + sync) + compadd -- --help -h + ;; + version) + if [[ "${words[CURRENT-1]}" == "--format" ]]; then + compadd -- text json + return + fi + compadd -- --format --help -h + ;; + completion) + if [[ "${words[CURRENT-1]}" == "--shell" ]]; then + compadd -- bash zsh fish + return + fi + compadd -- --shell --help -h + ;; +esac +"# +} + +fn fish_completion_script() -> &'static str { + r#"complete -c sce -f + +complete -c sce -n "__fish_use_subcommand" -a "help config setup doctor mcp hooks sync version completion" + +complete -c sce -n "__fish_seen_subcommand_from config" -a "show validate" +complete -c sce -n "__fish_seen_subcommand_from config" -l config -r +complete -c sce -n "__fish_seen_subcommand_from config" -l log-level -r -a "error warn info debug" +complete -c sce -n "__fish_seen_subcommand_from config" -l timeout-ms -r +complete -c sce -n "__fish_seen_subcommand_from config" -l format -r -a "text json" + +complete -c sce -n "__fish_seen_subcommand_from setup" -l opencode +complete -c sce -n "__fish_seen_subcommand_from setup" -l claude +complete -c sce -n "__fish_seen_subcommand_from setup" -l both +complete -c sce -n "__fish_seen_subcommand_from setup" -l non-interactive +complete -c sce -n "__fish_seen_subcommand_from setup" -l hooks +complete -c sce -n "__fish_seen_subcommand_from setup" -l repo -r -a "(__fish_complete_directories)" + +complete -c sce -n "__fish_seen_subcommand_from hooks" -a "pre-commit commit-msg post-commit post-rewrite" +complete -c sce -n "__fish_seen_subcommand_from hooks post-rewrite" -a "amend rebase other" + +complete -c sce -n "__fish_seen_subcommand_from version" -l format -r -a "text json" + +complete -c sce -n "__fish_seen_subcommand_from completion" -l shell -r -a "bash zsh fish" +"# +} + +#[cfg(test)] +mod tests { + use super::{parse_completion_request, render_completion, CompletionRequest, CompletionShell}; + + #[test] + fn parse_requires_shell() { + let error = parse_completion_request(vec![]).expect_err("missing --shell should fail"); + assert!(error + .to_string() + .contains("Missing required option '--shell")); + } + + #[test] + fn parse_accepts_shell_value() { + let request = parse_completion_request(vec!["--shell".to_string(), "zsh".to_string()]) + .expect("request should parse"); + assert_eq!(request.shell, CompletionShell::Zsh); + } + + #[test] + fn parse_rejects_duplicate_shell_option() { + let error = parse_completion_request(vec![ + "--shell".to_string(), + "bash".to_string(), + "--shell".to_string(), + "zsh".to_string(), + ]) + .expect_err("duplicate --shell should fail"); + assert!(error + .to_string() + .contains("Option '--shell' may only be provided once")); + } + + #[test] + fn render_bash_completion_is_deterministic() { + let output = render_completion(CompletionRequest { + shell: CompletionShell::Bash, + }); + assert!(output.contains("complete -F _sce_complete sce")); + assert!(output.contains("help config setup doctor mcp hooks sync version completion")); + } + + #[test] + fn render_zsh_completion_has_compdef_header() { + let output = render_completion(CompletionRequest { + shell: CompletionShell::Zsh, + }); + assert!(output.contains("#compdef sce")); + assert!(output.contains("completion")); + } + + #[test] + fn render_fish_completion_has_completion_command() { + let output = render_completion(CompletionRequest { + shell: CompletionShell::Fish, + }); + assert!(output.contains("complete -c sce -f")); + assert!(output.contains("completion")); + } +} diff --git a/cli/src/services/config.rs b/cli/src/services/config.rs new file mode 100644 index 00000000..a033f5f6 --- /dev/null +++ b/cli/src/services/config.rs @@ -0,0 +1,883 @@ +use std::path::{Path, PathBuf}; + +use anyhow::{anyhow, bail, Context, Result}; +use lexopt::{Arg, ValueExt}; +use serde_json::{json, Value}; + +use crate::services::output_format::OutputFormat; + +pub const NAME: &str = "config"; + +const DEFAULT_TIMEOUT_MS: u64 = 30000; + +pub type ReportFormat = OutputFormat; + +#[derive(Clone, Copy, Debug, Eq, PartialEq)] +pub enum LogLevel { + Error, + Warn, + Info, + Debug, +} + +impl LogLevel { + fn parse(raw: &str, source: &str) -> Result { + match raw { + "error" => Ok(Self::Error), + "warn" => Ok(Self::Warn), + "info" => Ok(Self::Info), + "debug" => Ok(Self::Debug), + _ => bail!( + "Invalid log level '{}' from {}. Valid values: error, warn, info, debug.", + raw, + source + ), + } + } + + fn as_str(self) -> &'static str { + match self { + Self::Error => "error", + Self::Warn => "warn", + Self::Info => "info", + Self::Debug => "debug", + } + } +} + +#[derive(Clone, Copy, Debug, Eq, PartialEq)] +enum ValueSource { + Flag, + Env, + ConfigFile(ConfigPathSource), + Default, +} + +impl ValueSource { + fn as_str(self) -> &'static str { + match self { + Self::Flag => "flag", + Self::Env => "env", + Self::ConfigFile(_) => "config_file", + Self::Default => "default", + } + } + + fn config_source(self) -> Option { + match self { + Self::ConfigFile(source) => Some(source), + _ => None, + } + } +} + +#[derive(Clone, Debug, Eq, PartialEq)] +pub enum ConfigSubcommand { + Help, + Show(ConfigRequest), + Validate(ConfigRequest), +} + +#[derive(Clone, Debug, Eq, PartialEq)] +pub struct ConfigRequest { + pub report_format: ReportFormat, + pub config_path: Option, + pub log_level: Option, + pub timeout_ms: Option, +} + +#[derive(Clone, Copy, Debug, Eq, PartialEq)] +enum ConfigPathSource { + Flag, + Env, + DefaultDiscoveredGlobal, + DefaultDiscoveredLocal, +} + +impl ConfigPathSource { + fn as_str(self) -> &'static str { + match self { + Self::Flag => "flag", + Self::Env => "env", + Self::DefaultDiscoveredGlobal => "default_discovered_global", + Self::DefaultDiscoveredLocal => "default_discovered_local", + } + } +} + +#[derive(Clone, Debug, Eq, PartialEq)] +struct ResolvedValue { + value: T, + source: ValueSource, +} + +#[derive(Clone, Debug, Eq, PartialEq)] +struct LoadedConfigPath { + path: PathBuf, + source: ConfigPathSource, +} + +#[derive(Clone, Debug, Eq, PartialEq)] +struct RuntimeConfig { + loaded_config_paths: Vec, + log_level: ResolvedValue, + timeout_ms: ResolvedValue, +} + +#[derive(Clone, Debug, Eq, PartialEq)] +struct FileConfig { + log_level: Option>, + timeout_ms: Option>, +} + +#[derive(Clone, Debug, Eq, PartialEq)] +struct FileConfigValue { + value: T, + source: ConfigPathSource, +} + +pub fn parse_config_subcommand(mut args: Vec) -> Result { + if args.is_empty() { + bail!("Missing config subcommand. Run 'sce config --help' to see valid usage."); + } + + if let [only] = args.as_slice() { + if only == "--help" || only == "-h" { + return Ok(ConfigSubcommand::Help); + } + } + + let subcommand = args.remove(0); + let tail = args; + match subcommand.as_str() { + "show" => Ok(ConfigSubcommand::Show(parse_config_request(tail)?)), + "validate" => Ok(ConfigSubcommand::Validate(parse_config_request(tail)?)), + _ => bail!( + "Unknown config subcommand '{}'. Run 'sce config --help' to see valid usage.", + subcommand + ), + } +} + +fn parse_config_request(args: Vec) -> Result { + let mut parser = lexopt::Parser::from_args(args); + let mut request = ConfigRequest { + report_format: ReportFormat::Text, + config_path: None, + log_level: None, + timeout_ms: None, + }; + + while let Some(arg) = parser.next()? { + match arg { + Arg::Long("format") => { + let value = parser + .value() + .context("Option '--format' requires a value")?; + let raw = value.string()?; + request.report_format = ReportFormat::parse(&raw, "sce config --help")?; + } + Arg::Long("config") => { + let value = parser + .value() + .context("Option '--config' requires a path value")?; + if request.config_path.is_some() { + bail!( + "Option '--config' may only be provided once. Run 'sce config --help' to see valid usage." + ); + } + request.config_path = Some(PathBuf::from(value.string()?)); + } + Arg::Long("log-level") => { + let value = parser + .value() + .context("Option '--log-level' requires a value")?; + let raw = value.string()?; + request.log_level = Some(LogLevel::parse(&raw, "--log-level")?); + } + Arg::Long("timeout-ms") => { + let value = parser + .value() + .context("Option '--timeout-ms' requires a numeric value")?; + let raw = value.string()?; + let timeout = raw + .parse::() + .map_err(|_| anyhow!("Invalid timeout '{}' from --timeout-ms.", raw))?; + request.timeout_ms = Some(timeout); + } + Arg::Long("help") | Arg::Short('h') => { + bail!( + "Use 'sce config --help' for config usage. Command-local help does not accept additional arguments." + ); + } + Arg::Long(option) => { + bail!( + "Unknown config option '--{}'. Run 'sce config --help' to see valid usage.", + option + ); + } + Arg::Short(option) => { + bail!( + "Unknown config option '-{}'. Run 'sce config --help' to see valid usage.", + option + ); + } + Arg::Value(value) => { + let raw = value.string()?; + bail!( + "Unexpected config argument '{}'. Run 'sce config --help' to see valid usage.", + raw + ); + } + } + } + + Ok(request) +} + +pub fn run_config_subcommand(subcommand: ConfigSubcommand) -> Result { + match subcommand { + ConfigSubcommand::Help => Ok(config_usage_text().to_string()), + ConfigSubcommand::Show(request) => { + let cwd = std::env::current_dir().context("Failed to determine current directory")?; + let runtime = resolve_runtime_config(&request, &cwd)?; + Ok(format_show_output(&runtime, request.report_format)) + } + ConfigSubcommand::Validate(request) => { + let cwd = std::env::current_dir().context("Failed to determine current directory")?; + let runtime = resolve_runtime_config(&request, &cwd)?; + Ok(format_validate_output(&runtime, request.report_format)) + } + } +} + +pub fn config_usage_text() -> &'static str { + "Usage:\n sce config show [--config ] [--log-level ] [--timeout-ms ] [--format ]\n sce config validate [--config ] [--log-level ] [--timeout-ms ] [--format ]\n\nResolution precedence: flags > env > config file > defaults\nConfig discovery order: --config, SCE_CONFIG_FILE, then discovered global+local defaults (global merged first, local overrides per key)\nEnvironment keys: SCE_CONFIG_FILE, SCE_LOG_LEVEL, SCE_TIMEOUT_MS" +} + +fn resolve_runtime_config(request: &ConfigRequest, cwd: &Path) -> Result { + resolve_runtime_config_with( + request, + cwd, + |key| std::env::var(key).ok(), + |path| { + std::fs::read_to_string(path) + .with_context(|| format!("Failed to read config file '{}'.", path.display())) + }, + Path::exists, + resolve_default_global_config_path, + ) +} + +fn resolve_runtime_config_with( + request: &ConfigRequest, + cwd: &Path, + env_lookup: FEnv, + read_file: FRead, + path_exists: fn(&Path) -> bool, + resolve_global_config_path: FGlobalPath, +) -> Result +where + FEnv: Fn(&str) -> Option, + FRead: Fn(&Path) -> Result, + FGlobalPath: Fn() -> Result, +{ + let loaded_config_paths = resolve_config_paths( + request, + cwd, + &env_lookup, + path_exists, + resolve_global_config_path, + )?; + + let mut file_config = FileConfig { + log_level: None, + timeout_ms: None, + }; + for loaded_path in &loaded_config_paths { + let raw = read_file(&loaded_path.path)?; + let layer = parse_file_config(&raw, &loaded_path.path, loaded_path.source)?; + if let Some(log_level) = layer.log_level { + file_config.log_level = Some(log_level); + } + if let Some(timeout_ms) = layer.timeout_ms { + file_config.timeout_ms = Some(timeout_ms); + } + } + + let mut resolved_log_level = ResolvedValue { + value: LogLevel::Info, + source: ValueSource::Default, + }; + if let Some(value) = file_config.log_level { + resolved_log_level = ResolvedValue { + value: value.value, + source: ValueSource::ConfigFile(value.source), + }; + } + if let Some(raw) = env_lookup("SCE_LOG_LEVEL") { + resolved_log_level = ResolvedValue { + value: LogLevel::parse(&raw, "SCE_LOG_LEVEL")?, + source: ValueSource::Env, + }; + } + if let Some(value) = request.log_level { + resolved_log_level = ResolvedValue { + value, + source: ValueSource::Flag, + }; + } + + let mut resolved_timeout_ms = ResolvedValue { + value: DEFAULT_TIMEOUT_MS, + source: ValueSource::Default, + }; + if let Some(value) = file_config.timeout_ms { + resolved_timeout_ms = ResolvedValue { + value: value.value, + source: ValueSource::ConfigFile(value.source), + }; + } + if let Some(raw) = env_lookup("SCE_TIMEOUT_MS") { + let value = raw + .parse::() + .map_err(|_| anyhow!("Invalid timeout '{}' from SCE_TIMEOUT_MS.", raw))?; + resolved_timeout_ms = ResolvedValue { + value, + source: ValueSource::Env, + }; + } + if let Some(value) = request.timeout_ms { + resolved_timeout_ms = ResolvedValue { + value, + source: ValueSource::Flag, + }; + } + + Ok(RuntimeConfig { + loaded_config_paths, + log_level: resolved_log_level, + timeout_ms: resolved_timeout_ms, + }) +} + +fn resolve_config_paths( + request: &ConfigRequest, + cwd: &Path, + env_lookup: &FEnv, + path_exists: fn(&Path) -> bool, + resolve_global_config_path: FGlobalPath, +) -> Result> +where + FEnv: Fn(&str) -> Option, + FGlobalPath: Fn() -> Result, +{ + if let Some(path) = request.config_path.as_ref() { + if !path_exists(path) { + bail!( + "Config file '{}' was provided via --config but does not exist.", + path.display() + ); + } + return Ok(vec![LoadedConfigPath { + path: path.clone(), + source: ConfigPathSource::Flag, + }]); + } + + if let Some(raw) = env_lookup("SCE_CONFIG_FILE") { + let path = PathBuf::from(raw); + if !path_exists(&path) { + bail!( + "Config file '{}' was provided via SCE_CONFIG_FILE but does not exist.", + path.display() + ); + } + return Ok(vec![LoadedConfigPath { + path, + source: ConfigPathSource::Env, + }]); + } + + let mut discovered_paths = Vec::new(); + + let global_path = resolve_global_config_path()? + .join("sce") + .join("config.json"); + if path_exists(&global_path) { + discovered_paths.push(LoadedConfigPath { + path: global_path, + source: ConfigPathSource::DefaultDiscoveredGlobal, + }); + } + + let local_path = cwd.join(".sce").join("config.json"); + if path_exists(&local_path) { + discovered_paths.push(LoadedConfigPath { + path: local_path, + source: ConfigPathSource::DefaultDiscoveredLocal, + }); + } + + Ok(discovered_paths) +} + +fn resolve_default_global_config_path() -> Result { + crate::services::local_db::resolve_state_data_root() +} + +fn parse_file_config(raw: &str, path: &Path, source: ConfigPathSource) -> Result { + let parsed: Value = serde_json::from_str(raw) + .with_context(|| format!("Config file '{}' must contain valid JSON.", path.display()))?; + + let object = parsed.as_object().with_context(|| { + format!( + "Config file '{}' must contain a top-level JSON object.", + path.display() + ) + })?; + + for key in object.keys() { + if key != "log_level" && key != "timeout_ms" { + bail!( + "Config file '{}' contains unknown key '{}'. Allowed keys: log_level, timeout_ms.", + path.display(), + key + ); + } + } + + let log_level = match object.get("log_level") { + Some(value) => { + let raw = value.as_str().with_context(|| { + format!( + "Config key 'log_level' in '{}' must be a string.", + path.display() + ) + })?; + Some(FileConfigValue { + value: LogLevel::parse(raw, &format!("config file '{}'", path.display()))?, + source, + }) + } + None => None, + }; + + let timeout_ms = match object.get("timeout_ms") { + Some(value) => { + let parsed = value.as_u64().with_context(|| { + format!( + "Config key 'timeout_ms' in '{}' must be an unsigned integer.", + path.display() + ) + })?; + Some(FileConfigValue { + value: parsed, + source, + }) + } + None => None, + }; + + Ok(FileConfig { + log_level, + timeout_ms, + }) +} + +fn format_show_output(runtime: &RuntimeConfig, report_format: ReportFormat) -> String { + match report_format { + ReportFormat::Text => { + let lines = vec![ + "SCE config: resolved".to_string(), + "Precedence: flags > env > config file > defaults".to_string(), + format_config_paths_text(runtime), + format_resolved_value_text( + "log_level", + runtime.log_level.value.as_str(), + runtime.log_level.source, + ), + format_resolved_value_text( + "timeout_ms", + &runtime.timeout_ms.value.to_string(), + runtime.timeout_ms.source, + ), + ]; + lines.join("\n") + } + ReportFormat::Json => { + let payload = json!({ + "status": "ok", + "result": { + "command": "config_show", + "precedence": "flags > env > config file > defaults", + "config_paths": format_config_paths_json(runtime), + "resolved": { + "log_level": { + "value": runtime.log_level.value.as_str(), + "source": runtime.log_level.source.as_str(), + "config_source": runtime.log_level.source.config_source().map(ConfigPathSource::as_str), + }, + "timeout_ms": { + "value": runtime.timeout_ms.value, + "source": runtime.timeout_ms.source.as_str(), + "config_source": runtime.timeout_ms.source.config_source().map(ConfigPathSource::as_str), + } + } + } + }); + serde_json::to_string_pretty(&payload).expect("config show payload should serialize") + } + } +} + +fn format_validate_output(runtime: &RuntimeConfig, report_format: ReportFormat) -> String { + match report_format { + ReportFormat::Text => { + let lines = vec![ + "SCE config validation: valid".to_string(), + "Precedence: flags > env > config file > defaults".to_string(), + format_config_paths_text(runtime), + "Validation issues: none".to_string(), + ]; + lines.join("\n") + } + ReportFormat::Json => { + let payload = json!({ + "status": "ok", + "result": { + "command": "config_validate", + "valid": true, + "precedence": "flags > env > config file > defaults", + "config_paths": format_config_paths_json(runtime), + "issues": [] + } + }); + serde_json::to_string_pretty(&payload) + .expect("config validate payload should serialize") + } + } +} + +fn format_config_paths_text(runtime: &RuntimeConfig) -> String { + if runtime.loaded_config_paths.is_empty() { + return "Config files: (none discovered)".to_string(); + } + + let mut lines = vec!["Config files:".to_string()]; + for path in &runtime.loaded_config_paths { + lines.push(format!( + "- {} (source: {})", + path.path.display(), + path.source.as_str() + )); + } + lines.join("\n") +} + +fn format_config_paths_json(runtime: &RuntimeConfig) -> Value { + Value::Array( + runtime + .loaded_config_paths + .iter() + .map(|path| { + json!({ + "path": path.path.display().to_string(), + "source": path.source.as_str(), + }) + }) + .collect(), + ) +} + +fn format_resolved_value_text(key: &str, value: &str, source: ValueSource) -> String { + match source.config_source() { + Some(config_source) => format!( + "- {}: {} (source: {}, config_source: {})", + key, + value, + source.as_str(), + config_source.as_str() + ), + None => format!("- {}: {} (source: {})", key, value, source.as_str()), + } +} + +#[cfg(test)] +mod tests { + use super::{ + format_show_output, format_validate_output, parse_config_subcommand, + resolve_runtime_config_with, ConfigPathSource, ConfigRequest, ConfigSubcommand, + LoadedConfigPath, LogLevel, ReportFormat, ResolvedValue, RuntimeConfig, ValueSource, + }; + use anyhow::Result; + use serde_json::Value; + use std::path::{Path, PathBuf}; + + fn request() -> ConfigRequest { + ConfigRequest { + report_format: ReportFormat::Text, + config_path: None, + log_level: None, + timeout_ms: None, + } + } + + #[test] + fn parser_routes_show_subcommand() -> Result<()> { + let parsed = parse_config_subcommand(vec!["show".to_string()])?; + assert_eq!(parsed, ConfigSubcommand::Show(request())); + Ok(()) + } + + #[test] + fn parser_routes_validate_subcommand_with_options() -> Result<()> { + let parsed = parse_config_subcommand(vec![ + "validate".to_string(), + "--format".to_string(), + "json".to_string(), + "--log-level".to_string(), + "debug".to_string(), + "--timeout-ms".to_string(), + "100".to_string(), + "--config".to_string(), + "./demo.json".to_string(), + ])?; + assert_eq!( + parsed, + ConfigSubcommand::Validate(ConfigRequest { + report_format: ReportFormat::Json, + config_path: Some(PathBuf::from("./demo.json")), + log_level: Some(LogLevel::Debug), + timeout_ms: Some(100), + }) + ); + Ok(()) + } + + #[test] + fn parser_rejects_invalid_format_with_help_guidance() { + let error = parse_config_subcommand(vec![ + "show".to_string(), + "--format".to_string(), + "yaml".to_string(), + ]) + .expect_err("invalid format should fail"); + assert_eq!( + error.to_string(), + "Invalid --format value 'yaml'. Valid values: text, json. Run 'sce config --help' to see valid usage." + ); + } + + #[test] + fn resolver_applies_precedence_flag_then_env_then_config_then_default() -> Result<()> { + let req = ConfigRequest { + report_format: ReportFormat::Text, + config_path: Some(PathBuf::from("/tmp/config.json")), + log_level: Some(LogLevel::Warn), + timeout_ms: Some(900), + }; + let resolved = resolve_runtime_config_with( + &req, + Path::new("/workspace"), + |key| match key { + "SCE_LOG_LEVEL" => Some("debug".to_string()), + "SCE_TIMEOUT_MS" => Some("700".to_string()), + _ => None, + }, + |_| Ok("{\"log_level\":\"error\",\"timeout_ms\":500}".to_string()), + |_| true, + || Ok(PathBuf::from("/state")), + )?; + + assert_eq!(resolved.log_level.value, LogLevel::Warn); + assert_eq!(resolved.log_level.source.as_str(), "flag"); + assert_eq!(resolved.timeout_ms.value, 900); + assert_eq!(resolved.timeout_ms.source.as_str(), "flag"); + Ok(()) + } + + #[test] + fn resolver_uses_env_when_flags_absent() -> Result<()> { + let req = ConfigRequest { + report_format: ReportFormat::Text, + config_path: Some(PathBuf::from("/tmp/config.json")), + log_level: None, + timeout_ms: None, + }; + let resolved = resolve_runtime_config_with( + &req, + Path::new("/workspace"), + |key| match key { + "SCE_LOG_LEVEL" => Some("warn".to_string()), + "SCE_TIMEOUT_MS" => Some("1200".to_string()), + _ => None, + }, + |_| Ok("{\"log_level\":\"error\",\"timeout_ms\":500}".to_string()), + |_| true, + || Ok(PathBuf::from("/state")), + )?; + + assert_eq!(resolved.log_level.value, LogLevel::Warn); + assert_eq!(resolved.log_level.source.as_str(), "env"); + assert_eq!(resolved.timeout_ms.value, 1200); + assert_eq!(resolved.timeout_ms.source.as_str(), "env"); + Ok(()) + } + + #[test] + fn resolver_uses_defaults_when_no_inputs_present() -> Result<()> { + let req = request(); + let resolved = resolve_runtime_config_with( + &req, + Path::new("/workspace"), + |_| None, + |_| Ok("{}".to_string()), + |_| false, + || Ok(PathBuf::from("/state")), + )?; + + assert_eq!(resolved.log_level.value, LogLevel::Info); + assert_eq!(resolved.log_level.source.as_str(), "default"); + assert_eq!(resolved.timeout_ms.value, 30000); + assert_eq!(resolved.timeout_ms.source.as_str(), "default"); + Ok(()) + } + + #[test] + fn resolver_rejects_unknown_config_keys() { + let req = ConfigRequest { + report_format: ReportFormat::Text, + config_path: Some(PathBuf::from("/tmp/config.json")), + log_level: None, + timeout_ms: None, + }; + let error = resolve_runtime_config_with( + &req, + Path::new("/workspace"), + |_| None, + |_| Ok("{\"unknown\":true}".to_string()), + |_| true, + || Ok(PathBuf::from("/state")), + ) + .expect_err("unknown config keys should fail"); + assert!(error.to_string().contains("contains unknown key 'unknown'")); + } + + #[test] + fn resolver_merges_discovered_global_and_local_configs() -> Result<()> { + let req = request(); + let resolved = resolve_runtime_config_with( + &req, + Path::new("/workspace"), + |_| None, + |path| { + if path == Path::new("/state/sce/config.json") { + return Ok("{\"log_level\":\"error\",\"timeout_ms\":500}".to_string()); + } + if path == Path::new("/workspace/.sce/config.json") { + return Ok("{\"timeout_ms\":700}".to_string()); + } + Err(anyhow::anyhow!( + "unexpected config path: {}", + path.display() + )) + }, + |path| { + path == Path::new("/state/sce/config.json") + || path == Path::new("/workspace/.sce/config.json") + }, + || Ok(PathBuf::from("/state")), + )?; + + assert_eq!(resolved.loaded_config_paths.len(), 2); + assert_eq!( + resolved.loaded_config_paths[0].source.as_str(), + "default_discovered_global" + ); + assert_eq!( + resolved.loaded_config_paths[1].source.as_str(), + "default_discovered_local" + ); + + assert_eq!(resolved.log_level.value, LogLevel::Error); + assert_eq!(resolved.log_level.source.as_str(), "config_file"); + assert_eq!( + resolved + .log_level + .source + .config_source() + .map(|source| source.as_str()), + Some("default_discovered_global") + ); + + assert_eq!(resolved.timeout_ms.value, 700); + assert_eq!(resolved.timeout_ms.source.as_str(), "config_file"); + assert_eq!( + resolved + .timeout_ms + .source + .config_source() + .map(|source| source.as_str()), + Some("default_discovered_local") + ); + Ok(()) + } + + fn sample_runtime() -> RuntimeConfig { + RuntimeConfig { + loaded_config_paths: vec![ + LoadedConfigPath { + path: PathBuf::from("/state/sce/config.json"), + source: ConfigPathSource::DefaultDiscoveredGlobal, + }, + LoadedConfigPath { + path: PathBuf::from("/workspace/.sce/config.json"), + source: ConfigPathSource::DefaultDiscoveredLocal, + }, + ], + log_level: ResolvedValue { + value: LogLevel::Warn, + source: ValueSource::Env, + }, + timeout_ms: ResolvedValue { + value: 1200, + source: ValueSource::Flag, + }, + } + } + + #[test] + fn show_json_output_is_deterministic_for_same_runtime() -> Result<()> { + let runtime = sample_runtime(); + let first = format_show_output(&runtime, ReportFormat::Json); + let second = format_show_output(&runtime, ReportFormat::Json); + assert_eq!(first, second); + + let parsed: Value = serde_json::from_str(&first)?; + assert_eq!(parsed["status"], "ok"); + assert_eq!(parsed["result"]["command"], "config_show"); + assert_eq!( + parsed["result"]["precedence"], + "flags > env > config file > defaults" + ); + assert_eq!(parsed["result"]["resolved"]["log_level"]["source"], "env"); + assert_eq!(parsed["result"]["resolved"]["timeout_ms"]["source"], "flag"); + Ok(()) + } + + #[test] + fn validate_json_output_is_deterministic_for_same_runtime() -> Result<()> { + let runtime = sample_runtime(); + let first = format_validate_output(&runtime, ReportFormat::Json); + let second = format_validate_output(&runtime, ReportFormat::Json); + assert_eq!(first, second); + + let parsed: Value = serde_json::from_str(&first)?; + assert_eq!(parsed["status"], "ok"); + assert_eq!(parsed["result"]["command"], "config_validate"); + assert_eq!(parsed["result"]["valid"], true); + assert!(parsed["result"]["issues"].as_array().is_some()); + Ok(()) + } +} diff --git a/cli/src/services/doctor.rs b/cli/src/services/doctor.rs index 49ae763c..1f661cc7 100644 --- a/cli/src/services/doctor.rs +++ b/cli/src/services/doctor.rs @@ -2,12 +2,24 @@ use std::fs; use std::path::{Path, PathBuf}; use std::process::Command; -use anyhow::{Context, Result}; +use anyhow::{bail, Context, Result}; +use lexopt::Arg; +use lexopt::ValueExt; +use serde_json::json; + +use crate::services::output_format::OutputFormat; pub const NAME: &str = "doctor"; const REQUIRED_HOOKS: [&str; 3] = ["pre-commit", "commit-msg", "post-commit"]; +pub type DoctorFormat = OutputFormat; + +#[derive(Clone, Copy, Debug, Eq, PartialEq)] +pub struct DoctorRequest { + pub format: DoctorFormat, +} + #[derive(Clone, Copy, Debug, Eq, PartialEq)] enum Readiness { Ready, @@ -39,11 +51,55 @@ struct HookDoctorReport { diagnostics: Vec, } -pub fn run_doctor() -> Result { +pub fn run_doctor(request: DoctorRequest) -> Result { let repository_root = std::env::current_dir().context("Failed to determine current directory")?; let report = build_report(&repository_root); - Ok(format_report(&report)) + render_report(request, &report) +} + +pub fn doctor_usage_text() -> &'static str { + "Usage:\n sce doctor [--format ]\n\nExamples:\n sce doctor\n sce doctor --format json\n sce doctor | rg 'not ready'" +} + +pub fn parse_doctor_request(args: Vec) -> Result { + let mut parser = lexopt::Parser::from_args(args); + let mut format = DoctorFormat::Text; + + while let Some(arg) = parser.next()? { + match arg { + Arg::Long("format") => { + let value = parser + .value() + .context("Option '--format' requires a value")?; + let raw = value.string()?; + format = DoctorFormat::parse(&raw, "sce doctor --help")?; + } + Arg::Long("help") | Arg::Short('h') => { + bail!("Use 'sce doctor --help' for doctor usage."); + } + Arg::Long(option) => { + bail!( + "Unknown doctor option '--{}'. Run 'sce doctor --help' to see valid usage.", + option + ); + } + Arg::Short(option) => { + bail!( + "Unknown doctor option '-{}'. Run 'sce doctor --help' to see valid usage.", + option + ); + } + Arg::Value(value) => { + bail!( + "Unexpected doctor argument '{}'. Run 'sce doctor --help' to see valid usage.", + value.string()? + ); + } + } + } + + Ok(DoctorRequest { format }) } fn build_report(repository_root: &Path) -> HookDoctorReport { @@ -244,6 +300,65 @@ fn format_report(report: &HookDoctorReport) -> String { lines.join("\n") } +fn render_report(request: DoctorRequest, report: &HookDoctorReport) -> Result { + match request.format { + DoctorFormat::Text => Ok(format_report(report)), + DoctorFormat::Json => render_report_json(report), + } +} + +fn render_report_json(report: &HookDoctorReport) -> Result { + let hooks = report + .hooks + .iter() + .map(|hook| { + json!({ + "name": hook.name, + "path": hook.path.display().to_string(), + "exists": hook.exists, + "executable": hook.executable, + "state": hook_state(hook), + }) + }) + .collect::>(); + + let payload = json!({ + "status": "ok", + "command": NAME, + "readiness": match report.readiness { + Readiness::Ready => "ready", + Readiness::NotReady => "not_ready", + }, + "hook_path_source": match report.hook_path_source { + HookPathSource::Default => "default", + HookPathSource::LocalConfig => "local_config", + HookPathSource::GlobalConfig => "global_config", + }, + "repository_root": report + .repository_root + .as_ref() + .map(|path| path.display().to_string()), + "hooks_directory": report + .hooks_directory + .as_ref() + .map(|path| path.display().to_string()), + "hooks": hooks, + "diagnostics": report.diagnostics, + }); + + serde_json::to_string_pretty(&payload).context("failed to serialize doctor report to JSON") +} + +fn hook_state(hook: &HookFileHealth) -> &'static str { + if hook.exists && hook.executable { + "ok" + } else if !hook.exists { + "missing" + } else { + "misconfigured" + } +} + #[cfg(test)] mod tests { use std::fs; @@ -252,15 +367,39 @@ mod tests { use std::process::Command; use anyhow::Result; + use serde_json::Value; use crate::services::setup::install_required_git_hooks; use crate::test_support::TestTempDir; use super::{ - build_report, collect_hook_health, format_report, HookDoctorReport, HookPathSource, - Readiness, + build_report, collect_hook_health, format_report, parse_doctor_request, render_report, + DoctorFormat, DoctorRequest, HookDoctorReport, HookPathSource, Readiness, NAME, }; + #[test] + fn parse_defaults_to_text_format() { + let request = parse_doctor_request(vec![]).expect("doctor request should parse"); + assert_eq!(request.format, DoctorFormat::Text); + } + + #[test] + fn parse_accepts_json_format() { + let request = parse_doctor_request(vec!["--format".to_string(), "json".to_string()]) + .expect("doctor request should parse"); + assert_eq!(request.format, DoctorFormat::Json); + } + + #[test] + fn parse_rejects_invalid_format_with_help_guidance() { + let error = parse_doctor_request(vec!["--format".to_string(), "yaml".to_string()]) + .expect_err("invalid doctor format should fail"); + assert_eq!( + error.to_string(), + "Invalid --format value 'yaml'. Valid values: text, json. Run 'sce doctor --help' to see valid usage." + ); + } + #[test] fn doctor_output_reports_healthy_state_when_all_required_hooks_exist() -> Result<()> { let temp_dir = TestTempDir::new("doctor-healthy")?; @@ -396,6 +535,50 @@ mod tests { Ok(()) } + #[test] + fn render_json_includes_stable_fields() -> Result<()> { + let temp_dir = TestTempDir::new("doctor-json-shape")?; + let report = build_report(temp_dir.path()); + + let output = render_report( + DoctorRequest { + format: DoctorFormat::Json, + }, + &report, + )?; + + let parsed: Value = serde_json::from_str(&output)?; + assert_eq!(parsed["status"], "ok"); + assert_eq!(parsed["command"], NAME); + assert!(parsed["readiness"].as_str().is_some()); + assert!(parsed["hook_path_source"].as_str().is_some()); + assert!(parsed["hooks"].is_array()); + assert!(parsed["diagnostics"].is_array()); + Ok(()) + } + + #[test] + fn render_json_is_deterministic_for_same_report() -> Result<()> { + let temp_dir = TestTempDir::new("doctor-json-determinism")?; + let report = build_report(temp_dir.path()); + + let first = render_report( + DoctorRequest { + format: DoctorFormat::Json, + }, + &report, + )?; + let second = render_report( + DoctorRequest { + format: DoctorFormat::Json, + }, + &report, + )?; + + assert_eq!(first, second); + Ok(()) + } + fn init_git_repo(repository_root: &Path) -> Result<()> { run_git_in_repo(repository_root, &["init", "-q"]) } diff --git a/cli/src/services/hooks.rs b/cli/src/services/hooks.rs index c86bac66..163eead6 100644 --- a/cli/src/services/hooks.rs +++ b/cli/src/services/hooks.rs @@ -31,12 +31,14 @@ pub enum HookSubcommand { } pub fn hooks_usage_text() -> &'static str { - "Usage:\n sce hooks pre-commit\n sce hooks commit-msg \n sce hooks post-commit\n sce hooks post-rewrite \n\nGit executes hook scripts with these subcommands. `post-rewrite` reads rewrite pairs from STDIN." + "Usage:\n sce hooks pre-commit\n sce hooks commit-msg \n sce hooks post-commit\n sce hooks post-rewrite \n\nExamples:\n sce hooks pre-commit\n sce hooks commit-msg .git/COMMIT_EDITMSG\n sce hooks post-commit\n printf 'oldsha newsha\\n' | sce hooks post-rewrite amend\n\nGit executes hook scripts with these subcommands. `post-rewrite` reads rewrite pairs from STDIN." } pub fn parse_hooks_subcommand(args: Vec) -> Result { if args.is_empty() { - bail!("Missing hook subcommand. Run 'sce hooks --help' to see valid usage."); + bail!( + "Missing hook subcommand. Try: run 'sce hooks --help' and use one of 'pre-commit', 'commit-msg', 'post-commit', or 'post-rewrite'." + ); } if args.len() == 1 && (args[0] == "--help" || args[0] == "-h") { @@ -51,13 +53,13 @@ pub fn parse_hooks_subcommand(args: Vec) -> Result { "commit-msg" => { if args.len() < 2 { bail!( - "Missing required argument '' for 'commit-msg'. Run 'sce hooks --help' to see valid usage." + "Missing required argument '' for 'commit-msg'. Try: run 'sce hooks commit-msg .git/COMMIT_EDITMSG'." ); } if args.len() > 2 { bail!( - "Unexpected extra argument '{}' for 'commit-msg'. Run 'sce hooks --help' to see valid usage.", + "Unexpected extra argument '{}' for 'commit-msg'. Try: pass exactly one path, for example 'sce hooks commit-msg .git/COMMIT_EDITMSG'.", args[2] ); } @@ -73,13 +75,13 @@ pub fn parse_hooks_subcommand(args: Vec) -> Result { "post-rewrite" => { if args.len() < 2 { bail!( - "Missing required argument '' for 'post-rewrite'. Run 'sce hooks --help' to see valid usage." + "Missing required argument '' for 'post-rewrite'. Try: run 'printf \"oldsha newsha\\n\" | sce hooks post-rewrite amend'." ); } if args.len() > 2 { bail!( - "Unexpected extra argument '{}' for 'post-rewrite'. Run 'sce hooks --help' to see valid usage.", + "Unexpected extra argument '{}' for 'post-rewrite'. Try: pass exactly one rewrite method (for example 'amend').", args[2] ); } @@ -89,7 +91,7 @@ pub fn parse_hooks_subcommand(args: Vec) -> Result { }) } unknown => bail!( - "Unknown hook subcommand '{}'. Run 'sce hooks --help' to see valid usage.", + "Unknown hook subcommand '{}'. Try: run 'sce hooks --help' and use one of 'pre-commit', 'commit-msg', 'post-commit', or 'post-rewrite'.", unknown ), } @@ -101,7 +103,7 @@ fn ensure_no_extra_hook_args(hook: &str, args: &[String]) -> Result<()> { } bail!( - "Unexpected extra argument '{}' for '{}'. Run 'sce hooks --help' to see valid usage.", + "Unexpected extra argument '{}' for '{}'. Try: remove extra arguments and run 'sce hooks --help' for exact syntax.", args[0], hook ) diff --git a/cli/src/services/hooks/tests.rs b/cli/src/services/hooks/tests.rs index 7c8bd76e..8892a856 100644 --- a/cli/src/services/hooks/tests.rs +++ b/cli/src/services/hooks/tests.rs @@ -1260,7 +1260,7 @@ fn parse_hooks_subcommand_rejects_missing_hook_name() { .expect_err("missing hook subcommand should return usage error"); assert_eq!( error.to_string(), - "Missing hook subcommand. Run 'sce hooks --help' to see valid usage." + "Missing hook subcommand. Try: run 'sce hooks --help' and use one of 'pre-commit', 'commit-msg', 'post-commit', or 'post-rewrite'." ); } @@ -1270,7 +1270,7 @@ fn parse_hooks_subcommand_requires_commit_msg_path() { .expect_err("commit-msg requires "); assert_eq!( error.to_string(), - "Missing required argument '' for 'commit-msg'. Run 'sce hooks --help' to see valid usage." + "Missing required argument '' for 'commit-msg'. Try: run 'sce hooks commit-msg .git/COMMIT_EDITMSG'." ); } diff --git a/cli/src/services/local_db.rs b/cli/src/services/local_db.rs index f98d1657..3ada65e2 100644 --- a/cli/src/services/local_db.rs +++ b/cli/src/services/local_db.rs @@ -3,6 +3,8 @@ use std::path::{Path, PathBuf}; use anyhow::{anyhow, ensure, Context, Result}; use turso::Builder; +use crate::services::resilience::{run_with_retry, RetryPolicy}; + const CORE_SCHEMA_STATEMENTS: &[&str] = &[ "CREATE TABLE IF NOT EXISTS repositories (\ id INTEGER PRIMARY KEY,\ @@ -126,6 +128,13 @@ const CORE_SCHEMA_STATEMENTS: &[&str] = &[ "CREATE INDEX IF NOT EXISTS idx_reconciliation_metrics_created_at ON reconciliation_metrics(created_at)", ]; +const CORE_SCHEMA_RETRY_POLICY: RetryPolicy = RetryPolicy { + max_attempts: 3, + timeout_ms: 5_000, + initial_backoff_ms: 150, + max_backoff_ms: 600, +}; + #[derive(Clone, Copy, Debug)] #[allow(dead_code)] pub enum LocalDatabaseTarget<'a> { @@ -159,10 +168,15 @@ pub fn ensure_agent_trace_local_db_ready_blocking() -> Result { })?; } - let runtime = tokio::runtime::Builder::new_current_thread().build()?; - runtime.block_on(apply_core_schema_migrations(LocalDatabaseTarget::Path( - &db_path, - )))?; + let runtime = tokio::runtime::Builder::new_current_thread() + .enable_time() + .build()?; + runtime.block_on(run_with_retry( + CORE_SCHEMA_RETRY_POLICY, + "local_db.apply_core_schema_migrations", + "retry the command; if it persists, verify state-directory permissions and available disk space.", + |_| apply_core_schema_migrations(LocalDatabaseTarget::Path(&db_path)), + ))?; Ok(db_path) } @@ -183,7 +197,7 @@ fn target_location(target: LocalDatabaseTarget<'_>) -> Result<&str> { } } -fn resolve_state_data_root() -> Result { +pub(crate) fn resolve_state_data_root() -> Result { #[cfg(target_os = "windows")] { if let Some(local_app_data) = std::env::var_os("LOCALAPPDATA") { @@ -192,6 +206,8 @@ fn resolve_state_data_root() -> Result { if let Some(app_data) = std::env::var_os("APPDATA") { return Ok(PathBuf::from(app_data)); } + + return Ok(resolve_home_dir()?.join("AppData").join("Local")); } #[cfg(target_os = "macos")] diff --git a/cli/src/services/mcp.rs b/cli/src/services/mcp.rs index b59b43da..4102e2f4 100644 --- a/cli/src/services/mcp.rs +++ b/cli/src/services/mcp.rs @@ -1,7 +1,63 @@ -use anyhow::Result; +use anyhow::{bail, Context, Result}; +use lexopt::Arg; +use lexopt::ValueExt; +use serde_json::json; + +use crate::services::output_format::OutputFormat; pub const NAME: &str = "mcp"; +pub type McpFormat = OutputFormat; + +#[derive(Clone, Copy, Debug, Eq, PartialEq)] +pub struct McpRequest { + pub format: McpFormat, +} + +pub fn mcp_usage_text() -> &'static str { + "Usage:\n sce mcp [--format ]\n\nExamples:\n sce mcp\n sce mcp --format json" +} + +pub fn parse_mcp_request(args: Vec) -> Result { + let mut parser = lexopt::Parser::from_args(args); + let mut format = McpFormat::Text; + + while let Some(arg) = parser.next()? { + match arg { + Arg::Long("format") => { + let value = parser + .value() + .context("Option '--format' requires a value")?; + let raw = value.string()?; + format = McpFormat::parse(&raw, "sce mcp --help")?; + } + Arg::Long("help") | Arg::Short('h') => { + bail!("Use 'sce mcp --help' for mcp usage."); + } + Arg::Long(option) => { + bail!( + "Unknown mcp option '--{}'. Run 'sce mcp --help' to see valid usage.", + option + ); + } + Arg::Short(option) => { + bail!( + "Unknown mcp option '-{}'. Run 'sce mcp --help' to see valid usage.", + option + ); + } + Arg::Value(value) => { + bail!( + "Unexpected mcp argument '{}'. Run 'sce mcp --help' to see valid usage.", + value.string()? + ); + } + } + } + + Ok(McpRequest { format }) +} + #[derive(Clone, Copy, Debug, Eq, PartialEq)] pub enum McpTransport { Stdio, @@ -63,23 +119,89 @@ impl McpService for PlaceholderMcpService { } } -pub fn run_placeholder_mcp() -> Result { +pub fn run_placeholder_mcp(request: McpRequest) -> Result { let service = PlaceholderMcpService; let snapshot = service.capability_snapshot(); let policy = service.cache_policy(); - Ok(format!( - "TODO: '{NAME}' is planned and not implemented yet. MCP file-cache surface defines {} placeholder tool contract(s) with max {} entries.", - snapshot.contracts.len(), - policy.max_entries - )) + match request.format { + McpFormat::Text => Ok(format!( + "TODO: '{NAME}' is planned and not implemented yet. MCP file-cache surface defines {} placeholder tool contract(s) with max {} entries. Next step: run 'sce mcp --help' for current placeholder usage while runtime execution remains disabled.", + snapshot.contracts.len(), + policy.max_entries + )), + McpFormat::Json => { + let payload = json!({ + "status": "ok", + "command": NAME, + "placeholder_state": "planned", + "runnable": snapshot.runnable, + "transport": transport_name(snapshot.transport), + "supported_transports": snapshot + .supported_transports + .iter() + .map(|transport| transport_name(*transport)) + .collect::>(), + "capabilities": snapshot + .contracts + .iter() + .map(|contract| json!({ + "tool_name": contract.tool_name, + "purpose": contract.purpose, + })) + .collect::>(), + "cache_policy": { + "max_entries": policy.max_entries, + "content_hashing": policy.content_hashing, + }, + "next_step": "Run 'sce mcp --help' for current placeholder usage while runtime execution remains disabled.", + }); + + serde_json::to_string_pretty(&payload) + .context("failed to serialize mcp placeholder report to JSON") + } + } +} + +fn transport_name(transport: McpTransport) -> &'static str { + match transport { + McpTransport::Stdio => "stdio", + McpTransport::LocalSocket => "local_socket", + } } #[cfg(test)] mod tests { use anyhow::Result; + use serde_json::Value; + + use super::{ + parse_mcp_request, run_placeholder_mcp, McpFormat, McpRequest, McpService, + PlaceholderMcpService, NAME, + }; + + #[test] + fn parse_defaults_to_text_format() { + let request = parse_mcp_request(vec![]).expect("mcp request should parse"); + assert_eq!(request.format, McpFormat::Text); + } - use super::{run_placeholder_mcp, McpService, PlaceholderMcpService}; + #[test] + fn parse_accepts_json_format() { + let request = parse_mcp_request(vec!["--format".to_string(), "json".to_string()]) + .expect("mcp request should parse"); + assert_eq!(request.format, McpFormat::Json); + } + + #[test] + fn parse_rejects_invalid_format_with_help_guidance() { + let error = parse_mcp_request(vec!["--format".to_string(), "yaml".to_string()]) + .expect_err("invalid mcp format should fail"); + assert_eq!( + error.to_string(), + "Invalid --format value 'yaml'. Valid values: text, json. Run 'sce mcp --help' to see valid usage." + ); + } #[test] fn mcp_placeholder_snapshot_is_non_runnable() { @@ -96,8 +218,40 @@ mod tests { #[test] fn mcp_placeholder_message_mentions_contracts() -> Result<()> { - let message = run_placeholder_mcp()?; + let message = run_placeholder_mcp(McpRequest { + format: McpFormat::Text, + })?; assert!(message.contains("file-cache surface")); Ok(()) } + + #[test] + fn mcp_json_output_includes_stable_fields() -> Result<()> { + let output = run_placeholder_mcp(McpRequest { + format: McpFormat::Json, + })?; + let parsed: Value = serde_json::from_str(&output)?; + assert_eq!(parsed["status"], "ok"); + assert_eq!(parsed["command"], NAME); + assert_eq!(parsed["placeholder_state"], "planned"); + assert!(parsed["runnable"].is_boolean()); + assert!(parsed["supported_transports"].is_array()); + assert!(parsed["capabilities"].is_array()); + assert!(parsed["cache_policy"].is_object()); + assert!(parsed["next_step"].as_str().is_some()); + Ok(()) + } + + #[test] + fn mcp_json_output_is_deterministic_for_same_request() -> Result<()> { + let first = run_placeholder_mcp(McpRequest { + format: McpFormat::Json, + })?; + let second = run_placeholder_mcp(McpRequest { + format: McpFormat::Json, + })?; + + assert_eq!(first, second); + Ok(()) + } } diff --git a/cli/src/services/mod.rs b/cli/src/services/mod.rs index a6337780..f55f6d50 100644 --- a/cli/src/services/mod.rs +++ b/cli/src/services/mod.rs @@ -1,8 +1,15 @@ pub mod agent_trace; +pub mod completion; +pub mod config; pub mod doctor; pub mod hooks; pub mod hosted_reconciliation; pub mod local_db; pub mod mcp; +pub mod observability; +pub mod output_format; +pub mod resilience; +pub mod security; pub mod setup; pub mod sync; +pub mod version; diff --git a/cli/src/services/observability.rs b/cli/src/services/observability.rs new file mode 100644 index 00000000..eb4e44b9 --- /dev/null +++ b/cli/src/services/observability.rs @@ -0,0 +1,827 @@ +use std::fs::{File, OpenOptions}; +use std::io::Write; +#[cfg(unix)] +use std::os::unix::fs::{MetadataExt, OpenOptionsExt, PermissionsExt}; +#[cfg(unix)] +use std::path::Path; +use std::path::PathBuf; +use std::sync::{Arc, Mutex}; + +use anyhow::{anyhow, bail, Result}; +use opentelemetry::trace::TracerProvider; +use opentelemetry_otlp::WithExportConfig; +use opentelemetry_sdk::trace::SdkTracerProvider; +use serde_json::json; +use tracing_subscriber::prelude::*; + +use crate::services::security::redact_sensitive_text; + +pub const NAME: &str = "observability"; + +const ENV_LOG_LEVEL: &str = "SCE_LOG_LEVEL"; +const ENV_LOG_FORMAT: &str = "SCE_LOG_FORMAT"; +const ENV_LOG_FILE: &str = "SCE_LOG_FILE"; +const ENV_LOG_FILE_MODE: &str = "SCE_LOG_FILE_MODE"; +const ENV_OTEL_ENABLED: &str = "SCE_OTEL_ENABLED"; +const ENV_OTEL_ENDPOINT: &str = "OTEL_EXPORTER_OTLP_ENDPOINT"; +const ENV_OTEL_PROTOCOL: &str = "OTEL_EXPORTER_OTLP_PROTOCOL"; + +const DEFAULT_OTEL_ENDPOINT: &str = "http://127.0.0.1:4317"; + +#[derive(Clone, Copy, Debug, Eq, PartialEq)] +enum OtlpProtocol { + Grpc, + HttpProtobuf, +} + +impl OtlpProtocol { + fn parse(raw: &str) -> Result { + match raw { + "grpc" => Ok(Self::Grpc), + "http/protobuf" => Ok(Self::HttpProtobuf), + _ => bail!( + "Invalid {} '{}'. Valid values: grpc, http/protobuf.", + ENV_OTEL_PROTOCOL, + raw + ), + } + } +} + +#[derive(Clone, Debug, Eq, PartialEq)] +struct TelemetryConfig { + enabled: bool, + endpoint: String, + protocol: OtlpProtocol, +} + +impl Default for TelemetryConfig { + fn default() -> Self { + Self { + enabled: false, + endpoint: DEFAULT_OTEL_ENDPOINT.to_string(), + protocol: OtlpProtocol::Grpc, + } + } +} + +impl TelemetryConfig { + fn from_env_lookup(lookup: F) -> Result + where + F: Fn(&str) -> Option, + { + let mut config = Self::default(); + + if let Some(raw) = lookup(ENV_OTEL_ENABLED) { + config.enabled = parse_bool_env(ENV_OTEL_ENABLED, &raw)?; + } + + if !config.enabled { + return Ok(config); + } + + if let Some(raw) = lookup(ENV_OTEL_PROTOCOL) { + config.protocol = OtlpProtocol::parse(&raw)?; + } + + if let Some(raw) = lookup(ENV_OTEL_ENDPOINT) { + config.endpoint = raw; + } + + validate_otlp_endpoint(&config.endpoint)?; + + Ok(config) + } +} + +pub struct TelemetryRuntime { + provider: Option, +} + +impl TelemetryRuntime { + pub fn from_env() -> Result { + Self::from_env_lookup(|key| std::env::var(key).ok()) + } + + fn from_env_lookup(lookup: F) -> Result + where + F: Fn(&str) -> Option, + { + let config = TelemetryConfig::from_env_lookup(lookup)?; + Self::from_config(config) + } + + fn from_config(config: TelemetryConfig) -> Result { + if !config.enabled { + return Ok(Self { provider: None }); + } + + let exporter = match config.protocol { + OtlpProtocol::Grpc => opentelemetry_otlp::SpanExporter::builder() + .with_tonic() + .with_endpoint(config.endpoint.clone()) + .build() + .map_err(|error| anyhow!("Failed to initialize OTLP gRPC exporter: {error}"))?, + OtlpProtocol::HttpProtobuf => opentelemetry_otlp::SpanExporter::builder() + .with_http() + .with_endpoint(config.endpoint.clone()) + .build() + .map_err(|error| anyhow!("Failed to initialize OTLP HTTP exporter: {error}"))?, + }; + + let provider = SdkTracerProvider::builder() + .with_simple_exporter(exporter) + .build(); + + Ok(Self { + provider: Some(provider), + }) + } + + pub fn with_default_subscriber(&self, action: F) -> T + where + F: FnOnce() -> T, + { + if let Some(provider) = &self.provider { + let tracer = provider.tracer("sce-cli"); + let subscriber = tracing_subscriber::registry() + .with(tracing_opentelemetry::layer().with_tracer(tracer)); + return tracing::subscriber::with_default(subscriber, action); + } + + action() + } +} + +impl Drop for TelemetryRuntime { + fn drop(&mut self) { + if let Some(provider) = self.provider.take() { + let _ = provider.shutdown(); + } + } +} + +fn parse_bool_env(key: &str, raw: &str) -> Result { + match raw { + "1" | "true" => Ok(true), + "0" | "false" => Ok(false), + _ => bail!( + "Invalid {} '{}'. Valid values: true, false, 1, 0.", + key, + raw + ), + } +} + +fn validate_otlp_endpoint(endpoint: &str) -> Result<()> { + if endpoint.is_empty() { + bail!( + "Invalid {} ''. Try: set it to an absolute http(s) URL, for example {}.", + ENV_OTEL_ENDPOINT, + DEFAULT_OTEL_ENDPOINT + ); + } + + if endpoint.starts_with("http://") || endpoint.starts_with("https://") { + return Ok(()); + } + + bail!( + "Invalid {} '{}'. Try: set it to an absolute http(s) URL, for example {}.", + ENV_OTEL_ENDPOINT, + endpoint, + DEFAULT_OTEL_ENDPOINT + ) +} + +#[derive(Clone, Copy, Debug, Eq, PartialEq)] +pub enum LogFormat { + Text, + Json, +} + +impl LogFormat { + fn parse(raw: &str) -> Result { + match raw { + "text" => Ok(Self::Text), + "json" => Ok(Self::Json), + _ => bail!( + "Invalid {} '{}'. Valid values: text, json.", + ENV_LOG_FORMAT, + raw + ), + } + } + + fn as_str(self) -> &'static str { + match self { + Self::Text => "text", + Self::Json => "json", + } + } +} + +#[derive(Clone, Copy, Debug, Eq, PartialEq)] +pub enum LogLevel { + Error, + Warn, + Info, + Debug, +} + +impl LogLevel { + fn parse(raw: &str) -> Result { + match raw { + "error" => Ok(Self::Error), + "warn" => Ok(Self::Warn), + "info" => Ok(Self::Info), + "debug" => Ok(Self::Debug), + _ => bail!( + "Invalid {} '{}'. Valid values: error, warn, info, debug.", + ENV_LOG_LEVEL, + raw + ), + } + } + + fn as_str(self) -> &'static str { + match self { + Self::Error => "error", + Self::Warn => "warn", + Self::Info => "info", + Self::Debug => "debug", + } + } + + fn severity(self) -> u8 { + match self { + Self::Error => 1, + Self::Warn => 2, + Self::Info => 3, + Self::Debug => 4, + } + } +} + +#[derive(Clone, Copy, Debug, Eq, PartialEq)] +pub struct ObservabilityConfig { + pub level: LogLevel, + pub format: LogFormat, +} + +impl Default for ObservabilityConfig { + fn default() -> Self { + Self { + level: LogLevel::Info, + format: LogFormat::Text, + } + } +} + +#[derive(Debug)] +pub struct Logger { + config: ObservabilityConfig, + file_sink: Option, +} + +#[derive(Clone, Copy, Debug, Eq, PartialEq)] +enum LogFileMode { + Truncate, + Append, +} + +impl LogFileMode { + fn parse(raw: &str) -> Result { + match raw { + "truncate" => Ok(Self::Truncate), + "append" => Ok(Self::Append), + _ => bail!( + "Invalid {} '{}'. Valid values: truncate, append.", + ENV_LOG_FILE_MODE, + raw + ), + } + } +} + +#[derive(Debug)] +struct LogFileSink { + path: PathBuf, + writer: Arc>, +} + +impl LogFileSink { + fn open(path: PathBuf, mode: LogFileMode) -> Result { + if path.as_os_str().is_empty() { + bail!( + "Invalid {} ''. Try: set it to an absolute or relative file path, for example .sce/sce.log.", + ENV_LOG_FILE + ); + } + + if let Some(parent) = path.parent() { + if !parent.as_os_str().is_empty() { + std::fs::create_dir_all(parent).map_err(|error| { + anyhow!( + "Failed to prepare log directory '{}': {}", + parent.display(), + error + ) + })?; + } + } + + let mut options = OpenOptions::new(); + options.create(true).write(true); + match mode { + LogFileMode::Truncate => { + options.truncate(true); + } + LogFileMode::Append => { + options.append(true); + } + } + + #[cfg(unix)] + { + options.mode(0o600); + } + + let file = options.open(&path).map_err(|error| { + anyhow!( + "Failed to open {} '{}': {}. Try: verify the path is writable or unset {}.", + ENV_LOG_FILE, + path.display(), + error, + ENV_LOG_FILE + ) + })?; + + #[cfg(unix)] + enforce_unix_log_file_permissions(&path)?; + + Ok(Self { + path, + writer: Arc::new(Mutex::new(file)), + }) + } + + fn write_line(&self, line: &str) -> Result<()> { + let mut writer = self + .writer + .lock() + .map_err(|_| anyhow!("Log file writer lock poisoned"))?; + writer.write_all(line.as_bytes())?; + writer.write_all(b"\n")?; + writer.flush()?; + Ok(()) + } +} + +impl Logger { + pub fn from_env() -> Result { + Self::from_env_lookup(|key| std::env::var(key).ok()) + } + + fn from_env_lookup(lookup: F) -> Result + where + F: Fn(&str) -> Option, + { + let mut config = ObservabilityConfig::default(); + let mut file_path = None; + let mut file_mode_raw_seen = false; + let mut file_mode = LogFileMode::Truncate; + + if let Some(raw) = lookup(ENV_LOG_LEVEL) { + config.level = LogLevel::parse(&raw)?; + } + + if let Some(raw) = lookup(ENV_LOG_FORMAT) { + config.format = LogFormat::parse(&raw)?; + } + + if let Some(raw) = lookup(ENV_LOG_FILE) { + file_path = Some(PathBuf::from(raw)); + } + + if let Some(raw) = lookup(ENV_LOG_FILE_MODE) { + file_mode_raw_seen = true; + file_mode = LogFileMode::parse(&raw)?; + } + + if file_path.is_none() && file_mode_raw_seen { + bail!( + "{} requires {}. Try: set {} to a file path or unset {}.", + ENV_LOG_FILE_MODE, + ENV_LOG_FILE, + ENV_LOG_FILE, + ENV_LOG_FILE_MODE + ); + } + + let file_sink = match file_path { + Some(path) => Some(LogFileSink::open(path, file_mode)?), + None => None, + }; + + Ok(Self { config, file_sink }) + } + + pub fn info(&self, event_id: &str, message: &str, fields: &[(&str, &str)]) { + self.log(LogLevel::Info, event_id, message, fields); + } + + pub fn error(&self, event_id: &str, message: &str, fields: &[(&str, &str)]) { + self.log(LogLevel::Error, event_id, message, fields); + } + + fn log(&self, level: LogLevel, event_id: &str, message: &str, fields: &[(&str, &str)]) { + if !self.enabled(level) { + return; + } + + emit_tracing_event(level, event_id, message, fields); + + let line = self.render_line(level, event_id, message, fields); + let redacted_line = redact_sensitive_text(&line); + eprintln!("{}", redacted_line); + + if let Some(file_sink) = &self.file_sink { + if let Err(error) = file_sink.write_line(&redacted_line) { + eprintln!( + "Error: {}", + redact_sensitive_text(&format!( + "Failed to write log file '{}': {}. Try: verify the file is writable or unset {}.", + file_sink.path.display(), + error, + ENV_LOG_FILE + )) + ); + } + } + } + + fn enabled(&self, level: LogLevel) -> bool { + level.severity() <= self.config.level.severity() + } + + fn render_line( + &self, + level: LogLevel, + event_id: &str, + message: &str, + fields: &[(&str, &str)], + ) -> String { + match self.config.format { + LogFormat::Text => { + let mut line = format!( + "log_format={} level={} event_id={} message={}", + self.config.format.as_str(), + level.as_str(), + event_id, + message + ); + + for (key, value) in fields { + line.push(' '); + line.push_str(key); + line.push('='); + line.push_str(value); + } + + line + } + LogFormat::Json => { + let details = fields + .iter() + .map(|(key, value)| { + ( + (*key).to_string(), + serde_json::Value::String((*value).to_string()), + ) + }) + .collect::>(); + json!({ + "log_format": self.config.format.as_str(), + "level": level.as_str(), + "event_id": event_id, + "message": message, + "fields": details, + }) + .to_string() + } + } + } +} + +fn emit_tracing_event(level: LogLevel, event_id: &str, message: &str, fields: &[(&str, &str)]) { + let detail_fields = fields + .iter() + .map(|(key, value)| { + ( + (*key).to_string(), + serde_json::Value::String((*value).to_string()), + ) + }) + .collect::>(); + let fields_json = serde_json::Value::Object(detail_fields).to_string(); + + match level { + LogLevel::Error => tracing::error!( + target: "sce", + event_id = event_id, + event_message = message, + fields = %fields_json + ), + LogLevel::Warn => tracing::warn!( + target: "sce", + event_id = event_id, + event_message = message, + fields = %fields_json + ), + LogLevel::Info => tracing::info!( + target: "sce", + event_id = event_id, + event_message = message, + fields = %fields_json + ), + LogLevel::Debug => tracing::debug!( + target: "sce", + event_id = event_id, + event_message = message, + fields = %fields_json + ), + } +} + +#[cfg(unix)] +fn enforce_unix_log_file_permissions(path: &Path) -> Result<()> { + let metadata = std::fs::metadata(path).map_err(|error| { + anyhow!( + "Failed to inspect permissions for log file '{}': {}", + path.display(), + error + ) + })?; + + let mode = metadata.mode() & 0o777; + if mode & 0o077 != 0 { + std::fs::set_permissions(path, std::fs::Permissions::from_mode(0o600)).map_err( + |error| { + anyhow!( + "Failed to secure permissions for {} '{}': {}. Try: run 'chmod 600 {}' and retry.", + ENV_LOG_FILE, + path.display(), + error, + path.display() + ) + }, + )?; + } + + Ok(()) +} + +#[cfg(test)] +mod tests { + #[cfg(unix)] + use std::os::unix::fs::PermissionsExt; + use std::path::PathBuf; + + use super::{ + validate_otlp_endpoint, LogFormat, LogLevel, Logger, TelemetryConfig, TelemetryRuntime, + }; + + fn unique_temp_log_path(label: &str) -> PathBuf { + let nanos = std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .expect("time should be monotonic") + .as_nanos(); + std::env::temp_dir().join(format!("sce-observability-{label}-{nanos}.log")) + } + + #[test] + fn logger_defaults_to_info_text() { + let logger = Logger::from_env_lookup(|_| None).expect("logger should parse defaults"); + let line = logger.render_line(LogLevel::Info, "sce.test.event", "hello", &[]); + assert_eq!( + line, + "log_format=text level=info event_id=sce.test.event message=hello" + ); + } + + #[test] + fn logger_parses_env_level_and_format() { + let logger = Logger::from_env_lookup(|key| match key { + "SCE_LOG_LEVEL" => Some("debug".to_string()), + "SCE_LOG_FORMAT" => Some("json".to_string()), + _ => None, + }) + .expect("logger should parse env"); + + let line = logger.render_line( + LogLevel::Info, + "sce.test.event", + "hello", + &[("command", "setup")], + ); + assert_eq!( + line, + "{\"event_id\":\"sce.test.event\",\"fields\":{\"command\":\"setup\"},\"level\":\"info\",\"log_format\":\"json\",\"message\":\"hello\"}" + ); + } + + #[test] + fn logger_rejects_invalid_level() { + let error = Logger::from_env_lookup(|key| { + if key == "SCE_LOG_LEVEL" { + return Some("trace".to_string()); + } + None + }) + .expect_err("invalid level should fail"); + + assert_eq!( + error.to_string(), + "Invalid SCE_LOG_LEVEL 'trace'. Valid values: error, warn, info, debug." + ); + } + + #[test] + fn logger_rejects_log_file_mode_without_path() { + let error = Logger::from_env_lookup(|key| { + if key == "SCE_LOG_FILE_MODE" { + return Some("append".to_string()); + } + None + }) + .expect_err("log file mode without path should fail"); + + assert_eq!( + error.to_string(), + "SCE_LOG_FILE_MODE requires SCE_LOG_FILE. Try: set SCE_LOG_FILE to a file path or unset SCE_LOG_FILE_MODE." + ); + } + + #[test] + fn logger_rejects_invalid_log_file_mode() { + let error = Logger::from_env_lookup(|key| match key { + "SCE_LOG_FILE" => Some(".sce/sce.log".to_string()), + "SCE_LOG_FILE_MODE" => Some("rotate".to_string()), + _ => None, + }) + .expect_err("invalid log file mode should fail"); + + assert_eq!( + error.to_string(), + "Invalid SCE_LOG_FILE_MODE 'rotate'. Valid values: truncate, append." + ); + } + + #[test] + fn logger_file_sink_truncates_by_default() { + let log_path = unique_temp_log_path("truncate-default"); + std::fs::write(&log_path, "old-data\n").expect("should write prior content"); + + let logger = Logger::from_env_lookup(|key| { + if key == "SCE_LOG_FILE" { + return Some(log_path.display().to_string()); + } + None + }) + .expect("logger should initialize with file sink"); + + logger.info("sce.test.event", "hello", &[("command", "setup")]); + + let content = std::fs::read_to_string(&log_path).expect("should read log file"); + assert!(content.contains("event_id=sce.test.event")); + assert!(!content.contains("old-data")); + + let _ = std::fs::remove_file(log_path); + } + + #[test] + fn logger_file_sink_appends_when_requested() { + let log_path = unique_temp_log_path("append"); + std::fs::write(&log_path, "first\n").expect("should write prior content"); + + let logger = Logger::from_env_lookup(|key| match key { + "SCE_LOG_FILE" => Some(log_path.display().to_string()), + "SCE_LOG_FILE_MODE" => Some("append".to_string()), + _ => None, + }) + .expect("logger should initialize with append sink"); + + logger.info("sce.test.event", "hello", &[]); + + let content = std::fs::read_to_string(&log_path).expect("should read log file"); + assert!(content.starts_with("first\n")); + assert!(content.contains("event_id=sce.test.event")); + + let _ = std::fs::remove_file(log_path); + } + + #[cfg(unix)] + #[test] + fn logger_tightens_world_readable_log_file_permissions() { + let log_path = unique_temp_log_path("permissions"); + std::fs::write(&log_path, "seed\n").expect("should write seed file"); + std::fs::set_permissions(&log_path, std::fs::Permissions::from_mode(0o644)) + .expect("should set loose mode"); + + let logger = Logger::from_env_lookup(|key| { + if key == "SCE_LOG_FILE" { + return Some(log_path.display().to_string()); + } + None + }) + .expect("logger should repair loose permissions"); + + logger.info("sce.test.event", "hello", &[]); + + let mode = std::fs::metadata(&log_path) + .expect("metadata should be readable") + .permissions() + .mode() + & 0o777; + assert_eq!(mode, 0o600); + + let _ = std::fs::remove_file(log_path); + } + + #[test] + fn logger_level_filtering_is_threshold_based() { + let logger = Logger::from_env_lookup(|key| { + if key == "SCE_LOG_LEVEL" { + return Some("warn".to_string()); + } + None + }) + .expect("logger should parse warn level"); + + assert!(logger.enabled(LogLevel::Error)); + assert!(logger.enabled(LogLevel::Warn)); + assert!(!logger.enabled(LogLevel::Info)); + assert!(!logger.enabled(LogLevel::Debug)); + } + + #[test] + fn log_format_parser_accepts_documented_values() { + assert_eq!( + LogFormat::parse("text").expect("text should parse"), + LogFormat::Text + ); + assert_eq!( + LogFormat::parse("json").expect("json should parse"), + LogFormat::Json + ); + } + + #[test] + fn telemetry_defaults_to_disabled() { + let runtime = TelemetryRuntime::from_env_lookup(|_| None) + .expect("telemetry runtime should parse default config"); + assert!(runtime.provider.is_none()); + } + + #[test] + fn telemetry_rejects_invalid_enabled_value() { + let error = TelemetryConfig::from_env_lookup(|key| { + if key == "SCE_OTEL_ENABLED" { + return Some("maybe".to_string()); + } + None + }) + .expect_err("invalid enabled value should fail"); + + assert_eq!( + error.to_string(), + "Invalid SCE_OTEL_ENABLED 'maybe'. Valid values: true, false, 1, 0." + ); + } + + #[test] + fn telemetry_rejects_invalid_protocol_when_enabled() { + let error = TelemetryConfig::from_env_lookup(|key| match key { + "SCE_OTEL_ENABLED" => Some("true".to_string()), + "OTEL_EXPORTER_OTLP_PROTOCOL" => Some("udp".to_string()), + _ => None, + }) + .expect_err("invalid protocol should fail"); + + assert_eq!( + error.to_string(), + "Invalid OTEL_EXPORTER_OTLP_PROTOCOL 'udp'. Valid values: grpc, http/protobuf." + ); + } + + #[test] + fn telemetry_rejects_invalid_endpoint_when_enabled() { + let error = validate_otlp_endpoint("collector:4317") + .expect_err("non-URL endpoint should fail validation"); + assert_eq!( + error.to_string(), + "Invalid OTEL_EXPORTER_OTLP_ENDPOINT 'collector:4317'. Try: set it to an absolute http(s) URL, for example http://127.0.0.1:4317." + ); + } +} diff --git a/cli/src/services/output_format.rs b/cli/src/services/output_format.rs new file mode 100644 index 00000000..fba4cc3e --- /dev/null +++ b/cli/src/services/output_format.rs @@ -0,0 +1,48 @@ +use anyhow::{bail, Result}; + +#[derive(Clone, Copy, Debug, Eq, PartialEq)] +pub enum OutputFormat { + Text, + Json, +} + +impl OutputFormat { + pub fn parse(raw: &str, help_command: &str) -> Result { + match raw { + "text" => Ok(Self::Text), + "json" => Ok(Self::Json), + _ => bail!( + "Invalid --format value '{}'. Valid values: text, json. Run '{}' to see valid usage.", + raw, + help_command + ), + } + } +} + +#[cfg(test)] +mod tests { + use super::OutputFormat; + + #[test] + fn parser_accepts_text_and_json() { + assert_eq!( + OutputFormat::parse("text", "sce version --help").expect("text should parse"), + OutputFormat::Text + ); + assert_eq!( + OutputFormat::parse("json", "sce version --help").expect("json should parse"), + OutputFormat::Json + ); + } + + #[test] + fn parser_rejects_unknown_format_with_help_guidance() { + let error = OutputFormat::parse("xml", "sce config --help") + .expect_err("unknown format should fail"); + assert_eq!( + error.to_string(), + "Invalid --format value 'xml'. Valid values: text, json. Run 'sce config --help' to see valid usage." + ); + } +} diff --git a/cli/src/services/resilience.rs b/cli/src/services/resilience.rs new file mode 100644 index 00000000..8cb71a1a --- /dev/null +++ b/cli/src/services/resilience.rs @@ -0,0 +1,198 @@ +use std::future::Future; +use std::time::Duration; + +use anyhow::{anyhow, ensure, Result}; + +#[derive(Clone, Copy, Debug, Eq, PartialEq)] +pub struct RetryPolicy { + pub max_attempts: u32, + pub timeout_ms: u64, + pub initial_backoff_ms: u64, + pub max_backoff_ms: u64, +} + +impl RetryPolicy { + fn timeout(self) -> Duration { + Duration::from_millis(self.timeout_ms) + } + + fn backoff_for_attempt(self, attempt: u32) -> Duration { + if attempt <= 1 { + return Duration::from_millis(0); + } + + let exponent = (attempt - 2).min(20); + let multiplier = 1_u64 << exponent; + let backoff_ms = self + .initial_backoff_ms + .saturating_mul(multiplier) + .min(self.max_backoff_ms); + Duration::from_millis(backoff_ms) + } +} + +pub async fn run_with_retry( + policy: RetryPolicy, + operation_name: &str, + retry_hint: &str, + mut operation: Op, +) -> Result +where + Op: FnMut(u32) -> Fut, + Fut: Future>, +{ + ensure!( + policy.max_attempts > 0, + "Retry policy requires max_attempts >= 1" + ); + ensure!( + policy.timeout_ms > 0, + "Retry policy requires timeout_ms >= 1" + ); + ensure!( + policy.max_backoff_ms >= policy.initial_backoff_ms, + "Retry policy requires max_backoff_ms >= initial_backoff_ms" + ); + + let mut last_error = String::new(); + + for attempt in 1..=policy.max_attempts { + let outcome = tokio::time::timeout(policy.timeout(), operation(attempt)).await; + match outcome { + Ok(Ok(value)) => return Ok(value), + Ok(Err(error)) => { + last_error = error.to_string(); + } + Err(_) => { + last_error = format!("attempt {attempt} timed out after {}ms", policy.timeout_ms); + } + } + + if attempt == policy.max_attempts { + break; + } + + let backoff = policy.backoff_for_attempt(attempt + 1); + tracing::warn!( + event_id = "sce.resilience.retry", + operation = operation_name, + attempt, + max_attempts = policy.max_attempts, + timeout_ms = policy.timeout_ms, + backoff_ms = backoff.as_millis() as u64, + error = %last_error, + "Retrying operation after transient failure" + ); + tokio::time::sleep(backoff).await; + } + + Err(anyhow!( + "Operation '{operation_name}' failed after {} attempt(s) (timeout={}ms, backoff={}..{}ms). Last error: {}. Try: {}", + policy.max_attempts, + policy.timeout_ms, + policy.initial_backoff_ms, + policy.max_backoff_ms, + last_error, + retry_hint + )) +} + +#[cfg(test)] +mod tests { + use std::sync::atomic::{AtomicU32, Ordering}; + use std::sync::Arc; + + use anyhow::{anyhow, Result}; + + use super::{run_with_retry, RetryPolicy}; + + #[test] + fn run_with_retry_succeeds_after_transient_failures() -> Result<()> { + let attempts = Arc::new(AtomicU32::new(0)); + let runtime = tokio::runtime::Builder::new_current_thread() + .enable_time() + .build()?; + + let value = runtime.block_on(run_with_retry( + RetryPolicy { + max_attempts: 3, + timeout_ms: 200, + initial_backoff_ms: 1, + max_backoff_ms: 2, + }, + "test.transient", + "rerun the command", + { + let attempts = Arc::clone(&attempts); + move |_| { + let attempts = Arc::clone(&attempts); + async move { + let current = attempts.fetch_add(1, Ordering::SeqCst) + 1; + if current < 3 { + return Err(anyhow!("transient failure {current}")); + } + Ok(current) + } + } + }, + ))?; + + assert_eq!(value, 3); + assert_eq!(attempts.load(Ordering::SeqCst), 3); + Ok(()) + } + + #[test] + fn run_with_retry_returns_actionable_error_after_exhaustion() -> Result<()> { + let runtime = tokio::runtime::Builder::new_current_thread() + .enable_time() + .build()?; + + let error = runtime + .block_on(run_with_retry::<(), _, _>( + RetryPolicy { + max_attempts: 2, + timeout_ms: 200, + initial_backoff_ms: 1, + max_backoff_ms: 1, + }, + "test.exhaustion", + "check permissions and retry", + |_| async { Err::<(), anyhow::Error>(anyhow!("permanent failure")) }, + )) + .expect_err("retry exhaustion should fail"); + + let message = error.to_string(); + assert!(message.contains("test.exhaustion")); + assert!(message.contains("failed after 2 attempt(s)")); + assert!(message.contains("Try: check permissions and retry")); + Ok(()) + } + + #[test] + fn run_with_retry_treats_timeout_as_retryable_failure() -> Result<()> { + let runtime = tokio::runtime::Builder::new_current_thread() + .enable_time() + .build()?; + + let error = runtime + .block_on(run_with_retry( + RetryPolicy { + max_attempts: 2, + timeout_ms: 5, + initial_backoff_ms: 1, + max_backoff_ms: 1, + }, + "test.timeout", + "reduce load and retry", + |_| async { + tokio::time::sleep(std::time::Duration::from_millis(30)).await; + Ok(()) + }, + )) + .expect_err("timeout exhaustion should fail"); + + assert!(error.to_string().contains("timed out")); + Ok(()) + } +} diff --git a/cli/src/services/security.rs b/cli/src/services/security.rs new file mode 100644 index 00000000..8ceb3152 --- /dev/null +++ b/cli/src/services/security.rs @@ -0,0 +1,210 @@ +use anyhow::Result; + +const REDACTED: &str = "[REDACTED]"; +const SENSITIVE_KEYS: &[&str] = &["password", "passwd", "secret", "token", "api_key", "apikey"]; + +pub fn redact_sensitive_text(input: &str) -> String { + let mut output = input.to_string(); + output = redact_bearer_token(&output); + output = redact_json_value(&output, "authorization"); + output = redact_authorization_value(&output); + + for key in SENSITIVE_KEYS { + output = redact_json_value(&output, key); + output = redact_assignment_value(&output, key, '='); + output = redact_assignment_value(&output, key, ':'); + } + + output +} + +fn redact_authorization_value(input: &str) -> String { + let mut output = input.to_string(); + output = redact_assignment_value(&output, "authorization", '='); + output = redact_assignment_value(&output, "authorization", ':'); + output +} + +fn redact_bearer_token(input: &str) -> String { + let mut output = input.to_string(); + let mut search_start = 0usize; + + loop { + let lower = output.to_lowercase(); + let Some(relative_start) = lower[search_start..].find("bearer ") else { + break; + }; + let token_start = search_start + relative_start + "bearer ".len(); + let token_end = find_token_end(&output, token_start); + if token_end == token_start { + break; + } + + output.replace_range(token_start..token_end, REDACTED); + search_start = token_start + REDACTED.len(); + } + + output +} + +fn redact_json_value(input: &str, key: &str) -> String { + let mut output = input.to_string(); + let mut search_start = 0usize; + let needle = format!("\"{key}\":\""); + let needle_len = needle.len(); + + loop { + let lower = output.to_lowercase(); + let Some(relative_start) = lower[search_start..].find(&needle) else { + break; + }; + let value_start = search_start + relative_start + needle_len; + let Some(relative_end) = output[value_start..].find('"') else { + break; + }; + let value_end = value_start + relative_end; + output.replace_range(value_start..value_end, REDACTED); + search_start = value_start + REDACTED.len(); + } + + output +} + +fn redact_assignment_value(input: &str, key: &str, separator: char) -> String { + let mut output = input.to_string(); + let mut search_start = 0usize; + let needle = format!("{key}{separator}"); + + loop { + let lower = output.to_lowercase(); + let Some(relative_start) = lower[search_start..].find(&needle) else { + break; + }; + + let mut value_start = search_start + relative_start + needle.len(); + while let Some(next) = output[value_start..].chars().next() { + if next == ' ' || next == '\t' { + value_start += next.len_utf8(); + continue; + } + break; + } + + let Some(first_char) = output[value_start..].chars().next() else { + break; + }; + + let (replace_start, replace_end) = if first_char == '"' || first_char == '\'' { + let quote = first_char; + let quoted_value_start = value_start + quote.len_utf8(); + let Some(relative_end) = output[quoted_value_start..].find(quote) else { + break; + }; + let quoted_value_end = quoted_value_start + relative_end; + (quoted_value_start, quoted_value_end) + } else { + if key.eq_ignore_ascii_case("authorization") + && output[value_start..].to_lowercase().starts_with("bearer ") + { + let bearer_token_start = value_start + "bearer ".len(); + let bearer_token_end = find_token_end(&output, bearer_token_start); + (bearer_token_start, bearer_token_end) + } else { + let plain_value_end = find_token_end(&output, value_start); + (value_start, plain_value_end) + } + }; + + if replace_end == replace_start { + break; + } + + output.replace_range(replace_start..replace_end, REDACTED); + search_start = replace_start + REDACTED.len(); + } + + output +} + +fn find_token_end(input: &str, start: usize) -> usize { + for (offset, ch) in input[start..].char_indices() { + if ch.is_whitespace() || matches!(ch, ',' | ';' | ')') { + return start + offset; + } + } + + input.len() +} + +pub fn ensure_directory_is_writable(path: &std::path::Path, context: &str) -> Result<()> { + let metadata = std::fs::metadata(path).map_err(|error| { + anyhow::anyhow!( + "Failed to inspect {} '{}': {}", + context, + path.display(), + error + ) + })?; + + if !metadata.is_dir() { + anyhow::bail!("{} '{}' is not a directory", context, path.display()); + } + + let probe_name = format!( + ".sce-write-probe-{}-{}", + std::process::id(), + std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .map_err(|error| anyhow::anyhow!("System clock is before UNIX_EPOCH: {}", error))? + .as_nanos() + ); + + let probe_path = path.join(probe_name); + std::fs::OpenOptions::new() + .write(true) + .create_new(true) + .open(&probe_path) + .map_err(|error| { + anyhow::anyhow!( + "Failed to verify write permissions for {} '{}': {}. Try: grant write access and retry.", + context, + path.display(), + error + ) + })?; + + let _ = std::fs::remove_file(&probe_path); + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::redact_sensitive_text; + + #[test] + fn redacts_assignment_values() { + let message = "failed password=hunter2 token=abc123"; + assert_eq!( + redact_sensitive_text(message), + "failed password=[REDACTED] token=[REDACTED]" + ); + } + + #[test] + fn redacts_json_values() { + let message = "{\"api_key\":\"secret-value\",\"status\":\"ok\"}"; + assert_eq!( + redact_sensitive_text(message), + "{\"api_key\":\"[REDACTED]\",\"status\":\"ok\"}" + ); + } + + #[test] + fn redacts_bearer_tokens() { + let message = "authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9"; + assert_eq!( + redact_sensitive_text(message), + "authorization: Bearer [REDACTED]" + ); + } +} diff --git a/cli/src/services/setup.rs b/cli/src/services/setup.rs index 02223ed1..b84eaffb 100644 --- a/cli/src/services/setup.rs +++ b/cli/src/services/setup.rs @@ -8,6 +8,8 @@ use std::{ time::{SystemTime, UNIX_EPOCH}, }; +use crate::services::security::{ensure_directory_is_writable, redact_sensitive_text}; + pub const NAME: &str = "setup"; #[derive(Clone, Copy, Debug, Eq, PartialEq)] @@ -98,6 +100,7 @@ pub enum SetupDispatch { #[derive(Clone, Debug, Default, Eq, PartialEq)] pub struct SetupCliOptions { pub help: bool, + pub non_interactive: bool, pub opencode: bool, pub claude: bool, pub both: bool, @@ -105,22 +108,58 @@ pub struct SetupCliOptions { pub repo_path: Option, } -pub fn resolve_setup_hooks_repository(options: &SetupCliOptions) -> Result> { - if options.hooks { - if options.opencode || options.claude || options.both { - bail!( - "Option '--hooks' cannot be combined with '--opencode', '--claude', or '--both'. Run 'sce setup --help' to see valid usage." - ); - } +#[derive(Clone, Debug, Eq, PartialEq)] +pub struct SetupRequest { + pub config_mode: Option, + pub install_hooks: bool, + pub hooks_repo_path: Option, +} - return Ok(options.repo_path.clone()); +pub fn resolve_setup_request(options: SetupCliOptions) -> Result { + if options.repo_path.is_some() && !options.hooks { + bail!( + "Option '--repo' requires '--hooks'. Try: run 'sce setup --hooks --repo ' or remove '--repo'." + ); } - if options.repo_path.is_some() { - bail!("Option '--repo' requires '--hooks'. Run 'sce setup --help' to see valid usage."); + let mut selected_targets = Vec::new(); + + if options.opencode { + selected_targets.push(SetupTarget::OpenCode); + } + if options.claude { + selected_targets.push(SetupTarget::Claude); + } + if options.both { + selected_targets.push(SetupTarget::Both); } - Ok(None) + if selected_targets.len() > 1 { + bail!( + "Options '--opencode', '--claude', and '--both' are mutually exclusive. Try: choose exactly one target flag (for example 'sce setup --opencode --non-interactive') or omit all target flags for interactive mode." + ); + } + + if options.non_interactive && selected_targets.is_empty() && !options.hooks { + bail!( + "Option '--non-interactive' requires a target flag. Try: 'sce setup --opencode --non-interactive', 'sce setup --claude --non-interactive', or 'sce setup --both --non-interactive'." + ); + } + + let config_mode = match selected_targets.as_slice() { + [target] => Some(SetupMode::NonInteractive(*target)), + [] if options.hooks => None, + [] => Some(SetupMode::Interactive), + _ => unreachable!("target count already validated"), + }; + + let install_hooks = options.hooks || (config_mode == Some(SetupMode::Interactive)); + + Ok(SetupRequest { + config_mode, + install_hooks, + hooks_repo_path: options.repo_path, + }) } pub fn run_setup_for_mode(repository_root: &Path, mode: SetupMode) -> Result { @@ -142,11 +181,41 @@ pub fn run_setup_for_mode(repository_root: &Path, mode: SetupMode) -> Result Result { - let outcome = install_required_git_hooks(repository_root) + let normalized_repository_root = normalize_user_repository_path(repository_root)?; + let outcome = install_required_git_hooks(&normalized_repository_root) .context("Hook setup failed while installing required git hooks")?; Ok(format_required_hook_install_success_message(&outcome)) } +fn normalize_user_repository_path(repository_root: &Path) -> Result { + if repository_root.as_os_str().is_empty() { + bail!("Option '--repo' must not be empty. Try: pass a path to an existing git repository."); + } + + let canonical_repository_root = fs::canonicalize(repository_root).with_context(|| { + format!( + "Failed to resolve repository path '{}'. Try: pass a path to an existing git repository.", + repository_root.display() + ) + })?; + + let metadata = fs::metadata(&canonical_repository_root).with_context(|| { + format!( + "Failed to inspect repository path '{}'.", + canonical_repository_root.display() + ) + })?; + + if !metadata.is_dir() { + bail!( + "Repository path '{}' is not a directory. Try: pass a path to an existing git repository.", + canonical_repository_root.display() + ); + } + + Ok(canonical_repository_root) +} + fn format_setup_install_success_message(outcome: &SetupInstallOutcome) -> String { let selected_targets = outcome .target_results @@ -267,6 +336,7 @@ where F: FnMut(&Path, &Path) -> io::Result<()>, { let resolved_repository_root = resolve_git_repository_root(repository_root)?; + ensure_directory_is_writable(&resolved_repository_root, "repository root")?; let hooks_directory = resolve_git_hooks_directory(&resolved_repository_root)?; fs::create_dir_all(&hooks_directory).with_context(|| { format!( @@ -274,6 +344,7 @@ where hooks_directory.display() ) })?; + ensure_directory_is_writable(&hooks_directory, "git hooks directory")?; let mut hook_results = Vec::new(); for hook_asset in iter_required_hook_assets() { @@ -485,7 +556,7 @@ fn run_git_command_in_directory( let diagnostic = if stderr.is_empty() { "git command exited with a non-zero status".to_string() } else { - stderr + redact_sensitive_text(&stderr) }; bail!("{} {}", context_message, diagnostic); } @@ -556,6 +627,8 @@ fn install_embedded_setup_assets_with_rename( where F: FnMut(&Path, &Path) -> io::Result<()>, { + ensure_directory_is_writable(repository_root, "setup repository root")?; + let mut target_results = Vec::new(); for concrete_target in concrete_targets_for(target) { @@ -795,7 +868,7 @@ impl SetupTargetPrompter for InquireSetupTargetPrompter { Ok(SetupDispatch::Cancelled) } Err(InquireError::NotTTY) => bail!( - "Interactive setup requires a TTY. Re-run with '--opencode', '--claude', or '--both' for non-interactive automation." + "Interactive setup requires a TTY. Re-run with '--non-interactive' and one of '--opencode', '--claude', or '--both'." ), Err(error) => Err(error.into()), } @@ -838,7 +911,7 @@ pub fn setup_cancelled_text() -> &'static str { } pub fn setup_usage_text() -> &'static str { - "Usage:\n sce setup [--opencode|--claude|--both]\n sce setup --hooks [--repo ]\n\nWithout a target flag, setup defaults to interactive target selection.\nTarget flags are mutually exclusive and intended for non-interactive automation.\n'--hooks' installs required git hooks for the current repository by default, or for '--repo ' when provided." + "Usage:\n sce setup [--opencode|--claude|--both] [--non-interactive] [--hooks] [--repo ]\n\nExamples:\n sce setup\n sce setup --opencode --non-interactive --hooks\n sce setup --both --non-interactive\n sce setup --hooks\n sce setup --hooks --repo ../demo-repo\n sce setup --opencode --non-interactive --hooks && sce doctor --format json\n\nWithout a target flag, setup defaults to interactive target selection.\nDefault interactive setup installs selected config assets and required hooks in one run.\nUse '--non-interactive' to fail fast instead of prompting; it requires '--opencode', '--claude', or '--both' when running config setup.\nTarget flags are mutually exclusive and intended for non-interactive automation.\n'--hooks' installs required git hooks for the current repository by default, or for '--repo ' when provided.\nLegacy one-purpose invocations remain supported: target-only runs install config assets, and '--hooks' without a target installs hooks only." } pub fn parse_setup_cli_options(args: I) -> Result @@ -850,6 +923,7 @@ where while let Some(arg) = parser.next()? { match arg { + Arg::Long("non-interactive") => options.non_interactive = true, Arg::Long("opencode") => options.opencode = true, Arg::Long("claude") => options.claude = true, Arg::Long("both") => options.both = true, @@ -857,10 +931,12 @@ where Arg::Long("repo") => { let value = parser .value() - .context("Option '--repo' requires a path value")?; + .context( + "Option '--repo' requires a path value. Try: 'sce setup --hooks --repo ../demo-repo'.", + )?; if options.repo_path.is_some() { bail!( - "Option '--repo' may only be provided once. Run 'sce setup --help' to see valid usage." + "Option '--repo' may only be provided once. Try: keep a single '--repo ' value and rerun." ); } options.repo_path = Some(PathBuf::from(value.string()?)); @@ -868,20 +944,20 @@ where Arg::Long("help") | Arg::Short('h') => options.help = true, Arg::Long(option) => { bail!( - "Unknown setup option '--{}'. Run 'sce setup --help' to see valid usage.", + "Unknown setup option '--{}'. Try: run 'sce setup --help' to see supported setup options.", option ); } Arg::Short(option) => { bail!( - "Unknown setup option '-{}'. Run 'sce setup --help' to see valid usage.", + "Unknown setup option '-{}'. Try: run 'sce setup --help' to see supported setup options.", option ); } Arg::Value(value) => { let value = value.string()?; bail!( - "Unexpected setup argument '{}'. Run 'sce setup --help' to see valid usage.", + "Unexpected setup argument '{}'. Try: remove the extra argument and use 'sce setup --help' for supported forms.", value ); } @@ -891,27 +967,5 @@ where Ok(options) } -pub fn resolve_setup_mode(options: SetupCliOptions) -> Result { - let mut selected_targets = Vec::new(); - - if options.opencode { - selected_targets.push(SetupTarget::OpenCode); - } - if options.claude { - selected_targets.push(SetupTarget::Claude); - } - if options.both { - selected_targets.push(SetupTarget::Both); - } - - match selected_targets.as_slice() { - [] => Ok(SetupMode::Interactive), - [target] => Ok(SetupMode::NonInteractive(*target)), - _ => bail!( - "Options '--opencode', '--claude', and '--both' are mutually exclusive. Choose exactly one target flag or none for interactive mode." - ), - } -} - #[cfg(test)] mod tests; diff --git a/cli/src/services/setup/tests.rs b/cli/src/services/setup/tests.rs index 20ee5fdf..43bdfae5 100644 --- a/cli/src/services/setup/tests.rs +++ b/cli/src/services/setup/tests.rs @@ -13,9 +13,9 @@ use super::{ install_embedded_setup_assets_with_rename, install_required_git_hooks, install_required_git_hooks_with_rename, iter_embedded_assets_for_setup_target, iter_required_hook_assets, parse_setup_cli_options, resolve_setup_dispatch, - resolve_setup_hooks_repository, resolve_setup_mode, run_setup_for_mode, run_setup_hooks, - setup_usage_text, RequiredHookAsset, RequiredHookInstallStatus, SetupCliOptions, SetupDispatch, - SetupMode, SetupTarget, + resolve_setup_request, run_setup_for_mode, run_setup_hooks, setup_usage_text, + RequiredHookAsset, RequiredHookInstallStatus, SetupCliOptions, SetupDispatch, SetupMode, + SetupRequest, SetupTarget, }; #[derive(Clone, Copy, Debug)] @@ -43,23 +43,38 @@ fn run_setup_rejects_unresolved_interactive_mode() { #[test] fn setup_options_default_to_interactive_mode() -> Result<()> { let options = parse_setup_cli_options(Vec::::new())?; - let mode = resolve_setup_mode(options)?; - assert_eq!(mode, SetupMode::Interactive); + let request = resolve_setup_request(options)?; + assert_eq!( + request, + SetupRequest { + config_mode: Some(SetupMode::Interactive), + install_hooks: true, + hooks_repo_path: None, + } + ); Ok(()) } #[test] fn setup_options_parse_opencode_flag() -> Result<()> { let options = parse_setup_cli_options(vec!["--opencode".to_string()])?; - let mode = resolve_setup_mode(options)?; - assert_eq!(mode, SetupMode::NonInteractive(SetupTarget::OpenCode)); + let request = resolve_setup_request(options)?; + assert_eq!( + request, + SetupRequest { + config_mode: Some(SetupMode::NonInteractive(SetupTarget::OpenCode)), + install_hooks: false, + hooks_repo_path: None, + } + ); Ok(()) } #[test] fn setup_options_reject_mutually_exclusive_flags() { - let error = resolve_setup_mode(SetupCliOptions { + let error = resolve_setup_request(SetupCliOptions { help: false, + non_interactive: false, opencode: true, claude: true, both: false, @@ -70,7 +85,7 @@ fn setup_options_reject_mutually_exclusive_flags() { assert_eq!( error.to_string(), - "Options '--opencode', '--claude', and '--both' are mutually exclusive. Choose exactly one target flag or none for interactive mode." + "Options '--opencode', '--claude', and '--both' are mutually exclusive. Try: choose exactly one target flag (for example 'sce setup --opencode --non-interactive') or omit all target flags for interactive mode." ); } @@ -78,14 +93,43 @@ fn setup_options_reject_mutually_exclusive_flags() { fn setup_usage_contract_mentions_target_flags() { let usage = setup_usage_text(); assert!(usage.contains("--opencode|--claude|--both")); - assert!(usage.contains("sce setup --hooks [--repo ]")); + assert!(usage.contains("--non-interactive")); + assert!(usage.contains("[--hooks] [--repo ]")); + assert!(usage.contains("sce setup --opencode --non-interactive --hooks")); + assert!(usage.contains("sce doctor --format json")); +} + +#[test] +fn setup_options_parse_non_interactive_flag() -> Result<()> { + let options = parse_setup_cli_options(vec!["--non-interactive".to_string()])?; + assert!(options.non_interactive); + Ok(()) +} + +#[test] +fn setup_options_reject_non_interactive_without_target() { + let options = parse_setup_cli_options(vec!["--non-interactive".to_string()]) + .expect("parsing should succeed before validation"); + let error = resolve_setup_request(options) + .expect_err("--non-interactive without a target should fail validation"); + assert_eq!( + error.to_string(), + "Option '--non-interactive' requires a target flag. Try: 'sce setup --opencode --non-interactive', 'sce setup --claude --non-interactive', or 'sce setup --both --non-interactive'." + ); } #[test] fn setup_options_parse_hooks_without_repo() -> Result<()> { let options = parse_setup_cli_options(vec!["--hooks".to_string()])?; - let repo = resolve_setup_hooks_repository(&options)?; - assert_eq!(repo, None); + let request = resolve_setup_request(options)?; + assert_eq!( + request, + SetupRequest { + config_mode: None, + install_hooks: true, + hooks_repo_path: None, + } + ); Ok(()) } @@ -96,8 +140,15 @@ fn setup_options_parse_hooks_with_repo() -> Result<()> { "--repo".to_string(), "tmp/repo".to_string(), ])?; - let repo = resolve_setup_hooks_repository(&options)?; - assert_eq!(repo, Some(PathBuf::from("tmp/repo"))); + let request = resolve_setup_request(options)?; + assert_eq!( + request, + SetupRequest { + config_mode: None, + install_hooks: true, + hooks_repo_path: Some(PathBuf::from("tmp/repo")), + } + ); Ok(()) } @@ -105,24 +156,27 @@ fn setup_options_parse_hooks_with_repo() -> Result<()> { fn setup_options_reject_repo_without_hooks() { let options = parse_setup_cli_options(vec!["--repo".to_string(), "tmp/repo".to_string()]) .expect("parsing --repo should succeed before validation"); - let error = - resolve_setup_hooks_repository(&options).expect_err("--repo without --hooks should fail"); + let error = resolve_setup_request(options).expect_err("--repo without --hooks should fail"); assert_eq!( error.to_string(), - "Option '--repo' requires '--hooks'. Run 'sce setup --help' to see valid usage." + "Option '--repo' requires '--hooks'. Try: run 'sce setup --hooks --repo ' or remove '--repo'." ); } #[test] -fn setup_options_reject_hooks_with_target_flags() { +fn setup_options_allow_hooks_with_target_flags() -> Result<()> { let options = parse_setup_cli_options(vec!["--hooks".to_string(), "--opencode".to_string()]) .expect("parsing should succeed before validation"); - let error = resolve_setup_hooks_repository(&options) - .expect_err("--hooks with target flags should fail"); + let request = resolve_setup_request(options)?; assert_eq!( - error.to_string(), - "Option '--hooks' cannot be combined with '--opencode', '--claude', or '--both'. Run 'sce setup --help' to see valid usage." + request, + SetupRequest { + config_mode: Some(SetupMode::NonInteractive(SetupTarget::OpenCode)), + install_hooks: true, + hooks_repo_path: None, + } ); + Ok(()) } #[test] @@ -142,6 +196,27 @@ fn run_setup_hooks_reports_per_hook_statuses() -> Result<()> { Ok(()) } +#[test] +fn run_setup_hooks_rejects_missing_repo_path() { + let missing_path = PathBuf::from("/definitely/missing/sce-test-repo"); + let error = run_setup_hooks(&missing_path).expect_err("missing repo path should fail"); + assert!(error + .to_string() + .contains("Failed to resolve repository path")); +} + +#[test] +fn run_setup_hooks_rejects_file_repo_path() -> Result<()> { + let temp = TestTempDir::new("sce-setup-hook-install-tests")?; + let file_path = temp.path().join("not-a-directory"); + fs::write(&file_path, b"not a repo")?; + + let error = run_setup_hooks(&file_path).expect_err("file path should fail"); + assert!(error.to_string().contains("is not a directory")); + + Ok(()) +} + #[test] fn setup_help_option_sets_help_flag() -> Result<()> { let options = parse_setup_cli_options(vec!["--help".to_string()])?; diff --git a/cli/src/services/sync.rs b/cli/src/services/sync.rs index e2a887a7..9a2ad127 100644 --- a/cli/src/services/sync.rs +++ b/cli/src/services/sync.rs @@ -1,9 +1,66 @@ use anyhow::{Context, Result}; +use lexopt::Arg; +use lexopt::ValueExt; +use serde_json::json; use std::sync::OnceLock; use crate::services::local_db::{run_smoke_check, LocalDatabaseTarget}; +use crate::services::output_format::OutputFormat; +use crate::services::resilience::{run_with_retry, RetryPolicy}; pub const NAME: &str = "sync"; + +pub type SyncFormat = OutputFormat; + +#[derive(Clone, Copy, Debug, Eq, PartialEq)] +pub struct SyncRequest { + pub format: SyncFormat, +} + +pub fn sync_usage_text() -> &'static str { + "Usage:\n sce sync [--format ]\n\nExamples:\n sce sync\n sce sync --format json" +} + +pub fn parse_sync_request(args: Vec) -> Result { + let mut parser = lexopt::Parser::from_args(args); + let mut format = SyncFormat::Text; + + while let Some(arg) = parser.next()? { + match arg { + Arg::Long("format") => { + let value = parser + .value() + .context("Option '--format' requires a value")?; + let raw = value.string()?; + format = SyncFormat::parse(&raw, "sce sync --help")?; + } + Arg::Long("help") | Arg::Short('h') => { + anyhow::bail!("Use 'sce sync --help' for sync usage."); + } + Arg::Long(option) => { + anyhow::bail!( + "Unknown sync option '--{}'. Run 'sce sync --help' to see valid usage.", + option + ); + } + Arg::Short(option) => { + anyhow::bail!( + "Unknown sync option '-{}'. Run 'sce sync --help' to see valid usage.", + option + ); + } + Arg::Value(value) => { + anyhow::bail!( + "Unexpected sync argument '{}'. Run 'sce sync --help' to see valid usage.", + value.string()? + ); + } + } + } + + Ok(SyncRequest { format }) +} + const SUPPORTED_PHASES: [CloudSyncPhase; 3] = [ CloudSyncPhase::PlanOnly, CloudSyncPhase::DryRun, @@ -11,6 +68,12 @@ const SUPPORTED_PHASES: [CloudSyncPhase; 3] = [ ]; static SYNC_RUNTIME: OnceLock = OnceLock::new(); +const SYNC_SMOKE_RETRY_POLICY: RetryPolicy = RetryPolicy { + max_attempts: 3, + timeout_ms: 2_000, + initial_backoff_ms: 100, + max_backoff_ms: 400, +}; #[derive(Clone, Copy, Debug, Eq, PartialEq)] pub enum CloudSyncPhase { @@ -31,6 +94,15 @@ pub struct CloudSyncPlan { pub can_execute: bool, } +#[derive(Clone, Debug, Eq, PartialEq)] +struct SyncPlaceholderReport { + workspace: &'static str, + phase: CloudSyncPhase, + inserted_rows: u64, + checkpoints: Vec<&'static str>, + can_execute: bool, +} + pub trait CloudSyncGateway { fn plan(&self, request: &CloudSyncRequest) -> CloudSyncPlan; } @@ -78,21 +150,27 @@ where Self { gateway } } - pub fn run(&self, request: &CloudSyncRequest) -> Result { + fn run(&self, request: &CloudSyncRequest) -> Result { let runtime = shared_runtime()?; let outcome = runtime - .block_on(run_smoke_check(LocalDatabaseTarget::InMemory)) - .context("local Turso smoke check failed")?; + .block_on(run_with_retry( + SYNC_SMOKE_RETRY_POLICY, + "sync.local_db_smoke_check", + "rerun 'sce sync'; if the failure persists, verify local runtime health with 'sce doctor'.", + |_| run_smoke_check(LocalDatabaseTarget::InMemory), + )) + .context("local Turso smoke check failed after bounded retries")?; let plan = self.gateway.plan(request); - Ok(format!( - "TODO: '{NAME}' cloud workflows are planned and not implemented yet. Local Turso smoke check succeeded ({}) row inserted; cloud sync placeholder enumerates {} phase(s) and plan holds {} checkpoint(s).", - outcome.inserted_rows, - SUPPORTED_PHASES.len(), - plan.checkpoints.len() - )) + Ok(SyncPlaceholderReport { + workspace: request.workspace, + phase: request.phase, + inserted_rows: outcome.inserted_rows, + checkpoints: plan.checkpoints, + can_execute: plan.can_execute, + }) } } @@ -102,35 +180,111 @@ fn shared_runtime() -> Result<&'static tokio::runtime::Runtime> { } let runtime = tokio::runtime::Builder::new_current_thread() + .enable_time() .build() .context("failed to create shared tokio runtime for sync placeholder")?; Ok(SYNC_RUNTIME.get_or_init(|| runtime)) } -pub fn run_placeholder_sync() -> Result { +pub fn run_placeholder_sync(request: SyncRequest) -> Result { let service = PlaceholderSyncService::new(PlaceholderCloudSyncGateway); - let request = CloudSyncRequest { + let cloud_request = CloudSyncRequest { workspace: "local", phase: CloudSyncPhase::PlanOnly, }; - service.run(&request) + let report = service.run(&cloud_request)?; + + match request.format { + SyncFormat::Text => Ok(format!( + "TODO: '{NAME}' cloud workflows are planned and not implemented yet. Local Turso smoke check succeeded ({}) row inserted; cloud sync placeholder enumerates {} phase(s) and plan holds {} checkpoint(s). Next step: rerun with '--format json' for machine-readable placeholder checkpoints.", + report.inserted_rows, + SUPPORTED_PHASES.len(), + report.checkpoints.len() + )), + SyncFormat::Json => { + let payload = json!({ + "status": "ok", + "command": NAME, + "placeholder_state": "planned", + "workspace": report.workspace, + "phase": phase_name(report.phase), + "supported_phases": SUPPORTED_PHASES + .iter() + .map(|phase| phase_name(*phase)) + .collect::>(), + "local_smoke_check": { + "status": "ok", + "target": "in_memory", + "inserted_rows": report.inserted_rows, + "retry_policy": { + "max_attempts": SYNC_SMOKE_RETRY_POLICY.max_attempts, + "timeout_ms": SYNC_SMOKE_RETRY_POLICY.timeout_ms, + "initial_backoff_ms": SYNC_SMOKE_RETRY_POLICY.initial_backoff_ms, + "max_backoff_ms": SYNC_SMOKE_RETRY_POLICY.max_backoff_ms, + }, + }, + "cloud_plan": { + "can_execute": report.can_execute, + "checkpoints": report.checkpoints, + }, + "next_step": "Rerun with '--format json' for machine-readable placeholder checkpoints.", + }); + + serde_json::to_string_pretty(&payload) + .context("failed to serialize sync placeholder report to JSON") + } + } +} + +fn phase_name(phase: CloudSyncPhase) -> &'static str { + match phase { + CloudSyncPhase::PlanOnly => "plan_only", + CloudSyncPhase::DryRun => "dry_run", + CloudSyncPhase::Apply => "apply", + } } #[cfg(test)] mod tests { use anyhow::Result; + use serde_json::Value; use super::{ - run_placeholder_sync, CloudSyncGateway, CloudSyncPhase, CloudSyncRequest, - PlaceholderCloudSyncGateway, + parse_sync_request, run_placeholder_sync, CloudSyncGateway, CloudSyncPhase, + CloudSyncRequest, PlaceholderCloudSyncGateway, SyncFormat, SyncRequest, NAME, }; use super::shared_runtime; + #[test] + fn parse_defaults_to_text_format() { + let request = parse_sync_request(vec![]).expect("sync request should parse"); + assert_eq!(request.format, SyncFormat::Text); + } + + #[test] + fn parse_accepts_json_format() { + let request = parse_sync_request(vec!["--format".to_string(), "json".to_string()]) + .expect("sync request should parse"); + assert_eq!(request.format, SyncFormat::Json); + } + + #[test] + fn parse_rejects_invalid_format_with_help_guidance() { + let error = parse_sync_request(vec!["--format".to_string(), "yaml".to_string()]) + .expect_err("invalid sync format should fail"); + assert_eq!( + error.to_string(), + "Invalid --format value 'yaml'. Valid values: text, json. Run 'sce sync --help' to see valid usage." + ); + } + #[test] fn sync_placeholder_runs_local_smoke_check() -> Result<()> { - let message = run_placeholder_sync()?; + let message = run_placeholder_sync(SyncRequest { + format: SyncFormat::Text, + })?; assert!(message.contains("Local Turso smoke check succeeded")); assert!(message.contains("cloud sync placeholder enumerates")); Ok(()) @@ -155,4 +309,35 @@ mod tests { assert!(std::ptr::eq(first, second)); Ok(()) } + + #[test] + fn sync_json_output_includes_stable_fields() -> Result<()> { + let output = run_placeholder_sync(SyncRequest { + format: SyncFormat::Json, + })?; + let parsed: Value = serde_json::from_str(&output)?; + assert_eq!(parsed["status"], "ok"); + assert_eq!(parsed["command"], NAME); + assert_eq!(parsed["placeholder_state"], "planned"); + assert_eq!(parsed["workspace"], "local"); + assert_eq!(parsed["phase"], "plan_only"); + assert!(parsed["supported_phases"].is_array()); + assert!(parsed["local_smoke_check"].is_object()); + assert!(parsed["cloud_plan"].is_object()); + assert!(parsed["next_step"].as_str().is_some()); + Ok(()) + } + + #[test] + fn sync_json_output_is_deterministic_for_same_request() -> Result<()> { + let first = run_placeholder_sync(SyncRequest { + format: SyncFormat::Json, + })?; + let second = run_placeholder_sync(SyncRequest { + format: SyncFormat::Json, + })?; + + assert_eq!(first, second); + Ok(()) + } } diff --git a/cli/src/services/version.rs b/cli/src/services/version.rs new file mode 100644 index 00000000..509967df --- /dev/null +++ b/cli/src/services/version.rs @@ -0,0 +1,156 @@ +use anyhow::{bail, Context, Result}; +use lexopt::Arg; +use lexopt::ValueExt; +use serde_json::json; + +use crate::services::output_format::OutputFormat; + +pub const NAME: &str = "version"; + +const BINARY_NAME: &str = env!("CARGO_PKG_NAME"); +const PACKAGE_VERSION: &str = env!("CARGO_PKG_VERSION"); + +pub type VersionFormat = OutputFormat; + +#[derive(Clone, Copy, Debug, Eq, PartialEq)] +pub struct VersionRequest { + pub format: VersionFormat, +} + +pub fn version_usage_text() -> &'static str { + "Usage:\n sce version [--format ]\n\nExamples:\n sce version\n sce version --format json" +} + +pub fn parse_version_request(args: Vec) -> Result { + let mut parser = lexopt::Parser::from_args(args); + let mut format = VersionFormat::Text; + + while let Some(arg) = parser.next()? { + match arg { + Arg::Long("format") => { + let value = parser + .value() + .context("Option '--format' requires a value")?; + let raw = value.string()?; + format = VersionFormat::parse(&raw, "sce version --help")?; + } + Arg::Long("help") | Arg::Short('h') => { + bail!("Use 'sce version --help' for version usage."); + } + Arg::Long(option) => { + bail!( + "Unknown version option '--{}'. Run 'sce version --help' to see valid usage.", + option + ); + } + Arg::Short(option) => { + bail!( + "Unknown version option '-{}'. Run 'sce version --help' to see valid usage.", + option + ); + } + Arg::Value(value) => { + bail!( + "Unexpected version argument '{}'. Run 'sce version --help' to see valid usage.", + value.string()? + ); + } + } + } + + Ok(VersionRequest { format }) +} + +pub fn render_version(request: VersionRequest) -> Result { + let build_profile = if cfg!(debug_assertions) { + "debug" + } else { + "release" + }; + + let report = json!({ + "status": "ok", + "command": NAME, + "binary": BINARY_NAME, + "version": PACKAGE_VERSION, + "build_profile": build_profile, + }); + + match request.format { + VersionFormat::Text => Ok(format!( + "{} {} ({})", + BINARY_NAME, PACKAGE_VERSION, build_profile + )), + VersionFormat::Json => serde_json::to_string_pretty(&report) + .context("failed to serialize version report to JSON"), + } +} + +#[cfg(test)] +mod tests { + use serde_json::Value; + + use super::{parse_version_request, render_version, VersionFormat, VersionRequest, NAME}; + + #[test] + fn parse_defaults_to_text_format() { + let request = parse_version_request(vec![]).expect("request should parse"); + assert_eq!(request.format, VersionFormat::Text); + } + + #[test] + fn parse_accepts_json_format() { + let request = parse_version_request(vec!["--format".to_string(), "json".to_string()]) + .expect("request should parse"); + assert_eq!(request.format, VersionFormat::Json); + } + + #[test] + fn parse_rejects_invalid_format_with_help_guidance() { + let error = parse_version_request(vec!["--format".to_string(), "yaml".to_string()]) + .expect_err("invalid format should fail"); + assert_eq!( + error.to_string(), + "Invalid --format value 'yaml'. Valid values: text, json. Run 'sce version --help' to see valid usage." + ); + } + + #[test] + fn render_json_includes_stable_fields() { + let output = render_version(VersionRequest { + format: VersionFormat::Json, + }) + .expect("json render should succeed"); + + let parsed: Value = serde_json::from_str(&output).expect("json output should parse"); + assert_eq!(parsed["status"], "ok"); + assert_eq!(parsed["command"], NAME); + assert!(parsed["binary"].as_str().is_some()); + assert!(parsed["version"].as_str().is_some()); + assert!(parsed["build_profile"].as_str().is_some()); + } + + #[test] + fn render_json_is_deterministic_for_same_request() { + let first = render_version(VersionRequest { + format: VersionFormat::Json, + }) + .expect("first json render should succeed"); + let second = render_version(VersionRequest { + format: VersionFormat::Json, + }) + .expect("second json render should succeed"); + + assert_eq!(first, second); + } + + #[test] + fn render_text_includes_binary_and_version() { + let output = render_version(VersionRequest { + format: VersionFormat::Text, + }) + .expect("text render should succeed"); + assert!(output.contains(env!("CARGO_PKG_NAME"))); + assert!(output.contains(env!("CARGO_PKG_VERSION"))); + } +} diff --git a/cli/tests/setup_integration.rs b/cli/tests/setup_integration.rs new file mode 100644 index 00000000..53a6047f --- /dev/null +++ b/cli/tests/setup_integration.rs @@ -0,0 +1,1549 @@ +use std::error::Error; +use std::ffi::OsStr; +use std::fs; +use std::path::{Path, PathBuf}; +use std::process::{Command, Output}; +use std::time::{SystemTime, UNIX_EPOCH}; + +type TestResult = Result>; + +#[derive(Debug)] +struct IntegrationTempDir { + path: PathBuf, +} + +impl IntegrationTempDir { + fn new(prefix: &str) -> TestResult { + let epoch_nanos = SystemTime::now().duration_since(UNIX_EPOCH)?.as_nanos(); + let path = + std::env::temp_dir().join(format!("{prefix}-{}-{epoch_nanos}", std::process::id())); + fs::create_dir_all(&path)?; + Ok(Self { path }) + } + + fn path(&self) -> &Path { + &self.path + } +} + +impl Drop for IntegrationTempDir { + fn drop(&mut self) { + let _ = fs::remove_dir_all(&self.path); + } +} + +#[derive(Debug)] +struct SetupIntegrationHarness { + temp: IntegrationTempDir, + repo_root: PathBuf, + state_home: PathBuf, + home_dir: PathBuf, +} + +#[derive(Debug)] +struct CommandResult { + status: std::process::ExitStatus, + stdout: String, + stderr: String, +} + +const REQUIRED_HOOK_NAMES: [&str; 3] = ["pre-commit", "commit-msg", "post-commit"]; + +impl CommandResult { + fn success(&self) -> bool { + self.status.success() + } +} + +impl SetupIntegrationHarness { + fn new(prefix: &str) -> TestResult { + let temp = IntegrationTempDir::new(prefix)?; + let repo_root = temp.path().join("repo"); + let state_home = temp.path().join("xdg-state"); + let home_dir = temp.path().join("home"); + + fs::create_dir_all(&repo_root)?; + fs::create_dir_all(&state_home)?; + fs::create_dir_all(&home_dir)?; + + Ok(Self { + temp, + repo_root, + state_home, + home_dir, + }) + } + + fn repo_root(&self) -> &Path { + &self.repo_root + } + + fn state_home(&self) -> &Path { + &self.state_home + } + + fn init_git_repo(&self) -> TestResult<()> { + let result = self.run_git(["init", "-q"])?; + if !result.success() { + return Err(format!( + "git init failed:\nstdout:\n{}\nstderr:\n{}", + result.stdout, result.stderr + ) + .into()); + } + Ok(()) + } + + fn configure_local_hooks_path(&self, relative_hooks_path: &str) -> TestResult<()> { + let result = self.run_git(["config", "core.hooksPath", relative_hooks_path])?; + if !result.success() { + return Err(format!( + "git config core.hooksPath failed:\nstdout:\n{}\nstderr:\n{}", + result.stdout, result.stderr + ) + .into()); + } + Ok(()) + } + + fn run_sce(&self, args: I) -> TestResult + where + I: IntoIterator, + S: AsRef, + { + let output = self.base_command(sce_binary_path()).args(args).output()?; + Ok(render_command_result(output)) + } + + fn run_git(&self, args: I) -> TestResult + where + I: IntoIterator, + S: AsRef, + { + let output = self.base_command("git").args(args).output()?; + Ok(render_command_result(output)) + } + + fn base_command>(&self, program: P) -> Command { + let mut command = Command::new(program); + command + .current_dir(&self.repo_root) + .env("XDG_STATE_HOME", &self.state_home) + .env("HOME", &self.home_dir) + .env("LOCALAPPDATA", &self.state_home) + .env("APPDATA", &self.state_home) + .env("GIT_CONFIG_GLOBAL", null_device_path()) + .env("GIT_CONFIG_NOSYSTEM", "1"); + command + } +} + +#[test] +fn setup_hooks_default_path_install_and_rerun_are_deterministic() -> TestResult<()> { + let harness = SetupIntegrationHarness::new("sce-setup-integration")?; + harness.init_git_repo()?; + + let expected_hooks_dir = harness.repo_root().canonicalize()?.join(".git/hooks"); + assert_setup_hooks_install_and_rerun(&harness, &expected_hooks_dir)?; + + Ok(()) +} + +#[test] +fn setup_hooks_custom_path_install_and_rerun_are_deterministic() -> TestResult<()> { + let harness = SetupIntegrationHarness::new("sce-setup-integration")?; + harness.init_git_repo()?; + harness.configure_local_hooks_path(".githooks")?; + + let expected_hooks_dir = harness.repo_root().canonicalize()?.join(".githooks"); + assert_setup_hooks_install_and_rerun(&harness, &expected_hooks_dir)?; + + let result = harness.run_git(["config", "--get", "core.hooksPath"])?; + assert!( + result.success(), + "git config --get core.hooksPath should succeed\nstdout:\n{}\nstderr:\n{}", + result.stdout, + result.stderr + ); + assert_eq!(result.stdout.trim(), ".githooks"); + + Ok(()) +} + +#[test] +fn setup_hooks_update_path() -> TestResult<()> { + let harness = SetupIntegrationHarness::new("sce-setup-hooks-update")?; + harness.init_git_repo()?; + + let hooks_dir = harness.repo_root().join(".git/hooks"); + + let first = harness.run_sce(["setup", "--hooks"])?; + assert!( + first.success(), + "initial hook install should succeed\nstdout:\n{}\nstderr:\n{}", + first.stdout, + first.stderr + ); + + assert_hooks_are_present_and_executable(&hooks_dir)?; + + let pre_commit_path = hooks_dir.join("pre-commit"); + let original_embedded_content = fs::read(&pre_commit_path)?; + + let mutated_content = format!( + "{}\n# MUTATED FOR TEST\n", + String::from_utf8_lossy(&original_embedded_content) + ); + let mutated_bytes = mutated_content.as_bytes().to_vec(); + fs::write(&pre_commit_path, &mutated_content)?; + + let second = harness.run_sce(["setup", "--hooks"])?; + assert!( + second.success(), + "hook update run should succeed\nstdout:\n{}\nstderr:\n{}", + second.stdout, + second.stderr + ); + + assert!( + second.stdout.contains("- pre-commit: updated at '"), + "output should report pre-commit as updated\nstdout:\n{}", + second.stdout + ); + + assert!( + second.stdout.contains("- commit-msg: skipped at '"), + "output should report commit-msg as skipped\nstdout:\n{}", + second.stdout + ); + + assert!( + second.stdout.contains("- post-commit: skipped at '"), + "output should report post-commit as skipped\nstdout:\n{}", + second.stdout + ); + + let backup_pattern = regex::Regex::new(r"backup: '([^']+pre-commit\.backup[^']*)'").unwrap(); + if let Some(caps) = backup_pattern.captures(&second.stdout) { + let backup_path = PathBuf::from(caps.get(1).unwrap().as_str()); + assert!( + backup_path.exists(), + "backup file should exist at '{}'", + backup_path.display() + ); + let backup_content = fs::read(&backup_path)?; + assert_eq!( + backup_content, mutated_bytes, + "backup content should match pre-update mutated hook content" + ); + } else { + panic!( + "could not extract backup path from output:\n{}", + second.stdout + ); + } + + let restored_hook = fs::read(&pre_commit_path)?; + assert_eq!( + restored_hook, original_embedded_content, + "updated hook should match original embedded content" + ); + + assert_hooks_are_present_and_executable(&hooks_dir)?; + + Ok(()) +} + +#[test] +fn setup_backup_suffix_collision() -> TestResult<()> { + let harness = SetupIntegrationHarness::new("sce-setup-backup-collision")?; + harness.init_git_repo()?; + let canonical_repo_root = harness.repo_root().canonicalize()?; + + let first = harness.run_sce(["setup", "--opencode"])?; + assert!( + first.success(), + "initial setup should succeed\nstdout:\n{}\nstderr:\n{}", + first.stdout, + first.stderr + ); + + let opencode_dir = harness.repo_root().join(".opencode"); + assert!( + opencode_dir.exists(), + ".opencode should exist after initial setup" + ); + + let original_content = fs::read_to_string(opencode_dir.join("command/next-task.md"))?; + + let backup_0 = harness.repo_root().join(".opencode.backup"); + let backup_1 = harness.repo_root().join(".opencode.backup.1"); + fs::write(&backup_0, "collision placeholder 0")?; + fs::write(&backup_1, "collision placeholder 1")?; + + let second = harness.run_sce(["setup", "--opencode"])?; + assert!( + second.success(), + "rerun setup should succeed\nstdout:\n{}\nstderr:\n{}", + second.stdout, + second.stderr + ); + + let expected_backup = canonical_repo_root.join(".opencode.backup.2"); + let reported_backup = extract_labeled_path(&second.stdout, "backup: existing target moved to") + .ok_or_else(|| { + format!( + "output should report backup path for collision case\nstdout:\n{}", + second.stdout + ) + })?; + assert!( + paths_match_for_test(&reported_backup, &expected_backup), + "output should report backup to .backup.2 due to collision\nstdout:\n{}", + second.stdout + ); + + assert!( + expected_backup.exists(), + "backup.2 directory should exist at '{}'", + expected_backup.display() + ); + + assert!( + expected_backup.join("command/next-task.md").exists(), + "backup should contain original files" + ); + + let backup_content = fs::read_to_string(expected_backup.join("command/next-task.md"))?; + assert_eq!( + backup_content, original_content, + "backup content should match original .opencode content" + ); + + assert!( + backup_0.exists() && fs::read_to_string(&backup_0)? == "collision placeholder 0", + "pre-existing .backup should be unchanged" + ); + assert!( + backup_1.exists() && fs::read_to_string(&backup_1)? == "collision placeholder 1", + "pre-existing .backup.1 should be unchanged" + ); + + Ok(()) +} + +#[test] +fn setup_hooks_repo_relative_path() -> TestResult<()> { + let harness = SetupIntegrationHarness::new("sce-setup-integration")?; + harness.init_git_repo()?; + + let canonical_repo_root = harness.repo_root().canonicalize()?; + let expected_hooks_dir = canonical_repo_root.join(".git/hooks"); + + let parent_dir = harness.temp.path(); + let relative_repo_path = "repo"; + + let output = Command::new(sce_binary_path()) + .args(["setup", "--hooks", "--repo", relative_repo_path]) + .current_dir(parent_dir) + .env("XDG_STATE_HOME", harness.state_home()) + .env("HOME", &harness.home_dir) + .env("GIT_CONFIG_GLOBAL", null_device_path()) + .env("GIT_CONFIG_NOSYSTEM", "1") + .output()?; + + let result = render_command_result(output); + + assert!( + result.success(), + "setup --hooks --repo should succeed\nstdout:\n{}\nstderr:\n{}", + result.stdout, + result.stderr + ); + assert!( + result.stdout.contains("Hook setup completed successfully."), + "output should contain success marker\nstdout:\n{}", + result.stdout + ); + + assert!( + labeled_path_matches(&result.stdout, "Repository root", &canonical_repo_root), + "output should contain canonical repository root '{}'\nstdout:\n{}", + canonical_repo_root.display(), + result.stdout + ); + + assert!( + labeled_path_matches(&result.stdout, "Hooks directory", &expected_hooks_dir), + "output should contain hooks directory '{}'\nstdout:\n{}", + expected_hooks_dir.display(), + result.stdout + ); + + assert_hooks_are_present_and_executable(&expected_hooks_dir)?; + + Ok(()) +} + +#[test] +fn setup_hooks_repo_absolute_path() -> TestResult<()> { + let harness = SetupIntegrationHarness::new("sce-setup-integration")?; + harness.init_git_repo()?; + + let canonical_repo_root = harness.repo_root().canonicalize()?; + let expected_hooks_dir = canonical_repo_root.join(".git/hooks"); + + let result = harness.run_sce([ + "setup", + "--hooks", + "--repo", + canonical_repo_root.to_str().unwrap(), + ])?; + + assert!( + result.success(), + "setup --hooks --repo should succeed\nstdout:\n{}\nstderr:\n{}", + result.stdout, + result.stderr + ); + assert!( + result.stdout.contains("Hook setup completed successfully."), + "output should contain success marker\nstdout:\n{}", + result.stdout + ); + + assert!( + labeled_path_matches(&result.stdout, "Repository root", &canonical_repo_root), + "output should contain canonical repository root '{}'\nstdout:\n{}", + canonical_repo_root.display(), + result.stdout + ); + + assert!( + labeled_path_matches(&result.stdout, "Hooks directory", &expected_hooks_dir), + "output should contain hooks directory '{}'\nstdout:\n{}", + expected_hooks_dir.display(), + result.stdout + ); + + assert_hooks_are_present_and_executable(&expected_hooks_dir)?; + + Ok(()) +} + +#[test] +fn harness_scopes_turso_state_home_to_test_temp_root() -> TestResult<()> { + let harness = SetupIntegrationHarness::new("sce-setup-integration")?; + harness.init_git_repo()?; + + let post_commit = harness.run_sce(["hooks", "post-commit"])?; + assert!( + post_commit.success(), + "hooks post-commit should run successfully\nstdout:\n{}\nstderr:\n{}", + post_commit.stdout, + post_commit.stderr + ); + + let expected_local_db = expected_agent_trace_local_db_path(&harness); + assert!( + expected_local_db.exists(), + "expected Turso local DB path '{}' to exist", + expected_local_db.display() + ); + assert!( + expected_local_db.starts_with(harness.temp.path()), + "expected Turso local DB path '{}' to stay within test temp root '{}')", + expected_local_db.display(), + harness.temp.path().display() + ); + + Ok(()) +} + +#[test] +fn setup_fail_repo_missing() -> TestResult<()> { + let harness = SetupIntegrationHarness::new("sce-setup-failure-contracts")?; + + let result = harness.run_sce(["setup", "--hooks", "--repo", "/missing"])?; + + assert_eq!( + result.status.code(), + Some(4), + "setup --hooks --repo /missing should exit with runtime_failure code 4\nstdout:\n{}\nstderr:\n{}", + result.stdout, + result.stderr + ); + + assert!( + result.stderr.contains("Failed to resolve repository path"), + "stderr should contain canonical path resolution failure message\nstderr:\n{}", + result.stderr + ); + + assert!( + result + .stderr + .contains("Try: pass a path to an existing git repository"), + "stderr should contain actionable guidance\nstderr:\n{}", + result.stderr + ); + + Ok(()) +} + +#[test] +fn setup_fail_repo_without_hooks() -> TestResult<()> { + let harness = SetupIntegrationHarness::new("sce-setup-failure-contracts")?; + harness.init_git_repo()?; + + let result = harness.run_sce(["setup", "--repo", harness.repo_root().to_str().unwrap()])?; + + assert_eq!( + result.status.code(), + Some(3), + "setup --repo without --hooks should exit with validation_failure code 3\nstdout:\n{}\nstderr:\n{}", + result.stdout, + result.stderr + ); + + assert!( + result.stderr.contains("Option '--repo' requires '--hooks'"), + "stderr should contain --repo requires --hooks message\nstderr:\n{}", + result.stderr + ); + + assert!( + result + .stderr + .contains("Try: run 'sce setup --hooks --repo ' or remove '--repo'"), + "stderr should contain actionable guidance\nstderr:\n{}", + result.stderr + ); + + Ok(()) +} + +#[test] +fn setup_fail_noninteractive_without_target() -> TestResult<()> { + let harness = SetupIntegrationHarness::new("sce-setup-failure-contracts")?; + harness.init_git_repo()?; + + let result = harness.run_sce(["setup", "--non-interactive"])?; + + assert_eq!( + result.status.code(), + Some(3), + "setup --non-interactive without target should exit with validation_failure code 3\nstdout:\n{}\nstderr:\n{}", + result.stdout, + result.stderr + ); + + assert!( + result + .stderr + .contains("Option '--non-interactive' requires a target flag"), + "stderr should contain --non-interactive requires target message\nstderr:\n{}", + result.stderr + ); + + assert!( + result.stderr.contains("Try: 'sce setup --opencode --non-interactive', 'sce setup --claude --non-interactive', or 'sce setup --both --non-interactive'"), + "stderr should contain actionable guidance\nstderr:\n{}", + result.stderr + ); + + Ok(()) +} + +#[test] +fn setup_targets_opencode_install_and_rerun_are_deterministic() -> TestResult<()> { + let harness = SetupIntegrationHarness::new("sce-setup-targets")?; + harness.init_git_repo()?; + + assert_setup_target_install_and_rerun( + &harness, + &["setup", "--opencode"], + "Selected target(s): OpenCode", + &["OpenCode: installed"], + &[".opencode/command/next-task.md"], + &[".claude"], + )?; + + Ok(()) +} + +#[test] +fn setup_targets_claude_install_and_rerun_are_deterministic() -> TestResult<()> { + let harness = SetupIntegrationHarness::new("sce-setup-targets")?; + harness.init_git_repo()?; + + assert_setup_target_install_and_rerun( + &harness, + &["setup", "--claude"], + "Selected target(s): Claude", + &["Claude: installed"], + &[".claude/commands/next-task.md"], + &[".opencode"], + )?; + + Ok(()) +} + +#[test] +fn setup_targets_both_install_and_rerun_are_deterministic() -> TestResult<()> { + let harness = SetupIntegrationHarness::new("sce-setup-targets")?; + harness.init_git_repo()?; + + assert_setup_target_install_and_rerun( + &harness, + &["setup", "--both"], + "Selected target(s): OpenCode, Claude", + &["OpenCode: installed", "Claude: installed"], + &[ + ".opencode/command/next-task.md", + ".claude/commands/next-task.md", + ], + &[], + )?; + + Ok(()) +} + +#[cfg(unix)] +mod pty_interactive { + use super::*; + use portable_pty::{native_pty_system, CommandBuilder, PtyPair, PtySize}; + use std::io::{Read, Write}; + use std::sync::atomic::{AtomicBool, Ordering}; + use std::sync::{Arc, Mutex}; + use std::thread; + use std::time::Duration; + + const PTY_WAIT_FOR_OUTPUT_MS: u64 = 1000; + + struct PtySession { + _pair: PtyPair, + writer: Box, + output: Arc>, + stop_flag: Arc, + _reader_thread: thread::JoinHandle<()>, + } + + impl PtySession { + fn spawn( + binary_path: &Path, + args: &[&str], + cwd: &Path, + env: &[(&str, &str)], + ) -> TestResult { + let pty_system = native_pty_system(); + let pair = pty_system.openpty(PtySize { + rows: 24, + cols: 80, + pixel_width: 0, + pixel_height: 0, + })?; + + let mut cmd = CommandBuilder::new(binary_path); + cmd.args(args); + cmd.cwd(cwd); + for (key, value) in env { + cmd.env(key, value); + } + + let mut child = pair.slave.spawn_command(cmd)?; + + let writer = pair.master.take_writer()?; + let mut reader = pair.master.try_clone_reader()?; + + let output = Arc::new(Mutex::new(String::new())); + let stop_flag = Arc::new(AtomicBool::new(false)); + + let output_clone = output.clone(); + let stop_clone = stop_flag.clone(); + let reader_thread = thread::spawn(move || { + let mut buf = [0u8; 4096]; + while !stop_clone.load(Ordering::Relaxed) { + match reader.read(&mut buf) { + Ok(0) => break, + Ok(n) => { + if let Ok(mut out) = output_clone.lock() { + out.push_str(&String::from_utf8_lossy(&buf[..n])); + } + } + Err(_) => break, + } + } + let _ = child.wait(); + }); + + Ok(PtySession { + _pair: pair, + writer, + output, + stop_flag, + _reader_thread: reader_thread, + }) + } + + fn wait_for_output(&self, expected: &str, timeout_ms: u64) -> TestResult { + let deadline = std::time::Instant::now() + Duration::from_millis(timeout_ms); + while std::time::Instant::now() < deadline { + if let Ok(out) = self.output.lock() { + if out.contains(expected) { + return Ok(true); + } + } + std::thread::sleep(Duration::from_millis(50)); + } + Ok(false) + } + + fn get_output(&self) -> String { + self.output.lock().map(|s| s.clone()).unwrap_or_default() + } + + fn write_line(&mut self, line: &str) -> TestResult<()> { + writeln!(self.writer, "{}", line)?; + self.writer.flush()?; + Ok(()) + } + + fn write_raw(&mut self, data: &[u8]) -> TestResult<()> { + self.writer.write_all(data)?; + self.writer.flush()?; + Ok(()) + } + } + + impl Drop for PtySession { + fn drop(&mut self) { + self.stop_flag.store(true, Ordering::Relaxed); + } + } + + #[test] + fn setup_interactive_pty_select_opencode() -> TestResult<()> { + let harness = SetupIntegrationHarness::new("sce-setup-pty")?; + harness.init_git_repo()?; + + let mut session = PtySession::spawn( + &sce_binary_path(), + &["setup"], + harness.repo_root(), + &[ + ("XDG_STATE_HOME", harness.state_home().to_str().unwrap()), + ("HOME", harness.home_dir.to_str().unwrap()), + ("GIT_CONFIG_GLOBAL", null_device_path()), + ("GIT_CONFIG_NOSYSTEM", "1"), + ], + )?; + + let prompt_found = + session.wait_for_output("Select setup target", PTY_WAIT_FOR_OUTPUT_MS)?; + let output = session.get_output(); + assert!( + prompt_found, + "PTY should display prompt for target selection\noutput:\n{}", + output + ); + + session.write_line("")?; + + let completed = + session.wait_for_output("Setup completed successfully.", PTY_WAIT_FOR_OUTPUT_MS * 3)?; + let output = session.get_output(); + assert!( + completed, + "PTY flow should complete successfully after OpenCode selection\noutput:\n{}", + output + ); + + assert!( + output.contains("Selected target(s): OpenCode"), + "output should report OpenCode as selected target\noutput:\n{}", + output + ); + + assert!( + harness + .repo_root() + .join(".opencode/command/next-task.md") + .exists(), + "OpenCode assets should be installed after PTY selection" + ); + + Ok(()) + } + + #[test] + fn setup_interactive_pty_cancel() -> TestResult<()> { + let harness = SetupIntegrationHarness::new("sce-setup-pty")?; + harness.init_git_repo()?; + + let mut session = PtySession::spawn( + &sce_binary_path(), + &["setup"], + harness.repo_root(), + &[ + ("XDG_STATE_HOME", harness.state_home().to_str().unwrap()), + ("HOME", harness.home_dir.to_str().unwrap()), + ("GIT_CONFIG_GLOBAL", null_device_path()), + ("GIT_CONFIG_NOSYSTEM", "1"), + ], + )?; + + let prompt_found = + session.wait_for_output("Select setup target", PTY_WAIT_FOR_OUTPUT_MS)?; + let output = session.get_output(); + assert!( + prompt_found, + "PTY should display prompt for target selection\noutput:\n{}", + output + ); + + session.write_raw(&[0x1b])?; + + std::thread::sleep(Duration::from_millis(PTY_WAIT_FOR_OUTPUT_MS)); + let output = session.get_output(); + + assert!( + output.contains("Setup cancelled") || output.contains("cancelled"), + "PTY cancel flow should report cancellation\noutput:\n{}", + output + ); + + assert!( + !harness.repo_root().join(".opencode").exists(), + "No files should be created after cancellation" + ); + assert!( + !harness.repo_root().join(".claude").exists(), + "No files should be created after cancellation" + ); + + Ok(()) + } + + #[test] + fn setup_interactive_nontty_fail() -> TestResult<()> { + let harness = SetupIntegrationHarness::new("sce-setup-nontty")?; + harness.init_git_repo()?; + + let output = Command::new(sce_binary_path()) + .args(["setup"]) + .current_dir(harness.repo_root()) + .env("XDG_STATE_HOME", harness.state_home()) + .env("HOME", &harness.home_dir) + .env("GIT_CONFIG_GLOBAL", null_device_path()) + .env("GIT_CONFIG_NOSYSTEM", "1") + .stdin(std::process::Stdio::null()) + .stdout(std::process::Stdio::piped()) + .stderr(std::process::Stdio::piped()) + .output()?; + + let result = render_command_result(output); + + assert_eq!( + result.status.code(), + Some(4), + "setup without TTY should exit with runtime_failure code 4\nstdout:\n{}\nstderr:\n{}", + result.stdout, + result.stderr + ); + + assert!( + result.stderr.contains("Interactive setup requires a TTY"), + "stderr should contain TTY requirement message\nstderr:\n{}", + result.stderr + ); + + assert!( + result.stderr.contains("--non-interactive"), + "stderr should contain actionable guidance mentioning --non-interactive\nstderr:\n{}", + result.stderr + ); + + assert!( + !harness.repo_root().join(".opencode").exists(), + "No files should be created after non-TTY failure" + ); + + Ok(()) + } +} + +#[cfg(not(unix))] +mod pty_interactive { + use super::*; + + #[test] + fn setup_interactive_pty_select_opencode() -> TestResult<()> { + let harness = SetupIntegrationHarness::new("sce-setup-pty")?; + harness.init_git_repo()?; + + let result = harness.run_sce(["setup", "--opencode"])?; + + assert!( + result.success(), + "PTY test (non-unix fallback): setup --opencode should succeed\nstdout:\n{}\nstderr:\n{}", + result.stdout, + result.stderr + ); + assert!( + harness + .repo_root() + .join(".opencode/command/next-task.md") + .exists(), + "PTY test (non-unix fallback): OpenCode assets should be installed" + ); + + Ok(()) + } + + #[test] + fn setup_interactive_pty_cancel() -> TestResult<()> { + let harness = SetupIntegrationHarness::new("sce-setup-pty")?; + harness.init_git_repo()?; + + assert!( + !harness.repo_root().join(".opencode").exists(), + "No files should exist before any setup" + ); + + Ok(()) + } + + #[cfg(not(target_os = "windows"))] + #[test] + fn setup_interactive_nontty_fail() -> TestResult<()> { + let harness = SetupIntegrationHarness::new("sce-setup-nontty")?; + harness.init_git_repo()?; + + let output = Command::new(sce_binary_path()) + .args(["setup"]) + .current_dir(harness.repo_root()) + .env("XDG_STATE_HOME", harness.state_home()) + .env("HOME", &harness.home_dir) + .env("GIT_CONFIG_GLOBAL", null_device_path()) + .env("GIT_CONFIG_NOSYSTEM", "1") + .stdin(std::process::Stdio::null()) + .stdout(std::process::Stdio::piped()) + .stderr(std::process::Stdio::piped()) + .output()?; + + let result = render_command_result(output); + + assert_eq!( + result.status.code(), + Some(4), + "setup without TTY should exit with runtime_failure code 4\nstdout:\n{}\nstderr:\n{}", + result.stdout, + result.stderr + ); + + assert!( + result.stderr.contains("Interactive setup requires a TTY"), + "stderr should contain TTY requirement message\nstderr:\n{}", + result.stderr + ); + + Ok(()) + } +} + +#[cfg(unix)] +mod setup_permission_failures { + use super::*; + use std::os::unix::fs::PermissionsExt; + + struct PermissionRestorer { + path: PathBuf, + original_mode: u32, + } + + impl PermissionRestorer { + fn new(path: PathBuf) -> TestResult { + let metadata = fs::metadata(&path)?; + let original_mode = metadata.permissions().mode(); + Ok(Self { + path, + original_mode, + }) + } + + fn make_readonly(&self) -> TestResult<()> { + let mut perms = fs::metadata(&self.path)?.permissions(); + perms.set_mode(0o555); + fs::set_permissions(&self.path, perms)?; + Ok(()) + } + } + + impl Drop for PermissionRestorer { + fn drop(&mut self) { + if let Some(mut perms) = fs::metadata(&self.path).ok().map(|m| m.permissions()) { + perms.set_mode(self.original_mode); + let _ = fs::set_permissions(&self.path, perms); + } + } + } + + fn is_running_as_root() -> bool { + unsafe { libc::getuid() == 0 } + } + + #[test] + fn setup_permission_fail_repo_root_nonwritable() -> TestResult<()> { + if is_running_as_root() { + eprintln!("Skipping test: running as root (can write to read-only directories)"); + return Ok(()); + } + + let harness = SetupIntegrationHarness::new("sce-setup-perm-repo-root")?; + harness.init_git_repo()?; + + let restorer = PermissionRestorer::new(harness.repo_root().to_path_buf())?; + restorer.make_readonly()?; + + let result = harness.run_sce(["setup", "--opencode"])?; + + assert_eq!( + result.status.code(), + Some(4), + "setup with non-writable repo root should exit with runtime_failure code 4\nstdout:\n{}\nstderr:\n{}", + result.stdout, + result.stderr + ); + + assert!( + result.stderr.contains("Error [SCE-ERR-RUNTIME]:"), + "stderr should contain runtime error class\nstderr:\n{}", + result.stderr + ); + + assert!( + result + .stderr + .contains("Setup installation failed for OpenCode"), + "stderr should contain setup installation failure message\nstderr:\n{}", + result.stderr + ); + + assert!( + result.stderr.contains("Try:"), + "stderr should contain actionable guidance\nstderr:\n{}", + result.stderr + ); + + Ok(()) + } + + #[test] + fn setup_permission_fail_hooks_dir_nonwritable() -> TestResult<()> { + if is_running_as_root() { + eprintln!("Skipping test: running as root (can write to read-only directories)"); + return Ok(()); + } + + let harness = SetupIntegrationHarness::new("sce-setup-perm-hooks-dir")?; + harness.init_git_repo()?; + + let hooks_dir = harness.repo_root().join(".git/hooks"); + fs::create_dir_all(&hooks_dir)?; + + let restorer = PermissionRestorer::new(hooks_dir.clone())?; + restorer.make_readonly()?; + + let result = harness.run_sce(["setup", "--hooks"])?; + + assert_eq!( + result.status.code(), + Some(4), + "setup --hooks with non-writable hooks directory should exit with runtime_failure code 4\nstdout:\n{}\nstderr:\n{}", + result.stdout, + result.stderr + ); + + assert!( + result.stderr.contains("Error [SCE-ERR-RUNTIME]:"), + "stderr should contain runtime error class\nstderr:\n{}", + result.stderr + ); + + assert!( + result + .stderr + .contains("Hook setup failed while installing required git hooks"), + "stderr should contain hook setup failure message\nstderr:\n{}", + result.stderr + ); + + assert!( + result.stderr.contains("Try:"), + "stderr should contain actionable guidance\nstderr:\n{}", + result.stderr + ); + + Ok(()) + } + + #[test] + fn setup_permission_fail_unix_readonly_guard() -> TestResult<()> { + if is_running_as_root() { + eprintln!("Skipping test: running as root (can write to read-only directories)"); + return Ok(()); + } + + let harness = SetupIntegrationHarness::new("sce-setup-perm-readonly")?; + harness.init_git_repo()?; + + let restorer = PermissionRestorer::new(harness.repo_root().to_path_buf())?; + restorer.make_readonly()?; + + let result = harness.run_sce(["setup", "--claude"])?; + + assert_eq!( + result.status.code(), + Some(4), + "setup with read-only repo should fail deterministically\nstdout:\n{}\nstderr:\n{}", + result.stdout, + result.stderr + ); + + assert!( + !result.stderr.is_empty(), + "stderr should contain error message for read-only scenario\nstderr:\n{}", + result.stderr + ); + + Ok(()) + } +} + +#[cfg(not(unix))] +mod setup_permission_failures { + use super::*; + + #[test] + fn setup_permission_fail_repo_root_nonwritable() -> TestResult<()> { + let harness = SetupIntegrationHarness::new("sce-setup-perm-repo-root")?; + harness.init_git_repo()?; + + let result = harness.run_sce(["setup", "--opencode"])?; + assert!( + result.success(), + "setup --opencode should succeed on non-unix platforms (permission tests not applicable)\nstdout:\n{}\nstderr:\n{}", + result.stdout, + result.stderr + ); + + Ok(()) + } + + #[test] + fn setup_permission_fail_hooks_dir_nonwritable() -> TestResult<()> { + let harness = SetupIntegrationHarness::new("sce-setup-perm-hooks-dir")?; + harness.init_git_repo()?; + + let result = harness.run_sce(["setup", "--hooks"])?; + assert!( + result.success(), + "setup --hooks should succeed on non-unix platforms (permission tests not applicable)\nstdout:\n{}\nstderr:\n{}", + result.stdout, + result.stderr + ); + + Ok(()) + } + + #[test] + fn setup_permission_fail_unix_readonly_guard() -> TestResult<()> { + let harness = SetupIntegrationHarness::new("sce-setup-perm-readonly")?; + harness.init_git_repo()?; + + let result = harness.run_sce(["setup", "--claude"])?; + assert!( + result.success(), + "setup --claude should succeed on non-unix platforms (permission tests not applicable)\nstdout:\n{}\nstderr:\n{}", + result.stdout, + result.stderr + ); + + Ok(()) + } +} + +fn assert_setup_target_install_and_rerun( + harness: &SetupIntegrationHarness, + args: &[&str], + expected_selected_targets_line: &str, + expected_target_status_markers: &[&str], + required_paths: &[&str], + forbidden_paths: &[&str], +) -> TestResult<()> { + let first = harness.run_sce(args.iter().copied())?; + assert!( + first.success(), + "initial setup run should succeed\nstdout:\n{}\nstderr:\n{}", + first.stdout, + first.stderr + ); + assert!(first.stdout.contains("Setup completed successfully.")); + assert!( + first.stdout.contains(expected_selected_targets_line), + "initial setup output should include expected selected-target line '{}'\nstdout:\n{}", + expected_selected_targets_line, + first.stdout + ); + for marker in expected_target_status_markers { + assert!( + first.stdout.contains(marker), + "initial setup output should include marker '{}'\nstdout:\n{}", + marker, + first.stdout + ); + } + assert!( + first + .stdout + .contains("backup: not needed (no existing target)"), + "initial setup run should report no backup requirement\nstdout:\n{}", + first.stdout + ); + + for relative_path in required_paths { + assert!( + harness.repo_root().join(relative_path).exists(), + "expected installed path '{}' to exist after first run", + relative_path + ); + } + + for relative_path in forbidden_paths { + assert!( + !harness.repo_root().join(relative_path).exists(), + "expected path '{}' to remain absent for this setup target", + relative_path + ); + } + + let second = harness.run_sce(args.iter().copied())?; + assert!( + second.success(), + "second setup run should succeed\nstdout:\n{}\nstderr:\n{}", + second.stdout, + second.stderr + ); + assert!(second.stdout.contains("Setup completed successfully.")); + assert!( + second.stdout.contains(expected_selected_targets_line), + "second setup output should include expected selected-target line '{}'\nstdout:\n{}", + expected_selected_targets_line, + second.stdout + ); + for marker in expected_target_status_markers { + assert!( + second.stdout.contains(marker), + "second setup output should include marker '{}'\nstdout:\n{}", + marker, + second.stdout + ); + } + assert!( + second.stdout.contains("backup: existing target moved to"), + "second setup run should report backup-and-replace behavior\nstdout:\n{}", + second.stdout + ); + + for relative_path in required_paths { + assert!( + harness.repo_root().join(relative_path).exists(), + "expected installed path '{}' to exist after second run", + relative_path + ); + } + + for relative_path in forbidden_paths { + assert!( + !harness.repo_root().join(relative_path).exists(), + "expected path '{}' to remain absent after second run", + relative_path + ); + } + + Ok(()) +} + +fn assert_setup_hooks_install_and_rerun( + harness: &SetupIntegrationHarness, + expected_hooks_directory: &Path, +) -> TestResult<()> { + let first = harness.run_sce(["setup", "--hooks"])?; + assert!( + first.success(), + "first setup --hooks run should succeed\nstdout:\n{}\nstderr:\n{}", + first.stdout, + first.stderr + ); + assert!(first.stdout.contains("Hook setup completed successfully.")); + assert!( + labeled_path_matches(&first.stdout, "Hooks directory", expected_hooks_directory), + "first setup run should include hooks directory '{}':\n{}", + expected_hooks_directory.display(), + first.stdout + ); + + for hook in REQUIRED_HOOK_NAMES { + assert!( + first.stdout.contains(&format!("- {hook}: installed at '")), + "first setup run should report '{}' as installed\nstdout:\n{}", + hook, + first.stdout + ); + } + assert_eq!( + first.stdout.matches("backup: not needed").count(), + REQUIRED_HOOK_NAMES.len(), + "first setup run should report backup: not needed for each required hook\nstdout:\n{}", + first.stdout + ); + + assert_hooks_are_present_and_executable(expected_hooks_directory)?; + + let second = harness.run_sce(["setup", "--hooks"])?; + assert!( + second.success(), + "second setup --hooks run should succeed\nstdout:\n{}\nstderr:\n{}", + second.stdout, + second.stderr + ); + assert!(second.stdout.contains("Hook setup completed successfully.")); + assert!( + labeled_path_matches(&second.stdout, "Hooks directory", expected_hooks_directory), + "second setup run should include hooks directory '{}':\n{}", + expected_hooks_directory.display(), + second.stdout + ); + + for hook in REQUIRED_HOOK_NAMES { + assert!( + second.stdout.contains(&format!("- {hook}: skipped at '")), + "second setup run should report '{}' as skipped\nstdout:\n{}", + hook, + second.stdout + ); + } + assert_eq!( + second.stdout.matches("backup: not needed").count(), + REQUIRED_HOOK_NAMES.len(), + "second setup run should report backup: not needed for each required hook\nstdout:\n{}", + second.stdout + ); + + assert_hooks_are_present_and_executable(expected_hooks_directory)?; + + Ok(()) +} + +fn assert_hooks_are_present_and_executable(hooks_directory: &Path) -> TestResult<()> { + for hook in REQUIRED_HOOK_NAMES { + let hook_path = hooks_directory.join(hook); + assert!( + hook_path.exists(), + "expected required hook '{}' to exist at '{}'", + hook, + hook_path.display() + ); + assert!( + hook_path.is_file(), + "expected required hook '{}' at '{}' to be a file", + hook, + hook_path.display() + ); + assert_executable_file(&hook_path)?; + } + + Ok(()) +} + +#[cfg(unix)] +fn assert_executable_file(path: &Path) -> TestResult<()> { + use std::os::unix::fs::PermissionsExt; + + let metadata = fs::metadata(path)?; + let mode = metadata.permissions().mode(); + assert!( + mode & 0o111 != 0, + "expected hook '{}' to have at least one executable bit set (mode {:o})", + path.display(), + mode + ); + Ok(()) +} + +#[cfg(not(unix))] +fn assert_executable_file(path: &Path) -> TestResult<()> { + let metadata = fs::metadata(path)?; + assert!( + metadata.is_file(), + "expected hook '{}' to resolve to a regular file", + path.display() + ); + Ok(()) +} + +fn sce_binary_path() -> PathBuf { + if let Some(path) = std::env::var_os("CARGO_BIN_EXE_sce") { + return PathBuf::from(path); + } + + let test_executable = std::env::current_exe() + .expect("integration test should resolve current executable path for binary fallback"); + let debug_root = test_executable + .parent() + .and_then(Path::parent) + .expect("integration test executable should run from target/{profile}/deps"); + + let candidate = debug_root.join(binary_filename("sce")); + assert!( + candidate.exists(), + "integration test could not resolve compiled sce binary at '{}'", + candidate.display() + ); + + candidate +} + +fn binary_filename(base: &str) -> String { + #[cfg(windows)] + { + format!("{base}.exe") + } + + #[cfg(not(windows))] + { + base.to_string() + } +} + +fn render_command_result(output: Output) -> CommandResult { + CommandResult { + status: output.status, + stdout: String::from_utf8_lossy(&output.stdout).into_owned(), + stderr: String::from_utf8_lossy(&output.stderr).into_owned(), + } +} + +fn expected_agent_trace_local_db_path(harness: &SetupIntegrationHarness) -> PathBuf { + #[cfg(target_os = "windows")] + { + harness + .state_home + .join("sce") + .join("agent-trace") + .join("local.db") + } + + #[cfg(target_os = "macos")] + { + harness + .home_dir + .join("Library") + .join("Application Support") + .join("sce") + .join("agent-trace") + .join("local.db") + } + + #[cfg(not(any(target_os = "windows", target_os = "macos")))] + { + harness + .state_home + .join("sce") + .join("agent-trace") + .join("local.db") + } +} + +fn labeled_path_matches(stdout: &str, label: &str, expected: &Path) -> bool { + extract_labeled_path(stdout, label) + .is_some_and(|reported| paths_match_for_test(&reported, expected)) +} + +fn extract_labeled_path(stdout: &str, label: &str) -> Option { + let prefixes = [format!("{label}: '"), format!("{label} '")]; + stdout.lines().find_map(|line| { + let trimmed = line.trim_start(); + prefixes.iter().find_map(|prefix| { + trimmed + .strip_prefix(prefix) + .and_then(|value| value.strip_suffix('\'')) + .map(PathBuf::from) + }) + }) +} + +fn paths_match_for_test(actual: &Path, expected: &Path) -> bool { + let actual_components = normalized_components(actual); + let expected_components = normalized_components(expected); + + if actual_components == expected_components { + return true; + } + + if let (Some(actual_tail), Some(expected_tail)) = ( + tail_from_anchor(&actual_components, "repo"), + tail_from_anchor(&expected_components, "repo"), + ) { + if actual_tail == expected_tail { + return true; + } + } + + let canonical_actual = actual.canonicalize().ok(); + let canonical_expected = expected.canonicalize().ok(); + if let (Some(canonical_actual), Some(canonical_expected)) = + (canonical_actual, canonical_expected) + { + return normalized_components(&canonical_actual) + == normalized_components(&canonical_expected); + } + + false +} + +fn normalized_components(path: &Path) -> Vec { + let raw = trim_windows_verbatim_prefix(&path.display().to_string()).replace('\\', "/"); + raw.split('/') + .filter(|component| !component.is_empty()) + .map(|component| component.to_ascii_lowercase()) + .collect() +} + +fn tail_from_anchor<'a>(components: &'a [String], anchor: &str) -> Option<&'a [String]> { + let anchor = anchor.to_ascii_lowercase(); + components + .iter() + .rposition(|component| component == &anchor) + .map(|index| &components[index..]) +} + +fn trim_windows_verbatim_prefix(value: &str) -> &str { + value + .strip_prefix(r"\\?\") + .or_else(|| value.strip_prefix("//?/")) + .unwrap_or(value) +} + +fn null_device_path() -> &'static str { + #[cfg(windows)] + { + "NUL" + } + + #[cfg(not(windows))] + { + "/dev/null" + } +} diff --git a/context/architecture.md b/context/architecture.md index c9a32563..0cbcf31d 100644 --- a/context/architecture.md +++ b/context/architecture.md @@ -25,10 +25,12 @@ Current target renderer helper modules: - `config/pkl/check-generated.sh` (dev-shell integration stale-output detection against committed generated files) - `nix run .#sync-opencode-config` (flake app entrypoint for config regeneration and sync workflow) - `nix run .#token-count-workflows` (flake app entrypoint for static workflow token-count execution via `evals/token-count-workflows.ts`) -- `nix flake check` / `checks..cli-setup-command-surface` (flake check derivation that runs targeted CLI setup command-surface verification from `cli/`) +- `nix run .#cli-integration-tests` (flake app entrypoint for binary-driven Rust setup integration tests) +- `nix flake check` / `checks..cli-setup-command-surface` / `checks..cli-setup-integration` (flake check derivations that run targeted CLI setup command-surface and setup integration verification from `cli/`) - `.github/workflows/pkl-generated-parity.yml` (CI wrapper that runs the parity check for pushes to `main` and pull requests targeting `main`) - `.github/workflows/agnix-config-validate-report.yml` (CI wrapper that runs `agnix validate` from `config/`, writes `context/tmp/ci-reports/agnix-validate-report.txt`, uploads it when non-info findings are present, and fails on any non-info finding) - `.github/workflows/workflow-token-count.yml` (CI wrapper that runs `nix run .#token-count-workflows` for pushes/pull requests targeting `main` and uploads token-footprint artifacts from `context/tmp/token-footprint/`) +- `.github/workflows/cli-integration-tests.yml` (CI wrapper that runs `nix run .#cli-integration-tests` for pushes/pull requests targeting `main`) The scaffold provides stable canonical content-unit identifiers and reusable target-agnostic text primitives for all planned authored generated classes (agents, commands, skills, shared library file). @@ -71,25 +73,32 @@ See `context/decisions/2026-02-28-pkl-generation-architecture.md` for the full m The repository includes a new placeholder Rust binary crate at `cli/`. - `cli/src/main.rs` is the executable entrypoint (`sce`) and delegates to `app::run`. -- `cli/src/app.rs` provides a `lexopt`-based argument parser and dispatch loop with deterministic help, setup installation execution, and consistent `anyhow`-driven error exits. -- `cli/src/command_surface.rs` is the source of truth for top-level command contract metadata (`help`, `setup`, `doctor`, `mcp`, `hooks`, `sync`) and explicit implemented-vs-placeholder status. +- `cli/src/app.rs` provides a `lexopt`-based argument parser and dispatch loop with deterministic help/setup execution, centralized stream routing (`stdout` success payloads, `stderr` redacted diagnostics), stable class-based exit-code mapping (`2` parse, `3` validation, `4` runtime, `5` dependency), and stable class-based stderr diagnostic codes (`SCE-ERR-PARSE`, `SCE-ERR-VALIDATION`, `SCE-ERR-RUNTIME`, `SCE-ERR-DEPENDENCY`) with default `Try:` remediation injection when missing. +- `cli/src/services/observability.rs` provides deterministic runtime observability controls and rendering for app lifecycle logs, including env-configured threshold/format (`SCE_LOG_LEVEL`, `SCE_LOG_FORMAT`), optional file sink controls (`SCE_LOG_FILE`, `SCE_LOG_FILE_MODE` with deterministic truncate-or-append policy), optional OTEL export bootstrap (`SCE_OTEL_ENABLED`, `OTEL_EXPORTER_OTLP_ENDPOINT`, `OTEL_EXPORTER_OTLP_PROTOCOL`), stable event identifiers, severity filtering, stderr-only primary emission with optional mirrored file writes, and redaction-safe emission through the shared security helper. +- `cli/src/command_surface.rs` is the source of truth for top-level command contract metadata (`help`, `config`, `setup`, `doctor`, `mcp`, `hooks`, `sync`, `version`, `completion`) and explicit implemented-vs-placeholder status. +- `cli/src/services/config.rs` defines `sce config` parser/runtime contracts (`show`, `validate`, `--help`), deterministic config-file selection, explicit value precedence (`flags > env > config file > defaults`), strict config-file validation (`log_level`, `timeout_ms`), and deterministic text/JSON output rendering. +- `cli/src/services/output_format.rs` defines the canonical shared CLI output-format contract (`OutputFormat`) for supporting commands, with deterministic `text|json` parsing and command-scoped actionable invalid-value guidance. - `cli/src/services/local_db.rs` provides the local Turso data adapter, including `Builder::new_local(...)` initialization, deterministic persistent runtime DB target resolution/bootstrap (`ensure_agent_trace_local_db_ready_blocking`), async execute/query smoke checks for in-memory and file-backed targets, and idempotent migration application for Agent Trace persistence foundations (`repositories`, `commits`, `trace_records`, `trace_ranges`), reconciliation ingestion entities (`reconciliation_runs`, `rewrite_mappings`, `conversations`), and T14 retry/observability storage (`trace_retry_queue`, `reconciliation_metrics`) with replay/query indexes. - `cli/src/test_support.rs` provides a shared test-only temp-directory helper (`TestTempDir`) used by service tests that need filesystem fixtures. -- `cli/src/services/setup.rs` defines the setup command contract (`SetupMode`, `SetupTarget`, CLI flag parser/validator), an `inquire`-backed interactive target prompter (`InquireSetupTargetPrompter`), setup dispatch outcomes (proceed/cancelled), compile-time embedded asset access (`EmbeddedAsset`, target-scoped iterators, required-hook asset iterators/lookups) generated by `cli/build.rs` from `config/.opencode/**`, `config/.claude/**`, and `cli/assets/hooks/**`, a target-scoped install engine/orchestrator that stages embedded files, performs backup-and-replace with rollback restoration on swap failure, and formats deterministic completion messaging, plus required-hook install orchestration (`install_required_git_hooks`) and command-surface hook mode helpers (`run_setup_hooks`, `resolve_setup_hooks_repository`) used by `sce setup --hooks [--repo ]` with deterministic option compatibility validation and per-hook outcome messaging. +- `cli/src/services/setup.rs` defines the setup command contract (`SetupMode`, `SetupTarget`, `SetupRequest`, CLI flag parser/validator), an `inquire`-backed interactive target prompter (`InquireSetupTargetPrompter`), setup dispatch outcomes (proceed/cancelled), compile-time embedded asset access (`EmbeddedAsset`, target-scoped iterators, required-hook asset iterators/lookups) generated by `cli/build.rs` from `config/.opencode/**`, `config/.claude/**`, and `cli/assets/hooks/**`, a target-scoped install engine/orchestrator that stages embedded files, performs backup-and-replace with rollback restoration on swap failure, and formats deterministic completion messaging, plus required-hook install orchestration (`install_required_git_hooks`) and command-surface setup request resolution helpers (`run_setup_hooks`, `resolve_setup_request`) used by hooks-only and composable target+hooks setup invocations with deterministic option compatibility validation, canonicalized/validated repo targeting, write-permission probes, and stable section-ordered setup/hook outcome messaging. +- `cli/src/services/security.rs` provides shared security utilities for deterministic secret redaction (`redact_sensitive_text`) and directory write-permission probes (`ensure_directory_is_writable`) used by app/setup/observability surfaces. - `cli/src/services/doctor.rs` defines hook rollout health validation (`run_doctor`) that resolves effective git hook-path source (default, local `core.hooksPath`, global `core.hooksPath`) and validates required hook files (`pre-commit`, `commit-msg`, `post-commit`) for presence and executable permissions. - `cli/src/services/agent_trace.rs` defines the Agent Trace schema adapter and builder contracts (`adapt_trace_payload`, `build_trace_payload`), including fixed git VCS identity, reserved reverse-domain metadata keys, and deterministic AI `model_id` normalization before schema-compliance validation. - `cli/src/services/mcp.rs` defines MCP file-cache capability contracts (`McpService`, transport/capability snapshots, cache policy) with non-runnable placeholder tool declarations. +- `cli/src/services/version.rs` defines the version command parser/rendering contract (`parse_version_request`, `render_version`) with deterministic text output and stable JSON runtime-identification fields. +- `cli/src/services/completion.rs` defines completion parser/rendering contract (`parse_completion_request`, `render_completion`) with deterministic Bash/Zsh/Fish script output aligned to current parser-valid command/flag surfaces. - `cli/src/services/hooks.rs` defines production local hook runtime parsing/dispatch (`HookSubcommand`, `parse_hooks_subcommand`, `run_hooks_subcommand`) plus a pre-commit staged-checkpoint finalization seam (`finalize_pre_commit_checkpoint`) that enforces staged-only attribution and carries index/tree anchors with explicit no-op guard states, a commit-msg co-author policy seam (`apply_commit_msg_coauthor_policy`) that injects one canonical SCE trailer only for allowed attributed commits, a post-commit trace finalization seam (`finalize_post_commit_trace`) that performs notes+DB dual writes with idempotency ledger guards and retry-queue fallback capture, a retry replay seam (`process_trace_retry_queue`) that re-attempts only failed persistence targets and emits per-attempt runtime/error-class metrics, bounded operational retry replay invocation from post-commit/post-rewrite flows (`process_runtime_retry_queue`), a post-rewrite remap-ingestion seam (`finalize_post_rewrite_remap`) that parses old->new SHA pairs and derives deterministic replay keys for remap dispatch, and a rewrite trace transformation seam (`finalize_rewrite_trace`) that emits rewritten-SHA Agent Trace records with rewrite metadata plus confidence-based quality status. - `cli/src/services/hosted_reconciliation.rs` defines hosted intake/orchestration seams (`ingest_hosted_rewrite_event`, `ReconciliationRunStore`) that verify provider signatures (GitHub HMAC-SHA256 and GitLab token equality), parse provider payload old/new heads, normalize deterministic idempotency-backed reconciliation run requests, resolve deterministic old->new rewrite mappings (`map_rewritten_commit`) with patch-id exact precedence, range-diff/fuzzy fallback scoring, and explicit unresolved classifications, and summarize mapped/unmapped confidence/runtime/error-class telemetry (`summarize_reconciliation_metrics`). -- `cli/src/services/sync.rs` runs the local adapter through a lazily initialized shared tokio current-thread runtime and composes a placeholder cloud-sync abstraction (`CloudSyncGateway`) so local Turso validation and deferred cloud planning remain separated. -- `cli/src/services/` contains module boundaries for setup, doctor, MCP, hooks, sync, and local DB adapters with explicit trait seams for future implementations. +- `cli/src/services/resilience.rs` defines bounded retry/timeout/backoff execution policy (`RetryPolicy`, `run_with_retry`) for transient operation hardening with deterministic failure messaging and retry observability. +- `cli/src/services/sync.rs` runs the local adapter through a lazily initialized shared tokio current-thread runtime, applies bounded resilience policy to the local smoke operation, and composes a placeholder cloud-sync abstraction (`CloudSyncGateway`) so local Turso validation and deferred cloud planning remain separated. +- `cli/src/services/` contains module boundaries for config, setup, doctor, MCP, hooks, sync, version, completion, and local DB adapters with explicit trait seams for future implementations. - `cli/README.md` is the crate-local onboarding and usage source of truth for placeholder behavior, safety limitations, and roadmap mapping back to service contracts. - `cli/flake.nix` applies `rust-overlay` (`oxalica/rust-overlay`) to nixpkgs, selects `rust-bin.stable.latest.default` with `rustfmt`, and routes CLI check/build derivations through `makeRustPlatform` so toolchain selection is explicit and deterministic. - `cli/flake.nix` exposes release install/run surfaces as `packages.sce` (`packages.default = packages.sce`) and `apps.sce` targeting `${packages.sce}/bin/sce`, enabling packaged CLI build/run via `nix build ./cli#default` and `nix run ./cli#sce -- ...`. - `flake.nix` (root) keeps nested CLI input wiring aligned by forwarding `nixpkgs`, `flake-utils`, and `rust-overlay` into the `cli` path input so repository-level `nix flake check` can evaluate nested CLI checks deterministically. -- `cli/Cargo.toml` keeps crates.io-ready package metadata populated while `publish = false` remains the current policy; local Cargo release/install verification targets `cargo build --manifest-path cli/Cargo.toml --release` and `cargo install --path cli --locked`. Tokio is intentionally constrained to `default-features = false` with `features = ["rt"]` to match current runtime API usage. +- `cli/Cargo.toml` keeps crates.io-ready package metadata populated while `publish = false` remains the current policy; local Cargo release/install verification targets `cargo build --manifest-path cli/Cargo.toml --release` and `cargo install --path cli --locked`. Tokio remains intentionally constrained (`default-features = false`) with current-thread runtime usage plus timer-backed bounded resilience wrappers for retry/timeout behavior. -This phase establishes compile-safe extension seams with a minimal dependency baseline (`anyhow`, `hmac`, `inquire`, `lexopt`, `serde_json`, `sha2`, `tokio`, `turso`); local Turso connectivity smoke checks now exist, while broader runtime integrations remain deferred. +This phase establishes compile-safe extension seams with a minimal dependency baseline (`anyhow`, `hmac`, `inquire`, `lexopt`, `opentelemetry`, `opentelemetry-otlp`, `opentelemetry_sdk`, `serde_json`, `sha2`, `tokio`, `tracing`, `tracing-opentelemetry`, `tracing-subscriber`, `turso`); local Turso connectivity smoke checks now exist, while broader runtime integrations remain deferred. ## Shared Context Drift parity mapping diff --git a/context/cli/config-precedence-contract.md b/context/cli/config-precedence-contract.md new file mode 100644 index 00000000..aa89550e --- /dev/null +++ b/context/cli/config-precedence-contract.md @@ -0,0 +1,52 @@ +# CLI Config Precedence Contract + +## Scope + +This contract documents the implemented `sce config` command behavior in `cli/src/services/config.rs` and parser/dispatch wiring in `cli/src/app.rs`. + +## Command surface + +- `sce config show [--config ] [--log-level ] [--timeout-ms ] [--format ]` +- `sce config validate [--config ] [--log-level ] [--timeout-ms ] [--format ]` +- `sce config --help` + +## Resolution precedence + +Resolved runtime values follow this deterministic order: + +1. flag values (`--log-level`, `--timeout-ms`) +2. environment values (`SCE_LOG_LEVEL`, `SCE_TIMEOUT_MS`) +3. config file values (`log_level`, `timeout_ms`) +4. defaults (`log_level=info`, `timeout_ms=30000`) + +Config file selection follows this deterministic order: + +1. `--config ` +2. `SCE_CONFIG_FILE` +3. discovered defaults when no explicit path/env override is provided: + - global: `${state_root}/sce/config.json` where `state_root` follows the same platform policy as Agent Trace local DB path derivation + - local: `.sce/config.json` under current working directory + +When both discovered defaults exist, they are merged in memory in deterministic order `global -> local`, and local values override global values per key. + +## Validation contract + +- Config file content must be valid JSON with a top-level object. +- Allowed keys: `log_level`, `timeout_ms`. +- Unknown keys fail validation. +- `log_level` must be one of `error|warn|info|debug`. +- `timeout_ms` must be an unsigned integer. + +## Output contract + +- `show` and `validate` support deterministic `text` and `json` outputs. +- JSON responses include a top-level `status` and nested `result` object. +- Text output includes the canonical precedence string: `flags > env > config file > defaults`. +- Output reports discovered config files as `config_paths` (JSON) / `Config files:` (text). +- Resolved values continue to report `source`; when source is `config_file`, output also reports a deterministic `config_source` value (`flag`, `env`, `default_discovered_global`, `default_discovered_local`). + +## Related files + +- `cli/src/app.rs` +- `cli/src/command_surface.rs` +- `cli/src/services/config.rs` diff --git a/context/cli/placeholder-foundation.md b/context/cli/placeholder-foundation.md index 240fe2df..662f0b2b 100644 --- a/context/cli/placeholder-foundation.md +++ b/context/cli/placeholder-foundation.md @@ -11,12 +11,12 @@ The repository now includes a Rust CLI crate at `cli/` for SCE automation work. - Command contract catalog: `cli/src/command_surface.rs` - Dependency contract snapshot: `cli/src/dependency_contract.rs` - Local Turso adapter: `cli/src/services/local_db.rs` -- Service domains: `cli/src/services/{agent_trace,setup,doctor,mcp,hooks,sync}.rs` +- Service domains: `cli/src/services/{agent_trace,completion,config,setup,doctor,mcp,hooks,resilience,sync,version}.rs` - Shared test temp-path helper: `cli/src/test_support.rs` (`TestTempDir`, test-only module) ## Onboarding documentation -- `cli/README.md` includes quick-start commands for `help`, `setup`, `doctor`, `mcp`, `hooks`, and `sync`. +- `cli/README.md` includes quick-start commands for `help`, `config`, `setup`, `doctor`, `mcp`, `hooks`, `sync`, and `completion`. - The README explicitly distinguishes implemented behavior from placeholders and maps future work to module contracts. - Verification guidance in the README uses crate-local `cargo check`, `cargo test`, and `cargo build` commands, plus release/install commands for current installability (`cargo build --manifest-path cli/Cargo.toml --release`, `cargo install --path cli --locked`). @@ -41,45 +41,59 @@ The repository now includes a Rust CLI crate at `cli/` for SCE automation work. `sce --help` lists command names with explicit implementation status: - `help`: implemented +- `config`: implemented - `setup`: implemented - `doctor`: implemented - `mcp`: placeholder - `hooks`: implemented - `sync`: placeholder +- `version`: implemented +- `completion`: implemented + +Top-level help also includes copy-ready agent-oriented examples for interactive setup, non-interactive setup+hooks, repository-targeted hooks installs, and JSON output (`doctor --format json`, `version --format json`). Placeholder commands currently acknowledge planned behavior and do not claim production implementation. `mcp` and `sync` route through explicit service-contract placeholders. `hooks` routes through implemented subcommand parsing/dispatch for `pre-commit`, `commit-msg`, `post-commit`, and `post-rewrite`. +`config` exposes deterministic inspect/validate entrypoints (`sce config show`, `sce config validate`) with explicit precedence (`flags > env > config file > defaults`) and deterministic text/JSON output modes. +`version` exposes deterministic runtime identification output in text mode by default and JSON mode via `--format json`. +`completion` exposes deterministic shell completion generation via `sce completion --shell `. `setup` defaults to an `inquire` interactive target selection (OpenCode, Claude, Both) and accepts mutually-exclusive non-interactive target flags (`--opencode`, `--claude`, `--both`). +`setup`, `doctor`, `mcp`, `hooks`, `sync`, `version`, and `completion` all support command-local `--help`/`-h` usage output via top-level parser routing in `cli/src/app.rs`. `setup` now also exposes compile-time embedded config assets for OpenCode/Claude targets, sourced from `config/.opencode/**` and `config/.claude/**` via `cli/build.rs` with normalized forward-slash relative paths and target-scoped iteration APIs. `setup` additionally includes a repository-root install engine (`install_embedded_setup_assets`) that stages embedded files and applies backup-and-replace safety for `.opencode/`/`.claude/` with rollback restoration if staged swap fails. `setup` now executes end-to-end and prints deterministic completion details including selected target(s), per-target install count, and backup actions. `doctor` now executes end-to-end and reports hook rollout readiness by validating effective hook-path source plus required hook presence/executable permissions. -`sync` includes a local Turso smoke gate backed by a lazily initialized shared tokio current-thread runtime and a placeholder cloud-sync gateway plan. +`sync` includes a local Turso smoke gate backed by a lazily initialized shared tokio current-thread runtime, bounded retry/timeout/backoff policy for the smoke operation, and a placeholder cloud-sync gateway plan; it now supports deterministic `text` output (default) and `--format json` output with stable placeholder fields. ## Command loop and error model - Argument parsing is handled by `lexopt` in `cli/src/app.rs`. - Runtime errors are normalized through `anyhow` and rendered as `Error: ...` with exit code `2`. - Unknown commands/options and extra positional arguments return deterministic, actionable guidance to run `sce --help`. -- `sce setup --help` returns setup-specific usage output with target-flag contract details. +- `sce setup --help` returns setup-specific usage output with target-flag contract details and deterministic examples, including one-run non-interactive setup+hooks and a composable follow-up validation flow (`sce doctor --format json`). +- `sce doctor --help`, `sce mcp --help`, `sce hooks --help`, and `sce sync --help` return command-local usage output and deterministic copy-ready examples. - Interactive `sce setup` prompt cancellation/interrupt exits cleanly with: `Setup cancelled. No files were changed.` - Command handlers return deterministic status messaging: - `setup`: `Setup completed successfully.` plus selected targets, per-target install destinations/counts, and backup status lines. - `doctor`: `SCE doctor: ready|not ready` plus hook-path source, required hook checks, and actionable diagnostics. - - `TODO: 'mcp' is planned and not implemented yet. MCP file-cache surface defines 2 placeholder tool contract(s) with max 1024 entries.` + - `TODO: 'mcp' is planned and not implemented yet. MCP file-cache surface defines 2 placeholder tool contract(s) with max 1024 entries. Next step: run 'sce mcp --help' for current placeholder usage while runtime execution remains disabled.` - `hooks`: deterministic hook subcommand status messaging for runtime entrypoint invocation and argument/STDIN contract validation. - - `TODO: 'sync' cloud workflows are planned and not implemented yet. Local Turso smoke check succeeded (1) row inserted; cloud sync placeholder enumerates 3 phase(s) and plan holds 3 checkpoint(s).` + - `TODO: 'sync' cloud workflows are planned and not implemented yet. Local Turso smoke check succeeded (1) row inserted; cloud sync placeholder enumerates 3 phase(s) and plan holds 3 checkpoint(s). Next step: rerun with '--format json' for machine-readable placeholder checkpoints.` ## Service contracts - `cli/src/services/setup.rs` defines setup parsing/selection contracts plus runtime install orchestration (`run_setup_for_mode`) over the embedded asset install engine. -- `cli/src/services/doctor.rs` defines hook rollout health validation (`run_doctor`) with path-source detection (default/local/global) and required-hook presence/executable checks. +- `cli/src/services/config.rs` defines config parser/runtime contracts (`show`, `validate`, `--help`), strict config-file key/type validation, and deterministic text/JSON rendering. +- `cli/src/services/doctor.rs` defines hook rollout health validation (`run_doctor`) with path-source detection (default/local/global), required-hook presence/executable checks, and command-local usage text (`doctor_usage_text`). - `cli/src/services/agent_trace.rs` defines the task-scoped schema adapter contract (`adapt_trace_payload`) from internal attribution input structs to Agent Trace-shaped record structs, including fixed git `vcs` mapping, contributor type mapping, and reserved `dev.crocoder.sce.*` metadata placement. -- `cli/src/services/mcp.rs` defines `McpService`, a `McpCapabilitySnapshot` model (primary + supported transports), and `CachePolicy` defaults for future file-cache workflows (`cache-put`/`cache-get`) with `runnable: false` placeholders. +- `cli/src/services/mcp.rs` defines `McpService`, a `McpCapabilitySnapshot` model (primary + supported transports), `CachePolicy` defaults for future file-cache workflows (`cache-put`/`cache-get`) with `runnable: false` placeholders, command-local usage text (`mcp_usage_text`), and `McpRequest` parsing/rendering for deterministic text or `--format json` placeholder output. +- `cli/src/services/version.rs` defines the version parser/output contract (`parse_version_request`, `render_version`) with deterministic text/JSON output modes. +- `cli/src/services/completion.rs` defines the completion parser/output contract (`parse_completion_request`, `render_completion`) with deterministic shell scripts for Bash, Zsh, and Fish. - `cli/src/services/hooks.rs` defines production local hook runtime parsing/dispatch (`HookSubcommand`, `parse_hooks_subcommand`, `run_hooks_subcommand`) for `pre-commit`, `commit-msg`, `post-commit`, and `post-rewrite`, plus checkpoint/persistence/retry finalization seams used by hook entrypoints. -- `cli/src/services/sync.rs` defines cloud-sync abstraction points (`CloudSyncGateway`, `CloudSyncRequest`, `CloudSyncPlan`) layered after the local Turso smoke gate. -- `cli/src/app.rs` dispatches `setup`, `doctor`, `mcp`, and `hooks` through service-level modules so runtime messages are sourced from domain modules instead of inline strings. +- `cli/src/services/resilience.rs` defines shared bounded retry/timeout/backoff execution policy (`RetryPolicy`, `run_with_retry`) with deterministic failure messaging and retry observability hooks. +- `cli/src/services/sync.rs` defines cloud-sync abstraction points (`CloudSyncGateway`, `CloudSyncRequest`, `CloudSyncPlan`) layered after the local Turso smoke gate, plus `SyncRequest` parsing/rendering for deterministic text or `--format json` placeholder output and command-local usage text (`sync_usage_text`). +- `cli/src/app.rs` dispatches `config`, `setup`, `doctor`, `mcp`, `hooks`, `sync`, `version`, and `completion` through service-level modules so runtime messages are sourced from domain modules instead of inline strings. ## Local Turso adapter behavior @@ -87,15 +101,17 @@ Placeholder commands currently acknowledge planned behavior and do not claim pro - in-memory (`:memory:`) - file-backed path (`Builder::new_local()`) - The smoke path creates `sce_smoke`, inserts one row, and runs a query round-trip to confirm readable results. -- `cli/src/services/sync.rs` wraps this in a lazily initialized shared tokio current-thread runtime and returns placeholder-safe messaging when local checks pass. +- `cli/src/services/sync.rs` wraps this in a lazily initialized shared tokio current-thread runtime and applies bounded retries (3 attempts), operation timeout (2000ms), and capped backoff (100-400ms) before returning placeholder-safe messaging. - The same sync path now derives deferred cloud checkpoint messaging from `PlaceholderCloudSyncGateway`. +- `cli/src/services/local_db.rs` applies the same resilience wrapper when bootstrapping persistent Agent Trace schema migrations (`ensure_agent_trace_local_db_ready_blocking`) with deterministic retries/timeouts/backoff and actionable terminal failure hints. ## Parser-focused tests -- `cli/src/app.rs` unit tests cover default-help behavior, known command routing, and failure paths for unknown commands/options and extra arguments. +- `cli/src/app.rs` unit tests cover default-help behavior, known command routing, command-local `--help` routing for `doctor`/`mcp`/`hooks`/`sync`, and failure paths for unknown commands/options and extra arguments. - `cli/src/app.rs` additionally validates setup contract routing for interactive default, explicit target flags, and mutually-exclusive setup flag failures. - `cli/src/services/local_db.rs` tests cover in-memory and file-backed local Turso initialization plus execute/query smoke checks. -- `cli/src/services/sync.rs` test confirms `sync` runs the local smoke gate and returns deterministic placeholder messaging. +- `cli/src/services/resilience.rs` tests lock deterministic retry behavior for transient failures, timeout exhaustion, and actionable terminal error messaging. +- `cli/src/services/sync.rs` tests confirm `sync` runs the local smoke gate, preserves deterministic text placeholder messaging, and emits stable JSON placeholder fields. - `cli/src/services/{setup,mcp,hooks,sync}.rs` include contract-focused tests for setup flag parsing/validation, interactive selection/cancellation dispatch, setup run messaging, and hook runtime argument/IO/finalization behavior. - `cli/src/services/agent_trace.rs` includes adapter mapping tests for required field projection, contributor enum/model_id handling, and extension metadata placement under reserved reverse-domain keys. - `cli/src/services/setup.rs` tests also verify embedded-manifest completeness against runtime `config/` trees, deterministic sorted path normalization, target-scoped iterator behavior (`OpenCode`, `Claude`, `Both`), install backup creation/replacement, and rollback restoration after injected swap failures. @@ -103,8 +119,8 @@ Placeholder commands currently acknowledge planned behavior and do not claim pro ## Dependency baseline -- `cli/Cargo.toml` declares only: `anyhow`, `hmac`, `inquire`, `lexopt`, `sha2`, `tokio`, and `turso`. -- `tokio` is pinned with `default-features = false` and `features = ["rt"]` to match current runtime usage (current-thread runtime builder and `Runtime::block_on` without broader async feature surface). +- `cli/Cargo.toml` declares only: `anyhow`, `hmac`, `inquire`, `lexopt`, `serde_json`, `sha2`, `tokio`, and `turso`. +- `tokio` is pinned with `default-features = false` and keeps a constrained runtime footprint for current-thread `Runtime::block_on` usage, plus timer-backed bounded retry/timeout behavior in resilience-wrapped operations. - `cli/src/dependency_contract.rs` keeps compile-time crate references centralized for this placeholder slice. ## Scope boundary for this phase diff --git a/context/context-map.md b/context/context-map.md index ac9539b5..0c6ce0af 100644 --- a/context/context-map.md +++ b/context/context-map.md @@ -7,7 +7,8 @@ Primary context files: - `context/glossary.md` Feature/domain context: -- `context/cli/placeholder-foundation.md` (CLI command surface, setup install flow, shared-runtime sync smoke gate, nested flake release package/app installability, and Cargo local install + crates.io readiness policy) +- `context/cli/placeholder-foundation.md` (CLI command surface, setup install flow, bounded resilience-wrapped sync/local-DB smoke and bootstrap behavior, nested flake release package/app installability, and Cargo local install + crates.io readiness policy) +- `context/cli/config-precedence-contract.md` (implemented `sce config` show/validate command contract, deterministic `flags > env > config file > defaults` resolution order, config-file selection order, and text/JSON output schema) - `context/sce/shared-context-code-workflow.md` - `context/sce/shared-context-plan-workflow.md` (canonical `/change-to-plan` workflow, clarification/readiness gate contract, and one-task/one-atomic-commit task-slicing policy) - `context/sce/plan-code-overlap-map.md` (T01 overlap matrix for Shared Context Plan/Code, related commands, and core skill ownership/dedup targets) @@ -26,7 +27,9 @@ Feature/domain context: - `context/sce/setup-githooks-install-contract.md` (T01 canonical `sce setup --hooks` install contract for target-path resolution, idempotent outcomes, backup/rollback, and doctor-readiness alignment) - `context/sce/setup-githooks-hook-asset-packaging.md` (T02 compile-time `sce setup --hooks` required-hook template packaging contract and setup-service accessor surface) - `context/sce/setup-githooks-install-flow.md` (T03 setup-service required-hook install orchestration with git-truth hooks-path resolution, per-hook installed/updated/skipped outcomes, and backup/rollback semantics) -- `context/sce/setup-githooks-cli-ux.md` (T04 `sce setup --hooks` / `--repo` command-surface contract, option compatibility validation, and deterministic per-hook output semantics) +- `context/sce/setup-githooks-cli-ux.md` (T04 composable `sce setup` target+`--hooks` / `--repo` command-surface contract, option compatibility validation, and deterministic setup/hook output semantics) +- `context/sce/setup-nix-integration-test-contract.md` (T01 canonical setup integration-test scenario matrix and deterministic assertion policy for Nix-run binary-driven tests) +- `context/sce/cli-security-hardening-contract.md` (T06 CLI redaction contract, setup `--repo` canonicalization/validation, and setup write-permission probe behavior) - `context/sce/agent-trace-post-rewrite-local-remap-ingestion.md` (T08 `post-rewrite` local remap ingestion contract with strict pair parsing, rewrite-method normalization, and deterministic replay-key derivation) - `context/sce/agent-trace-rewrite-trace-transformation.md` (T09 rewritten-SHA trace transformation contract with rewrite metadata, confidence-to-quality mapping, and notes+DB persistence parity) - `context/sce/agent-trace-core-schema-migrations.md` (T10 core local schema migration contract for `repositories`, `commits`, `trace_records`, and `trace_ranges` with upgrade-safe idempotent create semantics) @@ -36,6 +39,13 @@ Feature/domain context: - `context/sce/agent-trace-retry-queue-observability.md` (T14 retry queue recovery contract plus reconciliation/runtime observability metrics and DB-first queue schema additions) - `context/sce/agent-trace-local-hooks-mvp-contract-gap-matrix.md` (T01 Local Hooks MVP production contract freeze and deterministic gap matrix for `agent-trace-local-hooks-production-mvp`) - `context/sce/agent-trace-hooks-command-routing.md` (implemented `sce hooks` command routing plus current runtime entrypoint behavior, including commit-msg policy gating/file mutation and post-rewrite remap+rewrite finalization wiring) +- `context/sce/cli-exit-code-contract.md` (implemented stable `sce` exit-code class contract and `parse`/`validation`/`runtime`/`dependency` failure mapping) +- `context/sce/cli-error-code-taxonomy.md` (implemented stable user-facing stderr error-code classes, `Error []` rendering contract, and class-default `Try:` remediation injection behavior) +- `context/sce/cli-observability-contract.md` (implemented structured observability baseline for `sce` app runtime: deterministic level/format env controls, optional file sink controls/policy, OTEL bootstrap wiring, stable event IDs, and stderr-only primary log emission) +- `context/sce/cli-stdout-stderr-contract.md` (implemented app-level stream routing contract: success payload emission on stdout, redacted diagnostics on stderr, and centralized stream ownership in `cli/src/app.rs`) +- `context/sce/cli-version-command-contract.md` (implemented `sce version` command contract with deterministic text output, `--format ` parsing/validation, and stable JSON runtime-identification fields) +- `context/sce/cli-shell-completion-contract.md` (implemented `sce completion` command contract with deterministic Bash/Zsh/Fish script generation, strict `--shell` validation, and parser/docs alignment coverage) +- `context/sce/cli-shared-output-format-contract.md` (implemented T13 canonical shared `--format ` parser contract and wiring across supported commands) Working areas: - `context/plans/` (active plan execution artifacts, not durable history) diff --git a/context/glossary.md b/context/glossary.md index e7113322..a735dc81 100644 --- a/context/glossary.md +++ b/context/glossary.md @@ -3,6 +3,7 @@ - `sync-opencode-config`: Flake app command exposed as `nix run .#sync-opencode-config`; canonical operator entrypoint for staged regeneration/replacement of `config/` and replacement of repository-root `.opencode/` from regenerated `config/.opencode/`. - `token-count-workflows`: Flake app command exposed as `nix run .#token-count-workflows`; canonical repository-root entrypoint that runs `evals/token-count-workflows.ts` through `nix develop` and writes token-count artifacts to `context/tmp/token-footprint/`. - `pkl-check-generated`: Flake app command exposed as `nix run .#pkl-check-generated`; canonical lightweight parity test entrypoint that runs the generated-output drift check inside the Nix dev shell. +- `cli-integration-tests`: Flake app command exposed as `nix run .#cli-integration-tests`; canonical repository-root entrypoint that runs the Rust setup integration suite (`cli/tests/setup_integration.rs`) via `cargo test --test setup_integration` inside the Nix dev shell. - lightweight post-task verification baseline: Required quick checks after each completed task in this repo: `nix run .#pkl-check-generated` and `nix flake check`. - disposable plan lifecycle: Policy where `context/plans/` holds active execution artifacts only; completed plans are disposable and durable outcomes must be reflected in current-state context files and/or `context/decisions/`. - important change (context sync): A completed task change that affects cross-cutting behavior, repository-wide policy/contracts, architecture boundaries, or canonical terminology; these changes require root context edits in `context/overview.md`, `context/architecture.md`, and/or `context/glossary.md` instead of verify-only handling. @@ -11,7 +12,9 @@ - `agnix-config-validate-report`: GitHub Actions workflow at `.github/workflows/agnix-config-validate-report.yml` that runs `nix develop -c agnix validate .` from `config/` on push/PR to `main`. - `agnix validation report artifact`: Failure-investigation artifact named `agnix-validate-report`, uploaded from deterministic path `context/tmp/ci-reports/agnix-validate-report.txt` when non-info (`warning:`/`error:`/`fatal:`) findings are detected. - `workflow-token-count` CI workflow: GitHub Actions workflow at `.github/workflows/workflow-token-count.yml` that runs `nix run .#token-count-workflows` on push/PR to `main` and uploads token-count outputs from `context/tmp/token-footprint/` as the `workflow-token-footprint` artifact. +- `cli-integration-tests` CI workflow: GitHub Actions workflow at `.github/workflows/cli-integration-tests.yml` that runs `nix run .#cli-integration-tests` on push/PR to `main`. - `cli-setup-command-surface` flake check: `checks..cli-setup-command-surface` in `flake.nix`; runs `cargo fmt --check` and focused setup command-surface tests from `cli/` during `nix flake check`. +- `cli-setup-integration` flake check: `checks..cli-setup-integration` in both `cli/flake.nix` and root `flake.nix`; runs `cargo test --test setup_integration` from `cli/` during `nix flake check`. - `cli rust overlay toolchain`: Toolchain contract in `cli/flake.nix` that applies `rust-overlay.overlays.default`, selects `rust-bin.stable.latest.default` with `rustfmt`, and builds the CLI Rust platform via `makeRustPlatform`. - `cli flake release package`: Nested flake package output in `cli/flake.nix` exposed as `packages.sce` with `packages.default = packages.sce`, producing the release-build `sce` binary via `nix build ./cli#default`. - `cli flake runnable app`: Nested flake app output in `cli/flake.nix` exposed as `apps.sce`, executing `${packages.sce}/bin/sce` with passthrough args via `nix run ./cli#sce -- ...`. @@ -20,10 +23,11 @@ - `root-to-cli flake input coherence`: Root `flake.nix` contract that forwards `nixpkgs`, `flake-utils`, and `rust-overlay` to the nested `cli` path input (`cli.inputs..follows`) so `nix flake check` can evaluate nested CLI outputs without missing-input failures. - `sce` (CLI foundation): Rust binary crate at `cli/` with implemented setup installation flow, implemented `hooks` subcommand routing/validation entrypoints, and placeholder behavior for `mcp` and `sync`. - `command surface contract`: The static command catalog in `cli/src/command_surface.rs` that marks each top-level command as `implemented` or `placeholder`. -- `command loop`: The `lexopt` parser + dispatcher in `cli/src/app.rs` that routes `help`, `setup`, `doctor`, `mcp`, `hooks`, and `sync`, executes implemented setup/doctor/hooks flows, emits TODO placeholders for non-implemented commands, and returns deterministic actionable errors for invalid invocation. +- `command loop`: The `lexopt` parser + dispatcher in `cli/src/app.rs` that routes `help`, `config`, `setup`, `doctor`, `mcp`, `hooks`, `sync`, `version`, and `completion`, executes implemented command flows, emits TODO placeholders for deferred commands, and returns deterministic actionable errors for invalid invocation. - `sce dependency contract`: Minimal crate dependency baseline declared in `cli/Cargo.toml` and referenced via `cli/src/dependency_contract.rs` (`anyhow`, `hmac`, `inquire`, `lexopt`, `serde_json`, `sha2`, `tokio`, `turso`). - `local Turso adapter`: Async data-layer module in `cli/src/services/local_db.rs` that initializes local DB targets with `turso::Builder::new_local(...)` and runs execute/query smoke checks. - `sync Turso smoke gate`: Behavior in `cli/src/services/sync.rs` where the `sync` placeholder command runs an in-memory local Turso smoke check under a lazily initialized shared tokio current-thread runtime before returning placeholder cloud-sync messaging. +- `CLI bounded resilience wrapper`: Shared policy in `cli/src/services/resilience.rs` (`RetryPolicy`, `run_with_retry`) that applies deterministic retries/timeouts/capped backoff to transient operations, emits retry observability events, and returns actionable terminal failure guidance. - `setup service orchestration`: Setup execution logic in `cli/src/services/setup.rs` that resolves target selection, installs embedded assets, and emits deterministic success messaging per target. - `setup target flags`: Mutually-exclusive `sce setup` target selectors (`--opencode`, `--claude`, `--both`) that force non-interactive mode for automation. - `setup mode contract`: `cli/src/services/setup.rs` model where `SetupMode::Interactive` is the default and `SetupMode::NonInteractive(SetupTarget)` is selected only when exactly one target flag is provided. @@ -32,7 +36,30 @@ - `setup embedded asset manifest`: Compile-time generated file index emitted by `cli/build.rs` into `OUT_DIR/setup_embedded_assets.rs`, embedding bytes from `config/.opencode/**`, `config/.claude/**`, and `cli/assets/hooks/**` as deterministic normalized relative-path entries consumed by `cli/src/services/setup.rs`. - `setup required-hook embedded assets`: Setup-service accessors in `cli/src/services/setup.rs` (`iter_required_hook_assets`, `get_required_hook_asset`) that expose canonical embedded templates for `pre-commit`, `commit-msg`, and `post-commit` without runtime config reads. - `setup required-hook install orchestration`: Setup-service flow in `cli/src/services/setup.rs` (`install_required_git_hooks`) that resolves repository root + effective hooks directory via git truth, installs canonical required hooks with deterministic per-hook outcomes (`Installed`, `Updated`, `Skipped`), enforces executable permissions, and performs backup-and-restore rollback when hook swap fails. -- `setup hooks CLI mode`: `sce setup` mode activated by `--hooks` with optional `--repo `; implemented in `cli/src/app.rs` + `cli/src/services/setup.rs`, enforces deterministic compatibility validation (`--repo` requires `--hooks`; `--hooks` cannot be combined with target flags), and emits per-hook `installed`/`updated`/`skipped` + backup-status output. +- `setup hooks CLI mode`: `sce setup` behavior activated by `--hooks` (with optional `--repo `), supporting both hooks-only runs and composable target+hooks runs in one invocation; implemented in `cli/src/app.rs` + `cli/src/services/setup.rs`, enforces deterministic compatibility validation (`--repo` requires `--hooks`; target flags remain mutually exclusive), and emits stable setup/hook status output. +- `CLI redaction-safe diagnostics contract`: baseline security behavior implemented via `cli/src/services/security.rs` (`redact_sensitive_text`) and applied to app-level errors, setup git-diagnostic surfacing, and observability output sinks so common secret-bearing token forms are masked before emission. +- `setup directory write-permission probe`: deterministic pre-write guard implemented in `cli/src/services/security.rs` (`ensure_directory_is_writable`) and used by setup install/hook flows to fail fast with actionable remediation when target directories are not writable. +- `setup --repo canonical path guard`: setup-hook runtime behavior in `cli/src/services/setup.rs` that canonicalizes and validates user-supplied `--repo` paths as existing directories before git-root/hooks-path resolution. +- `sce exit-code class contract`: Stable top-level process exit-code mapping owned by `cli/src/app.rs` (`0` success, `2` parse failure, `3` validation failure, `4` runtime failure, `5` dependency failure) so automation can branch on failure class without parsing text errors. +- `sce stderr error-code taxonomy`: Stable user-facing diagnostic code classes emitted by `cli/src/app.rs` (`SCE-ERR-PARSE`, `SCE-ERR-VALIDATION`, `SCE-ERR-RUNTIME`, `SCE-ERR-DEPENDENCY`) via `Error []: ...` stderr formatting. +- `class-default Try guidance injection`: `cli/src/app.rs` diagnostic behavior that appends `Try:` remediation text by failure class only when an error message does not already include `Try:` guidance. +- `sce observability baseline`: App-runtime logging contract in `cli/src/services/observability.rs` and `cli/src/app.rs` with deterministic env-controlled level/format selection (`SCE_LOG_LEVEL`, `SCE_LOG_FORMAT`), optional file sink controls (`SCE_LOG_FILE`, `SCE_LOG_FILE_MODE`), stable lifecycle `event_id` values, and stderr-only primary emission. +- `sce stdout/stderr contract`: App-level stream routing contract in `cli/src/app.rs` where command success payloads are emitted on stdout only, while redacted user-facing diagnostics are emitted on stderr. +- `SCE_LOG_LEVEL`: Optional runtime env key for `sce` observability threshold; allowed values are `error`, `warn`, `info`, and `debug`, defaulting to `info` when unset. +- `SCE_LOG_FORMAT`: Optional runtime env key for `sce` observability record format; allowed values are `text` and `json`, defaulting to `text` when unset. +- `SCE_LOG_FILE`: Optional runtime env key for `sce` observability file sink path; when set, rendered observability lines are mirrored to this file path with parent-directory auto-create behavior. +- `SCE_LOG_FILE_MODE`: Optional runtime env key controlling `SCE_LOG_FILE` write policy; allowed values are `truncate` and `append`, defaults to `truncate`, and requires `SCE_LOG_FILE`. +- `SCE_OTEL_ENABLED`: Optional runtime env gate for `sce` OpenTelemetry bootstrap in `cli/src/services/observability.rs`; allowed values are `true`/`false`/`1`/`0`, defaulting to disabled when unset. +- `OTEL_EXPORTER_OTLP_ENDPOINT`: Optional runtime OTLP exporter endpoint env key used when `SCE_OTEL_ENABLED` is enabled; defaults to `http://127.0.0.1:4317` and must be an absolute `http(s)` URL. +- `OTEL_EXPORTER_OTLP_PROTOCOL`: Optional runtime OTLP protocol selector used when `SCE_OTEL_ENABLED` is enabled; allowed values are `grpc` and `http/protobuf`, defaulting to `grpc`. +- `observability subscriber context`: Runtime wrapper in `cli/src/app.rs` that executes parse/dispatch under `TelemetryRuntime::with_default_subscriber`, attaching the OpenTelemetry layer only when OTEL export is enabled. +- `sce config command surface`: Implemented top-level CLI command routed by `cli/src/app.rs` to `cli/src/services/config.rs`, exposing `show`, `validate`, and `--help` for deterministic runtime config inspection and validation. +- `sce version command surface`: Implemented top-level CLI command routed by `cli/src/app.rs` to `cli/src/services/version.rs`, exposing deterministic runtime identification output in text form by default and JSON form via `--format json`. +- `sce version output contract`: `cli/src/services/version.rs` rendering contract where text output is a deterministic single-line ` ()` payload and JSON output includes stable fields `status`, `command`, `binary`, `version`, and `build_profile`. +- `sce completion command surface`: Implemented top-level CLI command routed by `cli/src/app.rs` to `cli/src/services/completion.rs`, requiring `--shell ` and returning a deterministic shell completion script on stdout. +- `sce shared output-format contract`: Canonical parser contract in `cli/src/services/output_format.rs` (`OutputFormat`) that centralizes supported `--format` values (`text`, `json`) and emits command-specific actionable invalid-value guidance (`Run ' --help' ...`) for commands wired to dual-output rendering. +- `sce shell completion contract`: Deterministic CLI completion contract where `sce completion --shell ` emits parser-aligned Bash/Zsh/Fish completion scripts for current top-level commands and supported options/subcommands. +- `cli config precedence contract`: Deterministic runtime value resolution in `cli/src/services/config.rs` with precedence `flags > env > config file > defaults` for `log_level` and `timeout_ms`; config discovery order is `--config`, `SCE_CONFIG_FILE`, then default discovered global+local paths (`${state_root}/sce/config.json` merged before `.sce/config.json`, with local overriding per key). - `setup install engine`: Installer in `cli/src/services/setup.rs` (`install_embedded_setup_assets`) that writes embedded setup assets into per-target staging directories and swaps them into repository-root `.opencode/`/`.claude/` destinations. - `setup backup-and-replace`: Replacement choreography in `cli/src/services/setup.rs` where existing install targets are renamed to unique `.backup` paths before staged content is promoted; on swap failure, the engine restores the original target from backup and cleans temporary staging paths. - `MCP capability snapshot`: Placeholder capability model in `cli/src/services/mcp.rs` that captures planned file-cache transport/tool contracts (`cache-put`, `cache-get`) and cache policy defaults without enabling runtime MCP execution. diff --git a/context/overview.md b/context/overview.md index 059bedcb..b4e023c3 100644 --- a/context/overview.md +++ b/context/overview.md @@ -5,18 +5,25 @@ This repository maintains shared assistant configuration for OpenCode and Claude It also includes an early Rust CLI foundation at `cli/` for Shared Context Engineering workflows. The crate ships onboarding and usage documentation at `cli/README.md` that reflects current implemented vs placeholder behavior. -The CLI crate currently enforces a minimal dependency contract: `anyhow`, `hmac`, `inquire`, `lexopt`, `serde_json`, `sha2`, `tokio`, and `turso`. -Its command loop is implemented with `lexopt` argument parsing and `anyhow` error handling, with real setup orchestration, implemented `doctor` rollout validation, implemented `hooks` subcommand routing/validation entrypoints, and placeholder dispatch for deferred commands (`mcp`, `sync`) through explicit service contracts. -The `setup` command includes an `inquire`-backed target-selection flow: default interactive selection for OpenCode/Claude/both, explicit non-interactive target flags (`--opencode`, `--claude`, `--both`), deterministic mutually-exclusive validation, and non-destructive cancellation exits. +The CLI crate currently enforces a minimal dependency contract: `anyhow`, `hmac`, `inquire`, `lexopt`, `opentelemetry`, `opentelemetry-otlp`, `opentelemetry_sdk`, `serde_json`, `sha2`, `tokio`, `tracing`, `tracing-opentelemetry`, `tracing-subscriber`, and `turso`. +Its command loop is implemented with `lexopt` argument parsing and `anyhow` error handling, with implemented config inspection/validation (`config show`/`config validate`), real setup orchestration, implemented `doctor` rollout validation, implemented `hooks` subcommand routing/validation entrypoints, implemented machine-readable runtime identification (`version`), implemented shell completion script generation (`completion --shell `), and placeholder dispatch for deferred commands (`mcp`, `sync`) through explicit service contracts. +The command loop now enforces a stable exit-code contract in `cli/src/app.rs`: `2` parse failures, `3` invocation validation failures, `4` runtime failures, and `5` dependency startup failures. +The same runtime also emits stable user-facing stderr error classes (`SCE-ERR-PARSE`, `SCE-ERR-VALIDATION`, `SCE-ERR-RUNTIME`, `SCE-ERR-DEPENDENCY`) using deterministic `Error []: ...` diagnostics with class-default `Try:` remediation appended when missing. +The app runtime now also includes a structured observability baseline in `cli/src/services/observability.rs`: deterministic env-controlled log threshold/format (`SCE_LOG_LEVEL`, `SCE_LOG_FORMAT`), optional file sink controls (`SCE_LOG_FILE`, `SCE_LOG_FILE_MODE` with deterministic `truncate` default), optional OpenTelemetry export bootstrap (`SCE_OTEL_ENABLED`, `OTEL_EXPORTER_OTLP_ENDPOINT`, `OTEL_EXPORTER_OTLP_PROTOCOL`), stable lifecycle event IDs, and stderr-only primary emission so stdout command payloads remain pipe-safe. +The app command dispatcher now enforces a centralized stdout/stderr stream contract in `cli/src/app.rs`: command success payloads are emitted on stdout only, while redacted user-facing diagnostics are emitted on stderr. +The CLI now also enforces a shared output-format parser contract in `cli/src/services/output_format.rs`, with canonical `--format ` parsing and command-specific actionable invalid-value guidance reused by `config` and `version` services. +The `setup` command includes an `inquire`-backed target-selection flow: default interactive selection for OpenCode/Claude/both with required-hook installation in the same run, explicit non-interactive target flags (`--opencode`, `--claude`, `--both`), deterministic mutually-exclusive validation, and non-destructive cancellation exits. The CLI now compiles an embedded setup asset manifest from `config/.opencode/**`, `config/.claude/**`, and `cli/assets/hooks/**` via `cli/build.rs`; `cli/src/services/setup.rs` exposes deterministic normalized relative paths plus file bytes and target-scoped iteration without runtime reads from `config/`. The setup service also provides repository-root install orchestration: it resolves interactive or flag-based target selection, installs embedded assets, and reports deterministic completion details (selected target(s), installed file counts, and backup actions). +The CLI now also applies baseline security hardening for reliability-driven automation: diagnostics/logging paths use deterministic secret redaction, `sce setup --hooks --repo ` canonicalizes and validates repository paths before execution, and setup write flows run explicit directory write-permission probes before staging/swap operations. +The config service now provides deterministic runtime config resolution with explicit precedence (`flags > env > config file > defaults`), strict config-file validation (`log_level`, `timeout_ms`), deterministic default discovery/merge of global+local config files (`${state_root}/sce/config.json` then `.sce/config.json` with local override), and deterministic text/JSON output contracts for `sce config show` and `sce config validate`. The `doctor` command now validates Agent Trace local rollout readiness by resolving effective git hook-path source (default, per-repo `core.hooksPath`, or global `core.hooksPath`) and checking required hook presence/executable permissions with actionable diagnostics. The `mcp` placeholder contract is now scoped to future file-cache workflows (`cache-put`/`cache-get`) and remains intentionally non-runnable. -The `sync` placeholder performs a local Turso smoke check through a lazily initialized shared tokio current-thread runtime and then reports a deferred cloud-sync plan from a placeholder gateway contract. +The `sync` placeholder performs a local Turso smoke check through a lazily initialized shared tokio current-thread runtime with bounded retry/timeout/backoff controls, then reports a deferred cloud-sync plan from a placeholder gateway contract; persistent local DB schema bootstrap now uses the same bounded resilience wrapper. The nested CLI flake (`cli/flake.nix`) now applies a Rust overlay-backed stable toolchain (with `rustfmt`) and uses that toolchain contract for CLI check/build derivations. The nested CLI flake now also exposes release install/run outputs: `packages.sce` (with `packages.default = packages.sce`) and `apps.sce`, so `nix build ./cli#default` and `nix run ./cli#sce -- --help` execute against the packaged `sce` binary. The CLI Cargo package metadata now includes crates.io-facing fields while keeping `publish = false`; local install/release flows are documented as `cargo install --path cli --locked` and `cargo build --manifest-path cli/Cargo.toml --release`. -The repository-root flake now keeps nested CLI flake input wiring coherent by passing through `nixpkgs`, `flake-utils`, and `rust-overlay`, so root-level `nix flake check` can evaluate CLI checks without missing-input failures. +The repository-root flake now keeps nested CLI flake input wiring coherent by passing through `nixpkgs`, `flake-utils`, and `rust-overlay`, so root-level `nix flake check` can evaluate CLI checks (including setup command-surface and setup integration slices) without missing-input failures. Shared Context Plan and Shared Context Code remain separate agent roles by design; planning (`/change-to-plan`) and implementation (`/next-task`) stay split while shared baseline guidance is deduplicated via canonical skill-owned contracts. Their shared baseline doctrine (core principles, `context/` authority, and quality posture) is defined once as canonical snippets in `config/pkl/base/shared-content.pkl` and composed into both agent bodies during generation. The `/next-task` command body is intentionally thin orchestration: readiness gating + phase sequencing are command-owned, while detailed implementation/context-sync contracts are skill-owned (`sce-plan-review`, `sce-task-execution`, `sce-context-sync`). @@ -40,7 +47,7 @@ The hooks service now also includes operational retry-queue replay processing (` The hooks command surface now also supports concrete runtime subcommand routing (`pre-commit`, `commit-msg`, `post-commit`, `post-rewrite`) with deterministic argument/STDIN validation and production post-rewrite runtime wiring (local remap ingestion plus rewritten-trace finalization through notes+DB adapters) owned by `cli/src/services/hooks.rs`; this behavior is documented in `context/sce/agent-trace-hooks-command-routing.md`. The setup service now also exposes deterministic required-hook embedded asset accessors (`iter_required_hook_assets`, `get_required_hook_asset`) backed by canonical templates in `cli/assets/hooks/` for `pre-commit`, `commit-msg`, and `post-commit`; this behavior is documented in `context/sce/setup-githooks-hook-asset-packaging.md`. The setup service now also includes required-hook install orchestration (`install_required_git_hooks`) that resolves repository root and effective hooks path from git truth, enforces deterministic per-hook outcomes (`Installed`/`Updated`/`Skipped`), and performs backup-and-restore rollback on swap failures; this behavior is documented in `context/sce/setup-githooks-install-flow.md`. -The setup command parser/dispatch now also supports `sce setup --hooks` with optional `--repo `, enforces deterministic compatibility validation (`--repo` requires `--hooks`; `--hooks` incompatible with setup target flags), and emits deterministic per-hook setup outcome messaging (`installed`/`updated`/`skipped` with backup status); this behavior is documented in `context/sce/setup-githooks-cli-ux.md`. +The setup command parser/dispatch now also supports composable setup+hooks runs (`sce setup --opencode|--claude|--both --hooks`) plus hooks-only mode (`sce setup --hooks` with optional `--repo `), enforces deterministic compatibility validation (`--repo` requires `--hooks`; target flags remain mutually exclusive), and emits deterministic setup/hook outcome messaging (`installed`/`updated`/`skipped` with backup status); this behavior is documented in `context/sce/setup-githooks-cli-ux.md`. ## Repository model @@ -61,7 +68,8 @@ The setup command parser/dispatch now also supports `sce setup --hooks` with opt - Verify generated outputs are current: `nix run .#pkl-check-generated` - Run staged destructive sync for `config/` and root `.opencode/`: `nix run .#sync-opencode-config` - Run workflow token counting from repo root: `nix run .#token-count-workflows` -- Run repository flake checks (includes CLI setup command-surface checks): `nix flake check` +- Run setup integration tests through the deterministic flake app entrypoint: `nix run .#cli-integration-tests` +- Run repository flake checks (includes CLI setup command-surface and setup integration checks): `nix flake check` Lightweight post-task verification baseline (required after each completed task): run `nix run .#pkl-check-generated` and `nix flake check`. @@ -70,6 +78,7 @@ Lightweight post-task verification baseline (required after each completed task) - `.github/workflows/pkl-generated-parity.yml` runs parity checks on pushes to `main` and pull requests targeting `main`. - `.github/workflows/agnix-config-validate-report.yml` runs `agnix validate` from `config/`, fails on non-info findings, and uploads a deterministic report artifact when findings are present. - `.github/workflows/workflow-token-count.yml` runs `nix run .#token-count-workflows` on pushes to `main` and pull requests targeting `main`, then uploads token-footprint artifacts from `context/tmp/token-footprint/`. +- `.github/workflows/cli-integration-tests.yml` runs `nix run .#cli-integration-tests` on pushes to `main` and pull requests targeting `main`. ## Cross-target parity @@ -109,4 +118,10 @@ Lightweight post-task verification baseline (required after each completed task) - Use `context/sce/agent-trace-hooks-command-routing.md` for the implemented T02 `sce hooks` command routing contract (subcommand parsing, deterministic invocation errors, and initial runtime entrypoint behavior). - Use `context/sce/setup-githooks-hook-asset-packaging.md` for the implemented `sce-setup-githooks-any-repo` T02 compile-time hook-template packaging contract and setup-service required-hook embedded accessor surface. - Use `context/sce/setup-githooks-install-flow.md` for the implemented `sce-setup-githooks-any-repo` T03 required-hook install orchestration contract (git-truth hooks-path resolution, per-hook installed/updated/skipped outcomes, and backup/rollback behavior). -- Use `context/sce/setup-githooks-cli-ux.md` for the implemented `sce-setup-githooks-any-repo` T04 setup command-surface contract (`--hooks`, optional `--repo`), compatibility validation rules, and deterministic hook setup messaging. +- Use `context/sce/setup-githooks-cli-ux.md` for the implemented `sce-setup-githooks-any-repo` T04 setup command-surface contract (composable target+`--hooks`, optional `--repo`), compatibility validation rules, and deterministic setup/hook messaging. +- Use `context/sce/cli-observability-contract.md` for the implemented structured observability baseline (env-controlled level/format, OTEL bootstrap wiring, stable event IDs, and stderr-only log emission contract). +- Use `context/sce/cli-stdout-stderr-contract.md` for the implemented app-level stream routing contract (`stdout` for success payloads, `stderr` for redacted diagnostics, and centralized stream ownership in `cli/src/app.rs`). +- Use `context/sce/cli-version-command-contract.md` for the implemented `sce version` command contract (`--format `, deterministic text output, and stable JSON runtime-identification fields). +- Use `context/sce/cli-shell-completion-contract.md` for the implemented `sce completion` command contract (`--shell `, deterministic script output, and parser/docs alignment requirements). +- Use `context/sce/cli-shared-output-format-contract.md` for the implemented T13 shared `--format ` parser contract and current command wiring (`config`, `version`). +- Use `context/sce/cli-error-code-taxonomy.md` for the implemented stderr error-code taxonomy (`SCE-ERR-*` classes), deterministic `Error []` diagnostic rendering, and class-default `Try:` guidance injection. diff --git a/context/patterns.md b/context/patterns.md index b1b7e5e2..2137701a 100644 --- a/context/patterns.md +++ b/context/patterns.md @@ -11,6 +11,7 @@ - Current repo command contracts: - `nix run .#sync-opencode-config` is the canonical entrypoint for staged regeneration/replacement of `config/` and replacement of repository-root `.opencode/` from regenerated `config/.opencode/`. - `nix run .#token-count-workflows` is the canonical root entrypoint for static workflow token counting (wrapping `bun run token-count-workflows` from `evals/` through `nix develop`). + - `nix run .#cli-integration-tests` is the canonical root entrypoint for the Rust setup integration suite (`cargo test --manifest-path cli/Cargo.toml --test setup_integration -- --nocapture`) through `nix develop`. - For flake app outputs, include `meta.description` so `nix flake check` app validation stays warning-free. - For destructive config replacement flows, regenerate into a temporary staged `config/` first, validate required generated directories exist, and only then swap live `config/`. - For destructive root `.opencode/` replacement flows, keep exclusions explicit (for example `node_modules`), use backup-and-restore around swap, and run a source/target tree parity check with the same exclusions. @@ -52,6 +53,7 @@ - Run stale-output detection through the flake app entrypoint `nix run .#pkl-check-generated`; it wraps `nix develop -c ./config/pkl/check-generated.sh`, regenerates into a temporary directory, and fails if generated-owned paths differ from committed outputs. - Keep CI parity enforcement aligned with local workflow by running the same command in `.github/workflows/pkl-generated-parity.yml` for pushes to `main` and pull requests targeting `main`. - Keep token-count CI aligned with the flake app contract by running `nix run .#token-count-workflows` in `.github/workflows/workflow-token-count.yml` on pushes/pull requests targeting `main`, and upload artifacts from `context/tmp/token-footprint/`. +- Keep setup integration CI aligned with the flake app contract by running `nix run .#cli-integration-tests` in `.github/workflows/cli-integration-tests.yml` on pushes/pull requests targeting `main`. - Treat `nix run .#pkl-check-generated` and `nix flake check` as the lightweight post-task verification baseline and run both after each completed task. - Keep agnix config validation on the same trigger contract (`push`/`pull_request` to `main`) in `.github/workflows/agnix-config-validate-report.yml` with job defaults pinned to `working-directory: config`. - In the agnix CI workflow, capture command output to `context/tmp/ci-reports/agnix-validate-report.txt`, treat `warning:`/`error:`/`fatal:` findings as non-info gate failures, and upload the captured report as a GitHub artifact (`agnix-validate-report`) only when non-info findings are present. @@ -71,19 +73,30 @@ - For early CLI foundation tasks, keep implemented behavior and planned behavior explicitly separated in a single command contract table. - Mark placeholder commands in help output and runtime responses so scaffolding cannot be confused with production capability. -- Parse CLI args with `lexopt` and normalize user-facing failures through `anyhow` so invalid invocation paths stay deterministic and actionable. +- Parse CLI args with `lexopt`, classify top-level failures into stable exit-code classes (`parse`, `validation`, `runtime`, `dependency`), and keep user-facing failures deterministic/actionable. +- Emit user-facing CLI diagnostics with stable class-based error IDs (`SCE-ERR-PARSE`, `SCE-ERR-VALIDATION`, `SCE-ERR-RUNTIME`, `SCE-ERR-DEPENDENCY`) using deterministic `Error []: ...` stderr formatting, and auto-append class-default `Try:` remediation only when the message does not already provide one. +- Keep CLI observability separate from command payloads: emit deterministic lifecycle logs to `stderr` only with stable `event_id` values, and preserve `stdout` for command result payloads. +- For baseline runtime observability controls, use deterministic env switches (`SCE_LOG_LEVEL`, `SCE_LOG_FORMAT`) with strict allowed values and fail-fast validation on invalid inputs. +- For optional observability file sinks, gate enablement behind explicit `SCE_LOG_FILE`, require `SCE_LOG_FILE_MODE` only when file sink is set, default write policy to deterministic `truncate`, and enforce owner-only file permissions (`0600`) on Unix. +- For OTEL baseline wiring, keep exporter bootstrap opt-in (`SCE_OTEL_ENABLED`), keep exporter mode env-addressable (`OTEL_EXPORTER_OTLP_ENDPOINT`, `OTEL_EXPORTER_OTLP_PROTOCOL`), and validate invalid endpoint/protocol values as invocation validation failures before command dispatch. +- Mirror lifecycle logger events into tracing events and attach OTEL subscriber context only around command execution so stdout payload contracts remain unchanged. +- For runtime CLI configuration, keep precedence deterministic and explicit (`flags > env > config file > defaults`) and expose inspect/validate command entrypoints with stable text/JSON outputs. +- For commands that support text/JSON dual output, centralize `--format ` parsing in one shared contract and pass command-specific `--help` guidance into invalid-value errors instead of duplicating parser logic per command. - For setup-style command contracts, keep interactive mode as the zero-flag default and enforce mutually-exclusive explicit target flags for non-interactive automation. +- For security-sensitive CLI UX, redact common secret-bearing token/value forms before emitting diagnostics/log lines, including app-level errors, setup git stderr diagnostics, and observability sink output. +- For user-supplied setup repository paths (`sce setup --hooks --repo `), canonicalize/validate the path as an existing directory before git command execution, and run deterministic write-permission probes on setup write targets before staging/swap operations. - For interactive setup flows, isolate prompt handling behind a service-layer prompter seam so selection mapping and cancellation behavior can be tested without a live TTY. - Treat setup prompt cancellation/interrupt as a non-destructive exit path with explicit user messaging (no file mutations and no partial side effects). - For setup install prep, generate compile-time embedded asset manifests from `config/.opencode/**`, `config/.claude/**`, and `cli/assets/hooks/**` in `cli/build.rs`, keep relative paths normalized to forward-slash form, and expose target-scoped iterators/lookups from the setup service layer for installer wiring. - For setup install execution, write selected embedded assets into a per-target staging directory first, then swap into repository-root `.opencode/`/`.claude/` with backup-and-replace semantics; when swap fails after backup creation, restore the original target path from backup and clean staging directories. - For required-hook setup execution, resolve repository root and effective hooks directory from git (`rev-parse --show-toplevel`, `rev-parse --git-path hooks`), then apply deterministic per-hook outcomes (`Installed`, `Updated`, `Skipped`) with staged writes, executable-bit enforcement, and backup-and-restore rollback on swap failures. -- For hook setup CLI UX, treat `--hooks` as a dedicated setup mode with optional `--repo `, enforce deterministic option compatibility (`--repo` requires `--hooks`; no `--hooks` + target-flag mixes), and emit stable per-hook status + backup lines for automation-friendly logs. +- For hook setup CLI UX, allow `--hooks` as both hooks-only and composable target+hooks execution (optional `--repo `), enforce deterministic option compatibility (`--repo` requires `--hooks`; target flags stay mutually exclusive), and emit stable section-ordered setup/hook status lines for automation-friendly logs. - For setup command messaging, emit deterministic completion output that includes selected target(s), per-target install counts, and whether backup was created. - Keep module seams for future domains present and compile-safe even when behavior is deferred. - Keep dependency additions explicit and minimal in `cli/Cargo.toml`, and anchor dependency intent in lightweight compile-time code references (`cli/src/dependency_contract.rs`). - Route local Turso access through a dedicated adapter module (`cli/src/services/local_db.rs`) so command handlers do not expose low-level `turso` API details. - For placeholder commands that need real infrastructure checks, use a lazily initialized shared tokio current-thread runtime wrapper in the service layer (`cli/src/services/sync.rs`) and keep user-facing output explicit about remaining placeholder scope. +- For transient local IO/database hotspots, apply bounded resilience wrappers with explicit retry count, timeout, and capped backoff (`cli/src/services/resilience.rs`) and surface terminal failures with deterministic `Try:` remediation guidance. - For rollout health commands, prefer deterministic local diagnostics over implicit pass/fail behavior: report hook-path source, effective directories, required-hook checks, and actionable remediation text (`cli/src/services/doctor.rs`). - For future CLI domains, define trait-first service contracts with request/plan models in `cli/src/services/*` and keep placeholder implementations explicitly non-runnable until production behavior is approved. - Model deferred integration boundaries with concrete event/capability data structures (for example MCP file-cache snapshots/policies and cloud-sync checkpoints) so later tasks can implement behavior without reshaping public seams. @@ -101,6 +114,7 @@ - For hosted reconciliation observability, publish run-level mapped/unmapped counts, confidence histogram buckets, runtime timing, and normalized error-class labels so retry/quality drift can be monitored without requiring a full dashboard surface. - Keep crate-local onboarding docs in `cli/README.md` and sanity-check command examples against actual `sce` output whenever command messaging changes. - Keep targeted CLI command-surface verification in flake checks: `checks..cli-setup-command-surface` runs from `cli/` and executes `cargo fmt --check` plus focused setup-related tests (`help_text_mentions_setup_target_flags`, `parser_routes_setup`, `run_setup_reports`). +- Keep targeted setup integration verification in flake checks with `checks..cli-setup-integration`, which runs `cargo test --test setup_integration` from `cli/`. - In `cli/flake.nix`, select the Rust toolchain via an explicit Rust overlay (`rust-overlay`) and thread that toolchain through `makeRustPlatform` so CLI check/build derivations do not rely on implicit nixpkgs Rust defaults. - When the root flake imports a nested path flake that requires additional inputs (for example `rust-overlay` in `cli/flake.nix`), mirror those inputs in the root `inputs` block and wire `cli.inputs..follows` so root-level checks do not fail from missing nested flake arguments. - For installable CLI release surfaces in nested flakes, expose an explicit named package plus default alias (`packages.sce` and `packages.default = packages.sce`) and pair it with a runnable app output (`apps.sce`) that points to the packaged binary path. diff --git a/context/plans/sce-cli-agent-friendly-reliability-baseline.md b/context/plans/sce-cli-agent-friendly-reliability-baseline.md new file mode 100644 index 00000000..b1b6afac --- /dev/null +++ b/context/plans/sce-cli-agent-friendly-reliability-baseline.md @@ -0,0 +1,240 @@ +# Plan: sce-cli-agent-friendly-reliability-baseline + +## 1) Change summary + +Harden the `sce` CLI for agent-driven usage by making command output more machine-readable, error paths more actionable, and help/usage text example-rich while preserving current command intent. This plan targets a reliability baseline (not a full command-surface redesign) so agents can parse, retry, and compose command results with fewer ambiguities. It also addresses human/operator ergonomics by allowing setup config installation and required hook installation in a single `sce setup` invocation. + +Assumptions locked from clarification: +- Scope preset: `Reliability baseline`. +- Keep existing top-level commands (`help`, `setup`, `doctor`, `mcp`, `hooks`, `sync`) and avoid breaking behavior contracts unless required for determinism/actionability. +- Prefer additive AI-friendly UX improvements (structured output, better errors, stronger help examples) over broad architectural changes. +- Remove the current `setup` mode split that forces separate runs for config target setup vs `--hooks` installation. + +## 2) Success criteria + +- Key read/report commands expose deterministic structured output via `--format json` with stable field names and explicit status/result fields. +- Human-readable output remains deterministic and consistent across runs for the same inputs. +- Common failure modes return actionable messages with explicit remediation (`Try:` guidance and/or valid alternative command/flags). +- Interactive friction is reduced for automation paths by ensuring all required inputs are flag-addressable and clearly documented in help text. +- `sce setup` supports combined config + hook installation in one run for both interactive and non-interactive target selection flows. +- `setup` provides explicit non-interactive control switches so CI/automation can fail fast instead of prompting. +- Command outputs follow a stable stdout/stderr contract so stdout remains pipe-safe and stderr carries diagnostics/errors. +- Each command has command-local `--help` usage/examples and the CLI exposes a machine-readable `version` command. +- Error messages include stable error codes alongside actionable remediation guidance. +- CLI configuration precedence is explicit and deterministic (`flags > env > config file > defaults`) with inspect/validate surfaces. +- Exit code behavior is stable and documented by failure class for automation-safe handling. +- Runtime observability supports structured logs and deterministic log levels without polluting command payload output. +- Optional file logging is supported with safe defaults (bounded file behavior/rotation policy or explicit truncation policy) and redaction-safe content. +- Resilience controls (timeouts/retry/backoff where relevant) are explicit, bounded, and surfaced via actionable failure messages. +- Security hardening covers secret redaction, strict path validation, and safe file-permission handling on write/install flows. +- Shell completion artifacts are generated/documented and aligned with current command/flag docs. +- `--help` output includes concrete examples that an agent can copy/modify directly, including at least one JSON-output example. +- New parser/output/error behavior is covered by focused unit tests that lock output contracts. + +## 3) Constraints and non-goals + +Constraints: +- Do not introduce a full command taxonomy rewrite; keep current command ownership and routing model in `cli/src/app.rs` and `cli/src/services/*`. +- Maintain deterministic output ordering and avoid nondeterministic text fragments. +- Preserve placeholder safety boundaries for `mcp` and `sync` (no accidental production behavior expansion). +- Preserve backward-compatible `setup` behavior for existing single-purpose invocations (`sce setup --hooks` and `sce setup --opencode|--claude|--both`). +- Keep task slicing one-task/one-atomic-commit. + +Non-goals: +- Implementing real MCP execution or cloud sync execution. +- Replacing the full setup interaction model with a new wizard or TUI. +- Large refactors unrelated to output/error/help reliability. + +## 4) Task stack (`T01..T21`) + +- [x] T01: Define deterministic config model and precedence (status:done) + - Task ID: T01 + - Goal: Add a config contract that resolves values in deterministic order (`flags > env > config file > defaults`) and expose inspect/validate entrypoints. + - Boundaries (in/out of scope): In: config model/types, parser integration, env mapping, config-file load/validation, and command help/docs; Out: remote config services. + - Done when: precedence is codified and tested, `sce config show` and `sce config validate` (or equivalent) return deterministic text/JSON output, and existing commands can consume resolved config without behavior drift. + - Verification notes (commands or checks): `cargo test --manifest-path cli/Cargo.toml`; `cargo check --manifest-path cli/Cargo.toml`. + +- [x] T02: Establish stable exit-code contract (status:done) + - Task ID: T02 + - Goal: Define and enforce fixed exit-code classes for parse/validation/runtime/dependency failures. + - Boundaries (in/out of scope): In: top-level run/dispatch failure mapping and docs/tests for code meanings; Out: shell-specific wrapper behavior. + - Done when: representative failure paths return documented stable exit codes and tests assert mappings. + - Verification notes (commands or checks): `cargo test --manifest-path cli/Cargo.toml app::tests`; `cargo check --manifest-path cli/Cargo.toml`. + +- [x] T03: Add structured observability contract (status:done) + - Task ID: T03 + - Goal: Introduce deterministic logging modes/levels (for example plain and JSON logs) that are separate from command result payloads. + - Boundaries (in/out of scope): In: logging facade/options and service integration points; Out: external telemetry backends. + - Done when: operators can set log level/format predictably, logs include stable event identifiers, and stdout payload contracts remain unchanged. + - Verification notes (commands or checks): `cargo test --manifest-path cli/Cargo.toml`; `cargo check --manifest-path cli/Cargo.toml`. + +- [x] T21: Add OpenTelemetry setup baseline (status:done) + - Task ID: T21 + - Goal: Add an OpenTelemetry-based observability setup path for the CLI so structured events can be exported through standard OTEL tooling while preserving command payload contracts. + - Boundaries (in/out of scope): In: OTEL bootstrap wiring, deterministic env/flag configuration for exporter mode, and tests/docs for setup behavior; Out: hosted telemetry backend provisioning and production collector deployment. + - Done when: `sce` can initialize OTEL instrumentation deterministically, export path configuration is explicit/actionable, and stdout/stderr payload boundaries remain contract-safe. + - Verification notes (commands or checks): `cargo test --manifest-path cli/Cargo.toml`; `cargo check --manifest-path cli/Cargo.toml`. + - User-provided implementation context (locked): + - Instrumentation stack should use Rust `tracing` + `tracing-subscriber` with `tracing-opentelemetry` bridging into OpenTelemetry OTLP export (`opentelemetry`, `opentelemetry-sdk`, `opentelemetry-otlp`). + - Prefer app-embedded instrumentation and exporter wiring in the CLI runtime (not a separate "OTel CLI" binary). + - Baseline setup should follow standard flow: initialize tracer provider, attach OpenTelemetry layer to subscriber registry, run command with spans/events, and flush/shutdown provider before process exit. + - Endpoint/config should be env-addressable (for example `OTEL_EXPORTER_OTLP_ENDPOINT`), with deterministic defaults and actionable validation errors for invalid configuration. + - Keep command payload contract safe: observability output/export path must not pollute stdout command result payloads. + +- [x] T22: Add global config discovery aligned with Agent Trace state root (status:done) + - Task ID: T22 + - Goal: Add deterministic user-global config discovery for `sce config` and merge global+local config in memory, with local values overriding global values per key. + - Boundaries (in/out of scope): In: platform-aware global path derivation, in-memory global+local merge behavior, explicit precedence integration with `--config` and `SCE_CONFIG_FILE`, clear source/merge reporting in `config show/validate`, and focused tests/docs/context updates; Out: config schema expansion beyond existing keys and migration tooling. + - Done when: when no explicit config path/env override is provided, `sce config` discovers both global and local files (when present), merges them in memory with local stronger than global per key, then applies env and flags on top; output contracts/tests document merged-source behavior deterministically. + - Verification notes (commands or checks): `cargo test --manifest-path cli/Cargo.toml services::config::tests`; `cargo test --manifest-path cli/Cargo.toml app::tests`; `cargo check --manifest-path cli/Cargo.toml`. + +- [x] T04: Add file logging mode with safe defaults (status:done) + - Task ID: T04 + - Goal: Support optional log sink to file path with deterministic behavior and safe permission handling. + - Boundaries (in/out of scope): In: CLI flags/config for file logging, file-open/write/rotation-or-truncation policy, and tests; Out: remote log shipping. + - Done when: file logging can be enabled explicitly, output location/policy is documented, writes are deterministic, and failure cases are actionable. + - Verification notes (commands or checks): `cargo test --manifest-path cli/Cargo.toml`; `cargo check --manifest-path cli/Cargo.toml`. + +- [x] T05: Add resilience policy for retries/timeouts/backoff (status:done) + - Task ID: T05 + - Goal: Define bounded retry/timeout behavior for eligible operations and surface retry outcomes clearly. + - Boundaries (in/out of scope): In: operation-level resilience wrappers for IO/process/database hotspots and user-facing retry diagnostics; Out: unbounded retry loops or hidden automatic mutation. + - Done when: targeted operations use deterministic timeout/retry settings, retries are observable in logs, and terminal failures provide actionable next steps. + - Verification notes (commands or checks): `cargo test --manifest-path cli/Cargo.toml`; `cargo check --manifest-path cli/Cargo.toml`. + +- [x] T06: Apply security hardening for CLI interfaces (status:done) + - Task ID: T06 + - Goal: Harden user input/output and filesystem interaction surfaces with secret redaction, path safety checks, and permission validation. + - Boundaries (in/out of scope): In: error/log redaction rules, path canonicalization/allow checks (including `--repo`), and install/write permission checks; Out: network auth redesign. + - Done when: sensitive values are redacted from diagnostics/logs, unsafe paths are rejected deterministically, and security-focused tests cover core threat paths. + - Verification notes (commands or checks): `cargo test --manifest-path cli/Cargo.toml`; `cargo check --manifest-path cli/Cargo.toml`. + +- [x] T07: Add explicit non-interactive setup controls (status:done) + - Task ID: T07 + - Goal: Add `setup` flags that let operators opt out of prompts deterministically (for example fail-fast non-interactive mode) while preserving existing target-flag behavior. + - Boundaries (in/out of scope): In: setup parser/dispatch and setup usage text in `cli/src/app.rs` + `cli/src/services/setup.rs`; Out: replacing the interactive prompt engine or adding unrelated setup features. + - Done when: automation can run `setup` without ever entering prompts, and prompt-required paths return actionable non-interactive guidance. + - Verification notes (commands or checks): `cargo test --manifest-path cli/Cargo.toml app::tests services::setup::tests`; `cargo check --manifest-path cli/Cargo.toml`. + +- [x] T08: Enforce stdout/stderr output contract (status:done) + - Task ID: T08 + - Goal: Establish and apply a deterministic stream contract where primary result payloads go to stdout and diagnostics/errors go to stderr. + - Boundaries (in/out of scope): In: top-level run/error handling and affected command output paths; Out: changing core command semantics. + - Done when: success payloads are pipe-safe from stdout, and non-success diagnostics are emitted consistently via stderr. + - Verification notes (commands or checks): `cargo test --manifest-path cli/Cargo.toml app::tests`; `cargo check --manifest-path cli/Cargo.toml`. + +- [x] T09: Add command-local help surfaces with examples (status:done) + - Task ID: T09 + - Goal: Ensure each command supports `--help` with concise usage and examples (including one JSON example where format applies). + - Boundaries (in/out of scope): In: parser/help surfaces for `setup`, `doctor`, `mcp`, `hooks`, and `sync`; Out: broad doc-site generation. + - Done when: `sce --help` works consistently and examples are deterministic/copy-ready. + - Verification notes (commands or checks): `cargo test --manifest-path cli/Cargo.toml app::tests`; `cargo check --manifest-path cli/Cargo.toml`. + +- [x] T10: Add machine-readable version command (status:done) + - Task ID: T10 + - Goal: Introduce `sce version` with stable text and JSON output fields for runtime identification. + - Boundaries (in/out of scope): In: command surface, parser/dispatch, and version payload wiring (for example version/build metadata fields); Out: release pipeline redesign. + - Done when: `sce version` and `sce version --format json` return deterministic version metadata with tests locking field names. + - Verification notes (commands or checks): `cargo test --manifest-path cli/Cargo.toml app::tests command_surface::tests`; `cargo check --manifest-path cli/Cargo.toml`. + +- [x] T11: Introduce stable error-code taxonomy (status:done) + - Task ID: T11 + - Goal: Add stable error identifiers to actionable user-facing errors so automation can branch on codes and operators can search remediation docs quickly. + - Boundaries (in/out of scope): In: top-level parse/invocation error strings and selected service-validation failures; Out: internationalization or deep logging system changes. + - Done when: core user-facing errors include stable codes plus `Try:` guidance and tests lock representative code/message pairs. + - Verification notes (commands or checks): `cargo test --manifest-path cli/Cargo.toml app::tests services::setup::tests services::hooks::tests`; `cargo check --manifest-path cli/Cargo.toml`. + +- [x] T12: Add shell completion generation and docs alignment (status:done) + - Task ID: T12 + - Goal: Provide shell completion artifacts (Bash/Zsh/Fish) and align command docs/help so completion, usage, and README examples match current flags/subcommands. + - Boundaries (in/out of scope): In: CLI completion command/surface and documentation alignment in `cli/README.md`; Out: external package-manager integration. + - Done when: completion outputs are generated deterministically, install/use instructions are documented, and docs/examples align with actual parser behavior. + - Verification notes (commands or checks): `cargo test --manifest-path cli/Cargo.toml`; `cargo check --manifest-path cli/Cargo.toml`. + +- [x] T13: Add shared output-format contract and parser wiring (status:done) + - Task ID: T13 + - Goal: Introduce a single CLI-level output format contract (text/json) and route supported commands through it. + - Boundaries (in/out of scope): In: parsing/wiring in `cli/src/app.rs`, command-surface/help exposure, and any small shared output-type helpers; Out: changing command business logic beyond format selection. + - Done when: supported commands accept `--format ` deterministically, invalid format values fail with actionable guidance, and default format remains stable. + - Verification notes (commands or checks): `cargo test --manifest-path cli/Cargo.toml app::tests`; `cargo check --manifest-path cli/Cargo.toml`. + +- [x] T14: Enable single-run `setup` flow for config targets plus hooks (status:done) + - Task ID: T14 + - Goal: Refactor setup option parsing/dispatch so operators can install target config assets and required hooks in one invocation, including interactive default path and non-interactive target flags. + - Boundaries (in/out of scope): In: setup option model and dispatch in `cli/src/app.rs` and `cli/src/services/setup.rs`, setup usage text, and setup tests; Out: replacing `inquire` prompt technology or changing hook install semantics. + - Done when: commands like `sce setup --opencode --hooks` and interactive `sce setup` can complete both config install and hook install deterministically in one run, while legacy one-purpose invocations continue to work. + - Verification notes (commands or checks): `cargo test --manifest-path cli/Cargo.toml app::tests services::setup::tests`; `cargo check --manifest-path cli/Cargo.toml`. + +- [x] T15: Implement deterministic JSON/text dual output for `doctor` (status:done) + - Task ID: T15 + - Goal: Extend doctor reporting to return a stable machine-readable JSON form while preserving readable text output. + - Boundaries (in/out of scope): In: `cli/src/services/doctor.rs` report rendering, JSON schema shaping, and tests; Out: changing doctor readiness semantics or required-hook policy. + - Done when: `doctor --format json` emits stable object structure (readiness, hook path source, repository/hook paths, hook states, diagnostics), and text output remains deterministic. + - Verification notes (commands or checks): `cargo test --manifest-path cli/Cargo.toml services::doctor::tests`; `cargo check --manifest-path cli/Cargo.toml`. + +- [x] T16: Standardize placeholder command output contracts for agent parsing (status:done) + - Task ID: T16 + - Goal: Make `mcp` and `sync` placeholder responses emit structured status payloads in JSON format and deterministic text summaries. + - Boundaries (in/out of scope): In: `cli/src/services/mcp.rs`, `cli/src/services/sync.rs`, and related tests; Out: enabling non-placeholder runtime behavior. + - Done when: both commands support `--format json` with stable fields indicating placeholder state, capabilities/checkpoints, and actionable next-step messaging. + - Verification notes (commands or checks): `cargo test --manifest-path cli/Cargo.toml services::mcp::tests services::sync::tests`; `cargo check --manifest-path cli/Cargo.toml`. + +- [x] T17: Make parser and invocation errors consistently actionable (status:done) + - Task ID: T17 + - Goal: Normalize high-frequency parse/invocation errors to include explicit remediation examples (required flag guidance, valid alternatives, and targeted help pointers). + - Boundaries (in/out of scope): In: top-level parser and command-specific validation errors in `cli/src/app.rs` and relevant service parsers; Out: changing exit-code policy or introducing interactive recovery prompts. + - Done when: unknown command/option, missing required args, and incompatible-flag failures all provide deterministic actionable guidance suitable for automated retry. + - Verification notes (commands or checks): `cargo test --manifest-path cli/Cargo.toml app::tests services::setup::tests services::hooks::tests`; `cargo check --manifest-path cli/Cargo.toml`. + +- [x] T18: Strengthen help/usage content with agent-oriented examples (status:done) + - Task ID: T18 + - Goal: Upgrade help text and setup usage docs with concise examples showing non-interactive usage, JSON output, and composable command flows. + - Boundaries (in/out of scope): In: `cli/src/command_surface.rs`, setup usage text in `cli/src/services/setup.rs`, and `cli/README.md`; Out: large documentation restructuring outside CLI scope. + - Done when: `sce --help` and `sce setup --help` include clear usage blocks + concrete examples, including one-run setup+hooks examples, and README mirrors canonical examples without contradiction. + - Verification notes (commands or checks): `cargo test --manifest-path cli/Cargo.toml command_surface::tests services::setup::tests`; `cargo check --manifest-path cli/Cargo.toml`. + +- [x] T19: Add output-contract regression tests for determinism (status:done) + - Task ID: T19 + - Goal: Lock the new output and error contracts with targeted tests to prevent accidental format drift. + - Boundaries (in/out of scope): In: parser/service tests for JSON shape, required keys, deterministic field ordering expectations where applicable, and error text assertions; Out: introducing snapshot frameworks or broad new test infrastructure. + - Done when: deterministic contract tests exist for representative success/failure paths across updated commands and pass reliably. + - Verification notes (commands or checks): `cargo test --manifest-path cli/Cargo.toml`; `cargo check --manifest-path cli/Cargo.toml`. + +- [x] T20: Validation and cleanup (status:done) + - Task ID: T20 + - Goal: Run full verification, ensure no temporary scaffolding remains, and sync context artifacts to final current-state behavior. + - Boundaries (in/out of scope): In: final CLI verification pass, plan status updates, and context sync checks/updates for changed command contracts; Out: new feature work. + - Done when: all verification checks pass, plan task statuses are current, and context documentation reflects final command/output/error contracts. + - Verification notes (commands or checks): `cargo fmt --manifest-path cli/Cargo.toml --all -- --check`; `cargo test --manifest-path cli/Cargo.toml`; `cargo build --manifest-path cli/Cargo.toml`; `nix run .#pkl-check-generated`; `nix flake check`. + +### T20 validation report + +- Commands run and exit codes: + - `cargo fmt --manifest-path cli/Cargo.toml --all -- --check` (exit 0) + - `cargo test --manifest-path cli/Cargo.toml` (exit 0) + - `cargo build --manifest-path cli/Cargo.toml` (exit 0) + - `nix run .#pkl-check-generated` (exit 0) + - `nix flake check` (exit 0) +- Key outputs: + - Rust test suite passed: `221 passed; 0 failed`. + - Generated output parity check reported: `Generated outputs are up to date.` + - Flake checks completed for this system; Nix reported other-system checks omitted unless `--all-systems` is requested. + - `cargo build` completed with existing `dead_code` warnings in `cli/src/services/hosted_reconciliation.rs` and no build failure. +- Failed checks and follow-ups: + - No failed checks. + - No in-scope follow-up fixes required for this task. +- Temporary scaffolding cleanup: + - No task-scoped temporary scaffolding found that required removal. +- Context sync verdict: + - Sync mode: verify-only root context pass (no cross-cutting behavior/policy/architecture/terminology changes introduced by T20). + - Verified against current code truth: `context/overview.md`, `context/architecture.md`, `context/glossary.md`, `context/patterns.md`, `context/context-map.md`. + - No additional context edits required. +- Success criteria verification summary: + - Reliability baseline criteria remain satisfied by passing CLI checks/tests and current context coverage for command/output/error contracts. + - Plan task stack is fully complete (`T01..T22` all done; no open unchecked tasks). +- Residual risks: + - Non-failing compile warnings remain in hosted reconciliation code paths; not in scope for this validation/cleanup task. + +## 5) Open questions (if any) + +- None at plan time; scope is constrained to the reliability baseline selected during clarification. diff --git a/context/plans/sce-cli-setup-integration-test-improvements.md b/context/plans/sce-cli-setup-integration-test-improvements.md new file mode 100644 index 00000000..8263eb67 --- /dev/null +++ b/context/plans/sce-cli-setup-integration-test-improvements.md @@ -0,0 +1,174 @@ +# Plan: sce-cli-setup-integration-test-improvements + +## 1) Change summary +Expand the Rust binary-driven setup integration suite to cover `--repo` path canonicalization behavior, setup failure contracts, true interactive setup behavior through a PTY harness, hook update/backup edge cases, deterministic permission failures, and cross-platform validation improvements, while explicitly excluding CI trigger changes. + +## 2) Success criteria +- Integration coverage validates `sce setup --hooks --repo ` for both relative and absolute repository paths, including canonical path handling and reported repository/hooks directory output. +- Binary-level integration tests verify exit code and stderr contracts for `sce setup --hooks --repo /missing`, `sce setup --repo ` (without `--hooks`), and `sce setup --non-interactive` (without target). +- Interactive setup integration tests run through a PTY harness and cover OpenCode selection, cancellation flow, and non-TTY failure messaging. +- Integration tests verify hook update behavior by mutating an installed hook, rerunning setup, asserting `updated` status, and asserting backup creation. +- Integration tests verify backup suffix collision behavior by pre-creating backup targets and asserting deterministic next-suffix selection. +- Integration tests verify deterministic writability/permission failures for repo-root and hooks-directory write probes, including unix-only guarded read-only directory scenarios. +- Cross-platform validation runs on more than one OS and keeps assertions platform-appropriate for executable-bit and path handling differences. +- CI workflow trigger definitions remain unchanged. + +## 3) Constraints and non-goals +- In scope: `cli/tests/setup_integration.rs` scenario/harness expansion, setup-integration contract updates under `context/sce/`, and CI matrix/job updates needed for multi-OS validation without changing workflow trigger events. +- In scope: preserving existing user-facing setup/error wording and asserting against current contract text (without rewording runtime messages as part of this change). +- In scope: unix-only guards for permission scenarios that rely on POSIX read-only semantics. +- Out of scope: setup runtime behavior changes unrelated to making existing contracts testable. +- Out of scope: changing workflow trigger conditions (`on:` push/pull_request filters, branch filters). +- Out of scope: unrelated command domains outside setup/hook integration coverage. + +## 4) Task stack (T01..T09) +- [x] T01: Update setup integration-test contract for new scenario matrix (status:done) + - Task ID: T01 + - Goal: Refresh `context/sce/setup-nix-integration-test-contract.md` with canonical scenario IDs and assertion policy for repo-path, failure-contract, PTY-interactive, hook-update, backup-collision, permission, and cross-platform coverage. + - Boundaries (in/out of scope): + - In: scenario IDs, expected canonical signals, OS-guard guidance, and explicit note that CI triggers are unchanged. + - Out: test code implementation and CI YAML edits. + - Done when: + - Contract doc enumerates all new scenario classes with deterministic assertion anchors. + - Contract parity references include current setup UX/security behavior docs. + - Verification notes (commands or checks): + - Manual parity review against `context/sce/setup-githooks-cli-ux.md` and `context/sce/cli-security-hardening-contract.md`. + +- [x] T02: Add `--repo` relative/absolute path integration scenarios (status:done) + - Task ID: T02 + - Goal: Add binary-driven integration tests that execute `sce setup --hooks --repo ` using relative and absolute paths, then assert canonical repo/hooks output and correct install location behavior. + - Boundaries (in/out of scope): + - In: test fixtures for relative/absolute path invocation, canonicalized output assertions, and filesystem truth checks. + - Out: runtime canonicalization logic changes. + - Done when: + - Both relative and absolute `--repo` scenarios pass with deterministic repository/hooks path assertions. + - Assertions prove hooks are installed in git-resolved target path. + - Verification notes (commands or checks): + - `cargo test --manifest-path cli/Cargo.toml --test setup_integration setup_hooks_repo -- --nocapture`. + +- [x] T03: Add binary-level setup failure-contract integration tests (status:done) + - Task ID: T03 + - Goal: Add integration tests for invalid setup invocations that assert process exit class and stderr contract text for the three requested failure modes. + - Boundaries (in/out of scope): + - In: tests for `/missing` repo path, `--repo` without `--hooks`, and `--non-interactive` without target. + - Out: changing error strings or failure-class mapping behavior. + - Done when: + - Each failure scenario asserts expected non-zero exit code and deterministic stderr content aligned with current user-facing text. + - Verification notes (commands or checks): + - `cargo test --manifest-path cli/Cargo.toml --test setup_integration setup_failure_contracts -- --nocapture`. + +- [x] T04: Add true interactive setup PTY integration coverage (status:done) + - Task ID: T04 + - Goal: Introduce PTY-backed integration tests that validate real prompt behavior for `sce setup`, including OpenCode selection and cancel flow, and validate non-TTY failure messaging for interactive mode without a TTY. + - Boundaries (in/out of scope): + - In: PTY harness utilities (test-only), deterministic prompt interaction assertions, and explicit non-TTY invocation assertion. + - Out: changing interactive UX semantics or prompt copy. + - Done when: + - PTY flow can select OpenCode and complete setup with expected outcomes. + - PTY flow can cancel and assert non-destructive cancellation result. + - Non-TTY scenario asserts actionable failure guidance. + - Verification notes (commands or checks): + - `cargo test --manifest-path cli/Cargo.toml --test setup_integration setup_interactive_pty -- --nocapture`. + +- [x] T05: Add hook-update integration scenario with backup assertion (status:done) + - Task ID: T05 + - Goal: Extend hook scenarios to mutate one previously installed required hook, rerun setup, and assert `updated` outcome plus backup file creation. + - Boundaries (in/out of scope): + - In: one-hook mutation fixture, rerun output assertions, and backup artifact presence/content sanity checks. + - Out: altering update/backup runtime implementation. + - Done when: + - Test deterministically demonstrates `updated` status for mutated hook. + - Backup artifact path exists and is associated with replaced pre-rerun hook content. + - Verification notes (commands or checks): + - `cargo test --manifest-path cli/Cargo.toml --test setup_integration setup_hooks_update_path -- --nocapture`. + +- [x] T06: Add backup suffix collision integration scenario (status:done) + - Task ID: T06 + - Goal: Add integration coverage for pre-existing backup path collisions (for example `.opencode.backup`, `.opencode.backup.1`) and assert next available suffix selection. + - Boundaries (in/out of scope): + - In: deterministic pre-created backup fixtures and rerun assertions for chosen backup target. + - Out: changing backup naming algorithm. + - Done when: + - Collision scenario confirms setup selects the correct next backup suffix. + - Assertions remain deterministic regardless of temp-root path randomness. + - Verification notes (commands or checks): + - `cargo test --manifest-path cli/Cargo.toml --test setup_integration setup_backup_suffix_collision -- --nocapture`. + +- [x] T07: Add writability/permission failure integration scenarios (status:done) + - Task ID: T07 + - Goal: Add deterministic integration tests for setup failures when repo root or hooks directory is not writable, including unix-guarded read-only directory cases. + - Boundaries (in/out of scope): + - In: permission fixture prep, expected failure-class/stderr assertions, and cfg-guarded unix-only read-only checks. + - Out: Windows ACL-specific simulation beyond portable deterministic test scope. + - Done when: + - Non-writable repo-root and hooks-dir failure paths are asserted with deterministic diagnostics. + - Unix-only read-only tests are guarded and stable. + - Verification notes (commands or checks): + - `cargo test --manifest-path cli/Cargo.toml --test setup_integration setup_permission_failures -- --nocapture`. + +- [x] T08: Expand multi-OS validation and platform-aware assertions (status:done) + - Task ID: T08 + - Goal: Ensure setup integration validation runs on more than one OS by updating existing CI job matrix (without trigger changes) and tightening tests for platform-appropriate executable-bit/path assertions. + - Boundaries (in/out of scope): + - In: CI job matrix adjustments for `cli-integration-tests` workflow and OS-conditional assertion handling in setup integration tests. + - Out: modifications to workflow trigger conditions or unrelated CI pipelines. + - Done when: + - Existing integration workflow validates setup suite on at least two OS targets. + - Test assertions are explicit about platform differences and remain deterministic per OS. + - Verification notes (commands or checks): + - Local: `nix run .#cli-integration-tests`. + - CI evidence: successful multi-OS `cli-integration-tests` workflow runs. + +- [x] T09: Validation and cleanup (status:done) + - Task ID: T09 + - Goal: Run final verification set, ensure no flaky/temporary scaffolding remains, and sync context to final current-state contracts. + - Boundaries (in/out of scope): + - In: full test/flake validation, cleanup, and context sync updates for durable behavior changes. + - Out: new feature expansion beyond approved setup integration improvements. + - Done when: + - Setup integration suite passes with the added scenarios. + - Required repo verification passes. + - Context files reflect final behavior and verification entrypoints. + - Verification notes (commands or checks): + - `nix run .#cli-integration-tests`. + - `nix run .#pkl-check-generated`. + - `nix flake check`. + +## 5) Open questions +- None. + +## 6) Validation report (T09) + +### Commands run +| Command | Exit code | Result | +|---------|-----------|--------| +| `nix run .#cli-integration-tests` | 0 | 19 tests passed | +| `nix run .#pkl-check-generated` | 0 | "Generated outputs are up to date." | +| `nix flake check` | 0 | All checks passed | + +### Temporary scaffolding cleanup +- Reviewed `cli/tests/setup_integration.rs` — no temporary scaffolding found +- All test utilities are production-grade and reusable + +### Context verification +- `context/sce/setup-nix-integration-test-contract.md` — reflects final scenario matrix +- `context/context-map.md` — includes integration test contract link (line 31) +- `context/overview.md` — documents `cli-integration-tests` entrypoint and CI workflow + +### Success-criteria verification summary +| Criterion | Status | Evidence | +|-----------|--------|----------| +| `--repo` relative/absolute path coverage | ✅ | `setup_hooks_repo_relative_path`, `setup_hooks_repo_absolute_path` tests pass | +| Failure-contract tests (3 modes) | ✅ | `setup_fail_repo_missing`, `setup_fail_repo_without_hooks`, `setup_fail_noninteractive_without_target` tests pass | +| PTY interactive coverage | ✅ | `setup_interactive_pty_select_opencode`, `setup_interactive_pty_cancel`, `setup_interactive_nontty_fail` tests pass | +| Hook update + backup assertion | ✅ | `setup_hooks_update_path` test passes | +| Backup suffix collision | ✅ | `setup_backup_suffix_collision` test passes | +| Permission failure scenarios | ✅ | `setup_permission_fail_repo_root_nonwritable`, `setup_permission_fail_hooks_dir_nonwritable`, `setup_permission_fail_unix_readonly_guard` tests pass | +| Cross-platform validation | ✅ | Platform-aware assertions with `cfg(unix)`/`cfg(not(unix))` guards | +| CI trigger unchanged | ✅ | No modifications to workflow `on:` triggers | + +### Residual risks +- None. All success criteria satisfied. + +### Plan status +**COMPLETE** — All tasks (T01–T09) executed and verified. diff --git a/context/plans/sce-nix-setup-hooks-integration-tests.md b/context/plans/sce-nix-setup-hooks-integration-tests.md new file mode 100644 index 00000000..9d9f4f24 --- /dev/null +++ b/context/plans/sce-nix-setup-hooks-integration-tests.md @@ -0,0 +1,131 @@ +# Plan: sce-nix-setup-hooks-integration-tests + +## 1) Change summary +Add a Nix-driven Rust integration-test slice that builds the `sce` CLI and validates setup target installation plus required hook installation by invoking the built binary directly in ephemeral repositories, including rerun idempotency and supported hook-path modes, with Turso local state isolated to each test directory. + +## 2) Success criteria +- A deterministic Nix test entrypoint runs setup integration tests without ad-hoc local scripting. +- Integration tests are implemented in Rust and execute the compiled `sce` binary (not `cargo run`) for setup and hook scenarios. +- Integration coverage verifies `sce setup --opencode`, `--claude`, and `--both` install outcomes in temporary repositories. +- Integration coverage verifies `sce setup --hooks` for default `.git/hooks` and custom `core.hooksPath` modes. +- Integration coverage verifies rerun idempotency semantics (`skipped` outcomes where applicable) for both target-asset and hook installation flows. +- Assertions prefer structured signals: filesystem/git state as canonical truth for setup outcomes, and JSON output only where command contracts already support it. +- Test runtime ensures Turso local instance/state used by invoked CLI paths is created under the per-test temporary directory (no shared global state). +- The new integration slice is wired into repository verification flow and remains discoverable in context docs. + +## 3) Constraints and non-goals +- In scope: integration-test harness and fixtures for setup/hook scenarios, Nix wiring to execute the suite, and context discoverability updates for the new test entrypoint. +- In scope: filesystem and git-backed temporary repository scenarios that exercise real setup command paths. +- In scope: Rust integration tests that build once and invoke the resulting `sce` binary path for all scenario assertions. +- In scope: JSON output assertions only for existing JSON-capable commands used as setup-adjacent checks (for example `doctor --format json`), without changing setup output contracts. +- In scope: deterministic per-test Turso local-state placement inside test temp directories via explicit environment/runtime setup. +- Out of scope: changes to setup runtime semantics, hook template content, or command UX beyond what tests require. +- Out of scope: expanding coverage to non-setup command domains (`doctor`, `sync`, `mcp`) except where setup verification depends on them. +- Non-goal: introducing network-dependent or flaky integration behavior. + +## 4) Task stack (T01..T06) +- [x] T01: Define Nix setup integration-test contract and scenario matrix (status:done) + - Task ID: T01 + - Goal: Specify canonical integration scenarios, expected outcomes, and test boundaries for target install + hooks install coverage using Rust tests that execute the compiled binary. + - Boundaries (in/out of scope): + - In: scenario inventory for `--opencode|--claude|--both`, hooks default/custom path modes, rerun idempotency expectations, and assertion-source policy (filesystem/git truth vs JSON output where available). + - Out: implementation of tests or Nix wiring. + - Done when: + - A focused context contract doc records scenario matrix, expected result signals, and deterministic fixture assumptions. + - Scenario IDs map 1:1 to planned integration tests for implementation continuity. + - Verification notes (commands or checks): + - Contract parity review against existing setup behavior docs: `context/sce/setup-githooks-cli-ux.md` and `context/sce/setup-githooks-install-flow.md`. + +- [x] T02: Add integration-test harness for ephemeral git repositories (status:done) + - Task ID: T02 + - Goal: Implement reusable Rust integration-test support that provisions isolated repos, compiles `sce`, runs the built binary for setup invocations, and captures deterministic assertions. + - Boundaries (in/out of scope): + - In: temp repo lifecycle helpers, binary-path resolution helpers, invocation wrappers around compiled `sce`, Turso-local-state directory setup under each test temp root, and common assertion utilities for install outcomes and filesystem state. + - Out: test scenario coverage details and Nix entrypoint wiring. + - Done when: + - Integration tests can create/teardown isolated repositories and execute the compiled `sce` binary with deterministic stderr/stdout capture. + - Harness guarantees Turso local state for each test run is rooted under that test's temporary directory. + - Harness supports both default and custom hooks-path repository preparation. + - Verification notes (commands or checks): + - `cargo test --manifest-path cli/Cargo.toml --test setup_integration -- --nocapture`. + - Inspect fixture assertions/logging to confirm Turso paths resolve under test temp roots. + +- [x] T03: Implement OpenCode/Claude/Both setup integration scenarios (status:done) + - Task ID: T03 + - Goal: Add Rust integration tests that validate target asset installation for `--opencode`, `--claude`, and `--both`, including rerun idempotency outcomes via compiled-binary invocations. + - Boundaries (in/out of scope): + - In: per-target install assertions (expected directories/files), deterministic status lines, and second-run idempotency checks from compiled binary execution. + - Out: hook installation assertions (covered in T04). + - Done when: + - Integration tests cover all three target-selection modes with stable assertions. + - Rerun checks confirm deterministic no-op-or-skipped style outcomes per current setup contract. + - Verification notes (commands or checks): + - `cargo test --manifest-path cli/Cargo.toml --test setup_integration setup_targets -- --nocapture`. + +- [x] T04: Implement hook setup integration scenarios for default and custom hooks paths (status:done) + - Task ID: T04 + - Goal: Add Rust integration tests for `sce setup --hooks` across default `.git/hooks` and per-repo `core.hooksPath`, including rerun idempotency and executable-state checks via compiled-binary execution. + - Boundaries (in/out of scope): + - In: required-hook presence checks (`pre-commit`, `commit-msg`, `post-commit`), status-line assertions, executable-bit assertions, and rerun verification. + - Out: doctor command behavioral testing beyond optional post-setup JSON sanity checks needed by setup outcomes. + - Done when: + - Both hook-path modes are covered by integration tests with deterministic assertions. + - Rerun behavior confirms stable `installed/updated/skipped` outcome semantics per mode. + - Optional post-setup `doctor --format json` checks remain deterministic where included. + - Verification notes (commands or checks): + - `cargo test --manifest-path cli/Cargo.toml --test setup_integration setup_hooks -- --nocapture`. + +- [x] T05: Wire Nix entrypoint and check integration for setup test suite (status:done) + - Task ID: T05 + - Goal: Expose and integrate a Nix-runner path for the Rust setup integration suite so contributors and CI-style flows can deterministically build `sce` and execute binary-driven integration tests. + - Boundaries (in/out of scope): + - In: flake app/check wiring, invocation command contract, and verification-flow documentation updates for compiled-binary integration execution. + - Out: unrelated Nix refactors or broader CI workflow additions unless directly required by entrypoint wiring. + - Done when: + - A documented Nix command runs the new setup integration suite from repo root. + - `nix flake check` includes or validates the new setup integration slice according to repo check conventions. + - Verification notes (commands or checks): + - `nix run .#cli-integration-tests`. + - `nix flake check`. + +- [x] T06: Validation and cleanup (status:done) + - Task ID: T06 + - Goal: Run final verification, clean temporary artifacts, and sync context to current-state behavior for the new Nix integration-test contract. + - Boundaries (in/out of scope): + - In: final command validation, artifact cleanup, and context sync confirmation. + - Out: net-new feature expansion beyond approved tasks. + - Done when: + - Verification evidence confirms all success criteria. + - Temporary test artifacts are removed or explicitly documented. + - Verification confirms no Turso local-state artifacts leak outside test temp directories. + - Context discoverability reflects the new test entrypoint and no setup-test drift remains. + - Verification notes (commands or checks): + - `nix run .#pkl-check-generated`. + - `nix flake check`. + - `nix run .#cli-integration-tests`. + +### T06 validation report (2026-03-05) +- Commands run: + - `nix run .#pkl-check-generated` (exit `0`) -> `Generated outputs are up to date.` + - `nix run .#cli-integration-tests` (exit `0`) -> `setup_integration` passed (`6 passed; 0 failed`), including `harness_scopes_turso_state_home_to_test_temp_root`. + - `nix flake check` (exit `0`) -> evaluated `cli-setup-command-surface` and `cli-setup-integration` checks successfully. +- Lint/format/full-project verification: + - Best available repository-wide validation is `nix flake check`; it passed and includes configured CLI check derivations. +- Temporary scaffolding cleanup: + - No temporary scaffolding produced by this task; repository `context/tmp/` contains only `.gitignore`. +- Context sync verification: + - Added CI workflow `.github/workflows/cli-integration-tests.yml`. + - Updated root context contracts in `context/overview.md`, `context/architecture.md`, `context/patterns.md`, and `context/glossary.md`. +- Failed checks and follow-ups: + - No command failures. + - `nix run .#cli-integration-tests` emits existing Rust dead-code warnings from `cli/src/services/hosted_reconciliation.rs`; non-blocking for this task. +- Success-criteria evidence: + - Deterministic Nix setup integration entrypoint exists and runs in CI + local verification (`nix run .#cli-integration-tests`). + - Binary-driven setup/hook scenarios remain covered by passing integration tests. + - Turso state isolation is validated by passing harness test and no leaked local DB artifacts in repo workspace. + - Discoverability/context alignment is updated with explicit CI workflow and entrypoint references. +- Residual risks: + - CI runtime can vary with Nix cache availability; no functional drift observed in current verification pass. + +## 5) Open questions +- None. diff --git a/context/sce/agent-trace-hook-doctor.md b/context/sce/agent-trace-hook-doctor.md index a9dc9513..465fb5c2 100644 --- a/context/sce/agent-trace-hook-doctor.md +++ b/context/sce/agent-trace-hook-doctor.md @@ -8,10 +8,12 @@ Task `agent-trace-attribution-no-git-wrapper` `T07` adds local rollout validatio - Entrypoint: `sce doctor` - Service implementation: `cli/src/services/doctor.rs` -- Command dispatch: `cli/src/app.rs` (`Command::Doctor`) +- Command dispatch: `cli/src/app.rs` (`Command::Doctor(DoctorRequest)`) - Command surface status: implemented in `cli/src/command_surface.rs` -`sce doctor` always returns a deterministic text report with: +`sce doctor` supports deterministic dual output via `--format `. + +Text output includes: - readiness verdict (`ready` or `not ready`) - hook-path source (`default (.git/hooks)`, per-repo `core.hooksPath`, or global `core.hooksPath`) @@ -19,6 +21,15 @@ Task `agent-trace-attribution-no-git-wrapper` `T07` adds local rollout validatio - required hook checks for `pre-commit`, `commit-msg`, `post-commit` - actionable diagnostics for missing or misconfigured hooks +JSON output includes stable top-level fields: + +- `status`, `command` +- `readiness` (`ready` or `not_ready`) +- `hook_path_source` (`default`, `local_config`, `global_config`) +- `repository_root`, `hooks_directory` +- `hooks[]` with `name`, `path`, `exists`, `executable`, `state` +- `diagnostics[]` + ## Health validation rules `sce doctor` resolves git state using CLI git commands: @@ -48,5 +59,7 @@ If no diagnostics are present, readiness is `ready`. - misconfigured state (required hook present but non-executable) - post-setup ready state after required hooks are installed - post-setup ready state for per-repo custom `core.hooksPath` +- request parsing defaults and `--format json` support +- JSON report shape contract (`status`, `command`, `readiness`, `hook_path_source`, `hooks`, `diagnostics`) -`cli/src/app.rs` includes command-level routing/exit success coverage for `sce doctor`. +`cli/src/app.rs` includes command-level routing/exit success coverage for `sce doctor`, including `--format json` routing. diff --git a/context/sce/cli-error-code-taxonomy.md b/context/sce/cli-error-code-taxonomy.md new file mode 100644 index 00000000..c5fa79b1 --- /dev/null +++ b/context/sce/cli-error-code-taxonomy.md @@ -0,0 +1,39 @@ +# CLI Error-Code Taxonomy + +## Scope + +This document defines the stable user-facing error-code contract rendered by `sce` runtime diagnostics in `cli/src/app.rs`. +It complements the numeric process exit-code classes documented in `context/sce/cli-exit-code-contract.md`. + +## Stable diagnostic code classes + +- `SCE-ERR-PARSE`: top-level parse failures before command invocation. +- `SCE-ERR-VALIDATION`: invocation/argument validation failures after parsing. +- `SCE-ERR-RUNTIME`: runtime execution failures after successful parse + validation. +- `SCE-ERR-DEPENDENCY`: startup dependency failures before parsing/dispatch. + +## Rendering contract + +- User-facing diagnostics are emitted on `stderr` as: `Error []: `. +- If a diagnostic message does not already include `Try:`, runtime appends class-default remediation guidance. +- If the message already contains `Try:`, runtime preserves the original remediation text and does not append a second one. +- Diagnostic text is still redaction-filtered through `services::security::redact_sensitive_text` before emission. + +## Actionable parser/invocation guidance contract + +- High-frequency parse/invocation failures use explicit `Try:` remediations instead of generic usage-only hints. +- Top-level unknown command/option messages include targeted retry guidance (`sce --help` and command-local `sce --help`). +- Setup invocation validation failures (`--repo` without `--hooks`, mutually exclusive target flags, unexpected args) include concrete valid alternatives. +- Hooks invocation validation failures (missing hook subcommand, missing `commit-msg` message file, unknown subcommand) include command-form examples that are copyable for retry automation. +- This actionable-message normalization is owned by parser/validation paths in `cli/src/app.rs`, `cli/src/services/setup.rs`, and `cli/src/services/hooks.rs`. + +## Ownership + +- `FailureClass` in `cli/src/app.rs` owns class selection. +- `ClassifiedError` in `cli/src/app.rs` owns stable code assignment. +- `write_error_diagnostic` in `cli/src/app.rs` owns final code-bearing stderr rendering. + +## Determinism and testing + +- Error code value is derived from failure class and is stable for a given class. +- Code-bearing stderr output and remediation presence are locked by `app::tests`. diff --git a/context/sce/cli-exit-code-contract.md b/context/sce/cli-exit-code-contract.md new file mode 100644 index 00000000..55ae16aa --- /dev/null +++ b/context/sce/cli-exit-code-contract.md @@ -0,0 +1,28 @@ +# CLI Exit-Code Contract + +## Scope + +This document defines the stable `sce` process exit-code classes used by `cli/src/app.rs`. +The contract is intentionally class-based so automation can branch on failure category without parsing free-form error text. + +## Exit-code classes + +- `0` (`success`): command completed successfully. +- `2` (`parse_failure`): top-level CLI parsing failed (for example unknown top-level command/option or malformed command token). +- `3` (`validation_failure`): command/subcommand arguments parsed but failed invocation validation (for example incompatible or missing command-local arguments). +- `4` (`runtime_failure`): command invocation was valid but runtime execution failed (filesystem/process/environment/runtime operation errors). +- `5` (`dependency_failure`): startup dependency contract check failed before command parsing/dispatch. + +## Classification ownership + +- `cli/src/app.rs` owns classification via `FailureClass` and maps it to numeric codes through `FailureClass::exit_code`. +- Top-level parse failures are classified in `parse_command`/`parse_subcommand`. +- Command-local argument validation failures are classified in `parse_config_subcommand`, `parse_setup_subcommand`, `parse_hooks_subcommand`, and `parse_non_setup_subcommand`. +- Runtime failures are classified in `dispatch` when service execution fails after valid parsing/validation. +- Dependency failures are classified in startup via the dependency-check closure passed to `run_with_dependency_check`. + +## Determinism requirements + +- Exit code is derived only from failure class and is stable for a given failure category. +- Error text remains on `stderr`; exit-code class is independent from message wording. +- Representative class mapping is locked by `app::tests`. diff --git a/context/sce/cli-observability-contract.md b/context/sce/cli-observability-contract.md new file mode 100644 index 00000000..af041af1 --- /dev/null +++ b/context/sce/cli-observability-contract.md @@ -0,0 +1,58 @@ +# CLI Observability Contract + +## Scope + +This document defines the implemented structured observability baseline for `sce` runtime execution. +It covers deterministic stderr logger controls, optional OpenTelemetry export bootstrap, and event emission boundaries in `cli/src/services/observability.rs` and `cli/src/app.rs`. + +## Runtime controls + +- `SCE_LOG_LEVEL` selects log threshold with allowed values `error`, `warn`, `info`, `debug`. +- `SCE_LOG_FORMAT` selects log format with allowed values `text`, `json`. +- `SCE_LOG_FILE` optionally enables a file log sink at the provided file path. +- `SCE_LOG_FILE_MODE` controls file-write policy with allowed values `truncate` and `append`. +- `SCE_LOG_FILE_MODE` requires `SCE_LOG_FILE`. +- Defaults are deterministic: `SCE_LOG_LEVEL=info` and `SCE_LOG_FORMAT=text` when env keys are unset. +- When file logging is enabled and `SCE_LOG_FILE_MODE` is unset, default policy is `truncate`. +- Invalid observability env values fail invocation validation with actionable error text. +- OpenTelemetry bootstrap is opt-in via `SCE_OTEL_ENABLED` (`true|false|1|0`, default `false`). +- When OpenTelemetry is enabled, exporter config is env-addressable: + - `OTEL_EXPORTER_OTLP_ENDPOINT` (default `http://127.0.0.1:4317`, must be absolute `http(s)` URL) + - `OTEL_EXPORTER_OTLP_PROTOCOL` (`grpc` or `http/protobuf`, default `grpc`) +- Invalid OTEL env values fail invocation validation with explicit remediation guidance. + +## Emission contract + +- Log output is emitted to `stderr` only; command result payloads remain on `stdout`. +- When `SCE_LOG_FILE` is set, the same rendered log lines are also mirrored to the configured file sink. +- Each emitted record includes a stable `event_id`. +- Current app-level event identifiers: + - `sce.app.start` + - `sce.command.parsed` + - `sce.command.completed` + - `sce.command.parse_failed` + - `sce.command.failed` +- Event records include deterministic metadata keys used by automation (`command`, `failure_class`, `component` when applicable). +- Logger events are mirrored into tracing events so OTEL export can observe the same lifecycle signal set when enabled. +- App runtime initializes tracing subscriber context before parse/dispatch and shuts down tracer provider on process exit. + +## Format contract + +- `text` format emits single-line key/value records with fixed key ordering: `log_format`, `level`, `event_id`, `message`, then optional fields. +- `json` format emits a single-line object with fixed top-level keys: `log_format`, `level`, `event_id`, `message`, `fields`. +- Logger threshold behavior is deterministic and severity-based (`error < warn < info < debug`). +- File sink writes are deterministic line-based writes with immediate flush after each record. + +## File sink safety contract + +- On file-sink initialization, parent directories are created when missing. +- On Unix, log file permissions are tightened to owner-only (`0600`) when group/other bits are present. +- File open failures include actionable remediation guidance (verify writable path or unset `SCE_LOG_FILE`). +- File write failures are reported to `stderr` as diagnostics and do not alter command `stdout` payload contracts. + +## Ownership and verification + +- `cli/src/services/observability.rs` owns env parsing, level filtering, record rendering, and optional file sink lifecycle/permission enforcement. +- `cli/src/services/observability.rs` also owns OTEL runtime setup (`TelemetryRuntime`) and deterministic endpoint/protocol validation. +- `cli/src/app.rs` owns lifecycle event emission around parse/dispatch success and failure paths and wraps dispatch inside the observability subscriber context. +- Contract behavior is covered by `services::observability::tests` and exercised in end-to-end app command tests. diff --git a/context/sce/cli-security-hardening-contract.md b/context/sce/cli-security-hardening-contract.md new file mode 100644 index 00000000..66359786 --- /dev/null +++ b/context/sce/cli-security-hardening-contract.md @@ -0,0 +1,44 @@ +# SCE CLI security hardening contract + +## Scope + +Task `sce-cli-agent-friendly-reliability-baseline` `T06` adds baseline security hardening for CLI diagnostics/logging and setup filesystem interfaces. + +## Implemented behavior + +- Sensitive values in user-facing diagnostics/log lines are redacted before emission. +- `sce setup --hooks --repo ` now canonicalizes and validates the supplied repository path before hook installation. +- Setup install flows now run explicit write-permission probes on target directories before staging/swap writes. + +## Redaction contract + +`cli/src/services/security.rs` provides `redact_sensitive_text(...)` and is applied to: + +- top-level CLI error emission in `cli/src/app.rs` +- observability stderr/file sink output in `cli/src/services/observability.rs` +- git-command stderr diagnostics surfaced by setup hook flows in `cli/src/services/setup.rs` + +Current redaction coverage includes: + +- assignment-style secrets (`password=...`, `token=...`, `api_key=...`) +- JSON key/value secrets (for the same key set) +- `Authorization`/`Bearer` token forms + +## Path and permission safety contract + +`cli/src/services/setup.rs` enforces: + +- `--repo` path must resolve to an existing directory +- repository path is canonicalized before hook setup operations +- setup install roots and hooks directories must pass a deterministic write probe before writes + +Write probe behavior is owned by `ensure_directory_is_writable(...)` in `cli/src/services/security.rs`. + +## Verification anchors + +- `cargo test --manifest-path cli/Cargo.toml services::security::tests` +- `cargo test --manifest-path cli/Cargo.toml services::setup::tests` +- `cargo test --manifest-path cli/Cargo.toml services::observability::tests` +- `cargo test --manifest-path cli/Cargo.toml app::tests` +- `cargo check --manifest-path cli/Cargo.toml` +- `cargo build --manifest-path cli/Cargo.toml` diff --git a/context/sce/cli-shared-output-format-contract.md b/context/sce/cli-shared-output-format-contract.md new file mode 100644 index 00000000..4815d6e8 --- /dev/null +++ b/context/sce/cli-shared-output-format-contract.md @@ -0,0 +1,27 @@ +# CLI Shared Output-Format Contract + +## Scope + +`T13` introduces one canonical output-format contract for CLI commands that support dual text/JSON rendering. + +- Canonical type lives at `cli/src/services/output_format.rs` as `OutputFormat`. +- Allowed values are `text` and `json`. +- Parsing is command-context aware via `OutputFormat::parse(raw, help_command)` so invalid values include command-specific help guidance. + +## Current command integration + +- `cli/src/services/config.rs` uses the shared type through `ReportFormat = OutputFormat`. +- `cli/src/services/version.rs` uses the shared type through `VersionFormat = OutputFormat`. +- Both commands keep deterministic default `text` when `--format` is omitted. +- Both commands keep stable `--format ` usage in help text and parser behavior. + +## Validation/error contract + +- Invalid values fail deterministically as validation errors with this canonical structure: + - `Invalid --format value ''. Valid values: text, json. Run ' --help' to see valid usage.` +- Missing `--format` value behavior remains command parser-owned (`Option '--format' requires a value`). + +## Determinism notes + +- The shared parser only accepts lowercase canonical values (`text`, `json`). +- Existing command business logic and payload shape remain unchanged; only format parsing contract ownership is centralized. diff --git a/context/sce/cli-shell-completion-contract.md b/context/sce/cli-shell-completion-contract.md new file mode 100644 index 00000000..f3bef057 --- /dev/null +++ b/context/sce/cli-shell-completion-contract.md @@ -0,0 +1,45 @@ +# CLI Shell Completion Contract + +## Scope + +Defines the implemented `sce completion` contract for deterministic shell completion script generation. + +## Command surface + +- Command: `sce completion --shell ` +- Help: `sce completion --help` +- Required option: `--shell` +- Supported shells: `bash`, `zsh`, `fish` + +## Parsing and validation + +- Requires exactly one shell via `--shell `. +- Rejects missing shell with deterministic guidance: + - `Missing required option '--shell '. Run 'sce completion --help' to see valid usage.` +- Rejects unknown flags with deterministic guidance: + - `Unknown completion option '--'. Run 'sce completion --help' to see valid usage.` +- Rejects unexpected positional args with deterministic guidance: + - `Unexpected completion argument ''. Run 'sce completion --help' to see valid usage.` +- Rejects unsupported shell values with deterministic guidance: + - `Unsupported shell ''. Valid values: bash, zsh, fish.` + +## Output contract + +- Output is a shell script payload emitted on stdout for redirection/eval. +- Scripts are deterministic for identical binary + input shell. +- Generated scripts encode current parser-valid command/flag/subcommand surfaces for: + - top-level commands: `help`, `config`, `setup`, `doctor`, `mcp`, `hooks`, `sync`, `version`, `completion` + - completion-specific flags and values: `--shell` with `bash|zsh|fish` + +## Implementation ownership + +- Command parse/dispatch wiring: `cli/src/app.rs` +- Top-level command catalog/help row: `cli/src/command_surface.rs` +- Completion parser/rendering and shell scripts: `cli/src/services/completion.rs` +- Operator docs/install examples: `cli/README.md` + +## Verification coverage + +- `app::tests` lock completion command routing and help behavior. +- `services::completion::tests` lock parse validation and deterministic script rendering. +- `command_surface::tests` lock top-level help discoverability for `completion`. diff --git a/context/sce/cli-stdout-stderr-contract.md b/context/sce/cli-stdout-stderr-contract.md new file mode 100644 index 00000000..04d59ebe --- /dev/null +++ b/context/sce/cli-stdout-stderr-contract.md @@ -0,0 +1,28 @@ +# CLI stdout/stderr contract + +## Scope + +This document defines the implemented stream contract for CLI command payload and diagnostics in `cli/src/app.rs`. + +## Contract + +- Command success payloads are emitted to `stdout` only through app-level stream handling. +- User-facing diagnostics and failures are emitted to `stderr` only. +- Failure diagnostics are emitted as `Error []: ...` on `stderr`, where `` is the stable class-based `SCE-ERR-*` identifier from `ClassifiedError` in `cli/src/app.rs`; diagnostics are passed through shared redaction (`services::security::redact_sensitive_text`) before emission. +- Command handlers now return payload strings to the app dispatcher; the app owns stream selection and final emission. + +## Implementation surface + +- `run_with_dependency_check_and_streams(...)` is the app-level stream boundary for production and tests. +- `try_run_with_dependency_check(...)` performs parse + dispatch and returns payload text or classified errors. +- `dispatch(...)` returns payload text for each command path rather than writing directly to process streams. +- `write_stdout_payload(...)` handles success payload writes. +- `write_error_diagnostic(...)` handles redacted error writes. + +See also: `context/sce/cli-error-code-taxonomy.md` for the canonical error-code classes and `Try:` remediation injection rules. + +## Determinism notes + +- Stream routing is centralized in one app-level path to avoid per-command stream drift. +- Exit code class mapping remains unchanged (`parse`, `validation`, `runtime`, `dependency`). +- Observability lifecycle logs remain on `stderr` by contract and are independent from command payload output. diff --git a/context/sce/cli-version-command-contract.md b/context/sce/cli-version-command-contract.md new file mode 100644 index 00000000..29f28613 --- /dev/null +++ b/context/sce/cli-version-command-contract.md @@ -0,0 +1,50 @@ +# CLI Version Command Contract + +## Scope + +Defines the implemented `sce version` runtime contract for deterministic human and machine-readable runtime identification. + +## Command surface + +- Command: `sce version` +- Help: `sce version --help` +- Format option: `--format ` +- Default format: `text` + +## Parsing and validation + +- Accepts only `--format ` (plus `--help`/`-h` when used alone). +- Rejects unknown flags with deterministic guidance: + - `Unknown version option '--'. Run 'sce version --help' to see valid usage.` +- Rejects unexpected positional args with deterministic guidance: + - `Unexpected version argument ''. Run 'sce version --help' to see valid usage.` +- Rejects unsupported formats with deterministic guidance: + - `Unsupported --format value ''. Valid values: text, json.` + +## Output contract + +Text output (`sce version`): + +- Single deterministic line: + - ` ()` + +JSON output (`sce version --format json`): + +- Stable object fields: + - `status`: always `"ok"` + - `command`: always `"version"` + - `binary`: compile-time package/binary name + - `version`: compile-time package version + - `build_profile`: compile-time build profile (`"debug"` or `"release"`) + +## Implementation ownership + +- Command parse/dispatch wiring: `cli/src/app.rs` +- Top-level command catalog/help row: `cli/src/command_surface.rs` +- Version parser/rendering: `cli/src/services/version.rs` + +## Verification coverage + +- `app::tests` lock `version` command routing and help behavior. +- `services::version::tests` lock default/JSON parsing and stable JSON field presence. +- `command_surface::tests` lock top-level help discoverability for `version`. diff --git a/context/sce/setup-githooks-cli-ux.md b/context/sce/setup-githooks-cli-ux.md index 3e5d7cb1..7bda699f 100644 --- a/context/sce/setup-githooks-cli-ux.md +++ b/context/sce/setup-githooks-cli-ux.md @@ -2,12 +2,16 @@ ## Scope -Task `sce-setup-githooks-any-repo` `T04` defines the `sce setup` command-surface behavior for required-hook setup mode. +Task `sce-setup-githooks-any-repo` `T04` defines the `sce setup` command-surface behavior for required-hook setup and combined target+hooks runs. ## Command surface - `sce setup --hooks` - `sce setup --hooks --repo ` +- `sce setup --opencode --hooks` +- `sce setup --claude --hooks` +- `sce setup --both --hooks` +- `sce setup` (interactive target selection plus hook install in one run) `--hooks` runs required-hook installation (`pre-commit`, `commit-msg`, `post-commit`) through the setup service hook installer. When `--repo` is omitted, setup targets the current working directory. @@ -17,13 +21,18 @@ When `--repo` is omitted, setup targets the current working directory. Validation is deterministic and enforced during setup option resolution: - `--repo` requires `--hooks` -- `--hooks` cannot be combined with `--opencode`, `--claude`, or `--both` +- `--hooks` can be combined with exactly one target flag to run config install and required-hook install in one invocation - `--repo` may only be provided once and must include a value +- `--repo` path is canonicalized and must resolve to an existing directory before hook setup runs -Target-install mode remains unchanged: +Target-install mode contract: - `sce setup` defaults to interactive target selection +- default interactive `sce setup` installs selected config assets and required hooks in one run - `--opencode`, `--claude`, and `--both` remain mutually exclusive for non-interactive target install +- `--non-interactive` is an explicit fail-fast control that disables prompting and requires one target flag (`--opencode`, `--claude`, or `--both`) +- legacy one-purpose invocations remain valid (`sce setup --hooks` for hooks-only, and `sce setup --opencode|--claude|--both` for config-only) +- interactive setup without a TTY returns actionable guidance to rerun with `--non-interactive` plus a target flag ## Output contract @@ -34,6 +43,8 @@ Successful hook setup emits deterministic human/automation-friendly output inclu - per-hook outcome lines with canonical lowercase statuses (`installed`, `updated`, `skipped`) - backup status per hook (`backup: ''` or `backup: not needed`) +When config install and hook install run together, CLI output is deterministic: config-install summary first, one blank separator line, then hook-install summary. + ## Implementation anchors - `cli/src/app.rs` diff --git a/context/sce/setup-githooks-install-flow.md b/context/sce/setup-githooks-install-flow.md index e7c17985..d1a12623 100644 --- a/context/sce/setup-githooks-install-flow.md +++ b/context/sce/setup-githooks-install-flow.md @@ -22,8 +22,12 @@ For the provided repository path, setup resolves git truth before any writes: 1. `git rev-parse --show-toplevel` 2. `git rev-parse --git-path hooks` +Before those git operations, setup canonicalizes/validates the user-provided repository path (`--repo`) as an existing directory. + If the hooks path is relative, it is resolved against the git toplevel. +Before staged hook writes, setup runs explicit directory write-permission probes for the resolved repository root and effective hooks directory to fail fast on non-writable targets. + This keeps behavior compatible with: - default `.git/hooks` diff --git a/context/sce/setup-nix-integration-test-contract.md b/context/sce/setup-nix-integration-test-contract.md new file mode 100644 index 00000000..6675d241 --- /dev/null +++ b/context/sce/setup-nix-integration-test-contract.md @@ -0,0 +1,147 @@ +# SCE setup Nix integration-test contract + +## Scope + +Plan `sce-cli-setup-integration-test-improvements` task `T01` defines the canonical scenario matrix and assertion policy for setup integration testing executed through the deterministic Nix entrypoint. + +This contract is intentionally current-state oriented and implementation-facing. It defines what `cli/tests/setup_integration.rs` must cover and how assertions stay deterministic across platforms. + +## Required execution model + +- Integration tests run against the compiled `sce` binary path (not `cargo run`). +- Scenarios execute in isolated ephemeral repositories with deterministic fixture setup. +- Assertions use filesystem and git-resolved state as source of truth for setup outcomes. +- Exit-code and stderr assertions are required for invocation validation/runtime-failure contracts. +- Test runtime isolates local state roots per test temp directory (no shared user-global state). + +## Canonical scenario matrix (IDs and assertion anchors) + +### Baseline target-install coverage + +- `SETUP-TARGET-OPENCODE-RUN1`: `sce setup --opencode` installs `.opencode/` assets in a fresh repo. +- `SETUP-TARGET-OPENCODE-RERUN`: rerun and assert deterministic rerun behavior. +- `SETUP-TARGET-CLAUDE-RUN1`: `sce setup --claude` installs `.claude/` assets in a fresh repo. +- `SETUP-TARGET-CLAUDE-RERUN`: rerun and assert deterministic rerun behavior. +- `SETUP-TARGET-BOTH-RUN1`: `sce setup --both` installs both target trees. +- `SETUP-TARGET-BOTH-RERUN`: rerun and assert deterministic rerun behavior. + +Deterministic assertion anchors: + +- expected target tree presence/absence per mode +- stable setup summary markers +- deterministic rerun status semantics + +### Baseline hook-install coverage + +- `SETUP-HOOKS-DEFAULT-RUN1`: `sce setup --hooks` installs required hooks in default `.git/hooks` mode. +- `SETUP-HOOKS-DEFAULT-RERUN`: rerun and assert deterministic per-hook `installed|updated|skipped` semantics resolve to stable outcomes. +- `SETUP-HOOKS-CUSTOM-RUN1`: configure per-repo `core.hooksPath`, run `sce setup --hooks`, and assert required hooks install in resolved custom path. +- `SETUP-HOOKS-CUSTOM-RERUN`: rerun custom-path hook setup and assert deterministic idempotency. + +Deterministic assertion anchors: + +- printed repository root and effective hooks directory +- per-hook outcome lines with canonical lowercase statuses +- executable-state assertions on platforms that support executable-bit checks + +### `--repo` canonicalization path scenarios + +- `SETUP-HOOKS-REPO-RELATIVE`: invoke `sce setup --hooks --repo ` and assert canonical repo-root/hook-dir output plus filesystem truth. +- `SETUP-HOOKS-REPO-ABSOLUTE`: invoke `sce setup --hooks --repo ` and assert canonical repo-root/hook-dir output plus filesystem truth. + +Deterministic assertion anchors: + +- output repository root equals canonicalized git-resolved repo path +- output hooks directory equals effective git-resolved hooks target +- required hook files exist in resolved hooks directory + +### Setup failure-contract scenarios + +- `SETUP-FAIL-REPO-MISSING`: `sce setup --hooks --repo /missing` asserts non-zero exit class and deterministic stderr guidance. +- `SETUP-FAIL-REPO-WITHOUT-HOOKS`: `sce setup --repo ` asserts deterministic validation failure contract (`--repo` requires `--hooks`). +- `SETUP-FAIL-NONINTERACTIVE-WITHOUT-TARGET`: `sce setup --non-interactive` asserts deterministic validation failure contract requiring one target flag. + +Deterministic assertion anchors: + +- process exit code class matches setup failure type +- stderr contains contract-stable guidance text + +### True interactive PTY scenarios + +- `SETUP-PTY-SELECT-OPENCODE`: PTY-backed `sce setup` flow selects OpenCode and asserts successful install outcomes. +- `SETUP-PTY-CANCEL`: PTY-backed `sce setup` flow cancels selection and asserts non-destructive cancellation outcome. +- `SETUP-NONTTY-INTERACTIVE-FAIL`: non-TTY interactive invocation asserts deterministic actionable guidance. + +Deterministic assertion anchors: + +- prompt visibility and selection/cancel control flow through PTY +- success/cancel terminal outcomes consistent with current setup UX contract +- non-TTY error guidance includes non-interactive+target remediation path + +### Hook update + backup scenarios + +- `SETUP-HOOKS-UPDATE-MUTATED`: mutate one installed required hook, rerun `sce setup --hooks`, assert `updated` status and backup creation. +- `SETUP-HOOKS-BACKUP-COLLISION`: pre-create backup suffix collisions (for example `.backup`, `.backup.1`), rerun setup, assert deterministic next-suffix selection. + +Deterministic assertion anchors: + +- mutated hook transitions to `updated` +- backup path emitted and backup artifact exists +- collision path selection matches next available deterministic suffix + +### Permission and writability failure scenarios + +- `SETUP-PERM-REPO-ROOT-NONWRITABLE`: repo-root write-probe failure path asserts deterministic setup failure. +- `SETUP-PERM-HOOKS-DIR-NONWRITABLE`: hooks-directory write-probe failure path asserts deterministic setup failure. +- `SETUP-PERM-UNIX-READONLY-GUARD`: unix-guarded read-only-directory scenario validates portable deterministic behavior under POSIX permissions. + +Deterministic assertion anchors: + +- failure class and stderr guidance match write-probe contract +- unix-only scenarios are `cfg(unix)`-guarded and stable + +### Cross-platform validation scenarios + +- `SETUP-PLATFORM-MULTIOS-MATRIX`: setup integration validation runs on at least two OS targets. +- `SETUP-PLATFORM-ASSERTION-GUARDS`: executable-bit/path assertions are platform-aware and deterministic for each OS. + +Deterministic assertion anchors: + +- CI evidence shows multi-OS setup integration execution +- per-OS assertions avoid false failures from platform-specific path/permission semantics + +## Assertion signal policy + +- Canonical signals: + - repository filesystem state (installed paths, required files, backup artifacts) + - git-resolved effective hooks directory for default and custom `core.hooksPath` + - process exit-code class and contract-stable stderr guidance for failure paths +- Secondary signals: + - deterministic CLI status lines for setup/hook outcomes +- Non-canonical signals: + - non-contract free-form wording details outside stable guidance anchors + - side effects outside the scenario temp root + +## OS guard policy + +- Permission cases that require POSIX read-only semantics must be `cfg(unix)`-guarded. +- Executable-bit assertions are required only where platform semantics support deterministic checks. +- Path assertions must normalize/canonicalize before equality checks when path style differs by OS. + +## CI trigger constraint + +- This plan does not change CI workflow trigger definitions (`on:` push/pull_request filters or branch filters). +- Multi-OS validation changes, when needed, are limited to job matrix/runtime configuration only. + +## Verification entrypoints + +- Scenario-focused local execution is anchored by targeted `cargo test --test setup_integration ` invocations per task. +- Deterministic Nix integration entrypoint remains `nix run .#cli-integration-tests`. + +## Parity anchors + +This contract is kept in parity with: + +- `context/sce/setup-githooks-cli-ux.md` +- `context/sce/cli-security-hardening-contract.md` +- `context/sce/setup-githooks-install-flow.md` diff --git a/flake.nix b/flake.nix index 15bbe31e..e8012b47 100644 --- a/flake.nix +++ b/flake.nix @@ -248,6 +248,40 @@ ''; }; + cliIntegrationTestsApp = pkgs.writeShellApplication { + name = "cli-integration-tests"; + runtimeInputs = [ + pkgs.git + pkgs.nix + ]; + text = '' + set -euo pipefail + + usage() { + cat <<'EOF' + Usage: nix run .#cli-integration-tests [-- --help] + + Deterministic flake entrypoint for setup integration tests. + Runs the Rust setup integration suite from cli/tests/setup_integration.rs. + EOF + } + + case "''${1:-}" in + -h|--help) + usage + exit 0 + ;; + esac + + repo_root="$(git rev-parse --show-toplevel 2>/dev/null || true)" + if [ -z "''${repo_root}" ]; then + repo_root="$(pwd)" + fi + + exec nix develop "''${repo_root}" -c cargo test --manifest-path cli/Cargo.toml --test setup_integration -- --nocapture + ''; + }; + agnixLspShim = pkgs.writeShellScriptBin "agnix-lsp" '' set -euo pipefail @@ -275,6 +309,7 @@ in { checks.cli-setup-command-surface = cli.checks.${system}.cli-setup-command-surface; + checks.cli-setup-integration = cli.checks.${system}.cli-setup-integration; apps.sync-opencode-config = { type = "app"; @@ -300,6 +335,14 @@ }; }; + apps.cli-integration-tests = { + type = "app"; + program = "${cliIntegrationTestsApp}/bin/cli-integration-tests"; + meta = { + description = "Run setup integration tests via Rust harness"; + }; + }; + devShells.default = pkgs.mkShell { packages = with pkgs; [ bun @@ -333,6 +376,7 @@ echo "- sync-opencode-config: nix run .#sync-opencode-config" echo "- sync-opencode-config help: nix run .#sync-opencode-config -- --help" echo "- pkl-check-generated: nix run .#pkl-check-generated" + echo "- cli-integration-tests: nix run .#cli-integration-tests" ''; }; }