diff --git a/.githooks/pre-commit b/.githooks/pre-commit new file mode 100644 index 0000000..9565ea3 --- /dev/null +++ b/.githooks/pre-commit @@ -0,0 +1,8 @@ +#!/usr/bin/env sh +set -eu + +REPO_ROOT="$(git rev-parse --show-toplevel)" +cd "$REPO_ROOT" + +echo "[pre-commit] packaging sanity checks" +npm run verify:pre-commit diff --git a/.githooks/pre-push b/.githooks/pre-push new file mode 100644 index 0000000..8b24ba9 --- /dev/null +++ b/.githooks/pre-push @@ -0,0 +1,8 @@ +#!/usr/bin/env sh +set -eu + +REPO_ROOT="$(git rev-parse --show-toplevel)" +cd "$REPO_ROOT" + +echo "[pre-push] tarball install smoke" +npm run verify:pre-push diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 7449c28..e088269 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -44,6 +44,44 @@ jobs: fi - run: npm test + bundle-smoke: + name: esbuild bundle smoke test (Node ${{ matrix.node-version }}) + runs-on: ubuntu-latest + needs: test + strategy: + fail-fast: false + matrix: + node-version: [18.x, 20.x, 22.x] + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-node@v4 + with: + node-version: ${{ matrix.node-version }} + cache: npm + - run: npm ci + - run: npm run build + - name: Shebang count must be exactly 1 + run: | + COUNT=$(grep -c "#!/usr/bin/env node" dist/index.js) + if [ "$COUNT" -ne 1 ]; then + echo "FAIL: Expected 1 shebang, found $COUNT" + exit 1 + fi + echo "OK: shebang count = $COUNT" + - name: Node.js syntax check + run: node --check dist/index.js + - name: --version smoke test (exits 0, outputs correct semver) + run: | + PKG=$(node -p "require('./package.json').version") + CLI=$(node dist/index.js --version) + echo "package.json=$PKG bundle=$CLI" + if [ "$PKG" != "$CLI" ]; then + echo "FAIL: version mismatch" + exit 1 + fi + - name: Bundle size check + run: npm test -- tests/build/ + offline-smoke: name: Offline size budgets runs-on: ubuntu-latest @@ -89,6 +127,21 @@ jobs: exit 1 fi + pack-install-smoke: + name: Packed install smoke (esbuild — matches publish) + runs-on: ubuntu-latest + needs: test + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-node@v4 + with: + node-version: 20.x + cache: npm + - run: npm ci + - run: npm run build + - name: npm pack -> npm install tarball -> switchbot --version + run: npm run smoke:pack-install + policy-schema-sync: name: Policy schema sync with skill repo runs-on: ubuntu-latest diff --git a/.github/workflows/npm-published-smoke.yml b/.github/workflows/npm-published-smoke.yml new file mode 100644 index 0000000..6c0668f --- /dev/null +++ b/.github/workflows/npm-published-smoke.yml @@ -0,0 +1,162 @@ +name: npm published smoke + +on: + workflow_run: + workflows: ['Publish to npm'] + types: [completed] + workflow_dispatch: + inputs: + version: + description: 'Published npm version to verify (defaults to package.json from checked-out commit)' + required: false + +jobs: + smoke: + if: > + github.event_name == 'workflow_dispatch' || + (github.event.workflow_run.conclusion == 'success' && + github.event.workflow_run.event == 'release') + runs-on: ubuntu-latest + permissions: + contents: read + steps: + - uses: actions/checkout@v4 + with: + ref: ${{ github.event_name == 'workflow_run' && github.event.workflow_run.head_sha || github.sha }} + + - uses: actions/setup-node@v4 + with: + node-version: 20.x + registry-url: https://registry.npmjs.org + + - name: Verify credentials present + env: + TOKEN: ${{ secrets.SWITCHBOT_TOKEN }} + SECRET: ${{ secrets.SWITCHBOT_SECRET }} + run: | + if [ -z "$TOKEN" ] || [ -z "$SECRET" ]; then + echo "SWITCHBOT_TOKEN / SWITCHBOT_SECRET not set in repo secrets" + exit 1 + fi + + - name: Verify npm token present + env: + TOKEN: ${{ secrets.NPM_TOKEN }} + run: | + if [ -z "$TOKEN" ]; then + echo "NPM_TOKEN not set in repo secrets" + exit 1 + fi + + - name: Resolve target version + id: version + run: | + if [ -n "${{ inputs.version }}" ]; then + VERSION="${{ inputs.version }}" + else + VERSION=$(node -p "require('./package.json').version") + fi + echo "version=$VERSION" >> "$GITHUB_OUTPUT" + echo "target_version=$VERSION" + + - name: Resolve current latest dist-tag + id: latest + run: | + LATEST=$(npm view @switchbot/openapi-cli dist-tags.latest) + echo "version=$LATEST" >> "$GITHUB_OUTPUT" + echo "current_latest=$LATEST" + + - name: Wait for npm package to become available + id: wait_package + env: + VERSION: ${{ steps.version.outputs.version }} + run: | + for i in $(seq 1 24); do + if [ "${{ github.event_name }}" = "workflow_run" ]; then + FOUND=$(npm view "@switchbot/openapi-cli@next" version 2>/dev/null || true) + if [ "$FOUND" = "$VERSION" ]; then + echo "npm package is available on next: $FOUND" + exit 0 + fi + echo "waiting for @switchbot/openapi-cli@$VERSION to appear on npm dist-tag next ($i/24); current next=$FOUND" + else + FOUND=$(npm view "@switchbot/openapi-cli@$VERSION" version 2>/dev/null || true) + if [ "$FOUND" = "$VERSION" ]; then + echo "npm package version is available: $FOUND" + exit 0 + fi + echo "waiting for @switchbot/openapi-cli@$VERSION to appear on npm ($i/24)" + fi + sleep 10 + done + echo "Timed out waiting for @switchbot/openapi-cli@$VERSION on npm" + exit 1 + + - name: Install published package in a clean temp project + id: install_package + env: + VERSION: ${{ steps.version.outputs.version }} + run: | + TMPDIR=$(mktemp -d) + echo "TMPDIR=$TMPDIR" >> "$GITHUB_ENV" + cd "$TMPDIR" + npm init -y >/dev/null 2>&1 + npm install "@switchbot/openapi-cli@$VERSION" + + - name: Binary and offline smoke + id: offline_smoke + env: + TMPDIR: ${{ env.TMPDIR }} + VERSION: ${{ steps.version.outputs.version }} + run: | + cd "$TMPDIR" + ACTUAL=$(npx --no-install switchbot --version) + test "$ACTUAL" = "$VERSION" + npx --no-install switchbot --help >/dev/null + npx --no-install switchbot schema export --compact >/dev/null + npx --no-install switchbot capabilities --json | jq -e '.data.commandMeta != null' >/dev/null + + - name: Live smoke with configured credentials + id: live_smoke + env: + TMPDIR: ${{ env.TMPDIR }} + SWITCHBOT_TOKEN: ${{ secrets.SWITCHBOT_TOKEN }} + SWITCHBOT_SECRET: ${{ secrets.SWITCHBOT_SECRET }} + run: | + cd "$TMPDIR" + npx --no-install switchbot doctor --json | jq -e '.data.summary != null' >/dev/null + npx --no-install switchbot devices list --json | jq -e '.data.deviceList != null or .data.infraredRemoteList != null' >/dev/null + + - name: Promote verified version to latest + if: success() + env: + VERSION: ${{ steps.version.outputs.version }} + NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} + run: | + npm dist-tag add "@switchbot/openapi-cli@$VERSION" latest + echo "Promoted @switchbot/openapi-cli@$VERSION to dist-tag latest" + + - name: Deprecate failed version + if: > + failure() && + steps.wait_package.outcome == 'success' && + ( + steps.install_package.outcome == 'failure' || + steps.offline_smoke.outcome == 'failure' + ) + env: + VERSION: ${{ steps.version.outputs.version }} + PREVIOUS_LATEST: ${{ steps.latest.outputs.version }} + NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} + run: | + npm deprecate "@switchbot/openapi-cli@$VERSION" "Published to dist-tag next but failed package smoke tests. Install @switchbot/openapi-cli@${PREVIOUS_LATEST} or use dist-tag latest." + echo "Deprecated @switchbot/openapi-cli@$VERSION after package smoke failure" + + - name: Cleanup temp project + if: always() + env: + TMPDIR: ${{ env.TMPDIR }} + run: | + if [ -n "$TMPDIR" ] && [ -d "$TMPDIR" ]; then + rm -rf "$TMPDIR" + fi diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index b1c5c8e..1a68cbd 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -28,6 +28,9 @@ jobs: echo "Tag $TAG_VERSION does not match package.json version $PKG_VERSION" exit 1 fi - - run: npm publish --provenance --access public + - name: Smoke test packed npm artifact + run: npm run smoke:pack-install + - name: Publish package to npm dist-tag next + run: npm publish --tag next --provenance --access public env: NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} diff --git a/CHANGELOG.md b/CHANGELOG.md index 43ff01f..a295434 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,8 +7,35 @@ All notable changes to `@switchbot/openapi-cli` are documented in this file. The format is loosely based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/). This project follows [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [3.2.2] - 2026-04-26 + +### Changed — release pipeline + +- Release pipeline unified: `npm run build` is now the single source for the + published tarball. It runs a 5-stage `scripts/build.mjs` orchestrator + (clean → typecheck → bundle → copy-assets → ensure-binary). `prepublishOnly`, + `verify:pre-commit`, `verify:pre-push`, `publish.yml`, and the `bundle-smoke` + / `pack-install-smoke` CI jobs all call `npm run build` by name — no job + re-implements the build steps and no other script writes to `dist/`. +- Removed `npm run build:prod` and `npm run clean` — both are folded into + `scripts/build.mjs`. +- Added `npm run typecheck` (`tsc --noEmit`) as the local "does it still + compile?" escape hatch. +- Split `scripts/copy-assets.mjs` responsibility into two scripts with one + failure mode each: `copy-assets.mjs` only copies policy assets, and the + new `scripts/ensure-binary.mjs` asserts the shebang is present on + `dist/index.js` and `chmod 0755`s it. `ensure-binary.mjs` is a regression + guard — it fails loudly if the esbuild banner drops the shebang, rather + than silently repairing it the way `copy-assets.mjs` used to. + ## [3.2.1] - 2026-04-25 +> **Deprecated on npm.** The initial `3.2.1` publish shipped a broken bin +> (missing shebang / exec bit after `npm pack`). It has been rolled back +> from `main` and relanded in `3.2.2`; install `@switchbot/openapi-cli@3.2.2` +> or later. The feature list below is retained as the historical record of +> what `3.2.1` intended to deliver and what `3.2.2` now ships. + ### Added — plan resource model, MCP risk profiles, rules safety primitives - `switchbot plan save [file]` — persist a validated plan to `~/.switchbot/plans/.json` @@ -32,6 +59,28 @@ This project follows [Semantic Versioning](https://semver.org/spec/v2.0.0.html). - `rules lint` now validates `hysteresis` / `requires_stable_for` duration syntax and warns when `hysteresis` and `requires_stable_for` are both set. +### Changed — release pipeline + +- **Single publish source**: `publish.yml` now runs `npm run build:prod` + (esbuild) to match what `prepublishOnly` produces at `npm publish` time. The + tarball validated by `smoke:pack-install` is now byte-identical to the + tarball that actually ships to the registry — no artifact swap during + publish. +- Pre-publish `smoke:pack-install` runs in `publish.yml` before `npm publish`, + and the same smoke runs locally via `pre-push` hook (`verify:pre-push`) and + on every PR in CI (`pack-install-smoke`). +- `scripts/copy-assets.mjs` now injects the `#!/usr/bin/env node` shebang into + `dist/index.js` and chmods it to `0755` after every build, so the npm bin + entry is always executable. +- New `npm-published-smoke.yml` workflow verifies published tarballs on the + npm registry, auto-promotes `next → latest` on success, and auto-deprecates + on package-install/offline smoke failures only (never on live API flakes). +- `bundle-smoke` CI job is now a blocking matrix across Node 18/20/22 (was + single-node Node 20, advisory), so the esbuild bundle must start cleanly + on every supported Node version before a PR can merge. +- See [`docs/release-pipeline.md`](./docs/release-pipeline.md) for the full + gate sequence and invariants. + ## [3.2.0] - 2026-04-25 ### Added — daemon, upgrade-check, scenes validate/simulate, rules summary diff --git a/README.md b/README.md index 458f7b5..00a0002 100644 --- a/README.md +++ b/README.md @@ -93,7 +93,7 @@ Under the hood every surface shares the same catalog, cache, and HMAC client — - 🎨 **Dual output modes** — colorized tables by default; `--json` passthrough for `jq` and scripting - 🔐 **Secure credentials** — HMAC-SHA256 signed requests; config file written with `0600`; env-var override for CI - 🔍 **Dry-run mode** — preview every mutating request before it hits the API -- 🧪 **Fully tested** — 1882 Vitest tests, mocked axios, zero network in CI +- 🧪 **Fully tested** — 1959 Vitest tests, mocked axios, zero network in CI - ⚡ **Shell completion** — Bash / Zsh / Fish / PowerShell ## Requirements @@ -894,7 +894,7 @@ Queries the npm registry for the latest published version and compares it agains ```json { - "current": "3.2.1", + "current": "3.2.2", "latest": "4.0.0", "upToDate": false, "updateAvailable": true, @@ -1123,7 +1123,7 @@ npm install npm run dev -- # Run from TypeScript sources via tsx npm run build # Compile to dist/ -npm test # Run the Vitest suite (1882 tests) +npm test # Run the Vitest suite (1959 tests) npm run test:watch # Watch mode npm run test:coverage # Coverage report (v8, HTML + text) ``` @@ -1205,7 +1205,7 @@ src/ ├── format.ts # renderRows / filterFields / output-format dispatch ├── audit.ts # JSONL audit log writer └── quota.ts # Local daily-quota counter -tests/ # Vitest suite (1882 tests, mocked axios, no network) +tests/ # Vitest suite (1959 tests, mocked axios, no network) ``` ### Release flow @@ -1219,6 +1219,8 @@ git push --follow-tags Then on GitHub → **Releases → Draft a new release → select tag → Publish**. The `publish.yml` workflow runs tests, verifies the tag matches `package.json`, and publishes `@switchbot/openapi-cli` to npm with [provenance](https://docs.npmjs.com/generating-provenance-statements). +See [`docs/release-pipeline.md`](./docs/release-pipeline.md) for the full pre-publish and post-publish verification flow (local hooks → CI → `publish.yml` → `npm-published-smoke.yml`). + ## License [MIT](./LICENSE) © chenliuyun diff --git a/docs/release-pipeline.md b/docs/release-pipeline.md new file mode 100644 index 0000000..7854e75 --- /dev/null +++ b/docs/release-pipeline.md @@ -0,0 +1,163 @@ +# Release pipeline + +This document describes how `@switchbot/openapi-cli` goes from commit to npm +registry, and the invariants that keep the published artifact safe. + +## Single publish source + +There is exactly one way to produce the release artifact: + +``` +npm run build → node scripts/build.mjs +``` + +Every script on the release path — `prepublishOnly`, `verify:pre-commit`, +`verify:pre-push`, `publish.yml`, `ci.yml/bundle-smoke`, `ci.yml/pack-install-smoke` — +calls `npm run build` by name. No job re-implements any of the steps, and no +other script writes to `dist/`. + +### The five stages of `scripts/build.mjs` + +| # | Stage | Script | Responsibility | +|---|---|---|---| +| 1 | clean | inline | remove `dist/` so nothing stale leaks into the tarball | +| 2 | typecheck | `tsc --noEmit` | all types must compile before we bundle | +| 3 | bundle | `scripts/bundle.mjs` | esbuild produces the single-file `dist/index.js` (shebang via `banner.js`) | +| 4 | copy-assets | `scripts/copy-assets.mjs` | copy `src/policy/{schema,examples}` → `dist/policy/...` | +| 5 | ensure-binary | `scripts/ensure-binary.mjs` | assert the shebang is present and `chmod 0755` on `dist/index.js` | + +Each stage does exactly one thing. First non-zero exit aborts the build. + +### Why `ensure-binary.mjs` is a guard, not a repair + +The shebang (`#!/usr/bin/env node`) is injected at bundle time by +`scripts/bundle.mjs` via the esbuild `banner.js` option. `ensure-binary.mjs` +re-reads `dist/index.js` and **verifies** that the first bytes are the +expected shebang — if not, it exits non-zero with a pointer to the banner +config. + +Previously, `copy-assets.mjs` silently **repaired** a missing shebang by +prepending it at the end of the build. That masked the root cause (a +change to the banner config would not surface at build time). The current +split is: + +- `bundle.mjs` — *produces* the shebang via banner. +- `ensure-binary.mjs` — *asserts* the shebang exists. Never patches. + +If anything ever drops the banner line, `npm run build` fails loudly at +stage 5 with a message pointing to `scripts/bundle.mjs`. + +`npm pack` follows `"files": ["dist", "README.md", "LICENSE"]` in +`package.json`, so whatever ends up in `dist/` after stage 5 is what ships. + +## Gates before `npm publish` + +``` +git commit ──▶ pre-commit hook ─── verify:pre-commit + (npm run build + tests/version.test.ts) + +git push ──▶ pre-push hook ──── verify:pre-push + (npm run build + version test + + smoke:pack-install) + +open PR ──▶ ci.yml ──────────── docs-lint + test matrix (Node 18/20/22) + bundle-smoke (Node 18/20/22) + offline-smoke (size budgets) + pack-install-smoke (matches publish) + policy-schema-sync + +merge PR ──▶ main + +release ──▶ publish.yml ─────── 1. npm ci + 2. npm run build + 3. npm test + 4. tag == package.json version + 5. npm run smoke:pack-install ◀── last gate + 6. npm publish --tag next + └── prepublishOnly: test + build + smoke + (same commands as steps 2-5 — no drift) +``` + +Because step 2 and `prepublishOnly` both call `npm run build`, the tarball +validated by `smoke:pack-install` in step 5 is byte-identical to the +tarball `npm publish` uploads in step 6. No artifact swap happens in between. + +The critical pre-publish gate is step 5. It runs +`scripts/smoke-pack-install.mjs`, which: + +- Runs `npm pack` on the freshly-built tarball. +- Installs the tarball into a throwaway temp project. +- Executes `node_modules/.bin/switchbot --version` and compares the output to + `package.json.version`. + +If the shebang is missing, the bin entry is not marked executable, the version +drifts, or any deps are missing — the CLI fails to run and the smoke test exits +non-zero. `npm publish` does not run. + +## Post-publish defense-in-depth + +``` +npm-published-smoke.yml (triggered by publish.yml completion): + 1. wait_package — wait for @next to appear on the registry + 2. install_package — install in a clean temp project + 3. offline_smoke — --version, --help, schema export, capabilities + 4. live_smoke — doctor, devices list (uses real credentials) + 5. promote to @latest ◀── only if all four above pass + OR + npm deprecate ◀── only on install_package / offline_smoke failure + (never on live_smoke — API flakes should not + auto-deprecate a package) +``` + +This workflow runs *after* `npm publish`. It is defense-in-depth only; the +pre-publish gates are what keep bad artifacts off the registry in the first +place. + +## Invariants + +Changes to the release pipeline must preserve these invariants: + +1. **One command produces the release artifact.** `npm run build` is the + only path that writes `dist/` for publish. `publish.yml`, `prepublishOnly`, + `verify:pre-push`, and both `bundle-smoke` + `pack-install-smoke` jobs must + all call it by name — never re-implement steps. + +2. **One artifact is smoked.** `smoke:pack-install` always runs against the + `dist/` that `npm run build` just produced. No other script writes to + `dist/` between the build and the smoke. + +3. **One failure mode per script.** `copy-assets.mjs` can fail because an + asset is missing. `ensure-binary.mjs` can fail because the shebang is + missing or the output is absent. No script silently repairs the output of + another. + +4. **`prepublishOnly` and `publish.yml` do not drift.** Both run + `npm test && npm run build && npm run smoke:pack-install`. Any edit that + changes one must change the other in the same commit. + +5. **`publish.yml` must run `smoke:pack-install` before `npm publish`.** If + this gate is removed or skipped, a broken tarball can reach the registry. + +6. **Auto-deprecate must never fire on `live_smoke` failure.** Live smoke + depends on real SwitchBot API availability and valid credentials; a + transient outage should not deprecate a working package. Only + `install_package` and `offline_smoke` failures justify an automatic + deprecation. + +7. **`bundle-smoke` must stay blocking and matrixed.** Because the bundle is + the publish source, it has to start cleanly on every Node version the + package supports (`engines.node >= 18`). The job runs `npm run build + + shebang count + node --check + --version + bundle size test` on Node + 18/20/22. Adding a new supported Node version means adding it to the + matrix; making the job advisory again means end-users on some supported + Node version can install a broken CLI without CI catching it. + +## Related tests + +- `tests/version.test.ts` — asserts shebang presence and `--version` parity + with `package.json`. +- `tests/build/` — esbuild bundle guards (shebang count, `node --check`, + size budget). +- `scripts/smoke-pack-install.mjs` — the end-to-end install smoke used by + both the `pre-push` hook and the CI / publish workflows. diff --git a/package-lock.json b/package-lock.json index 97aa082..08ebe3f 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@switchbot/openapi-cli", - "version": "3.1.1", + "version": "3.2.2", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@switchbot/openapi-cli", - "version": "3.1.1", + "version": "3.2.2", "license": "MIT", "dependencies": { "@modelcontextprotocol/sdk": "^1.29.0", @@ -32,6 +32,7 @@ "@types/node": "^22.10.7", "@types/uuid": "^10.0.0", "@vitest/coverage-v8": "^2.1.9", + "esbuild": "^0.28.0", "markdownlint-cli": "^0.48.0", "tsx": "^4.19.2", "typescript": "^5.7.3", @@ -132,9 +133,9 @@ } }, "node_modules/@esbuild/aix-ppc64": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.27.7.tgz", - "integrity": "sha512-EKX3Qwmhz1eMdEJokhALr0YiD0lhQNwDqkPYyPhiSwKrh7/4KRjQc04sZ8db+5DVVnZ1LmbNDI1uAMPEUBnQPg==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.28.0.tgz", + "integrity": "sha512-lhRUCeuOyJQURhTxl4WkpFTjIsbDayJHih5kZC1giwE+MhIzAb7mEsQMqMf18rHLsrb5qI1tafG20mLxEWcWlA==", "cpu": [ "ppc64" ], @@ -149,9 +150,9 @@ } }, "node_modules/@esbuild/android-arm": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.27.7.tgz", - "integrity": "sha512-jbPXvB4Yj2yBV7HUfE2KHe4GJX51QplCN1pGbYjvsyCZbQmies29EoJbkEc+vYuU5o45AfQn37vZlyXy4YJ8RQ==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.28.0.tgz", + "integrity": "sha512-wqh0ByljabXLKHeWXYLqoJ5jKC4XBaw6Hk08OfMrCRd2nP2ZQ5eleDZC41XHyCNgktBGYMbqnrJKq/K/lzPMSQ==", "cpu": [ "arm" ], @@ -166,9 +167,9 @@ } }, "node_modules/@esbuild/android-arm64": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.27.7.tgz", - "integrity": "sha512-62dPZHpIXzvChfvfLJow3q5dDtiNMkwiRzPylSCfriLvZeq0a1bWChrGx/BbUbPwOrsWKMn8idSllklzBy+dgQ==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.28.0.tgz", + "integrity": "sha512-+WzIXQOSaGs33tLEgYPYe/yQHf0WTU0X42Jca3y8NWMbUVhp7rUnw+vAsRC/QiDrdD31IszMrZy+qwPOPjd+rw==", "cpu": [ "arm64" ], @@ -183,9 +184,9 @@ } }, "node_modules/@esbuild/android-x64": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.27.7.tgz", - "integrity": "sha512-x5VpMODneVDb70PYV2VQOmIUUiBtY3D3mPBG8NxVk5CogneYhkR7MmM3yR/uMdITLrC1ml/NV1rj4bMJuy9MCg==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.28.0.tgz", + "integrity": "sha512-+VJggoaKhk2VNNqVL7f6S189UzShHC/mR9EE8rDdSkdpN0KflSwWY/gWjDrNxxisg8Fp1ZCD9jLMo4m0OUfeUA==", "cpu": [ "x64" ], @@ -200,9 +201,9 @@ } }, "node_modules/@esbuild/darwin-arm64": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.7.tgz", - "integrity": "sha512-5lckdqeuBPlKUwvoCXIgI2D9/ABmPq3Rdp7IfL70393YgaASt7tbju3Ac+ePVi3KDH6N2RqePfHnXkaDtY9fkw==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.28.0.tgz", + "integrity": "sha512-0T+A9WZm+bZ84nZBtk1ckYsOvyA3x7e2Acj1KdVfV4/2tdG4fzUp91YHx+GArWLtwqp77pBXVCPn2We7Letr0Q==", "cpu": [ "arm64" ], @@ -217,9 +218,9 @@ } }, "node_modules/@esbuild/darwin-x64": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.7.tgz", - "integrity": "sha512-rYnXrKcXuT7Z+WL5K980jVFdvVKhCHhUwid+dDYQpH+qu+TefcomiMAJpIiC2EM3Rjtq0sO3StMV/+3w3MyyqQ==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.28.0.tgz", + "integrity": "sha512-fyzLm/DLDl/84OCfp2f/XQ4flmORsjU7VKt8HLjvIXChJoFFOIL6pLJPH4Yhd1n1gGFF9mPwtlN5Wf82DZs+LQ==", "cpu": [ "x64" ], @@ -234,9 +235,9 @@ } }, "node_modules/@esbuild/freebsd-arm64": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.27.7.tgz", - "integrity": "sha512-B48PqeCsEgOtzME2GbNM2roU29AMTuOIN91dsMO30t+Ydis3z/3Ngoj5hhnsOSSwNzS+6JppqWsuhTp6E82l2w==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.28.0.tgz", + "integrity": "sha512-l9GeW5UZBT9k9brBYI+0WDffcRxgHQD8ShN2Ur4xWq/NFzUKm3k5lsH4PdaRgb2w7mI9u61nr2gI2mLI27Nh3Q==", "cpu": [ "arm64" ], @@ -251,9 +252,9 @@ } }, "node_modules/@esbuild/freebsd-x64": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.27.7.tgz", - "integrity": "sha512-jOBDK5XEjA4m5IJK3bpAQF9/Lelu/Z9ZcdhTRLf4cajlB+8VEhFFRjWgfy3M1O4rO2GQ/b2dLwCUGpiF/eATNQ==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.28.0.tgz", + "integrity": "sha512-BXoQai/A0wPO6Es3yFJ7APCiKGc1tdAEOgeTNy3SsB491S3aHn4S4r3e976eUnPdU+NbdtmBuLncYir2tMU9Nw==", "cpu": [ "x64" ], @@ -268,9 +269,9 @@ } }, "node_modules/@esbuild/linux-arm": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.27.7.tgz", - "integrity": "sha512-RkT/YXYBTSULo3+af8Ib0ykH8u2MBh57o7q/DAs3lTJlyVQkgQvlrPTnjIzzRPQyavxtPtfg0EopvDyIt0j1rA==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.28.0.tgz", + "integrity": "sha512-CjaaREJagqJp7iTaNQjjidaNbCKYcd4IDkzbwwxtSvjI7NZm79qiHc8HqciMddQ6CKvJT6aBd8lO9kN/ZudLlw==", "cpu": [ "arm" ], @@ -285,9 +286,9 @@ } }, "node_modules/@esbuild/linux-arm64": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.27.7.tgz", - "integrity": "sha512-RZPHBoxXuNnPQO9rvjh5jdkRmVizktkT7TCDkDmQ0W2SwHInKCAV95GRuvdSvA7w4VMwfCjUiPwDi0ZO6Nfe9A==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.28.0.tgz", + "integrity": "sha512-RVyzfb3FWsGA55n6WY0MEIEPURL1FcbhFE6BffZEMEekfCzCIMtB5yyDcFnVbTnwk+CLAgTujmV/Lgvih56W+A==", "cpu": [ "arm64" ], @@ -302,9 +303,9 @@ } }, "node_modules/@esbuild/linux-ia32": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.27.7.tgz", - "integrity": "sha512-GA48aKNkyQDbd3KtkplYWT102C5sn/EZTY4XROkxONgruHPU72l+gW+FfF8tf2cFjeHaRbWpOYa/uRBz/Xq1Pg==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.28.0.tgz", + "integrity": "sha512-KBnSTt1kxl9x70q+ydterVdl+Cn0H18ngRMRCEQfrbqdUuntQQ0LoMZv47uB97NljZFzY6HcfqEZ2SAyIUTQBQ==", "cpu": [ "ia32" ], @@ -319,9 +320,9 @@ } }, "node_modules/@esbuild/linux-loong64": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.27.7.tgz", - "integrity": "sha512-a4POruNM2oWsD4WKvBSEKGIiWQF8fZOAsycHOt6JBpZ+JN2n2JH9WAv56SOyu9X5IqAjqSIPTaJkqN8F7XOQ5Q==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.28.0.tgz", + "integrity": "sha512-zpSlUce1mnxzgBADvxKXX5sl8aYQHo2ezvMNI8I0lbblJtp8V4odlm3Yzlj7gPyt3T8ReksE6bK+pT3WD+aJRg==", "cpu": [ "loong64" ], @@ -336,9 +337,9 @@ } }, "node_modules/@esbuild/linux-mips64el": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.27.7.tgz", - "integrity": "sha512-KabT5I6StirGfIz0FMgl1I+R1H73Gp0ofL9A3nG3i/cYFJzKHhouBV5VWK1CSgKvVaG4q1RNpCTR2LuTVB3fIw==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.28.0.tgz", + "integrity": "sha512-2jIfP6mmjkdmeTlsX/9vmdmhBmKADrWqN7zcdtHIeNSCH1SqIoNI63cYsjQR8J+wGa4Y5izRcSHSm8K3QWmk3w==", "cpu": [ "mips64el" ], @@ -353,9 +354,9 @@ } }, "node_modules/@esbuild/linux-ppc64": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.27.7.tgz", - "integrity": "sha512-gRsL4x6wsGHGRqhtI+ifpN/vpOFTQtnbsupUF5R5YTAg+y/lKelYR1hXbnBdzDjGbMYjVJLJTd2OFmMewAgwlQ==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.28.0.tgz", + "integrity": "sha512-bc0FE9wWeC0WBm49IQMPSPILRocGTQt3j5KPCA8os6VprfuJ7KD+5PzESSrJ6GmPIPJK965ZJHTUlSA6GNYEhg==", "cpu": [ "ppc64" ], @@ -370,9 +371,9 @@ } }, "node_modules/@esbuild/linux-riscv64": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.27.7.tgz", - "integrity": "sha512-hL25LbxO1QOngGzu2U5xeXtxXcW+/GvMN3ejANqXkxZ/opySAZMrc+9LY/WyjAan41unrR3YrmtTsUpwT66InQ==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.28.0.tgz", + "integrity": "sha512-SQPZOwoTTT/HXFXQJG/vBX8sOFagGqvZyXcgLA3NhIqcBv1BJU1d46c0rGcrij2B56Z2rNiSLaZOYW5cUk7yLQ==", "cpu": [ "riscv64" ], @@ -387,9 +388,9 @@ } }, "node_modules/@esbuild/linux-s390x": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.27.7.tgz", - "integrity": "sha512-2k8go8Ycu1Kb46vEelhu1vqEP+UeRVj2zY1pSuPdgvbd5ykAw82Lrro28vXUrRmzEsUV0NzCf54yARIK8r0fdw==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.28.0.tgz", + "integrity": "sha512-SCfR0HN8CEEjnYnySJTd2cw0k9OHB/YFzt5zgJEwa+wL/T/raGWYMBqwDNAC6dqFKmJYZoQBRfHjgwLHGSrn3Q==", "cpu": [ "s390x" ], @@ -404,9 +405,9 @@ } }, "node_modules/@esbuild/linux-x64": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.7.tgz", - "integrity": "sha512-hzznmADPt+OmsYzw1EE33ccA+HPdIqiCRq7cQeL1Jlq2gb1+OyWBkMCrYGBJ+sxVzve2ZJEVeePbLM2iEIZSxA==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.28.0.tgz", + "integrity": "sha512-us0dSb9iFxIi8srnpl931Nvs65it/Jd2a2K3qs7fz2WfGPHqzfzZTfec7oxZJRNPXPnNYZtanmRc4AL/JwVzHQ==", "cpu": [ "x64" ], @@ -421,9 +422,9 @@ } }, "node_modules/@esbuild/netbsd-arm64": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.27.7.tgz", - "integrity": "sha512-b6pqtrQdigZBwZxAn1UpazEisvwaIDvdbMbmrly7cDTMFnw/+3lVxxCTGOrkPVnsYIosJJXAsILG9XcQS+Yu6w==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.28.0.tgz", + "integrity": "sha512-CR/RYotgtCKwtftMwJlUU7xCVNg3lMYZ0RzTmAHSfLCXw3NtZtNpswLEj/Kkf6kEL3Gw+BpOekRX0BYCtklhUw==", "cpu": [ "arm64" ], @@ -438,9 +439,9 @@ } }, "node_modules/@esbuild/netbsd-x64": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.27.7.tgz", - "integrity": "sha512-OfatkLojr6U+WN5EDYuoQhtM+1xco+/6FSzJJnuWiUw5eVcicbyK3dq5EeV/QHT1uy6GoDhGbFpprUiHUYggrw==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.28.0.tgz", + "integrity": "sha512-nU1yhmYutL+fQ71Kxnhg8uEOdC0pwEW9entHykTgEbna2pw2dkbFSMeqjjyHZoCmt8SBkOSvV+yNmm94aUrrqw==", "cpu": [ "x64" ], @@ -455,9 +456,9 @@ } }, "node_modules/@esbuild/openbsd-arm64": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.27.7.tgz", - "integrity": "sha512-AFuojMQTxAz75Fo8idVcqoQWEHIXFRbOc1TrVcFSgCZtQfSdc1RXgB3tjOn/krRHENUB4j00bfGjyl2mJrU37A==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.28.0.tgz", + "integrity": "sha512-cXb5vApOsRsxsEl4mcZ1XY3D4DzcoMxR/nnc4IyqYs0rTI8ZKmW6kyyg+11Z8yvgMfAEldKzP7AdP64HnSC/6g==", "cpu": [ "arm64" ], @@ -472,9 +473,9 @@ } }, "node_modules/@esbuild/openbsd-x64": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.27.7.tgz", - "integrity": "sha512-+A1NJmfM8WNDv5CLVQYJ5PshuRm/4cI6WMZRg1by1GwPIQPCTs1GLEUHwiiQGT5zDdyLiRM/l1G0Pv54gvtKIg==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.28.0.tgz", + "integrity": "sha512-8wZM2qqtv9UP3mzy7HiGYNH/zjTA355mpeuA+859TyR+e+Tc08IHYpLJuMsfpDJwoLo1ikIJI8jC3GFjnRClzA==", "cpu": [ "x64" ], @@ -489,9 +490,9 @@ } }, "node_modules/@esbuild/openharmony-arm64": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.27.7.tgz", - "integrity": "sha512-+KrvYb/C8zA9CU/g0sR6w2RBw7IGc5J2BPnc3dYc5VJxHCSF1yNMxTV5LQ7GuKteQXZtspjFbiuW5/dOj7H4Yw==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.28.0.tgz", + "integrity": "sha512-FLGfyizszcef5C3YtoyQDACyg95+dndv79i2EekILBofh5wpCa1KuBqOWKrEHZg3zrL3t5ouE5jgr94vA+Wb2w==", "cpu": [ "arm64" ], @@ -506,9 +507,9 @@ } }, "node_modules/@esbuild/sunos-x64": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.27.7.tgz", - "integrity": "sha512-ikktIhFBzQNt/QDyOL580ti9+5mL/YZeUPKU2ivGtGjdTYoqz6jObj6nOMfhASpS4GU4Q/Clh1QtxWAvcYKamA==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.28.0.tgz", + "integrity": "sha512-1ZgjUoEdHZZl/YlV76TSCz9Hqj9h9YmMGAgAPYd+q4SicWNX3G5GCyx9uhQWSLcbvPW8Ni7lj4gDa1T40akdlw==", "cpu": [ "x64" ], @@ -523,9 +524,9 @@ } }, "node_modules/@esbuild/win32-arm64": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.27.7.tgz", - "integrity": "sha512-7yRhbHvPqSpRUV7Q20VuDwbjW5kIMwTHpptuUzV+AA46kiPze5Z7qgt6CLCK3pWFrHeNfDd1VKgyP4O+ng17CA==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.28.0.tgz", + "integrity": "sha512-Q9StnDmQ/enxnpxCCLSg0oo4+34B9TdXpuyPeTedN/6+iXBJ4J+zwfQI28u/Jl40nOYAxGoNi7mFP40RUtkmUA==", "cpu": [ "arm64" ], @@ -540,9 +541,9 @@ } }, "node_modules/@esbuild/win32-ia32": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.27.7.tgz", - "integrity": "sha512-SmwKXe6VHIyZYbBLJrhOoCJRB/Z1tckzmgTLfFYOfpMAx63BJEaL9ExI8x7v0oAO3Zh6D/Oi1gVxEYr5oUCFhw==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.28.0.tgz", + "integrity": "sha512-zF3ag/gfiCe6U2iczcRzSYJKH1DCI+ByzSENHlM2FcDbEeo5Zd2C86Aq0tKUYAJJ1obRP84ymxIAksZUcdztHA==", "cpu": [ "ia32" ], @@ -557,9 +558,9 @@ } }, "node_modules/@esbuild/win32-x64": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.27.7.tgz", - "integrity": "sha512-56hiAJPhwQ1R4i+21FVF7V8kSD5zZTdHcVuRFMW0hn753vVfQN8xlx4uOPT4xoGH0Z/oVATuR82AiqSTDIpaHg==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.28.0.tgz", + "integrity": "sha512-pEl1bO9mfAmIC+tW5btTmrKaujg3zGtUmWNdCw/xs70FBjwAL3o9OEKNHvNmnyylD6ubxUERiEhdsL0xBQ9efw==", "cpu": [ "x64" ], @@ -2108,9 +2109,9 @@ } }, "node_modules/esbuild": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.27.7.tgz", - "integrity": "sha512-IxpibTjyVnmrIQo5aqNpCgoACA/dTKLTlhMHihVHhdkxKyPO1uBBthumT0rdHmcsk9uMonIWS0m4FljWzILh3w==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.28.0.tgz", + "integrity": "sha512-sNR9MHpXSUV/XB4zmsFKN+QgVG82Cc7+/aaxJ8Adi8hyOac+EXptIp45QBPaVyX3N70664wRbTcLTOemCAnyqw==", "dev": true, "hasInstallScript": true, "license": "MIT", @@ -2121,32 +2122,32 @@ "node": ">=18" }, "optionalDependencies": { - "@esbuild/aix-ppc64": "0.27.7", - "@esbuild/android-arm": "0.27.7", - "@esbuild/android-arm64": "0.27.7", - "@esbuild/android-x64": "0.27.7", - "@esbuild/darwin-arm64": "0.27.7", - "@esbuild/darwin-x64": "0.27.7", - "@esbuild/freebsd-arm64": "0.27.7", - "@esbuild/freebsd-x64": "0.27.7", - "@esbuild/linux-arm": "0.27.7", - "@esbuild/linux-arm64": "0.27.7", - "@esbuild/linux-ia32": "0.27.7", - "@esbuild/linux-loong64": "0.27.7", - "@esbuild/linux-mips64el": "0.27.7", - "@esbuild/linux-ppc64": "0.27.7", - "@esbuild/linux-riscv64": "0.27.7", - "@esbuild/linux-s390x": "0.27.7", - "@esbuild/linux-x64": "0.27.7", - "@esbuild/netbsd-arm64": "0.27.7", - "@esbuild/netbsd-x64": "0.27.7", - "@esbuild/openbsd-arm64": "0.27.7", - "@esbuild/openbsd-x64": "0.27.7", - "@esbuild/openharmony-arm64": "0.27.7", - "@esbuild/sunos-x64": "0.27.7", - "@esbuild/win32-arm64": "0.27.7", - "@esbuild/win32-ia32": "0.27.7", - "@esbuild/win32-x64": "0.27.7" + "@esbuild/aix-ppc64": "0.28.0", + "@esbuild/android-arm": "0.28.0", + "@esbuild/android-arm64": "0.28.0", + "@esbuild/android-x64": "0.28.0", + "@esbuild/darwin-arm64": "0.28.0", + "@esbuild/darwin-x64": "0.28.0", + "@esbuild/freebsd-arm64": "0.28.0", + "@esbuild/freebsd-x64": "0.28.0", + "@esbuild/linux-arm": "0.28.0", + "@esbuild/linux-arm64": "0.28.0", + "@esbuild/linux-ia32": "0.28.0", + "@esbuild/linux-loong64": "0.28.0", + "@esbuild/linux-mips64el": "0.28.0", + "@esbuild/linux-ppc64": "0.28.0", + "@esbuild/linux-riscv64": "0.28.0", + "@esbuild/linux-s390x": "0.28.0", + "@esbuild/linux-x64": "0.28.0", + "@esbuild/netbsd-arm64": "0.28.0", + "@esbuild/netbsd-x64": "0.28.0", + "@esbuild/openbsd-arm64": "0.28.0", + "@esbuild/openbsd-x64": "0.28.0", + "@esbuild/openharmony-arm64": "0.28.0", + "@esbuild/sunos-x64": "0.28.0", + "@esbuild/win32-arm64": "0.28.0", + "@esbuild/win32-ia32": "0.28.0", + "@esbuild/win32-x64": "0.28.0" } }, "node_modules/escape-html": { @@ -4913,6 +4914,490 @@ "fsevents": "~2.3.3" } }, + "node_modules/tsx/node_modules/@esbuild/aix-ppc64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.27.7.tgz", + "integrity": "sha512-EKX3Qwmhz1eMdEJokhALr0YiD0lhQNwDqkPYyPhiSwKrh7/4KRjQc04sZ8db+5DVVnZ1LmbNDI1uAMPEUBnQPg==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/android-arm": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.27.7.tgz", + "integrity": "sha512-jbPXvB4Yj2yBV7HUfE2KHe4GJX51QplCN1pGbYjvsyCZbQmies29EoJbkEc+vYuU5o45AfQn37vZlyXy4YJ8RQ==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/android-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.27.7.tgz", + "integrity": "sha512-62dPZHpIXzvChfvfLJow3q5dDtiNMkwiRzPylSCfriLvZeq0a1bWChrGx/BbUbPwOrsWKMn8idSllklzBy+dgQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/android-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.27.7.tgz", + "integrity": "sha512-x5VpMODneVDb70PYV2VQOmIUUiBtY3D3mPBG8NxVk5CogneYhkR7MmM3yR/uMdITLrC1ml/NV1rj4bMJuy9MCg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/darwin-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.7.tgz", + "integrity": "sha512-5lckdqeuBPlKUwvoCXIgI2D9/ABmPq3Rdp7IfL70393YgaASt7tbju3Ac+ePVi3KDH6N2RqePfHnXkaDtY9fkw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/darwin-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.7.tgz", + "integrity": "sha512-rYnXrKcXuT7Z+WL5K980jVFdvVKhCHhUwid+dDYQpH+qu+TefcomiMAJpIiC2EM3Rjtq0sO3StMV/+3w3MyyqQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/freebsd-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.27.7.tgz", + "integrity": "sha512-B48PqeCsEgOtzME2GbNM2roU29AMTuOIN91dsMO30t+Ydis3z/3Ngoj5hhnsOSSwNzS+6JppqWsuhTp6E82l2w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/freebsd-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.27.7.tgz", + "integrity": "sha512-jOBDK5XEjA4m5IJK3bpAQF9/Lelu/Z9ZcdhTRLf4cajlB+8VEhFFRjWgfy3M1O4rO2GQ/b2dLwCUGpiF/eATNQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-arm": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.27.7.tgz", + "integrity": "sha512-RkT/YXYBTSULo3+af8Ib0ykH8u2MBh57o7q/DAs3lTJlyVQkgQvlrPTnjIzzRPQyavxtPtfg0EopvDyIt0j1rA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.27.7.tgz", + "integrity": "sha512-RZPHBoxXuNnPQO9rvjh5jdkRmVizktkT7TCDkDmQ0W2SwHInKCAV95GRuvdSvA7w4VMwfCjUiPwDi0ZO6Nfe9A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-ia32": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.27.7.tgz", + "integrity": "sha512-GA48aKNkyQDbd3KtkplYWT102C5sn/EZTY4XROkxONgruHPU72l+gW+FfF8tf2cFjeHaRbWpOYa/uRBz/Xq1Pg==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-loong64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.27.7.tgz", + "integrity": "sha512-a4POruNM2oWsD4WKvBSEKGIiWQF8fZOAsycHOt6JBpZ+JN2n2JH9WAv56SOyu9X5IqAjqSIPTaJkqN8F7XOQ5Q==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-mips64el": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.27.7.tgz", + "integrity": "sha512-KabT5I6StirGfIz0FMgl1I+R1H73Gp0ofL9A3nG3i/cYFJzKHhouBV5VWK1CSgKvVaG4q1RNpCTR2LuTVB3fIw==", + "cpu": [ + "mips64el" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-ppc64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.27.7.tgz", + "integrity": "sha512-gRsL4x6wsGHGRqhtI+ifpN/vpOFTQtnbsupUF5R5YTAg+y/lKelYR1hXbnBdzDjGbMYjVJLJTd2OFmMewAgwlQ==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-riscv64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.27.7.tgz", + "integrity": "sha512-hL25LbxO1QOngGzu2U5xeXtxXcW+/GvMN3ejANqXkxZ/opySAZMrc+9LY/WyjAan41unrR3YrmtTsUpwT66InQ==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-s390x": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.27.7.tgz", + "integrity": "sha512-2k8go8Ycu1Kb46vEelhu1vqEP+UeRVj2zY1pSuPdgvbd5ykAw82Lrro28vXUrRmzEsUV0NzCf54yARIK8r0fdw==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.7.tgz", + "integrity": "sha512-hzznmADPt+OmsYzw1EE33ccA+HPdIqiCRq7cQeL1Jlq2gb1+OyWBkMCrYGBJ+sxVzve2ZJEVeePbLM2iEIZSxA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/netbsd-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.27.7.tgz", + "integrity": "sha512-b6pqtrQdigZBwZxAn1UpazEisvwaIDvdbMbmrly7cDTMFnw/+3lVxxCTGOrkPVnsYIosJJXAsILG9XcQS+Yu6w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/netbsd-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.27.7.tgz", + "integrity": "sha512-OfatkLojr6U+WN5EDYuoQhtM+1xco+/6FSzJJnuWiUw5eVcicbyK3dq5EeV/QHT1uy6GoDhGbFpprUiHUYggrw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/openbsd-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.27.7.tgz", + "integrity": "sha512-AFuojMQTxAz75Fo8idVcqoQWEHIXFRbOc1TrVcFSgCZtQfSdc1RXgB3tjOn/krRHENUB4j00bfGjyl2mJrU37A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/openbsd-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.27.7.tgz", + "integrity": "sha512-+A1NJmfM8WNDv5CLVQYJ5PshuRm/4cI6WMZRg1by1GwPIQPCTs1GLEUHwiiQGT5zDdyLiRM/l1G0Pv54gvtKIg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/openharmony-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.27.7.tgz", + "integrity": "sha512-+KrvYb/C8zA9CU/g0sR6w2RBw7IGc5J2BPnc3dYc5VJxHCSF1yNMxTV5LQ7GuKteQXZtspjFbiuW5/dOj7H4Yw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/sunos-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.27.7.tgz", + "integrity": "sha512-ikktIhFBzQNt/QDyOL580ti9+5mL/YZeUPKU2ivGtGjdTYoqz6jObj6nOMfhASpS4GU4Q/Clh1QtxWAvcYKamA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/win32-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.27.7.tgz", + "integrity": "sha512-7yRhbHvPqSpRUV7Q20VuDwbjW5kIMwTHpptuUzV+AA46kiPze5Z7qgt6CLCK3pWFrHeNfDd1VKgyP4O+ng17CA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/win32-ia32": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.27.7.tgz", + "integrity": "sha512-SmwKXe6VHIyZYbBLJrhOoCJRB/Z1tckzmgTLfFYOfpMAx63BJEaL9ExI8x7v0oAO3Zh6D/Oi1gVxEYr5oUCFhw==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/win32-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.27.7.tgz", + "integrity": "sha512-56hiAJPhwQ1R4i+21FVF7V8kSD5zZTdHcVuRFMW0hn753vVfQN8xlx4uOPT4xoGH0Z/oVATuR82AiqSTDIpaHg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/esbuild": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.27.7.tgz", + "integrity": "sha512-IxpibTjyVnmrIQo5aqNpCgoACA/dTKLTlhMHihVHhdkxKyPO1uBBthumT0rdHmcsk9uMonIWS0m4FljWzILh3w==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=18" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.27.7", + "@esbuild/android-arm": "0.27.7", + "@esbuild/android-arm64": "0.27.7", + "@esbuild/android-x64": "0.27.7", + "@esbuild/darwin-arm64": "0.27.7", + "@esbuild/darwin-x64": "0.27.7", + "@esbuild/freebsd-arm64": "0.27.7", + "@esbuild/freebsd-x64": "0.27.7", + "@esbuild/linux-arm": "0.27.7", + "@esbuild/linux-arm64": "0.27.7", + "@esbuild/linux-ia32": "0.27.7", + "@esbuild/linux-loong64": "0.27.7", + "@esbuild/linux-mips64el": "0.27.7", + "@esbuild/linux-ppc64": "0.27.7", + "@esbuild/linux-riscv64": "0.27.7", + "@esbuild/linux-s390x": "0.27.7", + "@esbuild/linux-x64": "0.27.7", + "@esbuild/netbsd-arm64": "0.27.7", + "@esbuild/netbsd-x64": "0.27.7", + "@esbuild/openbsd-arm64": "0.27.7", + "@esbuild/openbsd-x64": "0.27.7", + "@esbuild/openharmony-arm64": "0.27.7", + "@esbuild/sunos-x64": "0.27.7", + "@esbuild/win32-arm64": "0.27.7", + "@esbuild/win32-ia32": "0.27.7", + "@esbuild/win32-x64": "0.27.7" + } + }, "node_modules/type-is": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/type-is/-/type-is-2.0.1.tgz", diff --git a/package.json b/package.json index 3928af5..d031954 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@switchbot/openapi-cli", - "version": "3.1.1", + "version": "3.2.2", "description": "SwitchBot smart home CLI — control devices, run scenes, stream real-time events, and integrate AI agents via MCP. Full API v1.1 coverage.", "keywords": [ "switchbot", @@ -36,17 +36,21 @@ "access": "public" }, "scripts": { - "build": "tsc && node scripts/copy-assets.mjs", - "build:prod": "tsc -p tsconfig.build.json && node scripts/copy-assets.mjs", - "clean": "node -e \"require('fs').rmSync('dist',{recursive:true,force:true})\"", + "typecheck": "tsc --noEmit", + "build": "node scripts/build.mjs", "dev": "tsx src/index.ts", + "hooks:install": "node scripts/install-git-hooks.mjs", "lint:md": "markdownlint \"**/*.md\"", "lint:md:changelog": "markdownlint CHANGELOG.md", + "prepare": "node scripts/install-git-hooks.mjs", "start": "node dist/index.js", + "smoke:pack-install": "node scripts/smoke-pack-install.mjs", "test": "vitest run", "test:watch": "vitest", "test:coverage": "vitest run --coverage", - "prepublishOnly": "npm test && npm run clean && npm run build:prod" + "verify:pre-commit": "npm run build && npm test -- tests/version.test.ts", + "verify:pre-push": "npm run build && npm test -- tests/version.test.ts && npm run smoke:pack-install", + "prepublishOnly": "npm test && npm run build && npm run smoke:pack-install" }, "dependencies": { "@modelcontextprotocol/sdk": "^1.29.0", @@ -69,6 +73,7 @@ "@types/node": "^22.10.7", "@types/uuid": "^10.0.0", "@vitest/coverage-v8": "^2.1.9", + "esbuild": "^0.28.0", "markdownlint-cli": "^0.48.0", "tsx": "^4.19.2", "typescript": "^5.7.3", diff --git a/scripts/build.mjs b/scripts/build.mjs new file mode 100644 index 0000000..3465a61 --- /dev/null +++ b/scripts/build.mjs @@ -0,0 +1,74 @@ +// scripts/build.mjs +// Single release-pipeline entry point. `npm run build` always calls this. +// +// Stages (one thing each; first failure aborts): +// 1. clean wipe dist/ so nothing stale leaks into the tarball +// 2. typecheck tsc --noEmit gate (types must still compile) +// 3. bundle esbuild produces dist/index.js (shebang via banner.js) +// 4. copy-assets copy policy schema/examples into dist/ +// 5. ensure-binary assert shebang + chmod 0755 on dist/index.js +// +// The invariant this file enforces: whatever ships (prepublishOnly, publish.yml, +// smoke:pack-install) was produced by EXACTLY these five steps. No other script +// writes to dist/ on the release path. + +import { spawnSync } from 'node:child_process'; +import { rmSync } from 'node:fs'; +import { dirname, join } from 'node:path'; +import { fileURLToPath } from 'node:url'; + +const scriptDir = dirname(fileURLToPath(import.meta.url)); +const repoRoot = dirname(scriptDir); +const isWindows = process.platform === 'win32'; +const npxCmd = isWindows ? 'npx.cmd' : 'npx'; + +const TOTAL = 5; +let stageIdx = 0; + +function runStage(label, fn) { + stageIdx += 1; + console.log(`build: [${stageIdx}/${TOTAL}] ${label}`); + try { + fn(); + } catch (err) { + console.error(`build: FAIL at ${label}`); + console.error(err?.message ?? err); + process.exit(1); + } +} + +function runNode(args) { + const res = spawnSync(process.execPath, args, { cwd: repoRoot, stdio: 'inherit' }); + if (res.status !== 0) { + throw new Error(`node ${args.join(' ')} exited with ${res.status}`); + } +} + +function runNpx(args) { + const res = spawnSync(npxCmd, args, { cwd: repoRoot, stdio: 'inherit', shell: isWindows }); + if (res.status !== 0) { + throw new Error(`npx ${args.join(' ')} exited with ${res.status}`); + } +} + +runStage('clean', () => { + rmSync(join(repoRoot, 'dist'), { recursive: true, force: true }); +}); + +runStage('typecheck', () => { + runNpx(['tsc', '--noEmit']); +}); + +runStage('bundle', () => { + runNode([join(scriptDir, 'bundle.mjs')]); +}); + +runStage('copy-assets', () => { + runNode([join(scriptDir, 'copy-assets.mjs')]); +}); + +runStage('ensure-binary', () => { + runNode([join(scriptDir, 'ensure-binary.mjs')]); +}); + +console.log('build: done'); diff --git a/scripts/bundle.mjs b/scripts/bundle.mjs new file mode 100644 index 0000000..bf3a7ec --- /dev/null +++ b/scripts/bundle.mjs @@ -0,0 +1,50 @@ +// scripts/bundle.mjs +// Production bundler: esbuild inlines pure-JS dependencies into a single +// dist/index.js, reducing install size. Heavy deps that use native bindings +// (mqtt, pino, axios, @modelcontextprotocol/sdk) remain in node_modules. + +import { build } from 'esbuild'; +import path from 'node:path'; +import { fileURLToPath } from 'node:url'; + +const __dirname = path.dirname(fileURLToPath(import.meta.url)); +const root = path.resolve(__dirname, '..'); + +const outfile = process.env.BUNDLE_OUTFILE ?? path.join(root, 'dist/index.js'); + +await build({ + entryPoints: [path.join(root, 'src/index.ts')], + bundle: true, + platform: 'node', + target: 'node18', + format: 'esm', + outfile, + // Keep heavy native-binding or large deps external; they stay in node_modules. + external: [ + 'node:*', + // native binding deps + 'mqtt', + 'pino', + 'pino-pretty', + // large deps with native parts + 'axios', + '@modelcontextprotocol/sdk', + // pure-JS but large — inline separately if needed + ], + // Inject a createRequire-based require() so CJS packages bundled into the + // ESM output can call require('process'), require('events'), etc. (bare names + // without node: prefix) without hitting esbuild's __require2 "not supported" error. + inject: [path.join(root, 'scripts/cjs-shim.mjs')], + banner: { + // The shebang must come first (Node.js requires it at byte 0). + // The `const require` line runs BEFORE esbuild's __require IIFE (which checks + // `typeof require !== "undefined"`), so CJS packages that call bare + // require('process') or require('node:events') get the real Node require(). + js: [ + '#!/usr/bin/env node', + 'import { createRequire as __cjsReq } from "node:module";', + 'const require = __cjsReq(import.meta.url);', + ].join('\n'), + }, + logLevel: 'info', +}); diff --git a/scripts/cjs-shim.mjs b/scripts/cjs-shim.mjs new file mode 100644 index 0000000..844da6b --- /dev/null +++ b/scripts/cjs-shim.mjs @@ -0,0 +1,6 @@ +// Inject a proper require() implementation for CJS packages bundled into the +// ESM output. Without this, esbuild's __require2 shim throws +// "Dynamic require of X is not supported" when CJS packages call +// require('process'), require('events'), etc. (bare names, no node: prefix). +import { createRequire } from 'node:module'; +export const require = createRequire(import.meta.url); diff --git a/scripts/ensure-binary.mjs b/scripts/ensure-binary.mjs new file mode 100644 index 0000000..45d36c9 --- /dev/null +++ b/scripts/ensure-binary.mjs @@ -0,0 +1,49 @@ +// scripts/ensure-binary.mjs +// Regression guard for the shipping binary. +// +// Contract: +// dist/index.js MUST exist, its first 20 bytes MUST be "#!/usr/bin/env node", +// and its mode SHOULD be 0o755. If any of those is not true, fail loudly. +// +// This script does NOT repair the output. The shebang is produced by +// scripts/bundle.mjs via the esbuild `banner.js` line; if that ever drops out, +// we want the build to fail here, not silently patch it at the last minute. + +import { chmodSync, existsSync, readFileSync, statSync } from 'node:fs'; +import { dirname, join } from 'node:path'; +import { fileURLToPath } from 'node:url'; + +const scriptDir = dirname(fileURLToPath(import.meta.url)); +const repoRoot = dirname(scriptDir); +const NODE_SHEBANG = '#!/usr/bin/env node'; + +const cliEntry = join(repoRoot, 'dist', 'index.js'); + +if (!existsSync(cliEntry)) { + console.error(`ensure-binary: dist/index.js is missing (expected at ${cliEntry})`); + console.error(' Did scripts/bundle.mjs run? This script only verifies, it does not build.'); + process.exit(1); +} + +const head = readFileSync(cliEntry, { encoding: 'utf-8' }).slice(0, NODE_SHEBANG.length); +if (head !== NODE_SHEBANG) { + console.error('ensure-binary: dist/index.js is missing the node shebang'); + console.error(` expected first bytes: ${JSON.stringify(NODE_SHEBANG)}`); + console.error(` actual first bytes: ${JSON.stringify(head)}`); + console.error(' Check scripts/bundle.mjs banner.js — the shebang is produced there.'); + process.exit(1); +} + +try { + chmodSync(cliEntry, 0o755); +} catch { + // Best-effort. Filesystems that ignore POSIX modes (e.g. some Windows FSes) + // still produce a valid tarball; npm records the mode at pack time on Linux. +} + +try { + const mode = statSync(cliEntry).mode & 0o777; + console.log(`ensure-binary: ok (shebang present, mode 0o${mode.toString(8)})`); +} catch { + console.log('ensure-binary: ok (shebang present)'); +} diff --git a/scripts/install-git-hooks.mjs b/scripts/install-git-hooks.mjs new file mode 100644 index 0000000..c1a88cc --- /dev/null +++ b/scripts/install-git-hooks.mjs @@ -0,0 +1,36 @@ +import { existsSync, readFileSync, writeFileSync } from 'node:fs'; +import path from 'node:path'; +import { fileURLToPath } from 'node:url'; + +const scriptDir = path.dirname(fileURLToPath(import.meta.url)); +const repoRoot = path.dirname(scriptDir); +const gitDir = path.join(repoRoot, '.git'); +const gitConfig = path.join(gitDir, 'config'); +const hookLine = '\thooksPath = .githooks'; + +if (!existsSync(gitDir) || !existsSync(gitConfig)) { + process.exit(0); +} + +try { + const raw = readFileSync(gitConfig, 'utf-8'); + + let next; + if (/\[core\]/.test(raw)) { + if (/^\s*hooksPath\s*=.*$/m.test(raw)) { + next = raw.replace(/^\s*hooksPath\s*=.*$/m, hookLine); + } else { + next = raw.replace(/\[core\][^\[]*/m, (section) => `${section.trimEnd()}\n${hookLine}\n`); + } + } else { + const prefix = raw.endsWith('\n') ? raw : `${raw}\n`; + next = `${prefix}[core]\n${hookLine}\n`; + } + + if (next !== raw) { + writeFileSync(gitConfig, next, 'utf-8'); + } +} catch { + // Best-effort only. Published-package consumers and non-git environments + // should not fail install because of local hook setup. +} diff --git a/scripts/smoke-pack-install.mjs b/scripts/smoke-pack-install.mjs new file mode 100644 index 0000000..5165d95 --- /dev/null +++ b/scripts/smoke-pack-install.mjs @@ -0,0 +1,67 @@ +import { execFileSync } from 'node:child_process'; +import { mkdtempSync, readFileSync, rmSync } from 'node:fs'; +import os from 'node:os'; +import path from 'node:path'; +import { fileURLToPath } from 'node:url'; + +const scriptDir = path.dirname(fileURLToPath(import.meta.url)); +const repoRoot = path.dirname(scriptDir); +const pkg = JSON.parse(readFileSync(path.join(repoRoot, 'package.json'), 'utf-8')); +const expectedVersion = String(pkg.version); + +function runNpm(args, options = {}) { + const npmExecPath = process.env.npm_execpath; + if (npmExecPath) { + return execFileSync(process.execPath, [npmExecPath, ...args], options); + } + const npmCmd = process.platform === 'win32' ? 'npm.cmd' : 'npm'; + return execFileSync(npmCmd, args, options); +} + +const workDir = mkdtempSync(path.join(os.tmpdir(), 'switchbot-pack-smoke-')); +let tarballPath = null; + +try { + const packJson = runNpm(['pack', '--json'], { + cwd: repoRoot, + encoding: 'utf-8', + }); + const [packResult] = JSON.parse(packJson); + if (!packResult?.filename) { + throw new Error(`npm pack did not return a filename: ${packJson}`); + } + + tarballPath = path.join(repoRoot, packResult.filename); + + runNpm(['init', '-y'], { + cwd: workDir, + stdio: 'ignore', + }); + + runNpm(['install', tarballPath], { + cwd: workDir, + stdio: 'inherit', + }); + + const actualVersion = process.platform === 'win32' + ? execFileSync(path.join(workDir, 'node_modules', '.bin', 'switchbot.cmd'), ['--version'], { + cwd: workDir, + encoding: 'utf-8', + shell: true, + }).trim() + : execFileSync(path.join(workDir, 'node_modules', '.bin', 'switchbot'), ['--version'], { + cwd: workDir, + encoding: 'utf-8', + }).trim(); + + if (actualVersion !== expectedVersion) { + throw new Error(`Packed CLI version mismatch: expected ${expectedVersion}, got ${actualVersion}`); + } + + console.log(`pack-install smoke ok: switchbot --version -> ${actualVersion}`); +} finally { + if (tarballPath) { + rmSync(tarballPath, { force: true }); + } + rmSync(workDir, { recursive: true, force: true }); +} diff --git a/src/commands/agent-bootstrap.ts b/src/commands/agent-bootstrap.ts index d744f56..e3ec25d 100644 --- a/src/commands/agent-bootstrap.ts +++ b/src/commands/agent-bootstrap.ts @@ -1,5 +1,5 @@ import { Command } from 'commander'; -import { printJson } from '../utils/output.js'; +import { printJson, exitWithError } from '../utils/output.js'; import { loadCache } from '../devices/cache.js'; import { getEffectiveCatalog, @@ -18,10 +18,7 @@ import { } from '../policy/load.js'; import { validateLoadedPolicy } from '../policy/validate.js'; import { selectCredentialStore, CredentialBackendName } from '../credentials/keychain.js'; -import { createRequire } from 'node:module'; - -const require = createRequire(import.meta.url); -const { version: pkgVersion } = require('../../package.json') as { version: string }; +import { VERSION as pkgVersion } from '../version.js'; /** * Schema version of the agent-bootstrap payload. Must stay in lockstep @@ -103,6 +100,7 @@ async function readCredentialsBackend(): Promise { interface BootstrapOptions { compact?: boolean; + sections?: string; } export function registerAgentBootstrapCommand(program: Command): void { @@ -115,6 +113,10 @@ export function registerAgentBootstrapCommand(program: Command): void { '--compact', 'Emit an even smaller payload by dropping catalog descriptions and non-essential fields (target: <20 KB).', ) + .option( + '--sections ', + 'Comma-separated top-level sections to include (e.g. identity,devices,catalog). Omit for all sections.', + ) .addHelpText( 'after', ` @@ -226,6 +228,27 @@ Examples: : [], }; - printJson(payload); + const VALID_SECTIONS = new Set([ + 'schemaVersion', 'generatedAt', 'cliVersion', 'identity', 'quickReference', + 'safetyTiers', 'nameStrategies', 'profile', 'quota', 'policyStatus', + 'credentialsBackend', 'devices', 'catalog', 'hints', + ]); + + let finalPayload: Record = payload as Record; + if (opts.sections) { + const requested = opts.sections.split(',').map((s) => s.trim()).filter(Boolean); + const unknown = requested.filter((s) => !VALID_SECTIONS.has(s)); + if (unknown.length > 0) { + exitWithError({ + code: 2, + kind: 'usage', + message: `Unknown section(s): ${unknown.join(', ')}. Valid sections: ${[...VALID_SECTIONS].join(', ')}.`, + }); + } + finalPayload = Object.fromEntries( + Object.entries(finalPayload).filter(([k]) => requested.includes(k)), + ); + } + printJson(finalPayload); }); } diff --git a/src/commands/capabilities.ts b/src/commands/capabilities.ts index 9f29c1b..3c37125 100644 --- a/src/commands/capabilities.ts +++ b/src/commands/capabilities.ts @@ -115,7 +115,7 @@ const DESTRUCTIVE_LOCAL = meta(true, false, false, 'destructive', 'local', 20); const DESTRUCTIVE_REMOTE = meta(true, true, false, 'destructive', 'deviceDependent', 1200); const READ_NONE = meta(false, false, false, 'read', 'none', 50); -const COMMAND_META: Record = { +export const COMMAND_META: Record = { 'agent-bootstrap': READ_LOCAL, 'auth keychain describe': READ_LOCAL, 'auth keychain get': READ_LOCAL, @@ -197,6 +197,7 @@ const COMMAND_META: Record = { 'rules doctor': READ_LOCAL, 'rules summary': READ_LOCAL, 'rules last-fired': READ_LOCAL, + 'rules explain': READ_LOCAL, 'schema export': READ_LOCAL, 'scenes list': READ_REMOTE, 'scenes execute': ACTION_REMOTE, @@ -410,17 +411,9 @@ export function registerCapabilitiesCommand(program: Command): void { Object.entries(COMMAND_META).map(([k, v]) => [k, { ...v, ...deriveRiskMeta(v) }]) ), ...(globalFlags ? { globalFlags } : {}), - catalog: { + catalog: compact ? undefined : { + note: 'Device type catalog (commands, status fields, parameters) is available via `schema export`. Use `schema export --capabilities` for entries annotated with CLI safety metadata.', typeCount: catalog.length, - roles, - destructiveCommandCount: catalog.reduce( - (n, e) => - n + e.commands.filter((c) => deriveSafetyTier(c, e) === 'destructive').length, - 0, - ), - safetyTiersInUse: collectSafetyTiersInUse(catalog), - readOnlyTypeCount: catalog.filter((e) => e.readOnly).length, - readOnlyQueryCount: countStatusQueries(catalog), }, resources: RESOURCE_CATALOG, }; diff --git a/src/commands/schema.ts b/src/commands/schema.ts index b0146bb..8fc3c7c 100644 --- a/src/commands/schema.ts +++ b/src/commands/schema.ts @@ -120,6 +120,7 @@ export function registerSchemaCommand(program: Command): void { .option('--compact', 'Drop descriptions/aliases/example params — emit ~60% smaller payload. Useful for agent prompts.') .option('--used', 'Restrict to device types present in the local devices cache (run "devices list" first)') .option('--project ', 'Project per-type fields (e.g. --project type,commands,statusFields)', stringArg('--project')) + .option('--capabilities', 'Annotate each device type with CLI command safety metadata (agentSafetyTier, mutating, consumesQuota)') .addHelpText('after', ` Output is always JSON (this command ignores --format). The output is a catalog export — not a formal JSON Schema standard document — suitable for @@ -148,7 +149,7 @@ Examples: $ switchbot schema export --role security --category physical $ switchbot schema export --project type,commands,statusFields `) - .action((options: { type?: string; types?: string; role?: string; category?: string; compact?: boolean; used?: boolean; project?: string }) => { + .action(async (options: { type?: string; types?: string; role?: string; category?: string; compact?: boolean; used?: boolean; project?: string; capabilities?: boolean }) => { const catalog = getEffectiveCatalog(); let filtered = catalog; @@ -199,9 +200,18 @@ Examples: ) : mapped; + let finalTypes = projected as Array>; + if (options.capabilities) { + const { COMMAND_META } = await import('./capabilities.js'); + const devicesMeta = Object.fromEntries( + Object.entries(COMMAND_META).filter(([k]) => k.startsWith('devices ')), + ); + finalTypes = finalTypes.map((e) => ({ ...e, commandsMeta: devicesMeta })); + } + const payload: Record = { version: '1.0', - types: projected, + types: finalTypes, }; if (!options.compact) { payload.generatedAt = new Date().toISOString(); diff --git a/src/commands/upgrade-check.ts b/src/commands/upgrade-check.ts index 7871f81..a39e77c 100644 --- a/src/commands/upgrade-check.ts +++ b/src/commands/upgrade-check.ts @@ -1,11 +1,10 @@ import { Command } from 'commander'; -import { createRequire } from 'node:module'; import https from 'node:https'; import { isJsonMode, printJson } from '../utils/output.js'; import chalk from 'chalk'; +import { VERSION as currentVersion } from '../version.js'; -const require = createRequire(import.meta.url); -const { name: pkgName, version: currentVersion } = require('../../package.json') as { name: string; version: string }; +const pkgName = '@switchbot/openapi-cli'; function fetchLatestVersion(packageName: string, timeoutMs = 8000): Promise { const encoded = packageName.replace('/', '%2F'); diff --git a/src/index.ts b/src/index.ts index 25763c2..1759a7e 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,4 +1,3 @@ -#!/usr/bin/env node import { Command, CommanderError, InvalidArgumentError } from 'commander'; import { createRequire } from 'node:module'; import chalk from 'chalk'; diff --git a/src/policy/schema/v0.2.json b/src/policy/schema/v0.2.json index 58aa07e..3785b64 100644 --- a/src/policy/schema/v0.2.json +++ b/src/policy/schema/v0.2.json @@ -18,7 +18,7 @@ "description": "Unchanged from v0.1.", "additionalProperties": { "type": "string", - "pattern": "^[A-Z0-9]{2,}-[A-Z0-9-]+$" + "pattern": "^[A-Za-z0-9][A-Za-z0-9_-]{1,63}$" } }, diff --git a/tests/build/bundle-size.test.ts b/tests/build/bundle-size.test.ts new file mode 100644 index 0000000..050a6a5 --- /dev/null +++ b/tests/build/bundle-size.test.ts @@ -0,0 +1,52 @@ +import { describe, it, expect, beforeAll } from 'vitest'; +import fs from 'node:fs'; +import path from 'node:path'; +import { spawnSync, execSync } from 'node:child_process'; + +// Build to a separate path so we don't overwrite the tsc dist/index.js +// that other tests (install smoke, status-sync smoke) depend on. +// Must stay in dist/ (not a subdirectory) so require('../package.json') resolves correctly. +const bundleEntry = path.resolve('dist/bundle-test.js'); + +describe('esbuild production bundle', () => { + beforeAll(() => { + execSync(`node scripts/bundle.mjs --outfile=${bundleEntry}`, { + stdio: 'pipe', + env: { ...process.env, BUNDLE_OUTFILE: bundleEntry }, + }); + }, 30_000); + + it('bundle output exists', () => { + expect(fs.existsSync(bundleEntry), `${bundleEntry} not found after build:prod`).toBe(true); + }); + + it('has exactly one shebang line', () => { + const content = fs.readFileSync(bundleEntry, 'utf-8'); + const count = (content.match(/^#!\/usr\/bin\/env node/gm) ?? []).length; + expect(count, `Expected exactly 1 shebang, found ${count} — check bundle.mjs banner vs src/index.ts`).toBe(1); + }); + + it('passes Node.js syntax check', () => { + const result = spawnSync(process.execPath, ['--check', bundleEntry], { encoding: 'utf-8' }); + expect(result.status, `node --check failed (exit ${result.status}):\n${result.stderr}`).toBe(0); + expect(result.stderr).toBe(''); + }); + + it('--version exits 0 and outputs a valid semver', () => { + const result = spawnSync(process.execPath, [bundleEntry, '--version'], { encoding: 'utf-8' }); + expect(result.status, `--version exited ${result.status}:\n${result.stderr}`).toBe(0); + expect(result.stdout.trim()).toMatch(/^\d+\.\d+\.\d+/); + }); + + it('--version matches package.json version', () => { + const pkgVersion = JSON.parse(fs.readFileSync(path.resolve('package.json'), 'utf-8')).version as string; + const result = spawnSync(process.execPath, [bundleEntry, '--version'], { encoding: 'utf-8' }); + expect(result.stdout.trim(), `Bundle reports ${result.stdout.trim()} but package.json says ${pkgVersion}`).toBe(pkgVersion); + }); + + it('is under 15 MB', () => { + const { size } = fs.statSync(bundleEntry); + const sizeMb = size / (1024 * 1024); + expect(sizeMb, `bundle is ${sizeMb.toFixed(1)} MB — exceeds 15 MB budget`).toBeLessThan(15); + }); +}); diff --git a/tests/commands/agent-bootstrap.test.ts b/tests/commands/agent-bootstrap.test.ts index 8c169d2..ee21c31 100644 --- a/tests/commands/agent-bootstrap.test.ts +++ b/tests/commands/agent-bootstrap.test.ts @@ -6,6 +6,7 @@ import path from 'node:path'; import { Command } from 'commander'; import { registerAgentBootstrapCommand } from '../../src/commands/agent-bootstrap.js'; import { resetListCache } from '../../src/devices/cache.js'; +import { runCli } from '../helpers/cli.js'; async function captureJson(fn: () => void | Promise): Promise { const lines: string[] = []; @@ -224,4 +225,38 @@ describe('agent-bootstrap', () => { delete process.env.SWITCHBOT_POLICY_PATH; } }); + + // ===================================================================== + // --sections flag (P1) + // ===================================================================== + describe('agent-bootstrap --sections', () => { + it('restricts output to the requested top-level keys', async () => { + const res = await runCli(registerAgentBootstrapCommand, [ + 'agent-bootstrap', '--sections', 'identity,cliVersion', + ]); + expect(res.exitCode).toBeNull(); + const out = JSON.parse(res.stdout.join('')) as { data: Record }; + const keys = Object.keys(out.data); + expect(keys).toContain('identity'); + expect(keys).toContain('cliVersion'); + expect(keys).not.toContain('catalog'); + expect(keys).not.toContain('hints'); + expect(keys).not.toContain('quota'); + }); + + it('includes all keys when --sections is not provided', async () => { + const res = await runCli(registerAgentBootstrapCommand, ['agent-bootstrap', '--compact']); + const out = JSON.parse(res.stdout.join('')) as { data: Record }; + expect(Object.keys(out.data)).toContain('catalog'); + expect(Object.keys(out.data)).toContain('hints'); + }); + + it('exits 2 and prints hint when an unknown section name is requested', async () => { + const res = await runCli(registerAgentBootstrapCommand, [ + 'agent-bootstrap', '--sections', 'identity,doesNotExist', + ]); + expect(res.exitCode).toBe(2); + expect(res.stderr.join('')).toMatch(/unknown section.*doesNotExist/i); + }); + }); }); diff --git a/tests/commands/capabilities-meta.test.ts b/tests/commands/capabilities-meta.test.ts new file mode 100644 index 0000000..2fdf31e --- /dev/null +++ b/tests/commands/capabilities-meta.test.ts @@ -0,0 +1,111 @@ +import { describe, it, expect, vi } from 'vitest'; + +// ── mocks required for importing capabilities.ts ──────────────────────────── +const catalogMock = vi.hoisted(() => ({ + getEffectiveCatalog: vi.fn(() => []), + deriveSafetyTier: vi.fn(() => 'action' as const), + deriveStatusQueries: vi.fn(() => []), +})); +const cacheMock = vi.hoisted(() => ({ loadCache: vi.fn(() => ({ list: [], status: {} })) })); +vi.mock('../../src/devices/catalog.js', () => catalogMock); +vi.mock('../../src/devices/cache.js', () => cacheMock); + +import { COMMAND_META } from '../../src/commands/capabilities.js'; +import { registerCapabilitiesCommand } from '../../src/commands/capabilities.js'; +import { runCli } from '../helpers/cli.js'; + +// ── comprehensive list of every CLI leaf command ────────────────────────────── +// Regression guard: when a new subcommand is added to the CLI, it MUST be added +// here AND to COMMAND_META. If either is missing, this test fails with a clear +// "missing: " message. +const ALL_EXPECTED_LEAF_COMMANDS = [ + 'agent-bootstrap', + 'auth keychain describe', 'auth keychain get', 'auth keychain set', + 'auth keychain delete', 'auth keychain migrate', + 'cache show', 'cache clear', + 'capabilities', + 'catalog path', 'catalog show', 'catalog search', 'catalog diff', 'catalog refresh', + 'completion', + 'config set-token', 'config show', 'config list-profiles', 'config agent-profile', + 'daemon start', 'daemon stop', 'daemon status', 'daemon reload', + 'devices list', 'devices status', 'devices command', 'devices types', + 'devices commands', 'devices describe', 'devices batch', 'devices watch', + 'devices explain', 'devices expand', + 'devices meta set', 'devices meta get', 'devices meta list', 'devices meta clear', + 'doctor', + 'events tail', 'events mqtt-tail', + 'health check', 'health serve', + 'history show', 'history replay', 'history range', 'history stats', + 'history verify', 'history aggregate', + 'install', + 'mcp serve', + 'plan schema', 'plan validate', 'plan suggest', 'plan run', + 'plan save', 'plan list', 'plan review', 'plan approve', 'plan execute', + 'policy validate', 'policy new', 'policy migrate', 'policy diff', + 'policy add-rule', 'policy backup', 'policy restore', + 'quota status', 'quota reset', + 'rules suggest', 'rules lint', 'rules list', 'rules run', 'rules reload', + 'rules tail', 'rules replay', 'rules webhook-rotate-token', 'rules webhook-show-token', + 'rules conflicts', 'rules doctor', 'rules summary', 'rules last-fired', + 'rules explain', + 'schema export', + 'scenes list', 'scenes execute', 'scenes describe', + 'scenes validate', 'scenes simulate', 'scenes explain', + 'status-sync run', 'status-sync start', 'status-sync stop', 'status-sync status', + 'uninstall', + 'upgrade-check', + 'webhook setup', 'webhook query', 'webhook update', 'webhook delete', +] as const; + +// MCP tool names and other prefixes that legitimately live in COMMAND_META +// but are NOT CLI leaf commands. +const NON_CLI_PREFIXES = [ + 'list_', 'get_', 'send_', 'describe_', 'run_', 'search_', + 'account_', 'query_', 'aggregate_', +]; + +describe('COMMAND_META — exhaustive coverage guard', () => { + it('has an entry for every known CLI leaf command', () => { + const missing = ALL_EXPECTED_LEAF_COMMANDS.filter((cmd) => !(cmd in COMMAND_META)); + expect(missing, `COMMAND_META missing entries: ${missing.join(', ')}`).toHaveLength(0); + }); + + it('does not have phantom entries for commands that do not exist', () => { + const knownSet = new Set(ALL_EXPECTED_LEAF_COMMANDS); + const phantom = Object.keys(COMMAND_META).filter( + (k) => !knownSet.has(k) && !NON_CLI_PREFIXES.some((p) => k.startsWith(p)), + ); + expect(phantom, `Phantom COMMAND_META entries: ${phantom.join(', ')}`).toHaveLength(0); + }); +}); + +describe('capabilities command — regression output tests', () => { + it('produces non-empty JSON output with --compact (regression: rules explain missing)', async () => { + const res = await runCli(registerCapabilitiesCommand, ['capabilities', '--compact']); + expect(res.exitCode).toBeNull(); + expect(res.stderr.join('')).not.toMatch(/coverage error/i); + const out = res.stdout.join(''); + expect(out.length).toBeGreaterThan(50); + const parsed = JSON.parse(out) as { data: { commands: Array<{ name: string }> } }; + expect(parsed).toHaveProperty('data'); + expect(parsed.data).toHaveProperty('commands'); + }); + + it('COMMAND_META has rules explain entry with READ_LOCAL tier', () => { + const entry = COMMAND_META['rules explain']; + expect(entry, 'COMMAND_META missing rules explain').toBeDefined(); + expect(entry.agentSafetyTier).toBe('read'); + expect(entry.mutating).toBe(false); + expect(entry.consumesQuota).toBe(false); + }); + + it('full output catalog is a pointer note referencing schema export', async () => { + const res = await runCli(registerCapabilitiesCommand, ['capabilities']); + expect(res.exitCode).toBeNull(); + const parsed = JSON.parse(res.stdout.join('')) as { data: { catalog?: { note: string } } }; + const catalog = parsed.data.catalog; + expect(catalog).toBeDefined(); + expect(catalog).toHaveProperty('note'); + expect(catalog!.note).toContain('schema export'); + }); +}); diff --git a/tests/commands/capabilities.test.ts b/tests/commands/capabilities.test.ts index d485e20..43ca552 100644 --- a/tests/commands/capabilities.test.ts +++ b/tests/commands/capabilities.test.ts @@ -128,19 +128,16 @@ describe('capabilities', () => { expect(flags.some((f) => f.includes('--dry-run'))).toBe(true); }); - it('catalog.roles includes lighting and security, typeCount > 10', async () => { + it('catalog is a pointer note with typeCount, not inline stats', async () => { const out = await runCapabilities(); const cat = out.catalog as Record; - expect((cat.roles as string[])).toContain('lighting'); - expect((cat.roles as string[])).toContain('security'); + expect(cat).toHaveProperty('note'); + expect(cat.note as string).toContain('schema export'); expect(cat.typeCount as number).toBeGreaterThan(10); - }); - - it('P11: catalog.safetyTiersInUse includes "read" and catalog.readOnlyQueryCount > 0', async () => { - const out = await runCapabilities(); - const cat = out.catalog as Record; - expect((cat.safetyTiersInUse as string[])).toContain('read'); - expect((cat.readOnlyQueryCount as number)).toBeGreaterThan(0); + // Inline stats (roles, safetyTiersInUse, readOnlyQueryCount) are intentionally + // removed — they now live in `schema export --capabilities`. + expect(cat.roles).toBeUndefined(); + expect(cat.safetyTiersInUse).toBeUndefined(); }); it('surfaces.mcp.tools includes send_command, account_overview, get_device_history and query_device_history', async () => { @@ -284,4 +281,38 @@ describe('capabilities B3/B4', () => { const keys = resources.keys as Array<{ keyType: string }>; expect(keys.map((k) => k.keyType).sort()).toEqual(['disposable', 'permanent', 'timeLimit', 'urgent']); }); + + it('commandMeta flat map includes derived risk fields on every entry', async () => { + const out = await runCapabilitiesWith([]); + const commandMeta = out.commandMeta as Record>; + expect(commandMeta).toBeDefined(); + // Spot-check a known entry + const devList = commandMeta['devices list']; + expect(devList).toBeDefined(); + expect(devList.agentSafetyTier).toBe('read'); + expect(devList.mutating).toBe(false); + expect(devList.consumesQuota).toBe(true); + // Derived risk meta must be present + expect(devList.riskLevel).toBe('low'); + expect(devList.requiresConfirmation).toBe(false); + expect(devList.recommendedMode).toBe('direct'); + // All entries must have the derived fields + for (const [_key, entry] of Object.entries(commandMeta)) { + expect(entry).toHaveProperty('riskLevel'); + expect(entry).toHaveProperty('requiresConfirmation'); + expect(entry).toHaveProperty('recommendedMode'); + } + }); + + it('--surface cli restricts surfaces block to cli only', async () => { + const out = await runCapabilitiesWith(['--surface', 'cli']); + const surfaces = out.surfaces as Record; + expect(Object.keys(surfaces)).toEqual(['cli']); + }); + + it('--surface mqtt restricts surfaces block to mqtt only', async () => { + const out = await runCapabilitiesWith(['--surface', 'mqtt']); + const surfaces = out.surfaces as Record; + expect(Object.keys(surfaces)).toEqual(['mqtt']); + }); }); diff --git a/tests/commands/daemon.test.ts b/tests/commands/daemon.test.ts index 7137266..97f7258 100644 --- a/tests/commands/daemon.test.ts +++ b/tests/commands/daemon.test.ts @@ -1,4 +1,4 @@ -import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; const fsMock = vi.hoisted(() => ({ mkdirSync: vi.fn(), @@ -165,3 +165,114 @@ describe('daemon command', () => { ); }); }); + +describe('daemon stop', () => { + beforeEach(() => { + vi.useFakeTimers(); + fsMock.unlinkSync.mockClear(); + pidFileMock.readPidFile.mockReset().mockReturnValue(null); + pidFileMock.isPidAlive.mockReset().mockReturnValue(false); + daemonStateMock.readDaemonState.mockReset().mockReturnValue(null); + daemonStateMock.writeDaemonState.mockClear(); + }); + afterEach(() => { vi.useRealTimers(); }); + + it('prints "No running daemon found" and exits 0 when no daemon is running', async () => { + const res = await runCli(registerDaemonCommand, ['daemon', 'stop']); + expect(res.exitCode).toBeNull(); + expect(res.stdout.join(' ')).toMatch(/no running daemon/i); + expect(daemonStateMock.writeDaemonState).toHaveBeenCalledWith( + expect.objectContaining({ status: 'stopped', pid: null }), + ); + }); + + it('unlinks pid files, persists stopped state, and prints success when daemon is running', async () => { + pidFileMock.readPidFile.mockImplementation((file: string) => + file === daemonStateMock.DAEMON_PID_FILE ? 12345 : null, + ); + pidFileMock.isPidAlive.mockReturnValue(true); + // Prevent real SIGTERM from being sent to a potentially-live PID in the test + const killSpy = vi.spyOn(process, 'kill').mockImplementation(() => true as never); + + const res = await runCli(registerDaemonCommand, ['daemon', 'stop']); + killSpy.mockRestore(); + + expect(res.exitCode).toBeNull(); + expect(fsMock.unlinkSync).toHaveBeenCalled(); + expect(daemonStateMock.writeDaemonState).toHaveBeenLastCalledWith( + expect.objectContaining({ status: 'stopped', pid: null }), + ); + expect(res.stdout.join(' ')).toMatch(/daemon stopped/i); + }); +}); + +describe('daemon status', () => { + beforeEach(() => { + pidFileMock.readPidFile.mockReset().mockReturnValue(null); + pidFileMock.isPidAlive.mockReset().mockReturnValue(false); + daemonStateMock.readDaemonState.mockReset().mockReturnValue(null); + }); + + it('--json reports status:stopped when no daemon is running', async () => { + const res = await runCli(registerDaemonCommand, ['--json', 'daemon', 'status']); + expect(res.exitCode).toBeNull(); + const body = JSON.parse(res.stdout.join('')) as { data: { status: string; pid: unknown } }; + expect(body.data.status).toBe('stopped'); + expect(body.data.pid).toBeNull(); + }); + + it('--json reports status:running with correct pid when daemon is alive', async () => { + pidFileMock.readPidFile.mockImplementation((file: string) => + file === daemonStateMock.DAEMON_PID_FILE ? 9999 : null, + ); + pidFileMock.isPidAlive.mockReturnValue(true); + + const res = await runCli(registerDaemonCommand, ['--json', 'daemon', 'status']); + expect(res.exitCode).toBeNull(); + const body = JSON.parse(res.stdout.join('')) as { data: { status: string; pid: number } }; + expect(body.data.status).toBe('running'); + expect(body.data.pid).toBe(9999); + }); + + it('human output prints "not running" when stopped', async () => { + const res = await runCli(registerDaemonCommand, ['daemon', 'status']); + expect(res.exitCode).toBeNull(); + expect(res.stdout.join(' ')).toMatch(/not running/i); + }); +}); + +describe('daemon reload', () => { + beforeEach(() => { + vi.useFakeTimers(); + pidFileMock.readPidFile.mockReset().mockReturnValue(null); + pidFileMock.isPidAlive.mockReset().mockReturnValue(false); + daemonStateMock.readDaemonState.mockReset().mockReturnValue(null); + daemonStateMock.writeDaemonState.mockClear(); + pidFileMock.writeReloadSentinel.mockClear(); + pidFileMock.sighupSupported.mockReturnValue(false); + }); + afterEach(() => { vi.useRealTimers(); }); + + it('exits 2 with usage error when no daemon is running', async () => { + const res = await runCli(registerDaemonCommand, ['daemon', 'reload']); + expect(res.exitCode).toBe(2); + expect(res.stderr.join(' ')).toMatch(/no running daemon/i); + }); + + it('succeeds via sentinel when daemon and rules engine are running', async () => { + pidFileMock.readPidFile.mockImplementation((file: string) => { + if (file === daemonStateMock.DAEMON_PID_FILE) return 8888; + if (file === '/mock/.switchbot/rules.pid') return 7777; + return null; + }); + pidFileMock.isPidAlive.mockReturnValue(true); + + const res = await runCli(registerDaemonCommand, ['daemon', 'reload']); + expect(res.exitCode).toBeNull(); + expect(pidFileMock.writeReloadSentinel).toHaveBeenCalledWith('/mock/.switchbot/rules.reload'); + expect(daemonStateMock.writeDaemonState).toHaveBeenCalledWith( + expect.objectContaining({ lastReloadStatus: 'ok' }), + ); + expect(res.stdout.join(' ')).toMatch(/reload requested/i); + }); +}); diff --git a/tests/commands/devices.test.ts b/tests/commands/devices.test.ts index 739f6b6..202745b 100644 --- a/tests/commands/devices.test.ts +++ b/tests/commands/devices.test.ts @@ -35,6 +35,7 @@ vi.mock('../../src/api/client.js', () => ({ })); import { registerDevicesCommand } from '../../src/commands/devices.js'; +import { ApiError } from '../../src/api/client.js'; import { runCli } from '../helpers/cli.js'; import { updateCacheFromDeviceList, resetListCache } from '../../src/devices/cache.js'; @@ -2444,4 +2445,43 @@ describe('devices command', () => { expect(lock?.safetyTier).toBe('mutation'); }); }); + + // ===================================================================== + // API error exit codes (P0 regression guard) + // ===================================================================== + describe('devices status — API error exit codes', () => { + beforeEach(() => { + apiMock.__instance.get.mockReset(); + apiMock.__instance.post.mockReset(); + }); + + it('exits 1 when API returns code 190 in human mode', async () => { + apiMock.__instance.get.mockRejectedValue( + new ApiError('Device internal error', 190), + ); + const res = await runCli(registerDevicesCommand, ['devices', 'status', 'BOGUS123']); + expect(res.exitCode).toBe(1); + expect(res.stderr.join('')).toMatch(/190/); + }); + + it('exits 1 when API returns code 190 in JSON mode', async () => { + apiMock.__instance.get.mockRejectedValue( + new ApiError('Device internal error', 190), + ); + const res = await runCli(registerDevicesCommand, ['--json', 'devices', 'status', 'BOGUS123']); + expect(res.exitCode).toBe(1); + const out = res.stdout.join(''); + const parsed = JSON.parse(out) as { error?: { code: number } }; + expect(parsed).toHaveProperty('error'); + expect(parsed.error?.code).toBe(190); + }); + + it('exits 0 for a successful status call', async () => { + apiMock.__instance.get.mockResolvedValue({ + data: { statusCode: 100, body: { power: 'on', battery: 90 } }, + }); + const res = await runCli(registerDevicesCommand, ['devices', 'status', 'DEVICE123']); + expect(res.exitCode).toBeNull(); + }); + }); }); diff --git a/tests/commands/health-check.test.ts b/tests/commands/health-check.test.ts new file mode 100644 index 0000000..aeeb8ce --- /dev/null +++ b/tests/commands/health-check.test.ts @@ -0,0 +1,93 @@ +import { describe, it, expect, vi, beforeEach } from 'vitest'; +import type { HealthReport } from '../../src/utils/health.js'; + +const healthMock = vi.hoisted(() => ({ + getHealthReport: vi.fn<[], HealthReport>(), + toPrometheusText: vi.fn(() => 'switchbot_quota_used_total 0\n'), +})); + +vi.mock('../../src/utils/health.js', () => healthMock); + +import { registerHealthCommand } from '../../src/commands/health.js'; +import { runCli } from '../helpers/cli.js'; + +const OK_REPORT: HealthReport = { + generatedAt: '2026-04-25T12:00:00.000Z', + overall: 'ok', + process: { pid: 1234, uptimeSeconds: 60, platform: 'linux', nodeVersion: 'v18.0.0', memoryMb: 50 }, + quota: { used: 10, limit: 10000, percentUsed: 0, remaining: 9990, status: 'ok' }, + audit: { present: false, recentErrors: 0, recentTotal: 0, errorRatePercent: 0, status: 'ok' }, + circuit: { name: 'switchbot-api', state: 'closed', failures: 0, status: 'ok' }, +}; + +const DEGRADED_REPORT: HealthReport = { + ...OK_REPORT, + overall: 'degraded', + quota: { used: 7500, limit: 10000, percentUsed: 75, remaining: 2500, status: 'warn' }, +}; + +const DOWN_REPORT: HealthReport = { + ...OK_REPORT, + overall: 'down', + circuit: { name: 'switchbot-api', state: 'open', failures: 5, status: 'open' }, +}; + +describe('health check CLI', () => { + beforeEach(() => { + healthMock.getHealthReport.mockReset().mockReturnValue(OK_REPORT); + healthMock.toPrometheusText.mockReset().mockReturnValue('switchbot_quota_used_total 0\n'); + }); + + it('--json exits 0 and includes overall, quota, circuit, process', async () => { + const res = await runCli(registerHealthCommand, ['--json', 'health', 'check']); + expect(res.exitCode).toBeNull(); + const body = JSON.parse(res.stdout.join('')) as { data: HealthReport }; + expect(body.data.overall).toBe('ok'); + expect(body.data.quota).toBeDefined(); + expect(body.data.circuit).toBeDefined(); + expect(body.data.process).toBeDefined(); + }); + + it('--json exits 0 even when overall is degraded (no process.exit in JSON mode)', async () => { + healthMock.getHealthReport.mockReturnValue(DEGRADED_REPORT); + const res = await runCli(registerHealthCommand, ['--json', 'health', 'check']); + expect(res.exitCode).toBeNull(); + const body = JSON.parse(res.stdout.join('')) as { data: HealthReport }; + expect(body.data.overall).toBe('degraded'); + }); + + it('human mode exits 0 and prints ✓ overall when healthy', async () => { + const res = await runCli(registerHealthCommand, ['health', 'check']); + expect(res.exitCode).toBeNull(); + expect(res.stdout.join(' ')).toMatch(/overall.*ok/i); + }); + + it('human mode exits 1 when overall is degraded', async () => { + healthMock.getHealthReport.mockReturnValue(DEGRADED_REPORT); + const res = await runCli(registerHealthCommand, ['health', 'check']); + expect(res.exitCode).toBe(1); + expect(res.stdout.join(' ')).toMatch(/overall.*degraded/i); + }); + + it('human mode exits 1 when circuit is open (overall: down)', async () => { + healthMock.getHealthReport.mockReturnValue(DOWN_REPORT); + const res = await runCli(registerHealthCommand, ['health', 'check']); + expect(res.exitCode).toBe(1); + }); + + it('--prometheus writes Prometheus text to stdout and exits 0', async () => { + const stdoutLines: string[] = []; + const writeSpy = vi.spyOn(process.stdout, 'write').mockImplementation((chunk: unknown) => { + stdoutLines.push(String(chunk)); + return true; + }); + try { + const res = await runCli(registerHealthCommand, ['health', 'check', '--prometheus']); + expect(res.exitCode).toBeNull(); + } finally { + writeSpy.mockRestore(); + } + expect(healthMock.toPrometheusText).toHaveBeenCalledWith(OK_REPORT); + expect(stdoutLines.join('')).toContain('switchbot_quota_used_total'); + }); +}); diff --git a/tests/commands/policy.test.ts b/tests/commands/policy.test.ts index 1d57e03..927c422 100644 --- a/tests/commands/policy.test.ts +++ b/tests/commands/policy.test.ts @@ -489,4 +489,55 @@ describe('switchbot policy (commander surface)', () => { expect(out.data.restored).toBe(policyFile); }); }); + + // ===================================================================== + // alias deviceId format (P1 regression guard) + // ===================================================================== + describe('policy validate — alias deviceId format', () => { + let tmp: string; + + beforeEach(() => { + tmp = fs.mkdtempSync(path.join(os.tmpdir(), 'sbpolicy-alias-')); + }); + + afterEach(() => { + fs.rmSync(tmp, { recursive: true, force: true }); + }); + + function writePolicy(deviceId: string): string { + const p = path.join(tmp, 'policy.yaml'); + fs.writeFileSync(p, `version: "0.2"\naliases:\n my device: ${deviceId}\n`); + return p; + } + + it('accepts standard hyphenated IDs (01-202407090924-26354212)', () => { + const { exitCode } = runCli(['policy', 'validate', writePolicy('01-202407090924-26354212')]); + expect(exitCode).toBe(0); + }); + + it('accepts 12-digit hex MAC without hyphen (28372F4C9C4A)', () => { + const { exitCode } = runCli(['policy', 'validate', writePolicy('28372F4C9C4A')]); + expect(exitCode).toBe(0); + }); + + it('accepts lowercase hex MAC (b0e9fe51ef2e)', () => { + const { exitCode } = runCli(['policy', 'validate', writePolicy('b0e9fe51ef2e')]); + expect(exitCode).toBe(0); + }); + + it('accepts IoT suffix format (28372F4C9C4A-vzwa)', () => { + const { exitCode } = runCli(['policy', 'validate', writePolicy('28372F4C9C4A-vzwa')]); + expect(exitCode).toBe(0); + }); + + it('rejects single-char IDs', () => { + const { exitCode } = runCli(['policy', 'validate', writePolicy('A')]); + expect(exitCode).not.toBe(0); + }); + + it('rejects IDs longer than 64 chars', () => { + const { exitCode } = runCli(['policy', 'validate', writePolicy('A'.repeat(65))]); + expect(exitCode).not.toBe(0); + }); + }); }); diff --git a/tests/commands/rules.test.ts b/tests/commands/rules.test.ts index 91c8920..be6cca7 100644 --- a/tests/commands/rules.test.ts +++ b/tests/commands/rules.test.ts @@ -542,4 +542,309 @@ describe('switchbot rules (commander surface)', () => { expect(body.data.lastFired).toBe('2026-04-25T08:00:00.000Z'); }); }); + + describe('rules conflicts', () => { + it('exits 0 and reports clean when no conflicts detected', async () => { + const p = path.join(tmpDir, 'clean.yaml'); + fs.writeFileSync(p, v02Policy(sampleAutomation)); + const { exitCode, stdout } = await runCli(['rules', 'conflicts', p]); + expect(exitCode).toBe(0); + expect(stdout.join(' ')).toMatch(/no conflicts detected/i); + }); + + it('exits 0 and emits findings when opposing-action pair exists (warnings, not errors)', async () => { + const conflict = v02Policy([ + 'automation:', + ' enabled: true', + ' rules:', + ' - name: r-on', + ' when: { source: mqtt, event: motion.detected }', + ' then:', + ' - { command: "devices command DEVICE-X turnOn", device: DEVICE-X }', + ' - name: r-off', + ' when: { source: mqtt, event: motion.detected }', + ' then:', + ' - { command: "devices command DEVICE-X turnOff", device: DEVICE-X }', + '', + ].join('\n')); + const p = path.join(tmpDir, 'conflict.yaml'); + fs.writeFileSync(p, conflict); + const { exitCode, stdout } = await runCli(['--json', 'rules', 'conflicts', p]); + // Opposing actions are "warning" severity → clean:true → exit 0 + expect(exitCode).toBe(0); + const body = JSON.parse(stdout[0]) as { data: { clean: boolean; findings: Array<{ code: string }> } }; + expect(body.data.findings.length).toBeGreaterThan(0); + expect(body.data.findings.some((f) => f.code === 'opposing-actions')).toBe(true); + }); + + it('--json includes counts for warning findings and has clean:true when only warnings exist', async () => { + const twoRules = v02Policy([ + 'automation:', + ' enabled: true', + ' rules:', + ' - name: on', + ' when: { source: mqtt, event: motion.detected }', + ' then:', + ' - { command: "devices command DD turnOn", device: DD }', + ' - name: off', + ' when: { source: mqtt, event: motion.detected }', + ' then:', + ' - { command: "devices command DD turnOff", device: DD }', + '', + ].join('\n')); + const p = path.join(tmpDir, 'conflict2.yaml'); + fs.writeFileSync(p, twoRules); + const { stdout } = await runCli(['--json', 'rules', 'conflicts', p]); + const body = JSON.parse(stdout[0]) as { data: { clean: boolean; counts: Record } }; + // Only warnings → clean:true (errors needed for clean:false) + expect(body.data.clean).toBe(true); + expect(body.data.counts.warning).toBeGreaterThan(0); + expect(body.data.counts.error).toBe(0); + }); + }); + + describe('rules doctor', () => { + it('--json exits 0 with overall:true for a valid policy', async () => { + const p = path.join(tmpDir, 'ok.yaml'); + fs.writeFileSync(p, v02Policy(sampleAutomation)); + const { exitCode, stdout } = await runCli(['--json', 'rules', 'doctor', p]); + expect(exitCode).toBe(0); + const body = JSON.parse(stdout[0]) as { data: { overall: boolean } }; + expect(body.data.overall).toBe(true); + }); + + it('--json exits 1 with overall:false for a policy with duplicate rule names (lint error)', async () => { + const bad = v02Policy([ + 'automation:', + ' enabled: true', + ' rules:', + ' - name: dup-name', + ' when: { source: mqtt, event: motion.detected }', + ' then:', + ' - { command: "devices command EE turnOn" }', + ' - name: dup-name', + ' when: { source: mqtt, event: motion.detected }', + ' then:', + ' - { command: "devices command FF turnOff" }', + '', + ].join('\n')); + const p = path.join(tmpDir, 'doctor-bad.yaml'); + fs.writeFileSync(p, bad); + const { exitCode, stdout } = await runCli(['--json', 'rules', 'doctor', p]); + expect(exitCode).toBe(1); + const body = JSON.parse(stdout[0]) as { data: { overall: boolean } }; + expect(body.data.overall).toBe(false); + }); + }); + + describe('rules summary', () => { + function writeAudit(file: string, rows: unknown[]): void { + fs.writeFileSync(file, rows.map((r) => JSON.stringify(r)).join('\n') + '\n'); + } + + it('prints "(no rule activity)" when the audit log is empty', async () => { + const f = path.join(tmpDir, 'audit-empty.log'); + fs.writeFileSync(f, ''); + const { stdout } = await runCli(['rules', 'summary', '--file', f]); + expect(stdout.join(' ')).toMatch(/no rule activity/i); + }); + + it('--json reports total count and summaries when entries exist', async () => { + const f = path.join(tmpDir, 'audit-sum.log'); + const now = new Date().toISOString(); + writeAudit(f, [ + { t: now, kind: 'rule-fire', rule: { name: 'lights on', triggerSource: 'mqtt', fireId: 'f1' }, result: 'ok', deviceId: 'D1', command: 'turnOn', parameter: null, commandType: 'command', dryRun: false }, + { t: now, kind: 'rule-fire', rule: { name: 'lights on', triggerSource: 'mqtt', fireId: 'f2' }, result: 'ok', deviceId: 'D1', command: 'turnOn', parameter: null, commandType: 'command', dryRun: false }, + { t: now, kind: 'rule-fire', rule: { name: 'lights on', triggerSource: 'mqtt', fireId: 'f3' }, result: 'error', deviceId: 'D1', command: 'turnOn', parameter: null, commandType: 'command', dryRun: false }, + ]); + const { exitCode, stdout } = await runCli(['--json', 'rules', 'summary', '--file', f]); + expect(exitCode).toBe(0); + const body = JSON.parse(stdout[0]) as { data: { total: number; summaries: Array<{ rule: string; fires: number; errors: number }> } }; + expect(body.data.total).toBe(3); + const s = body.data.summaries.find((x) => x.rule === 'lights on'); + expect(s).toBeDefined(); + expect(s!.fires).toBe(3); + expect(s!.errors).toBe(1); + }); + + it('--rule filters to a single rule name', async () => { + const f = path.join(tmpDir, 'audit-filter.log'); + const now = new Date().toISOString(); + writeAudit(f, [ + { t: now, kind: 'rule-fire', rule: { name: 'rule-A', triggerSource: 'mqtt', fireId: 'x1' }, result: 'ok', deviceId: 'D', command: 'turnOn', parameter: null, commandType: 'command', dryRun: false }, + { t: now, kind: 'rule-fire', rule: { name: 'rule-B', triggerSource: 'mqtt', fireId: 'x2' }, result: 'ok', deviceId: 'D', command: 'turnOn', parameter: null, commandType: 'command', dryRun: false }, + ]); + const { stdout } = await runCli(['--json', 'rules', 'summary', '--file', f, '--rule', 'rule-A']); + const body = JSON.parse(stdout[0]) as { data: { summaries: Array<{ rule: string }> } }; + expect(body.data.summaries.every((s) => s.rule === 'rule-A')).toBe(true); + }); + }); + + describe('rules last-fired', () => { + function writeAudit(file: string, rows: unknown[]): void { + fs.writeFileSync(file, rows.map((r) => JSON.stringify(r)).join('\n') + '\n'); + } + + it('prints hint when no rule-fire entries exist', async () => { + const f = path.join(tmpDir, 'audit-empty2.log'); + fs.writeFileSync(f, ''); + const { stdout } = await runCli(['rules', 'last-fired', '--file', f]); + expect(stdout.join(' ')).toMatch(/no rule-fire entries/i); + }); + + it('--json returns entries in reverse chronological order', async () => { + const f = path.join(tmpDir, 'audit-lf.log'); + const base = new Date('2026-04-25T10:00:00.000Z'); + writeAudit(f, [1, 2, 3].map((i) => ({ + t: new Date(base.getTime() + i * 1000).toISOString(), + kind: 'rule-fire', + rule: { name: 'night-light', triggerSource: 'mqtt', fireId: `f${i}` }, + result: 'ok', deviceId: 'D1', command: 'turnOn', parameter: null, commandType: 'command', dryRun: false, + }))); + const { exitCode, stdout } = await runCli(['--json', 'rules', 'last-fired', '--file', f]); + expect(exitCode).toBe(0); + const body = JSON.parse(stdout[0]) as { data: { count: number; entries: Array<{ kind: string }> } }; + expect(body.data.count).toBe(3); + expect(body.data.entries[0].kind).toBe('rule-fire'); + }); + + it('-n limits the number of results returned', async () => { + const f = path.join(tmpDir, 'audit-n.log'); + const base = new Date('2026-04-25T12:00:00.000Z'); + writeAudit(f, Array.from({ length: 15 }, (_, i) => ({ + t: new Date(base.getTime() + i * 1000).toISOString(), + kind: 'rule-fire', + rule: { name: 'flood-rule', triggerSource: 'mqtt', fireId: `id${i}` }, + result: 'ok', deviceId: 'D', command: 'turnOn', parameter: null, commandType: 'command', dryRun: false, + }))); + const { stdout } = await runCli(['--json', 'rules', 'last-fired', '--file', f, '-n', '5']); + const body = JSON.parse(stdout[0]) as { data: { count: number } }; + expect(body.data.count).toBe(5); + }); + }); +}); + +describe('rules webhook-rotate-token', () => { + let tokenDir: string; + + beforeEach(() => { + tokenDir = fs.mkdtempSync(path.join(os.tmpdir(), 'sbwh-')); + vi.spyOn(os, 'homedir').mockReturnValue(tokenDir); + }); + + afterEach(() => { + vi.restoreAllMocks(); + fs.rmSync(tokenDir, { recursive: true, force: true }); + }); + + it('creates a token file and prints the file path in human mode', async () => { + const { stdout } = await runCli(['rules', 'webhook-rotate-token']); + const tokenFile = path.join(tokenDir, '.switchbot', 'webhook-token'); + expect(fs.existsSync(tokenFile)).toBe(true); + const tokenContent = fs.readFileSync(tokenFile, 'utf-8').trim(); + expect(tokenContent.length).toBeGreaterThan(20); + expect(stdout.join(' ')).toMatch(/webhook bearer rotated/i); + }); + + it('--json reports status:rotated with filePath and tokenLength', async () => { + const { stdout } = await runCli(['--json', 'rules', 'webhook-rotate-token']); + const body = JSON.parse(stdout.join('')) as { data: { status: string; filePath: string; tokenLength: number } }; + expect(body.data.status).toBe('rotated'); + expect(typeof body.data.filePath).toBe('string'); + expect(body.data.tokenLength).toBeGreaterThan(20); + }); + + it('produces a different token on each rotation', async () => { + const tokenFile = path.join(tokenDir, '.switchbot', 'webhook-token'); + await runCli(['rules', 'webhook-rotate-token']); + const t1 = fs.readFileSync(tokenFile, 'utf-8').trim(); + await runCli(['rules', 'webhook-rotate-token']); + const t2 = fs.readFileSync(tokenFile, 'utf-8').trim(); + expect(t1).not.toBe(t2); + }); +}); + +describe('rules webhook-show-token', () => { + let tokenDir: string; + + beforeEach(() => { + tokenDir = fs.mkdtempSync(path.join(os.tmpdir(), 'sbwht-')); + vi.spyOn(os, 'homedir').mockReturnValue(tokenDir); + }); + + afterEach(() => { + vi.restoreAllMocks(); + fs.rmSync(tokenDir, { recursive: true, force: true }); + }); + + it('prints the token to stdout in human mode (creates if absent)', async () => { + const { stdout } = await runCli(['rules', 'webhook-show-token']); + expect(stdout.join('').trim().length).toBeGreaterThan(20); + }); + + it('returns the same token on repeated calls (stable, not rotating)', async () => { + const { stdout: s1 } = await runCli(['rules', 'webhook-show-token']); + const { stdout: s2 } = await runCli(['rules', 'webhook-show-token']); + expect(s1.join('').trim()).toBe(s2.join('').trim()); + }); + + it('--json reports filePath and tokenLength', async () => { + const { stdout } = await runCli(['--json', 'rules', 'webhook-show-token']); + const body = JSON.parse(stdout.join('')) as { data: { filePath: string; tokenLength: number } }; + expect(typeof body.data.filePath).toBe('string'); + expect(body.data.tokenLength).toBeGreaterThan(20); + }); +}); + +describe('rules suggest', () => { + it('exits with a Commander usage error when --intent is missing', async () => { + const program = makeProgram(); + await expect( + program.parseAsync(['node', 'test', 'rules', 'suggest']), + ).rejects.toThrow(); + }); + + it('outputs YAML to stdout when trigger can be inferred from intent', async () => { + const stdoutLines: string[] = []; + const writeSpy = vi.spyOn(process.stdout, 'write').mockImplementation((chunk: unknown) => { + stdoutLines.push(String(chunk)); + return true; + }); + try { + await runCli(['rules', 'suggest', '--intent', 'turn on light when motion detected']); + } finally { + writeSpy.mockRestore(); + } + const yaml = stdoutLines.join(''); + expect(yaml).toContain('name:'); + expect(yaml).toContain('when:'); + expect(yaml).toContain('then:'); + }); + + it('--json outputs structured rule + rule_yaml + warnings', async () => { + const { stdout } = await runCli(['--json', 'rules', 'suggest', '--intent', 'turn on lights at 8am every morning']); + const body = JSON.parse(stdout.join('')) as { data: { rule: Record; rule_yaml: string; warnings: string[] } }; + expect(body.data).toHaveProperty('rule'); + expect(body.data).toHaveProperty('rule_yaml'); + expect(Array.isArray(body.data.warnings)).toBe(true); + expect(body.data.rule.name).toBe('turn on lights at 8am every morning'); + }); + + it('writes YAML to --out file instead of stdout', async () => { + const outDir = fs.mkdtempSync(path.join(os.tmpdir(), 'sbsug-')); + const outFile = path.join(outDir, 'rule.yaml'); + try { + const { stdout } = await runCli([ + 'rules', 'suggest', + '--intent', 'turn on fan when button pressed', + '--out', outFile, + ]); + expect(fs.existsSync(outFile)).toBe(true); + const content = fs.readFileSync(outFile, 'utf-8'); + expect(content).toContain('name:'); + expect(stdout.join(' ')).toMatch(/rule YAML written/i); + } finally { + fs.rmSync(outDir, { recursive: true, force: true }); + } + }); }); diff --git a/tests/commands/scenes.test.ts b/tests/commands/scenes.test.ts index aa1a672..f79064d 100644 --- a/tests/commands/scenes.test.ts +++ b/tests/commands/scenes.test.ts @@ -298,4 +298,94 @@ describe('scenes command', () => { expect((out.error as Record).message).toMatch(/scene not found/i); }); }); + + describe('validate', () => { + function mockScenes() { + apiMock.__instance.get.mockResolvedValue({ + data: { + body: [ + { sceneId: 'V1', sceneName: 'Sunrise' }, + { sceneId: 'V2', sceneName: 'Sunset' }, + ], + }, + }); + } + + it('--json exits 0 with ok:true when all supplied IDs exist', async () => { + mockScenes(); + const res = await runCli(registerScenesCommand, ['--json', 'scenes', 'validate', 'V1', 'V2']); + expect(res.exitCode).toBeNull(); + const body = JSON.parse(res.stdout.join('')) as { data: { ok: boolean; results: unknown[] } }; + expect(body.data.ok).toBe(true); + expect(body.data.results).toHaveLength(2); + }); + + it('--json exits 1 with ok:false when a supplied ID does not exist', async () => { + mockScenes(); + const res = await runCli(registerScenesCommand, ['--json', 'scenes', 'validate', 'V1', 'MISSING']); + expect(res.exitCode).toBe(1); + const body = JSON.parse(res.stdout[0]) as { data: { ok: boolean; results: Array<{ sceneId: string; valid: boolean }> } }; + expect(body.data.ok).toBe(false); + const missingEntry = body.data.results.find((r) => r.sceneId === 'MISSING'); + expect(missingEntry?.valid).toBe(false); + }); + + it('human mode exits 0 and prints ✓ for valid scenes', async () => { + mockScenes(); + const res = await runCli(registerScenesCommand, ['scenes', 'validate', 'V1']); + expect(res.exitCode).toBeNull(); + expect(res.stdout.join(' ')).toContain('✓'); + }); + + it('validates all scenes when no IDs are supplied', async () => { + mockScenes(); + const res = await runCli(registerScenesCommand, ['--json', 'scenes', 'validate']); + expect(res.exitCode).toBeNull(); + const body = JSON.parse(res.stdout.join('')) as { data: { ok: boolean; results: unknown[] } }; + expect(body.data.results).toHaveLength(2); + }); + }); + + describe('simulate', () => { + function mockScenes() { + apiMock.__instance.get.mockResolvedValue({ + data: { + body: [ + { sceneId: 'SIM1', sceneName: 'Good Night' }, + ], + }, + }); + } + + it('--json returns simulated:true with wouldSend details', async () => { + mockScenes(); + const res = await runCli(registerScenesCommand, ['--json', 'scenes', 'simulate', 'SIM1']); + expect(res.exitCode).toBeNull(); + const body = JSON.parse(res.stdout.join('')) as { data: Record }; + expect(body.data.simulated).toBe(true); + expect(body.data.sceneId).toBe('SIM1'); + expect(body.data.sceneName).toBe('Good Night'); + const wouldSend = body.data.wouldSend as Record; + expect(wouldSend.method).toBe('POST'); + expect(wouldSend.url).toContain('SIM1'); + }); + + it('human mode prints sceneId, sceneName and wouldSend', async () => { + mockScenes(); + const res = await runCli(registerScenesCommand, ['scenes', 'simulate', 'SIM1']); + expect(res.exitCode).toBeNull(); + const out = res.stdout.join('\n'); + expect(out).toContain('SIM1'); + expect(out).toContain('Good Night'); + expect(out).toContain('POST'); + }); + + it('--json exits 2 with error envelope for unknown sceneId', async () => { + mockScenes(); + const res = await runCli(registerScenesCommand, ['--json', 'scenes', 'simulate', 'UNKNOWN']); + expect(res.exitCode).toBe(2); + const out = JSON.parse(res.stdout.find((l) => l.trim().startsWith('{'))!) as Record; + expect((out.error as Record).message).toMatch(/scene not found/i); + }); + }); }); diff --git a/tests/commands/schema.test.ts b/tests/commands/schema.test.ts index 5552ad9..056bd43 100644 --- a/tests/commands/schema.test.ts +++ b/tests/commands/schema.test.ts @@ -156,4 +156,32 @@ describe('schema export B3 slim flags', () => { expect(parsed.types[0].category).toBeUndefined(); expect(parsed.types[0].description).toBeUndefined(); }); + + // ===================================================================== + // --capabilities flag (MVP 5) + // ===================================================================== + describe('schema export --capabilities', () => { + it('adds commandsMeta to each device type entry', async () => { + const res = await runCli(registerSchemaCommand, ['schema', 'export', '--capabilities', '--type', 'Bot']); + expect(res.exitCode).toBeNull(); + const parsed = JSON.parse(res.stdout.join('')).data; + expect(parsed.types.length).toBeGreaterThan(0); + const first = parsed.types[0] as Record; + expect(first).toHaveProperty('commandsMeta'); + const meta = first.commandsMeta as Record; + expect(typeof meta).toBe('object'); + // commandsMeta contains entries from COMMAND_META for 'devices *' commands + expect(Object.keys(meta).length).toBeGreaterThan(0); + const firstEntry = Object.values(meta)[0] as Record; + expect(firstEntry).toHaveProperty('agentSafetyTier'); + expect(firstEntry).toHaveProperty('mutating'); + }); + + it('normal export without --capabilities does not include commandsMeta', async () => { + const res = await runCli(registerSchemaCommand, ['schema', 'export', '--type', 'Bot']); + const parsed = JSON.parse(res.stdout.join('')).data; + const first = parsed.types[0] as Record; + expect(first).not.toHaveProperty('commandsMeta'); + }); + }); }); diff --git a/tests/commands/status-sync.test.ts b/tests/commands/status-sync.test.ts new file mode 100644 index 0000000..c615f34 --- /dev/null +++ b/tests/commands/status-sync.test.ts @@ -0,0 +1,137 @@ +import { describe, it, expect, vi, beforeEach } from 'vitest'; +import type { StatusSyncStatus, StopStatusSyncResult } from '../../src/status-sync/manager.js'; + +const managerMock = vi.hoisted(() => ({ + getStatusSyncStatus: vi.fn<[], StatusSyncStatus>(), + startStatusSync: vi.fn<[], StatusSyncStatus>(), + stopStatusSync: vi.fn<[], StopStatusSyncResult>(), + runStatusSyncForeground: vi.fn<[], Promise>(), +})); + +vi.mock('../../src/status-sync/manager.js', () => managerMock); + +import { registerStatusSyncCommand } from '../../src/commands/status-sync.js'; +import { runCli } from '../helpers/cli.js'; + +const NOT_RUNNING: StatusSyncStatus = { + running: false, + pid: null, + startedAt: null, + stateDir: '/mock/.switchbot/status-sync', + stateFile: '/mock/.switchbot/status-sync/state.json', + stdoutLog: '/mock/.switchbot/status-sync/stdout.log', + stderrLog: '/mock/.switchbot/status-sync/stderr.log', + command: null, + openclawUrl: null, + openclawModel: null, + topic: null, + configPath: null, + profile: null, +}; + +const RUNNING: StatusSyncStatus = { + ...NOT_RUNNING, + running: true, + pid: 9876, + startedAt: '2026-04-25T12:00:00.000Z', + command: ['node', 'src/index.js', 'status-sync', 'run'], + openclawUrl: 'http://localhost:18789', + openclawModel: 'home-agent', +}; + +describe('status-sync command', () => { + beforeEach(() => { + managerMock.getStatusSyncStatus.mockReset().mockReturnValue(NOT_RUNNING); + managerMock.startStatusSync.mockReset().mockReturnValue(RUNNING); + managerMock.stopStatusSync.mockReset().mockReturnValue({ + stopped: false, stale: false, pid: null, status: NOT_RUNNING, + }); + managerMock.runStatusSyncForeground.mockReset().mockResolvedValue(0); + }); + + describe('status', () => { + it('--json exits 0 with running:false when not running', async () => { + const res = await runCli(registerStatusSyncCommand, ['--json', 'status-sync', 'status']); + expect(res.exitCode).toBeNull(); + const body = JSON.parse(res.stdout.join('')) as { data: StatusSyncStatus }; + expect(body.data.running).toBe(false); + expect(body.data.pid).toBeNull(); + }); + + it('--json exits 0 with running:true and pid when running', async () => { + managerMock.getStatusSyncStatus.mockReturnValue(RUNNING); + const res = await runCli(registerStatusSyncCommand, ['--json', 'status-sync', 'status']); + expect(res.exitCode).toBeNull(); + const body = JSON.parse(res.stdout.join('')) as { data: StatusSyncStatus }; + expect(body.data.running).toBe(true); + expect(body.data.pid).toBe(9876); + }); + + it('human mode prints "not running" when not running', async () => { + const res = await runCli(registerStatusSyncCommand, ['status-sync', 'status']); + expect(res.exitCode).toBeNull(); + expect(res.stdout.join(' ')).toMatch(/not running/i); + }); + }); + + describe('start', () => { + it('--json exits 0 and returns running state from startStatusSync', async () => { + managerMock.startStatusSync.mockReturnValue(RUNNING); + const res = await runCli(registerStatusSyncCommand, ['--json', 'status-sync', 'start']); + expect(res.exitCode).toBeNull(); + const body = JSON.parse(res.stdout.join('')) as { data: StatusSyncStatus }; + expect(body.data.running).toBe(true); + expect(body.data.pid).toBe(9876); + expect(managerMock.startStatusSync).toHaveBeenCalled(); + }); + + it('human mode prints started message with pid', async () => { + managerMock.startStatusSync.mockReturnValue(RUNNING); + const res = await runCli(registerStatusSyncCommand, ['status-sync', 'start']); + expect(res.exitCode).toBeNull(); + expect(res.stdout.join(' ')).toMatch(/started.*9876/i); + }); + }); + + describe('run', () => { + it('calls runStatusSyncForeground and exits 0 when it resolves 0', async () => { + managerMock.runStatusSyncForeground.mockResolvedValue(0); + const res = await runCli(registerStatusSyncCommand, ['status-sync', 'run']); + expect(res.exitCode).toBeNull(); + expect(managerMock.runStatusSyncForeground).toHaveBeenCalled(); + }); + + it('exits with the code returned by runStatusSyncForeground when non-zero', async () => { + managerMock.runStatusSyncForeground.mockResolvedValue(1); + const res = await runCli(registerStatusSyncCommand, ['status-sync', 'run']); + expect(res.exitCode).toBe(1); + }); + }); + + describe('stop', () => { + it('--json exits 0 with stopped:false when nothing is running', async () => { + const res = await runCli(registerStatusSyncCommand, ['--json', 'status-sync', 'stop']); + expect(res.exitCode).toBeNull(); + const body = JSON.parse(res.stdout.join('')) as { data: StopStatusSyncResult }; + expect(body.data.stopped).toBe(false); + expect(body.data.pid).toBeNull(); + }); + + it('human mode prints "not running" when nothing to stop', async () => { + const res = await runCli(registerStatusSyncCommand, ['status-sync', 'stop']); + expect(res.exitCode).toBeNull(); + expect(res.stdout.join(' ')).toMatch(/not running/i); + }); + + it('--json exits 0 with stopped:true when a running bridge is stopped', async () => { + managerMock.stopStatusSync.mockReturnValue({ + stopped: true, stale: false, pid: 9876, status: NOT_RUNNING, + }); + const res = await runCli(registerStatusSyncCommand, ['--json', 'status-sync', 'stop']); + expect(res.exitCode).toBeNull(); + const body = JSON.parse(res.stdout.join('')) as { data: StopStatusSyncResult }; + expect(body.data.stopped).toBe(true); + expect(body.data.pid).toBe(9876); + }); + }); +}); diff --git a/tests/commands/upgrade-check.test.ts b/tests/commands/upgrade-check.test.ts index 97f40fb..f4039c4 100644 --- a/tests/commands/upgrade-check.test.ts +++ b/tests/commands/upgrade-check.test.ts @@ -113,3 +113,59 @@ describe('upgrade-check action — prerelease guard', () => { expect(out).not.toMatch(/Update available/i); }); }); + +// ── happy path and network error ───────────────────────────────────────────── +describe('upgrade-check action — version comparison', () => { + afterEach(() => { + httpsMock.get.mockReset(); + }); + + it('--json: when up to date (registry returns same version), upToDate:true exits 0', async () => { + makeHttpsGet('3.1.1'); + const { registerUpgradeCheckCommand } = await import('../../src/commands/upgrade-check.js'); + const { runCli } = await import('../helpers/cli.js'); + + const res = await runCli(registerUpgradeCheckCommand, ['--json', 'upgrade-check']); + expect(res.exitCode).toBeNull(); + const line = res.stdout.find((l) => l.trim().startsWith('{')); + const out = JSON.parse(line!) as Record; + const data = (out.data ?? out) as Record; + expect(data.upToDate).toBe(true); + expect(data.updateAvailable).toBe(false); + expect(data.installCommand).toBeNull(); + }); + + it('--json: when newer version available, updateAvailable:true and exits 1', async () => { + makeHttpsGet('99.0.0'); + const { registerUpgradeCheckCommand } = await import('../../src/commands/upgrade-check.js'); + const { runCli } = await import('../helpers/cli.js'); + + const res = await runCli(registerUpgradeCheckCommand, ['--json', 'upgrade-check']); + // JSON mode returns early without calling process.exit(1) — that only happens in human mode + expect(res.exitCode).toBeNull(); + const line = res.stdout.find((l) => l.trim().startsWith('{')); + const out = JSON.parse(line!) as Record; + const data = (out.data ?? out) as Record; + expect(data.updateAvailable).toBe(true); + expect(data.breakingChange).toBe(true); + expect(typeof data.installCommand).toBe('string'); + }); + + it('--json: network error produces ok:false envelope and exits 1', async () => { + httpsMock.get.mockImplementation((_url: unknown, _opts: unknown, _cb: unknown) => { + const req = Object.assign(new EventEmitter(), { destroy: vi.fn() }); + process.nextTick(() => req.emit('error', new Error('ECONNREFUSED'))); + return req; + }); + const { registerUpgradeCheckCommand } = await import('../../src/commands/upgrade-check.js'); + const { runCli } = await import('../helpers/cli.js'); + + const res = await runCli(registerUpgradeCheckCommand, ['--json', 'upgrade-check']); + expect(res.exitCode).toBe(1); + const line = res.stdout.find((l) => l.trim().startsWith('{')); + const out = JSON.parse(line!) as Record; + const data = (out.data ?? out) as Record; + expect(data.ok).toBe(false); + expect(typeof data.error).toBe('string'); + }); +}); diff --git a/tests/status-sync/manager.test.ts b/tests/status-sync/manager.test.ts index 5300b96..e957bec 100644 --- a/tests/status-sync/manager.test.ts +++ b/tests/status-sync/manager.test.ts @@ -1,5 +1,5 @@ import path from 'node:path'; -import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; +import { afterAll, afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; const fsMock = vi.hoisted(() => ({ existsSync: vi.fn(), @@ -40,7 +40,13 @@ import { describe('status-sync manager', () => { const originalArgv = process.argv; - const killSpy = vi.spyOn(process, 'kill'); + const originalKill = process.kill; + const killSpy = vi.fn(); + (process as unknown as { kill: typeof process.kill }).kill = killSpy as unknown as typeof process.kill; + + afterAll(() => { + (process as unknown as { kill: typeof process.kill }).kill = originalKill; + }); beforeEach(() => { process.argv = ['node', '/repo/dist/index.js']; diff --git a/tests/version.test.ts b/tests/version.test.ts index fef53f3..e82bff5 100644 --- a/tests/version.test.ts +++ b/tests/version.test.ts @@ -12,6 +12,11 @@ const pkg = JSON.parse( ) as { version: string }; describe('CLI --version', () => { + it('keeps a node shebang on dist/index.js for npm bin execution', () => { + const cli = readFileSync(path.join(here, '..', 'dist', 'index.js'), 'utf-8'); + expect(cli.startsWith('#!/usr/bin/env node\n')).toBe(true); + }); + it('matches package.json version', () => { // Regression guard for the v1.3.1 bug where src/index.ts hardcoded a // stale version string. execFileSync + process.execPath avoids shell