From 88667cce36feb69f9fed6a38635a69120b377b89 Mon Sep 17 00:00:00 2001 From: chenliuyun Date: Sat, 25 Apr 2026 17:26:48 +0800 Subject: [PATCH 01/26] fix(capabilities): export COMMAND_META, add rules explain entry, add exhaustive coverage guard tests --- src/commands/capabilities.ts | 3 +- tests/commands/capabilities-meta.test.ts | 101 +++++++++++++++++++++++ 2 files changed, 103 insertions(+), 1 deletion(-) create mode 100644 tests/commands/capabilities-meta.test.ts diff --git a/src/commands/capabilities.ts b/src/commands/capabilities.ts index 9f29c1b..e43ede6 100644 --- a/src/commands/capabilities.ts +++ b/src/commands/capabilities.ts @@ -115,7 +115,7 @@ const DESTRUCTIVE_LOCAL = meta(true, false, false, 'destructive', 'local', 20); const DESTRUCTIVE_REMOTE = meta(true, true, false, 'destructive', 'deviceDependent', 1200); const READ_NONE = meta(false, false, false, 'read', 'none', 50); -const COMMAND_META: Record = { +export const COMMAND_META: Record = { 'agent-bootstrap': READ_LOCAL, 'auth keychain describe': READ_LOCAL, 'auth keychain get': READ_LOCAL, @@ -197,6 +197,7 @@ const COMMAND_META: Record = { 'rules doctor': READ_LOCAL, 'rules summary': READ_LOCAL, 'rules last-fired': READ_LOCAL, + 'rules explain': READ_LOCAL, 'schema export': READ_LOCAL, 'scenes list': READ_REMOTE, 'scenes execute': ACTION_REMOTE, diff --git a/tests/commands/capabilities-meta.test.ts b/tests/commands/capabilities-meta.test.ts new file mode 100644 index 0000000..a743a1b --- /dev/null +++ b/tests/commands/capabilities-meta.test.ts @@ -0,0 +1,101 @@ +import { describe, it, expect, vi } from 'vitest'; + +// ── mocks required for importing capabilities.ts ──────────────────────────── +const catalogMock = vi.hoisted(() => ({ + getEffectiveCatalog: vi.fn(() => []), + deriveSafetyTier: vi.fn(() => 'action' as const), + deriveStatusQueries: vi.fn(() => []), +})); +const cacheMock = vi.hoisted(() => ({ loadCache: vi.fn(() => ({ list: [], status: {} })) })); +vi.mock('../../src/devices/catalog.js', () => catalogMock); +vi.mock('../../src/devices/cache.js', () => cacheMock); + +import { COMMAND_META } from '../../src/commands/capabilities.js'; +import { registerCapabilitiesCommand } from '../../src/commands/capabilities.js'; +import { runCli } from '../helpers/cli.js'; + +// ── comprehensive list of every CLI leaf command ────────────────────────────── +// Regression guard: when a new subcommand is added to the CLI, it MUST be added +// here AND to COMMAND_META. If either is missing, this test fails with a clear +// "missing: " message. +const ALL_EXPECTED_LEAF_COMMANDS = [ + 'agent-bootstrap', + 'auth keychain describe', 'auth keychain get', 'auth keychain set', + 'auth keychain delete', 'auth keychain migrate', + 'cache show', 'cache clear', + 'capabilities', + 'catalog path', 'catalog show', 'catalog search', 'catalog diff', 'catalog refresh', + 'completion', + 'config set-token', 'config show', 'config list-profiles', 'config agent-profile', + 'daemon start', 'daemon stop', 'daemon status', 'daemon reload', + 'devices list', 'devices status', 'devices command', 'devices types', + 'devices commands', 'devices describe', 'devices batch', 'devices watch', + 'devices explain', 'devices expand', + 'devices meta set', 'devices meta get', 'devices meta list', 'devices meta clear', + 'doctor', + 'events tail', 'events mqtt-tail', + 'health check', 'health serve', + 'history show', 'history replay', 'history range', 'history stats', + 'history verify', 'history aggregate', + 'install', + 'mcp serve', + 'plan schema', 'plan validate', 'plan suggest', 'plan run', + 'plan save', 'plan list', 'plan review', 'plan approve', 'plan execute', + 'policy validate', 'policy new', 'policy migrate', 'policy diff', + 'policy add-rule', 'policy backup', 'policy restore', + 'quota status', 'quota reset', + 'rules suggest', 'rules lint', 'rules list', 'rules run', 'rules reload', + 'rules tail', 'rules replay', 'rules webhook-rotate-token', 'rules webhook-show-token', + 'rules conflicts', 'rules doctor', 'rules summary', 'rules last-fired', + 'rules explain', + 'schema export', + 'scenes list', 'scenes execute', 'scenes describe', + 'scenes validate', 'scenes simulate', 'scenes explain', + 'status-sync run', 'status-sync start', 'status-sync stop', 'status-sync status', + 'uninstall', + 'upgrade-check', + 'webhook setup', 'webhook query', 'webhook update', 'webhook delete', +] as const; + +// MCP tool names and other prefixes that legitimately live in COMMAND_META +// but are NOT CLI leaf commands. +const NON_CLI_PREFIXES = [ + 'list_', 'get_', 'send_', 'describe_', 'run_', 'search_', + 'account_', 'query_', 'aggregate_', +]; + +describe('COMMAND_META — exhaustive coverage guard', () => { + it('has an entry for every known CLI leaf command', () => { + const missing = ALL_EXPECTED_LEAF_COMMANDS.filter((cmd) => !(cmd in COMMAND_META)); + expect(missing, `COMMAND_META missing entries: ${missing.join(', ')}`).toHaveLength(0); + }); + + it('does not have phantom entries for commands that do not exist', () => { + const knownSet = new Set(ALL_EXPECTED_LEAF_COMMANDS); + const phantom = Object.keys(COMMAND_META).filter( + (k) => !knownSet.has(k) && !NON_CLI_PREFIXES.some((p) => k.startsWith(p)), + ); + expect(phantom, `Phantom COMMAND_META entries: ${phantom.join(', ')}`).toHaveLength(0); + }); +}); + +describe('capabilities command — regression output tests', () => { + it('produces non-empty JSON output with --compact (regression: rules explain missing)', async () => { + const res = await runCli(registerCapabilitiesCommand, ['capabilities', '--compact']); + expect(res.exitCode).toBeNull(); + expect(res.stderr.join('')).not.toMatch(/coverage error/i); + const out = res.stdout.join(''); + expect(out.length).toBeGreaterThan(50); + const parsed = JSON.parse(out) as { data: { commands: Array<{ name: string }> } }; + expect(parsed).toHaveProperty('data'); + expect(parsed.data).toHaveProperty('commands'); + }); + + it('COMMAND_META has rules explain entry with READ_LOCAL tier', () => { + const entry = COMMAND_META['rules explain']; + expect(entry, 'COMMAND_META missing rules explain').toBeDefined(); + expect(entry.agentSafetyTier).toBe('read'); + expect(entry.mutating).toBe(false); + expect(entry.consumesQuota).toBe(false); + }); +}); From a947cfc630495ea38ae0cb3a5db650f1e997f7a3 Mon Sep 17 00:00:00 2001 From: chenliuyun Date: Sat, 25 Apr 2026 17:26:57 +0800 Subject: [PATCH 02/26] test(devices): assert exit code 1 for API error 190, add happy-path exit code guard --- tests/commands/devices.test.ts | 40 ++++++++++++++++++++++++++++++++++ 1 file changed, 40 insertions(+) diff --git a/tests/commands/devices.test.ts b/tests/commands/devices.test.ts index 739f6b6..202745b 100644 --- a/tests/commands/devices.test.ts +++ b/tests/commands/devices.test.ts @@ -35,6 +35,7 @@ vi.mock('../../src/api/client.js', () => ({ })); import { registerDevicesCommand } from '../../src/commands/devices.js'; +import { ApiError } from '../../src/api/client.js'; import { runCli } from '../helpers/cli.js'; import { updateCacheFromDeviceList, resetListCache } from '../../src/devices/cache.js'; @@ -2444,4 +2445,43 @@ describe('devices command', () => { expect(lock?.safetyTier).toBe('mutation'); }); }); + + // ===================================================================== + // API error exit codes (P0 regression guard) + // ===================================================================== + describe('devices status — API error exit codes', () => { + beforeEach(() => { + apiMock.__instance.get.mockReset(); + apiMock.__instance.post.mockReset(); + }); + + it('exits 1 when API returns code 190 in human mode', async () => { + apiMock.__instance.get.mockRejectedValue( + new ApiError('Device internal error', 190), + ); + const res = await runCli(registerDevicesCommand, ['devices', 'status', 'BOGUS123']); + expect(res.exitCode).toBe(1); + expect(res.stderr.join('')).toMatch(/190/); + }); + + it('exits 1 when API returns code 190 in JSON mode', async () => { + apiMock.__instance.get.mockRejectedValue( + new ApiError('Device internal error', 190), + ); + const res = await runCli(registerDevicesCommand, ['--json', 'devices', 'status', 'BOGUS123']); + expect(res.exitCode).toBe(1); + const out = res.stdout.join(''); + const parsed = JSON.parse(out) as { error?: { code: number } }; + expect(parsed).toHaveProperty('error'); + expect(parsed.error?.code).toBe(190); + }); + + it('exits 0 for a successful status call', async () => { + apiMock.__instance.get.mockResolvedValue({ + data: { statusCode: 100, body: { power: 'on', battery: 90 } }, + }); + const res = await runCli(registerDevicesCommand, ['devices', 'status', 'DEVICE123']); + expect(res.exitCode).toBeNull(); + }); + }); }); From c94796ee258aeef7db252af7c95c799a1178650c Mon Sep 17 00:00:00 2001 From: chenliuyun Date: Sat, 25 Apr 2026 17:27:31 +0800 Subject: [PATCH 03/26] docs: update test count to 1889 --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 458f7b5..903bc0c 100644 --- a/README.md +++ b/README.md @@ -93,7 +93,7 @@ Under the hood every surface shares the same catalog, cache, and HMAC client — - 🎨 **Dual output modes** — colorized tables by default; `--json` passthrough for `jq` and scripting - 🔐 **Secure credentials** — HMAC-SHA256 signed requests; config file written with `0600`; env-var override for CI - 🔍 **Dry-run mode** — preview every mutating request before it hits the API -- 🧪 **Fully tested** — 1882 Vitest tests, mocked axios, zero network in CI +- 🧪 **Fully tested** — 1889 Vitest tests, mocked axios, zero network in CI - ⚡ **Shell completion** — Bash / Zsh / Fish / PowerShell ## Requirements @@ -1123,7 +1123,7 @@ npm install npm run dev -- # Run from TypeScript sources via tsx npm run build # Compile to dist/ -npm test # Run the Vitest suite (1882 tests) +npm test # Run the Vitest suite (1889 tests) npm run test:watch # Watch mode npm run test:coverage # Coverage report (v8, HTML + text) ``` @@ -1205,7 +1205,7 @@ src/ ├── format.ts # renderRows / filterFields / output-format dispatch ├── audit.ts # JSONL audit log writer └── quota.ts # Local daily-quota counter -tests/ # Vitest suite (1882 tests, mocked axios, no network) +tests/ # Vitest suite (1889 tests, mocked axios, no network) ``` ### Release flow From 746f2ac5e8f83c08f703fbe0b94b475dc9741ce7 Mon Sep 17 00:00:00 2001 From: chenliuyun Date: Sat, 25 Apr 2026 17:33:13 +0800 Subject: [PATCH 04/26] fix(policy): relax alias deviceId pattern to accept hex MAC and lowercase IDs --- src/policy/schema/v0.2.json | 2 +- tests/commands/policy.test.ts | 51 +++++++++++++++++++++++++++++++++++ 2 files changed, 52 insertions(+), 1 deletion(-) diff --git a/src/policy/schema/v0.2.json b/src/policy/schema/v0.2.json index 58aa07e..3785b64 100644 --- a/src/policy/schema/v0.2.json +++ b/src/policy/schema/v0.2.json @@ -18,7 +18,7 @@ "description": "Unchanged from v0.1.", "additionalProperties": { "type": "string", - "pattern": "^[A-Z0-9]{2,}-[A-Z0-9-]+$" + "pattern": "^[A-Za-z0-9][A-Za-z0-9_-]{1,63}$" } }, diff --git a/tests/commands/policy.test.ts b/tests/commands/policy.test.ts index 1d57e03..927c422 100644 --- a/tests/commands/policy.test.ts +++ b/tests/commands/policy.test.ts @@ -489,4 +489,55 @@ describe('switchbot policy (commander surface)', () => { expect(out.data.restored).toBe(policyFile); }); }); + + // ===================================================================== + // alias deviceId format (P1 regression guard) + // ===================================================================== + describe('policy validate — alias deviceId format', () => { + let tmp: string; + + beforeEach(() => { + tmp = fs.mkdtempSync(path.join(os.tmpdir(), 'sbpolicy-alias-')); + }); + + afterEach(() => { + fs.rmSync(tmp, { recursive: true, force: true }); + }); + + function writePolicy(deviceId: string): string { + const p = path.join(tmp, 'policy.yaml'); + fs.writeFileSync(p, `version: "0.2"\naliases:\n my device: ${deviceId}\n`); + return p; + } + + it('accepts standard hyphenated IDs (01-202407090924-26354212)', () => { + const { exitCode } = runCli(['policy', 'validate', writePolicy('01-202407090924-26354212')]); + expect(exitCode).toBe(0); + }); + + it('accepts 12-digit hex MAC without hyphen (28372F4C9C4A)', () => { + const { exitCode } = runCli(['policy', 'validate', writePolicy('28372F4C9C4A')]); + expect(exitCode).toBe(0); + }); + + it('accepts lowercase hex MAC (b0e9fe51ef2e)', () => { + const { exitCode } = runCli(['policy', 'validate', writePolicy('b0e9fe51ef2e')]); + expect(exitCode).toBe(0); + }); + + it('accepts IoT suffix format (28372F4C9C4A-vzwa)', () => { + const { exitCode } = runCli(['policy', 'validate', writePolicy('28372F4C9C4A-vzwa')]); + expect(exitCode).toBe(0); + }); + + it('rejects single-char IDs', () => { + const { exitCode } = runCli(['policy', 'validate', writePolicy('A')]); + expect(exitCode).not.toBe(0); + }); + + it('rejects IDs longer than 64 chars', () => { + const { exitCode } = runCli(['policy', 'validate', writePolicy('A'.repeat(65))]); + expect(exitCode).not.toBe(0); + }); + }); }); From ace7ed922f5c2d4e5c50a842c16d1f78fa750257 Mon Sep 17 00:00:00 2001 From: chenliuyun Date: Sat, 25 Apr 2026 17:33:40 +0800 Subject: [PATCH 05/26] docs: update test count to 1895 --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 903bc0c..4bedc7f 100644 --- a/README.md +++ b/README.md @@ -93,7 +93,7 @@ Under the hood every surface shares the same catalog, cache, and HMAC client — - 🎨 **Dual output modes** — colorized tables by default; `--json` passthrough for `jq` and scripting - 🔐 **Secure credentials** — HMAC-SHA256 signed requests; config file written with `0600`; env-var override for CI - 🔍 **Dry-run mode** — preview every mutating request before it hits the API -- 🧪 **Fully tested** — 1889 Vitest tests, mocked axios, zero network in CI +- 🧪 **Fully tested** — 1895 Vitest tests, mocked axios, zero network in CI - ⚡ **Shell completion** — Bash / Zsh / Fish / PowerShell ## Requirements @@ -1123,7 +1123,7 @@ npm install npm run dev -- # Run from TypeScript sources via tsx npm run build # Compile to dist/ -npm test # Run the Vitest suite (1889 tests) +npm test # Run the Vitest suite (1895 tests) npm run test:watch # Watch mode npm run test:coverage # Coverage report (v8, HTML + text) ``` @@ -1205,7 +1205,7 @@ src/ ├── format.ts # renderRows / filterFields / output-format dispatch ├── audit.ts # JSONL audit log writer └── quota.ts # Local daily-quota counter -tests/ # Vitest suite (1889 tests, mocked axios, no network) +tests/ # Vitest suite (1895 tests, mocked axios, no network) ``` ### Release flow From 55a4875a6a3d36a6cf78ea0aa06ca3ec667423d5 Mon Sep 17 00:00:00 2001 From: chenliuyun Date: Sat, 25 Apr 2026 17:37:02 +0800 Subject: [PATCH 06/26] feat(agent-bootstrap): add --sections flag to project top-level payload keys --- src/commands/agent-bootstrap.ts | 30 ++++++++++++++++++++-- tests/commands/agent-bootstrap.test.ts | 35 ++++++++++++++++++++++++++ 2 files changed, 63 insertions(+), 2 deletions(-) diff --git a/src/commands/agent-bootstrap.ts b/src/commands/agent-bootstrap.ts index d744f56..ceda31f 100644 --- a/src/commands/agent-bootstrap.ts +++ b/src/commands/agent-bootstrap.ts @@ -1,5 +1,5 @@ import { Command } from 'commander'; -import { printJson } from '../utils/output.js'; +import { printJson, exitWithError } from '../utils/output.js'; import { loadCache } from '../devices/cache.js'; import { getEffectiveCatalog, @@ -103,6 +103,7 @@ async function readCredentialsBackend(): Promise { interface BootstrapOptions { compact?: boolean; + sections?: string; } export function registerAgentBootstrapCommand(program: Command): void { @@ -115,6 +116,10 @@ export function registerAgentBootstrapCommand(program: Command): void { '--compact', 'Emit an even smaller payload by dropping catalog descriptions and non-essential fields (target: <20 KB).', ) + .option( + '--sections ', + 'Comma-separated top-level sections to include (e.g. identity,devices,catalog). Omit for all sections.', + ) .addHelpText( 'after', ` @@ -226,6 +231,27 @@ Examples: : [], }; - printJson(payload); + const VALID_SECTIONS = new Set([ + 'schemaVersion', 'generatedAt', 'cliVersion', 'identity', 'quickReference', + 'safetyTiers', 'nameStrategies', 'profile', 'quota', 'policyStatus', + 'credentialsBackend', 'devices', 'catalog', 'hints', + ]); + + let finalPayload: Record = payload as Record; + if (opts.sections) { + const requested = opts.sections.split(',').map((s) => s.trim()).filter(Boolean); + const unknown = requested.filter((s) => !VALID_SECTIONS.has(s)); + if (unknown.length > 0) { + exitWithError({ + code: 2, + kind: 'usage', + message: `Unknown section(s): ${unknown.join(', ')}. Valid sections: ${[...VALID_SECTIONS].join(', ')}.`, + }); + } + finalPayload = Object.fromEntries( + Object.entries(finalPayload).filter(([k]) => requested.includes(k)), + ); + } + printJson(finalPayload); }); } diff --git a/tests/commands/agent-bootstrap.test.ts b/tests/commands/agent-bootstrap.test.ts index 8c169d2..ee21c31 100644 --- a/tests/commands/agent-bootstrap.test.ts +++ b/tests/commands/agent-bootstrap.test.ts @@ -6,6 +6,7 @@ import path from 'node:path'; import { Command } from 'commander'; import { registerAgentBootstrapCommand } from '../../src/commands/agent-bootstrap.js'; import { resetListCache } from '../../src/devices/cache.js'; +import { runCli } from '../helpers/cli.js'; async function captureJson(fn: () => void | Promise): Promise { const lines: string[] = []; @@ -224,4 +225,38 @@ describe('agent-bootstrap', () => { delete process.env.SWITCHBOT_POLICY_PATH; } }); + + // ===================================================================== + // --sections flag (P1) + // ===================================================================== + describe('agent-bootstrap --sections', () => { + it('restricts output to the requested top-level keys', async () => { + const res = await runCli(registerAgentBootstrapCommand, [ + 'agent-bootstrap', '--sections', 'identity,cliVersion', + ]); + expect(res.exitCode).toBeNull(); + const out = JSON.parse(res.stdout.join('')) as { data: Record }; + const keys = Object.keys(out.data); + expect(keys).toContain('identity'); + expect(keys).toContain('cliVersion'); + expect(keys).not.toContain('catalog'); + expect(keys).not.toContain('hints'); + expect(keys).not.toContain('quota'); + }); + + it('includes all keys when --sections is not provided', async () => { + const res = await runCli(registerAgentBootstrapCommand, ['agent-bootstrap', '--compact']); + const out = JSON.parse(res.stdout.join('')) as { data: Record }; + expect(Object.keys(out.data)).toContain('catalog'); + expect(Object.keys(out.data)).toContain('hints'); + }); + + it('exits 2 and prints hint when an unknown section name is requested', async () => { + const res = await runCli(registerAgentBootstrapCommand, [ + 'agent-bootstrap', '--sections', 'identity,doesNotExist', + ]); + expect(res.exitCode).toBe(2); + expect(res.stderr.join('')).toMatch(/unknown section.*doesNotExist/i); + }); + }); }); From 8dd6ffc00e19aac5473bd60e1fef9bed673db210 Mon Sep 17 00:00:00 2001 From: chenliuyun Date: Sat, 25 Apr 2026 17:37:12 +0800 Subject: [PATCH 07/26] docs: update test count to 1898 --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 4bedc7f..3e91cf6 100644 --- a/README.md +++ b/README.md @@ -93,7 +93,7 @@ Under the hood every surface shares the same catalog, cache, and HMAC client — - 🎨 **Dual output modes** — colorized tables by default; `--json` passthrough for `jq` and scripting - 🔐 **Secure credentials** — HMAC-SHA256 signed requests; config file written with `0600`; env-var override for CI - 🔍 **Dry-run mode** — preview every mutating request before it hits the API -- 🧪 **Fully tested** — 1895 Vitest tests, mocked axios, zero network in CI +- 🧪 **Fully tested** — 1898 Vitest tests, mocked axios, zero network in CI - ⚡ **Shell completion** — Bash / Zsh / Fish / PowerShell ## Requirements @@ -1123,7 +1123,7 @@ npm install npm run dev -- # Run from TypeScript sources via tsx npm run build # Compile to dist/ -npm test # Run the Vitest suite (1895 tests) +npm test # Run the Vitest suite (1898 tests) npm run test:watch # Watch mode npm run test:coverage # Coverage report (v8, HTML + text) ``` @@ -1205,7 +1205,7 @@ src/ ├── format.ts # renderRows / filterFields / output-format dispatch ├── audit.ts # JSONL audit log writer └── quota.ts # Local daily-quota counter -tests/ # Vitest suite (1895 tests, mocked axios, no network) +tests/ # Vitest suite (1898 tests, mocked axios, no network) ``` ### Release flow From 76d43843900ee4996619fd54c16badccc48bb611 Mon Sep 17 00:00:00 2001 From: chenliuyun Date: Sat, 25 Apr 2026 17:40:40 +0800 Subject: [PATCH 08/26] perf: switch build:prod to esbuild bundler, inline pure-JS deps into single file --- package-lock.json | 699 +++++++++++++++++++++++++++----- package.json | 3 +- scripts/bundle.mjs | 36 ++ tests/build/bundle-size.test.ts | 17 + 4 files changed, 647 insertions(+), 108 deletions(-) create mode 100644 scripts/bundle.mjs create mode 100644 tests/build/bundle-size.test.ts diff --git a/package-lock.json b/package-lock.json index 97aa082..8c9cc61 100644 --- a/package-lock.json +++ b/package-lock.json @@ -32,6 +32,7 @@ "@types/node": "^22.10.7", "@types/uuid": "^10.0.0", "@vitest/coverage-v8": "^2.1.9", + "esbuild": "^0.28.0", "markdownlint-cli": "^0.48.0", "tsx": "^4.19.2", "typescript": "^5.7.3", @@ -132,9 +133,9 @@ } }, "node_modules/@esbuild/aix-ppc64": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.27.7.tgz", - "integrity": "sha512-EKX3Qwmhz1eMdEJokhALr0YiD0lhQNwDqkPYyPhiSwKrh7/4KRjQc04sZ8db+5DVVnZ1LmbNDI1uAMPEUBnQPg==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.28.0.tgz", + "integrity": "sha512-lhRUCeuOyJQURhTxl4WkpFTjIsbDayJHih5kZC1giwE+MhIzAb7mEsQMqMf18rHLsrb5qI1tafG20mLxEWcWlA==", "cpu": [ "ppc64" ], @@ -149,9 +150,9 @@ } }, "node_modules/@esbuild/android-arm": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.27.7.tgz", - "integrity": "sha512-jbPXvB4Yj2yBV7HUfE2KHe4GJX51QplCN1pGbYjvsyCZbQmies29EoJbkEc+vYuU5o45AfQn37vZlyXy4YJ8RQ==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.28.0.tgz", + "integrity": "sha512-wqh0ByljabXLKHeWXYLqoJ5jKC4XBaw6Hk08OfMrCRd2nP2ZQ5eleDZC41XHyCNgktBGYMbqnrJKq/K/lzPMSQ==", "cpu": [ "arm" ], @@ -166,9 +167,9 @@ } }, "node_modules/@esbuild/android-arm64": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.27.7.tgz", - "integrity": "sha512-62dPZHpIXzvChfvfLJow3q5dDtiNMkwiRzPylSCfriLvZeq0a1bWChrGx/BbUbPwOrsWKMn8idSllklzBy+dgQ==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.28.0.tgz", + "integrity": "sha512-+WzIXQOSaGs33tLEgYPYe/yQHf0WTU0X42Jca3y8NWMbUVhp7rUnw+vAsRC/QiDrdD31IszMrZy+qwPOPjd+rw==", "cpu": [ "arm64" ], @@ -183,9 +184,9 @@ } }, "node_modules/@esbuild/android-x64": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.27.7.tgz", - "integrity": "sha512-x5VpMODneVDb70PYV2VQOmIUUiBtY3D3mPBG8NxVk5CogneYhkR7MmM3yR/uMdITLrC1ml/NV1rj4bMJuy9MCg==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.28.0.tgz", + "integrity": "sha512-+VJggoaKhk2VNNqVL7f6S189UzShHC/mR9EE8rDdSkdpN0KflSwWY/gWjDrNxxisg8Fp1ZCD9jLMo4m0OUfeUA==", "cpu": [ "x64" ], @@ -200,9 +201,9 @@ } }, "node_modules/@esbuild/darwin-arm64": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.7.tgz", - "integrity": "sha512-5lckdqeuBPlKUwvoCXIgI2D9/ABmPq3Rdp7IfL70393YgaASt7tbju3Ac+ePVi3KDH6N2RqePfHnXkaDtY9fkw==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.28.0.tgz", + "integrity": "sha512-0T+A9WZm+bZ84nZBtk1ckYsOvyA3x7e2Acj1KdVfV4/2tdG4fzUp91YHx+GArWLtwqp77pBXVCPn2We7Letr0Q==", "cpu": [ "arm64" ], @@ -217,9 +218,9 @@ } }, "node_modules/@esbuild/darwin-x64": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.7.tgz", - "integrity": "sha512-rYnXrKcXuT7Z+WL5K980jVFdvVKhCHhUwid+dDYQpH+qu+TefcomiMAJpIiC2EM3Rjtq0sO3StMV/+3w3MyyqQ==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.28.0.tgz", + "integrity": "sha512-fyzLm/DLDl/84OCfp2f/XQ4flmORsjU7VKt8HLjvIXChJoFFOIL6pLJPH4Yhd1n1gGFF9mPwtlN5Wf82DZs+LQ==", "cpu": [ "x64" ], @@ -234,9 +235,9 @@ } }, "node_modules/@esbuild/freebsd-arm64": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.27.7.tgz", - "integrity": "sha512-B48PqeCsEgOtzME2GbNM2roU29AMTuOIN91dsMO30t+Ydis3z/3Ngoj5hhnsOSSwNzS+6JppqWsuhTp6E82l2w==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.28.0.tgz", + "integrity": "sha512-l9GeW5UZBT9k9brBYI+0WDffcRxgHQD8ShN2Ur4xWq/NFzUKm3k5lsH4PdaRgb2w7mI9u61nr2gI2mLI27Nh3Q==", "cpu": [ "arm64" ], @@ -251,9 +252,9 @@ } }, "node_modules/@esbuild/freebsd-x64": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.27.7.tgz", - "integrity": "sha512-jOBDK5XEjA4m5IJK3bpAQF9/Lelu/Z9ZcdhTRLf4cajlB+8VEhFFRjWgfy3M1O4rO2GQ/b2dLwCUGpiF/eATNQ==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.28.0.tgz", + "integrity": "sha512-BXoQai/A0wPO6Es3yFJ7APCiKGc1tdAEOgeTNy3SsB491S3aHn4S4r3e976eUnPdU+NbdtmBuLncYir2tMU9Nw==", "cpu": [ "x64" ], @@ -268,9 +269,9 @@ } }, "node_modules/@esbuild/linux-arm": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.27.7.tgz", - "integrity": "sha512-RkT/YXYBTSULo3+af8Ib0ykH8u2MBh57o7q/DAs3lTJlyVQkgQvlrPTnjIzzRPQyavxtPtfg0EopvDyIt0j1rA==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.28.0.tgz", + "integrity": "sha512-CjaaREJagqJp7iTaNQjjidaNbCKYcd4IDkzbwwxtSvjI7NZm79qiHc8HqciMddQ6CKvJT6aBd8lO9kN/ZudLlw==", "cpu": [ "arm" ], @@ -285,9 +286,9 @@ } }, "node_modules/@esbuild/linux-arm64": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.27.7.tgz", - "integrity": "sha512-RZPHBoxXuNnPQO9rvjh5jdkRmVizktkT7TCDkDmQ0W2SwHInKCAV95GRuvdSvA7w4VMwfCjUiPwDi0ZO6Nfe9A==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.28.0.tgz", + "integrity": "sha512-RVyzfb3FWsGA55n6WY0MEIEPURL1FcbhFE6BffZEMEekfCzCIMtB5yyDcFnVbTnwk+CLAgTujmV/Lgvih56W+A==", "cpu": [ "arm64" ], @@ -302,9 +303,9 @@ } }, "node_modules/@esbuild/linux-ia32": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.27.7.tgz", - "integrity": "sha512-GA48aKNkyQDbd3KtkplYWT102C5sn/EZTY4XROkxONgruHPU72l+gW+FfF8tf2cFjeHaRbWpOYa/uRBz/Xq1Pg==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.28.0.tgz", + "integrity": "sha512-KBnSTt1kxl9x70q+ydterVdl+Cn0H18ngRMRCEQfrbqdUuntQQ0LoMZv47uB97NljZFzY6HcfqEZ2SAyIUTQBQ==", "cpu": [ "ia32" ], @@ -319,9 +320,9 @@ } }, "node_modules/@esbuild/linux-loong64": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.27.7.tgz", - "integrity": "sha512-a4POruNM2oWsD4WKvBSEKGIiWQF8fZOAsycHOt6JBpZ+JN2n2JH9WAv56SOyu9X5IqAjqSIPTaJkqN8F7XOQ5Q==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.28.0.tgz", + "integrity": "sha512-zpSlUce1mnxzgBADvxKXX5sl8aYQHo2ezvMNI8I0lbblJtp8V4odlm3Yzlj7gPyt3T8ReksE6bK+pT3WD+aJRg==", "cpu": [ "loong64" ], @@ -336,9 +337,9 @@ } }, "node_modules/@esbuild/linux-mips64el": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.27.7.tgz", - "integrity": "sha512-KabT5I6StirGfIz0FMgl1I+R1H73Gp0ofL9A3nG3i/cYFJzKHhouBV5VWK1CSgKvVaG4q1RNpCTR2LuTVB3fIw==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.28.0.tgz", + "integrity": "sha512-2jIfP6mmjkdmeTlsX/9vmdmhBmKADrWqN7zcdtHIeNSCH1SqIoNI63cYsjQR8J+wGa4Y5izRcSHSm8K3QWmk3w==", "cpu": [ "mips64el" ], @@ -353,9 +354,9 @@ } }, "node_modules/@esbuild/linux-ppc64": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.27.7.tgz", - "integrity": "sha512-gRsL4x6wsGHGRqhtI+ifpN/vpOFTQtnbsupUF5R5YTAg+y/lKelYR1hXbnBdzDjGbMYjVJLJTd2OFmMewAgwlQ==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.28.0.tgz", + "integrity": "sha512-bc0FE9wWeC0WBm49IQMPSPILRocGTQt3j5KPCA8os6VprfuJ7KD+5PzESSrJ6GmPIPJK965ZJHTUlSA6GNYEhg==", "cpu": [ "ppc64" ], @@ -370,9 +371,9 @@ } }, "node_modules/@esbuild/linux-riscv64": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.27.7.tgz", - "integrity": "sha512-hL25LbxO1QOngGzu2U5xeXtxXcW+/GvMN3ejANqXkxZ/opySAZMrc+9LY/WyjAan41unrR3YrmtTsUpwT66InQ==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.28.0.tgz", + "integrity": "sha512-SQPZOwoTTT/HXFXQJG/vBX8sOFagGqvZyXcgLA3NhIqcBv1BJU1d46c0rGcrij2B56Z2rNiSLaZOYW5cUk7yLQ==", "cpu": [ "riscv64" ], @@ -387,9 +388,9 @@ } }, "node_modules/@esbuild/linux-s390x": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.27.7.tgz", - "integrity": "sha512-2k8go8Ycu1Kb46vEelhu1vqEP+UeRVj2zY1pSuPdgvbd5ykAw82Lrro28vXUrRmzEsUV0NzCf54yARIK8r0fdw==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.28.0.tgz", + "integrity": "sha512-SCfR0HN8CEEjnYnySJTd2cw0k9OHB/YFzt5zgJEwa+wL/T/raGWYMBqwDNAC6dqFKmJYZoQBRfHjgwLHGSrn3Q==", "cpu": [ "s390x" ], @@ -404,9 +405,9 @@ } }, "node_modules/@esbuild/linux-x64": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.7.tgz", - "integrity": "sha512-hzznmADPt+OmsYzw1EE33ccA+HPdIqiCRq7cQeL1Jlq2gb1+OyWBkMCrYGBJ+sxVzve2ZJEVeePbLM2iEIZSxA==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.28.0.tgz", + "integrity": "sha512-us0dSb9iFxIi8srnpl931Nvs65it/Jd2a2K3qs7fz2WfGPHqzfzZTfec7oxZJRNPXPnNYZtanmRc4AL/JwVzHQ==", "cpu": [ "x64" ], @@ -421,9 +422,9 @@ } }, "node_modules/@esbuild/netbsd-arm64": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.27.7.tgz", - "integrity": "sha512-b6pqtrQdigZBwZxAn1UpazEisvwaIDvdbMbmrly7cDTMFnw/+3lVxxCTGOrkPVnsYIosJJXAsILG9XcQS+Yu6w==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.28.0.tgz", + "integrity": "sha512-CR/RYotgtCKwtftMwJlUU7xCVNg3lMYZ0RzTmAHSfLCXw3NtZtNpswLEj/Kkf6kEL3Gw+BpOekRX0BYCtklhUw==", "cpu": [ "arm64" ], @@ -438,9 +439,9 @@ } }, "node_modules/@esbuild/netbsd-x64": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.27.7.tgz", - "integrity": "sha512-OfatkLojr6U+WN5EDYuoQhtM+1xco+/6FSzJJnuWiUw5eVcicbyK3dq5EeV/QHT1uy6GoDhGbFpprUiHUYggrw==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.28.0.tgz", + "integrity": "sha512-nU1yhmYutL+fQ71Kxnhg8uEOdC0pwEW9entHykTgEbna2pw2dkbFSMeqjjyHZoCmt8SBkOSvV+yNmm94aUrrqw==", "cpu": [ "x64" ], @@ -455,9 +456,9 @@ } }, "node_modules/@esbuild/openbsd-arm64": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.27.7.tgz", - "integrity": "sha512-AFuojMQTxAz75Fo8idVcqoQWEHIXFRbOc1TrVcFSgCZtQfSdc1RXgB3tjOn/krRHENUB4j00bfGjyl2mJrU37A==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.28.0.tgz", + "integrity": "sha512-cXb5vApOsRsxsEl4mcZ1XY3D4DzcoMxR/nnc4IyqYs0rTI8ZKmW6kyyg+11Z8yvgMfAEldKzP7AdP64HnSC/6g==", "cpu": [ "arm64" ], @@ -472,9 +473,9 @@ } }, "node_modules/@esbuild/openbsd-x64": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.27.7.tgz", - "integrity": "sha512-+A1NJmfM8WNDv5CLVQYJ5PshuRm/4cI6WMZRg1by1GwPIQPCTs1GLEUHwiiQGT5zDdyLiRM/l1G0Pv54gvtKIg==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.28.0.tgz", + "integrity": "sha512-8wZM2qqtv9UP3mzy7HiGYNH/zjTA355mpeuA+859TyR+e+Tc08IHYpLJuMsfpDJwoLo1ikIJI8jC3GFjnRClzA==", "cpu": [ "x64" ], @@ -489,9 +490,9 @@ } }, "node_modules/@esbuild/openharmony-arm64": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.27.7.tgz", - "integrity": "sha512-+KrvYb/C8zA9CU/g0sR6w2RBw7IGc5J2BPnc3dYc5VJxHCSF1yNMxTV5LQ7GuKteQXZtspjFbiuW5/dOj7H4Yw==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.28.0.tgz", + "integrity": "sha512-FLGfyizszcef5C3YtoyQDACyg95+dndv79i2EekILBofh5wpCa1KuBqOWKrEHZg3zrL3t5ouE5jgr94vA+Wb2w==", "cpu": [ "arm64" ], @@ -506,9 +507,9 @@ } }, "node_modules/@esbuild/sunos-x64": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.27.7.tgz", - "integrity": "sha512-ikktIhFBzQNt/QDyOL580ti9+5mL/YZeUPKU2ivGtGjdTYoqz6jObj6nOMfhASpS4GU4Q/Clh1QtxWAvcYKamA==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.28.0.tgz", + "integrity": "sha512-1ZgjUoEdHZZl/YlV76TSCz9Hqj9h9YmMGAgAPYd+q4SicWNX3G5GCyx9uhQWSLcbvPW8Ni7lj4gDa1T40akdlw==", "cpu": [ "x64" ], @@ -523,9 +524,9 @@ } }, "node_modules/@esbuild/win32-arm64": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.27.7.tgz", - "integrity": "sha512-7yRhbHvPqSpRUV7Q20VuDwbjW5kIMwTHpptuUzV+AA46kiPze5Z7qgt6CLCK3pWFrHeNfDd1VKgyP4O+ng17CA==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.28.0.tgz", + "integrity": "sha512-Q9StnDmQ/enxnpxCCLSg0oo4+34B9TdXpuyPeTedN/6+iXBJ4J+zwfQI28u/Jl40nOYAxGoNi7mFP40RUtkmUA==", "cpu": [ "arm64" ], @@ -540,9 +541,9 @@ } }, "node_modules/@esbuild/win32-ia32": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.27.7.tgz", - "integrity": "sha512-SmwKXe6VHIyZYbBLJrhOoCJRB/Z1tckzmgTLfFYOfpMAx63BJEaL9ExI8x7v0oAO3Zh6D/Oi1gVxEYr5oUCFhw==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.28.0.tgz", + "integrity": "sha512-zF3ag/gfiCe6U2iczcRzSYJKH1DCI+ByzSENHlM2FcDbEeo5Zd2C86Aq0tKUYAJJ1obRP84ymxIAksZUcdztHA==", "cpu": [ "ia32" ], @@ -557,9 +558,9 @@ } }, "node_modules/@esbuild/win32-x64": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.27.7.tgz", - "integrity": "sha512-56hiAJPhwQ1R4i+21FVF7V8kSD5zZTdHcVuRFMW0hn753vVfQN8xlx4uOPT4xoGH0Z/oVATuR82AiqSTDIpaHg==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.28.0.tgz", + "integrity": "sha512-pEl1bO9mfAmIC+tW5btTmrKaujg3zGtUmWNdCw/xs70FBjwAL3o9OEKNHvNmnyylD6ubxUERiEhdsL0xBQ9efw==", "cpu": [ "x64" ], @@ -2108,9 +2109,9 @@ } }, "node_modules/esbuild": { - "version": "0.27.7", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.27.7.tgz", - "integrity": "sha512-IxpibTjyVnmrIQo5aqNpCgoACA/dTKLTlhMHihVHhdkxKyPO1uBBthumT0rdHmcsk9uMonIWS0m4FljWzILh3w==", + "version": "0.28.0", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.28.0.tgz", + "integrity": "sha512-sNR9MHpXSUV/XB4zmsFKN+QgVG82Cc7+/aaxJ8Adi8hyOac+EXptIp45QBPaVyX3N70664wRbTcLTOemCAnyqw==", "dev": true, "hasInstallScript": true, "license": "MIT", @@ -2121,32 +2122,32 @@ "node": ">=18" }, "optionalDependencies": { - "@esbuild/aix-ppc64": "0.27.7", - "@esbuild/android-arm": "0.27.7", - "@esbuild/android-arm64": "0.27.7", - "@esbuild/android-x64": "0.27.7", - "@esbuild/darwin-arm64": "0.27.7", - "@esbuild/darwin-x64": "0.27.7", - "@esbuild/freebsd-arm64": "0.27.7", - "@esbuild/freebsd-x64": "0.27.7", - "@esbuild/linux-arm": "0.27.7", - "@esbuild/linux-arm64": "0.27.7", - "@esbuild/linux-ia32": "0.27.7", - "@esbuild/linux-loong64": "0.27.7", - "@esbuild/linux-mips64el": "0.27.7", - "@esbuild/linux-ppc64": "0.27.7", - "@esbuild/linux-riscv64": "0.27.7", - "@esbuild/linux-s390x": "0.27.7", - "@esbuild/linux-x64": "0.27.7", - "@esbuild/netbsd-arm64": "0.27.7", - "@esbuild/netbsd-x64": "0.27.7", - "@esbuild/openbsd-arm64": "0.27.7", - "@esbuild/openbsd-x64": "0.27.7", - "@esbuild/openharmony-arm64": "0.27.7", - "@esbuild/sunos-x64": "0.27.7", - "@esbuild/win32-arm64": "0.27.7", - "@esbuild/win32-ia32": "0.27.7", - "@esbuild/win32-x64": "0.27.7" + "@esbuild/aix-ppc64": "0.28.0", + "@esbuild/android-arm": "0.28.0", + "@esbuild/android-arm64": "0.28.0", + "@esbuild/android-x64": "0.28.0", + "@esbuild/darwin-arm64": "0.28.0", + "@esbuild/darwin-x64": "0.28.0", + "@esbuild/freebsd-arm64": "0.28.0", + "@esbuild/freebsd-x64": "0.28.0", + "@esbuild/linux-arm": "0.28.0", + "@esbuild/linux-arm64": "0.28.0", + "@esbuild/linux-ia32": "0.28.0", + "@esbuild/linux-loong64": "0.28.0", + "@esbuild/linux-mips64el": "0.28.0", + "@esbuild/linux-ppc64": "0.28.0", + "@esbuild/linux-riscv64": "0.28.0", + "@esbuild/linux-s390x": "0.28.0", + "@esbuild/linux-x64": "0.28.0", + "@esbuild/netbsd-arm64": "0.28.0", + "@esbuild/netbsd-x64": "0.28.0", + "@esbuild/openbsd-arm64": "0.28.0", + "@esbuild/openbsd-x64": "0.28.0", + "@esbuild/openharmony-arm64": "0.28.0", + "@esbuild/sunos-x64": "0.28.0", + "@esbuild/win32-arm64": "0.28.0", + "@esbuild/win32-ia32": "0.28.0", + "@esbuild/win32-x64": "0.28.0" } }, "node_modules/escape-html": { @@ -4913,6 +4914,490 @@ "fsevents": "~2.3.3" } }, + "node_modules/tsx/node_modules/@esbuild/aix-ppc64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.27.7.tgz", + "integrity": "sha512-EKX3Qwmhz1eMdEJokhALr0YiD0lhQNwDqkPYyPhiSwKrh7/4KRjQc04sZ8db+5DVVnZ1LmbNDI1uAMPEUBnQPg==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/android-arm": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.27.7.tgz", + "integrity": "sha512-jbPXvB4Yj2yBV7HUfE2KHe4GJX51QplCN1pGbYjvsyCZbQmies29EoJbkEc+vYuU5o45AfQn37vZlyXy4YJ8RQ==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/android-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.27.7.tgz", + "integrity": "sha512-62dPZHpIXzvChfvfLJow3q5dDtiNMkwiRzPylSCfriLvZeq0a1bWChrGx/BbUbPwOrsWKMn8idSllklzBy+dgQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/android-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.27.7.tgz", + "integrity": "sha512-x5VpMODneVDb70PYV2VQOmIUUiBtY3D3mPBG8NxVk5CogneYhkR7MmM3yR/uMdITLrC1ml/NV1rj4bMJuy9MCg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/darwin-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.7.tgz", + "integrity": "sha512-5lckdqeuBPlKUwvoCXIgI2D9/ABmPq3Rdp7IfL70393YgaASt7tbju3Ac+ePVi3KDH6N2RqePfHnXkaDtY9fkw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/darwin-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.7.tgz", + "integrity": "sha512-rYnXrKcXuT7Z+WL5K980jVFdvVKhCHhUwid+dDYQpH+qu+TefcomiMAJpIiC2EM3Rjtq0sO3StMV/+3w3MyyqQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/freebsd-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.27.7.tgz", + "integrity": "sha512-B48PqeCsEgOtzME2GbNM2roU29AMTuOIN91dsMO30t+Ydis3z/3Ngoj5hhnsOSSwNzS+6JppqWsuhTp6E82l2w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/freebsd-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.27.7.tgz", + "integrity": "sha512-jOBDK5XEjA4m5IJK3bpAQF9/Lelu/Z9ZcdhTRLf4cajlB+8VEhFFRjWgfy3M1O4rO2GQ/b2dLwCUGpiF/eATNQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-arm": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.27.7.tgz", + "integrity": "sha512-RkT/YXYBTSULo3+af8Ib0ykH8u2MBh57o7q/DAs3lTJlyVQkgQvlrPTnjIzzRPQyavxtPtfg0EopvDyIt0j1rA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.27.7.tgz", + "integrity": "sha512-RZPHBoxXuNnPQO9rvjh5jdkRmVizktkT7TCDkDmQ0W2SwHInKCAV95GRuvdSvA7w4VMwfCjUiPwDi0ZO6Nfe9A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-ia32": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.27.7.tgz", + "integrity": "sha512-GA48aKNkyQDbd3KtkplYWT102C5sn/EZTY4XROkxONgruHPU72l+gW+FfF8tf2cFjeHaRbWpOYa/uRBz/Xq1Pg==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-loong64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.27.7.tgz", + "integrity": "sha512-a4POruNM2oWsD4WKvBSEKGIiWQF8fZOAsycHOt6JBpZ+JN2n2JH9WAv56SOyu9X5IqAjqSIPTaJkqN8F7XOQ5Q==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-mips64el": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.27.7.tgz", + "integrity": "sha512-KabT5I6StirGfIz0FMgl1I+R1H73Gp0ofL9A3nG3i/cYFJzKHhouBV5VWK1CSgKvVaG4q1RNpCTR2LuTVB3fIw==", + "cpu": [ + "mips64el" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-ppc64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.27.7.tgz", + "integrity": "sha512-gRsL4x6wsGHGRqhtI+ifpN/vpOFTQtnbsupUF5R5YTAg+y/lKelYR1hXbnBdzDjGbMYjVJLJTd2OFmMewAgwlQ==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-riscv64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.27.7.tgz", + "integrity": "sha512-hL25LbxO1QOngGzu2U5xeXtxXcW+/GvMN3ejANqXkxZ/opySAZMrc+9LY/WyjAan41unrR3YrmtTsUpwT66InQ==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-s390x": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.27.7.tgz", + "integrity": "sha512-2k8go8Ycu1Kb46vEelhu1vqEP+UeRVj2zY1pSuPdgvbd5ykAw82Lrro28vXUrRmzEsUV0NzCf54yARIK8r0fdw==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.7.tgz", + "integrity": "sha512-hzznmADPt+OmsYzw1EE33ccA+HPdIqiCRq7cQeL1Jlq2gb1+OyWBkMCrYGBJ+sxVzve2ZJEVeePbLM2iEIZSxA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/netbsd-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.27.7.tgz", + "integrity": "sha512-b6pqtrQdigZBwZxAn1UpazEisvwaIDvdbMbmrly7cDTMFnw/+3lVxxCTGOrkPVnsYIosJJXAsILG9XcQS+Yu6w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/netbsd-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.27.7.tgz", + "integrity": "sha512-OfatkLojr6U+WN5EDYuoQhtM+1xco+/6FSzJJnuWiUw5eVcicbyK3dq5EeV/QHT1uy6GoDhGbFpprUiHUYggrw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/openbsd-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.27.7.tgz", + "integrity": "sha512-AFuojMQTxAz75Fo8idVcqoQWEHIXFRbOc1TrVcFSgCZtQfSdc1RXgB3tjOn/krRHENUB4j00bfGjyl2mJrU37A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/openbsd-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.27.7.tgz", + "integrity": "sha512-+A1NJmfM8WNDv5CLVQYJ5PshuRm/4cI6WMZRg1by1GwPIQPCTs1GLEUHwiiQGT5zDdyLiRM/l1G0Pv54gvtKIg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/openharmony-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.27.7.tgz", + "integrity": "sha512-+KrvYb/C8zA9CU/g0sR6w2RBw7IGc5J2BPnc3dYc5VJxHCSF1yNMxTV5LQ7GuKteQXZtspjFbiuW5/dOj7H4Yw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/sunos-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.27.7.tgz", + "integrity": "sha512-ikktIhFBzQNt/QDyOL580ti9+5mL/YZeUPKU2ivGtGjdTYoqz6jObj6nOMfhASpS4GU4Q/Clh1QtxWAvcYKamA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/win32-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.27.7.tgz", + "integrity": "sha512-7yRhbHvPqSpRUV7Q20VuDwbjW5kIMwTHpptuUzV+AA46kiPze5Z7qgt6CLCK3pWFrHeNfDd1VKgyP4O+ng17CA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/win32-ia32": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.27.7.tgz", + "integrity": "sha512-SmwKXe6VHIyZYbBLJrhOoCJRB/Z1tckzmgTLfFYOfpMAx63BJEaL9ExI8x7v0oAO3Zh6D/Oi1gVxEYr5oUCFhw==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/win32-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.27.7.tgz", + "integrity": "sha512-56hiAJPhwQ1R4i+21FVF7V8kSD5zZTdHcVuRFMW0hn753vVfQN8xlx4uOPT4xoGH0Z/oVATuR82AiqSTDIpaHg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/esbuild": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.27.7.tgz", + "integrity": "sha512-IxpibTjyVnmrIQo5aqNpCgoACA/dTKLTlhMHihVHhdkxKyPO1uBBthumT0rdHmcsk9uMonIWS0m4FljWzILh3w==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=18" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.27.7", + "@esbuild/android-arm": "0.27.7", + "@esbuild/android-arm64": "0.27.7", + "@esbuild/android-x64": "0.27.7", + "@esbuild/darwin-arm64": "0.27.7", + "@esbuild/darwin-x64": "0.27.7", + "@esbuild/freebsd-arm64": "0.27.7", + "@esbuild/freebsd-x64": "0.27.7", + "@esbuild/linux-arm": "0.27.7", + "@esbuild/linux-arm64": "0.27.7", + "@esbuild/linux-ia32": "0.27.7", + "@esbuild/linux-loong64": "0.27.7", + "@esbuild/linux-mips64el": "0.27.7", + "@esbuild/linux-ppc64": "0.27.7", + "@esbuild/linux-riscv64": "0.27.7", + "@esbuild/linux-s390x": "0.27.7", + "@esbuild/linux-x64": "0.27.7", + "@esbuild/netbsd-arm64": "0.27.7", + "@esbuild/netbsd-x64": "0.27.7", + "@esbuild/openbsd-arm64": "0.27.7", + "@esbuild/openbsd-x64": "0.27.7", + "@esbuild/openharmony-arm64": "0.27.7", + "@esbuild/sunos-x64": "0.27.7", + "@esbuild/win32-arm64": "0.27.7", + "@esbuild/win32-ia32": "0.27.7", + "@esbuild/win32-x64": "0.27.7" + } + }, "node_modules/type-is": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/type-is/-/type-is-2.0.1.tgz", diff --git a/package.json b/package.json index 3928af5..7b59343 100644 --- a/package.json +++ b/package.json @@ -37,7 +37,7 @@ }, "scripts": { "build": "tsc && node scripts/copy-assets.mjs", - "build:prod": "tsc -p tsconfig.build.json && node scripts/copy-assets.mjs", + "build:prod": "node scripts/bundle.mjs && node scripts/copy-assets.mjs", "clean": "node -e \"require('fs').rmSync('dist',{recursive:true,force:true})\"", "dev": "tsx src/index.ts", "lint:md": "markdownlint \"**/*.md\"", @@ -69,6 +69,7 @@ "@types/node": "^22.10.7", "@types/uuid": "^10.0.0", "@vitest/coverage-v8": "^2.1.9", + "esbuild": "^0.28.0", "markdownlint-cli": "^0.48.0", "tsx": "^4.19.2", "typescript": "^5.7.3", diff --git a/scripts/bundle.mjs b/scripts/bundle.mjs new file mode 100644 index 0000000..0b2b975 --- /dev/null +++ b/scripts/bundle.mjs @@ -0,0 +1,36 @@ +// scripts/bundle.mjs +// Production bundler: esbuild inlines pure-JS dependencies into a single +// dist/index.js, reducing install size. Heavy deps that use native bindings +// (mqtt, pino, axios, @modelcontextprotocol/sdk) remain in node_modules. + +import { build } from 'esbuild'; +import path from 'node:path'; +import { fileURLToPath } from 'node:url'; + +const __dirname = path.dirname(fileURLToPath(import.meta.url)); +const root = path.resolve(__dirname, '..'); + +await build({ + entryPoints: [path.join(root, 'src/index.ts')], + bundle: true, + platform: 'node', + target: 'node18', + format: 'esm', + outfile: path.join(root, 'dist/index.js'), + // Keep heavy native-binding or large deps external; they stay in node_modules. + external: [ + 'node:*', + // native binding deps + 'mqtt', + 'pino', + 'pino-pretty', + // large deps with native parts + 'axios', + '@modelcontextprotocol/sdk', + // pure-JS but large — inline separately if needed + ], + banner: { + js: '#!/usr/bin/env node', + }, + logLevel: 'info', +}); diff --git a/tests/build/bundle-size.test.ts b/tests/build/bundle-size.test.ts new file mode 100644 index 0000000..89677c8 --- /dev/null +++ b/tests/build/bundle-size.test.ts @@ -0,0 +1,17 @@ +import { describe, it, expect } from 'vitest'; +import fs from 'node:fs'; +import path from 'node:path'; + +describe('production bundle size', () => { + const distEntry = path.resolve('dist/index.js'); + + it('dist/index.js exists', () => { + expect(fs.existsSync(distEntry)).toBe(true); + }); + + it('dist/index.js is under 15 MB', () => { + const { size } = fs.statSync(distEntry); + const sizeMb = size / (1024 * 1024); + expect(sizeMb, `dist/index.js is ${sizeMb.toFixed(1)} MB — exceeds 15 MB budget`).toBeLessThan(15); + }); +}); From e6a0aad497f1ab1bdcbe029b0b5e8b38ad8af571 Mon Sep 17 00:00:00 2001 From: chenliuyun Date: Sat, 25 Apr 2026 17:40:49 +0800 Subject: [PATCH 09/26] docs: update test count to 1900 --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 3e91cf6..4028238 100644 --- a/README.md +++ b/README.md @@ -93,7 +93,7 @@ Under the hood every surface shares the same catalog, cache, and HMAC client — - 🎨 **Dual output modes** — colorized tables by default; `--json` passthrough for `jq` and scripting - 🔐 **Secure credentials** — HMAC-SHA256 signed requests; config file written with `0600`; env-var override for CI - 🔍 **Dry-run mode** — preview every mutating request before it hits the API -- 🧪 **Fully tested** — 1898 Vitest tests, mocked axios, zero network in CI +- 🧪 **Fully tested** — 1900 Vitest tests, mocked axios, zero network in CI - ⚡ **Shell completion** — Bash / Zsh / Fish / PowerShell ## Requirements @@ -1123,7 +1123,7 @@ npm install npm run dev -- # Run from TypeScript sources via tsx npm run build # Compile to dist/ -npm test # Run the Vitest suite (1898 tests) +npm test # Run the Vitest suite (1900 tests) npm run test:watch # Watch mode npm run test:coverage # Coverage report (v8, HTML + text) ``` @@ -1205,7 +1205,7 @@ src/ ├── format.ts # renderRows / filterFields / output-format dispatch ├── audit.ts # JSONL audit log writer └── quota.ts # Local daily-quota counter -tests/ # Vitest suite (1898 tests, mocked axios, no network) +tests/ # Vitest suite (1900 tests, mocked axios, no network) ``` ### Release flow From 68b8fe8df4eac6aed07003b03c740b84f670fcb8 Mon Sep 17 00:00:00 2001 From: chenliuyun Date: Sat, 25 Apr 2026 17:46:58 +0800 Subject: [PATCH 10/26] refactor(capabilities,schema): add schema export --capabilities, replace inline catalog with pointer note --- src/commands/capabilities.ts | 12 ++-------- src/commands/schema.ts | 14 ++++++++++-- tests/commands/capabilities-meta.test.ts | 10 +++++++++ tests/commands/capabilities.test.ts | 17 ++++++-------- tests/commands/schema.test.ts | 28 ++++++++++++++++++++++++ 5 files changed, 59 insertions(+), 22 deletions(-) diff --git a/src/commands/capabilities.ts b/src/commands/capabilities.ts index e43ede6..3c37125 100644 --- a/src/commands/capabilities.ts +++ b/src/commands/capabilities.ts @@ -411,17 +411,9 @@ export function registerCapabilitiesCommand(program: Command): void { Object.entries(COMMAND_META).map(([k, v]) => [k, { ...v, ...deriveRiskMeta(v) }]) ), ...(globalFlags ? { globalFlags } : {}), - catalog: { + catalog: compact ? undefined : { + note: 'Device type catalog (commands, status fields, parameters) is available via `schema export`. Use `schema export --capabilities` for entries annotated with CLI safety metadata.', typeCount: catalog.length, - roles, - destructiveCommandCount: catalog.reduce( - (n, e) => - n + e.commands.filter((c) => deriveSafetyTier(c, e) === 'destructive').length, - 0, - ), - safetyTiersInUse: collectSafetyTiersInUse(catalog), - readOnlyTypeCount: catalog.filter((e) => e.readOnly).length, - readOnlyQueryCount: countStatusQueries(catalog), }, resources: RESOURCE_CATALOG, }; diff --git a/src/commands/schema.ts b/src/commands/schema.ts index b0146bb..8fc3c7c 100644 --- a/src/commands/schema.ts +++ b/src/commands/schema.ts @@ -120,6 +120,7 @@ export function registerSchemaCommand(program: Command): void { .option('--compact', 'Drop descriptions/aliases/example params — emit ~60% smaller payload. Useful for agent prompts.') .option('--used', 'Restrict to device types present in the local devices cache (run "devices list" first)') .option('--project ', 'Project per-type fields (e.g. --project type,commands,statusFields)', stringArg('--project')) + .option('--capabilities', 'Annotate each device type with CLI command safety metadata (agentSafetyTier, mutating, consumesQuota)') .addHelpText('after', ` Output is always JSON (this command ignores --format). The output is a catalog export — not a formal JSON Schema standard document — suitable for @@ -148,7 +149,7 @@ Examples: $ switchbot schema export --role security --category physical $ switchbot schema export --project type,commands,statusFields `) - .action((options: { type?: string; types?: string; role?: string; category?: string; compact?: boolean; used?: boolean; project?: string }) => { + .action(async (options: { type?: string; types?: string; role?: string; category?: string; compact?: boolean; used?: boolean; project?: string; capabilities?: boolean }) => { const catalog = getEffectiveCatalog(); let filtered = catalog; @@ -199,9 +200,18 @@ Examples: ) : mapped; + let finalTypes = projected as Array>; + if (options.capabilities) { + const { COMMAND_META } = await import('./capabilities.js'); + const devicesMeta = Object.fromEntries( + Object.entries(COMMAND_META).filter(([k]) => k.startsWith('devices ')), + ); + finalTypes = finalTypes.map((e) => ({ ...e, commandsMeta: devicesMeta })); + } + const payload: Record = { version: '1.0', - types: projected, + types: finalTypes, }; if (!options.compact) { payload.generatedAt = new Date().toISOString(); diff --git a/tests/commands/capabilities-meta.test.ts b/tests/commands/capabilities-meta.test.ts index a743a1b..2fdf31e 100644 --- a/tests/commands/capabilities-meta.test.ts +++ b/tests/commands/capabilities-meta.test.ts @@ -98,4 +98,14 @@ describe('capabilities command — regression output tests', () => { expect(entry.mutating).toBe(false); expect(entry.consumesQuota).toBe(false); }); + + it('full output catalog is a pointer note referencing schema export', async () => { + const res = await runCli(registerCapabilitiesCommand, ['capabilities']); + expect(res.exitCode).toBeNull(); + const parsed = JSON.parse(res.stdout.join('')) as { data: { catalog?: { note: string } } }; + const catalog = parsed.data.catalog; + expect(catalog).toBeDefined(); + expect(catalog).toHaveProperty('note'); + expect(catalog!.note).toContain('schema export'); + }); }); diff --git a/tests/commands/capabilities.test.ts b/tests/commands/capabilities.test.ts index d485e20..e7ab730 100644 --- a/tests/commands/capabilities.test.ts +++ b/tests/commands/capabilities.test.ts @@ -128,19 +128,16 @@ describe('capabilities', () => { expect(flags.some((f) => f.includes('--dry-run'))).toBe(true); }); - it('catalog.roles includes lighting and security, typeCount > 10', async () => { + it('catalog is a pointer note with typeCount, not inline stats', async () => { const out = await runCapabilities(); const cat = out.catalog as Record; - expect((cat.roles as string[])).toContain('lighting'); - expect((cat.roles as string[])).toContain('security'); + expect(cat).toHaveProperty('note'); + expect(cat.note as string).toContain('schema export'); expect(cat.typeCount as number).toBeGreaterThan(10); - }); - - it('P11: catalog.safetyTiersInUse includes "read" and catalog.readOnlyQueryCount > 0', async () => { - const out = await runCapabilities(); - const cat = out.catalog as Record; - expect((cat.safetyTiersInUse as string[])).toContain('read'); - expect((cat.readOnlyQueryCount as number)).toBeGreaterThan(0); + // Inline stats (roles, safetyTiersInUse, readOnlyQueryCount) are intentionally + // removed — they now live in `schema export --capabilities`. + expect(cat.roles).toBeUndefined(); + expect(cat.safetyTiersInUse).toBeUndefined(); }); it('surfaces.mcp.tools includes send_command, account_overview, get_device_history and query_device_history', async () => { diff --git a/tests/commands/schema.test.ts b/tests/commands/schema.test.ts index 5552ad9..056bd43 100644 --- a/tests/commands/schema.test.ts +++ b/tests/commands/schema.test.ts @@ -156,4 +156,32 @@ describe('schema export B3 slim flags', () => { expect(parsed.types[0].category).toBeUndefined(); expect(parsed.types[0].description).toBeUndefined(); }); + + // ===================================================================== + // --capabilities flag (MVP 5) + // ===================================================================== + describe('schema export --capabilities', () => { + it('adds commandsMeta to each device type entry', async () => { + const res = await runCli(registerSchemaCommand, ['schema', 'export', '--capabilities', '--type', 'Bot']); + expect(res.exitCode).toBeNull(); + const parsed = JSON.parse(res.stdout.join('')).data; + expect(parsed.types.length).toBeGreaterThan(0); + const first = parsed.types[0] as Record; + expect(first).toHaveProperty('commandsMeta'); + const meta = first.commandsMeta as Record; + expect(typeof meta).toBe('object'); + // commandsMeta contains entries from COMMAND_META for 'devices *' commands + expect(Object.keys(meta).length).toBeGreaterThan(0); + const firstEntry = Object.values(meta)[0] as Record; + expect(firstEntry).toHaveProperty('agentSafetyTier'); + expect(firstEntry).toHaveProperty('mutating'); + }); + + it('normal export without --capabilities does not include commandsMeta', async () => { + const res = await runCli(registerSchemaCommand, ['schema', 'export', '--type', 'Bot']); + const parsed = JSON.parse(res.stdout.join('')).data; + const first = parsed.types[0] as Record; + expect(first).not.toHaveProperty('commandsMeta'); + }); + }); }); From 1ffb6b5ac13c7d40de885dfefdbbc05dd4ffd3ce Mon Sep 17 00:00:00 2001 From: chenliuyun Date: Sat, 25 Apr 2026 18:21:10 +0800 Subject: [PATCH 11/26] test(daemon): add stop, status, reload subcommand coverage --- tests/commands/daemon.test.ts | 109 ++++++++++++++++++++++++++++++++++ 1 file changed, 109 insertions(+) diff --git a/tests/commands/daemon.test.ts b/tests/commands/daemon.test.ts index 7137266..a44c933 100644 --- a/tests/commands/daemon.test.ts +++ b/tests/commands/daemon.test.ts @@ -165,3 +165,112 @@ describe('daemon command', () => { ); }); }); + +describe('daemon stop', () => { + beforeEach(() => { + vi.useFakeTimers(); + fsMock.unlinkSync.mockClear(); + pidFileMock.readPidFile.mockReset().mockReturnValue(null); + pidFileMock.isPidAlive.mockReset().mockReturnValue(false); + daemonStateMock.readDaemonState.mockReset().mockReturnValue(null); + daemonStateMock.writeDaemonState.mockClear(); + }); + + it('prints "No running daemon found" and exits 0 when no daemon is running', async () => { + const res = await runCli(registerDaemonCommand, ['daemon', 'stop']); + expect(res.exitCode).toBeNull(); + expect(res.stdout.join(' ')).toMatch(/no running daemon/i); + expect(daemonStateMock.writeDaemonState).toHaveBeenCalledWith( + expect.objectContaining({ status: 'stopped', pid: null }), + ); + }); + + it('unlinks pid files, persists stopped state, and prints success when daemon is running', async () => { + pidFileMock.readPidFile.mockImplementation((file: string) => + file === daemonStateMock.DAEMON_PID_FILE ? 12345 : null, + ); + pidFileMock.isPidAlive.mockReturnValue(true); + // Prevent real SIGTERM from being sent to a potentially-live PID in the test + const killSpy = vi.spyOn(process, 'kill').mockImplementation(() => true as never); + + const res = await runCli(registerDaemonCommand, ['daemon', 'stop']); + killSpy.mockRestore(); + + expect(res.exitCode).toBeNull(); + expect(fsMock.unlinkSync).toHaveBeenCalled(); + expect(daemonStateMock.writeDaemonState).toHaveBeenLastCalledWith( + expect.objectContaining({ status: 'stopped', pid: null }), + ); + expect(res.stdout.join(' ')).toMatch(/daemon stopped/i); + }); +}); + +describe('daemon status', () => { + beforeEach(() => { + pidFileMock.readPidFile.mockReset().mockReturnValue(null); + pidFileMock.isPidAlive.mockReset().mockReturnValue(false); + daemonStateMock.readDaemonState.mockReset().mockReturnValue(null); + }); + + it('--json reports status:stopped when no daemon is running', async () => { + const res = await runCli(registerDaemonCommand, ['--json', 'daemon', 'status']); + expect(res.exitCode).toBeNull(); + const body = JSON.parse(res.stdout.join('')) as { data: { status: string; pid: unknown } }; + expect(body.data.status).toBe('stopped'); + expect(body.data.pid).toBeNull(); + }); + + it('--json reports status:running with correct pid when daemon is alive', async () => { + pidFileMock.readPidFile.mockImplementation((file: string) => + file === daemonStateMock.DAEMON_PID_FILE ? 9999 : null, + ); + pidFileMock.isPidAlive.mockReturnValue(true); + + const res = await runCli(registerDaemonCommand, ['--json', 'daemon', 'status']); + expect(res.exitCode).toBeNull(); + const body = JSON.parse(res.stdout.join('')) as { data: { status: string; pid: number } }; + expect(body.data.status).toBe('running'); + expect(body.data.pid).toBe(9999); + }); + + it('human output prints "not running" when stopped', async () => { + const res = await runCli(registerDaemonCommand, ['daemon', 'status']); + expect(res.exitCode).toBeNull(); + expect(res.stdout.join(' ')).toMatch(/not running/i); + }); +}); + +describe('daemon reload', () => { + beforeEach(() => { + vi.useFakeTimers(); + pidFileMock.readPidFile.mockReset().mockReturnValue(null); + pidFileMock.isPidAlive.mockReset().mockReturnValue(false); + daemonStateMock.readDaemonState.mockReset().mockReturnValue(null); + daemonStateMock.writeDaemonState.mockClear(); + pidFileMock.writeReloadSentinel.mockClear(); + pidFileMock.sighupSupported.mockReturnValue(false); + }); + + it('exits 2 with usage error when no daemon is running', async () => { + const res = await runCli(registerDaemonCommand, ['daemon', 'reload']); + expect(res.exitCode).toBe(2); + expect(res.stderr.join(' ')).toMatch(/no running daemon/i); + }); + + it('succeeds via sentinel when daemon and rules engine are running', async () => { + pidFileMock.readPidFile.mockImplementation((file: string) => { + if (file === daemonStateMock.DAEMON_PID_FILE) return 8888; + if (file === '/mock/.switchbot/rules.pid') return 7777; + return null; + }); + pidFileMock.isPidAlive.mockReturnValue(true); + + const res = await runCli(registerDaemonCommand, ['daemon', 'reload']); + expect(res.exitCode).toBeNull(); + expect(pidFileMock.writeReloadSentinel).toHaveBeenCalledWith('/mock/.switchbot/rules.reload'); + expect(daemonStateMock.writeDaemonState).toHaveBeenCalledWith( + expect.objectContaining({ lastReloadStatus: 'ok' }), + ); + expect(res.stdout.join(' ')).toMatch(/reload requested/i); + }); +}); From bcde7bcbe11ace56bb4d19c6a3ea233d7015b058 Mon Sep 17 00:00:00 2001 From: chenliuyun Date: Sat, 25 Apr 2026 18:34:42 +0800 Subject: [PATCH 12/26] test(rules): add conflicts, doctor, summary, last-fired coverage --- tests/commands/rules.test.ts | 180 +++++++++++++++++++++++++++++++++++ 1 file changed, 180 insertions(+) diff --git a/tests/commands/rules.test.ts b/tests/commands/rules.test.ts index 91c8920..4734088 100644 --- a/tests/commands/rules.test.ts +++ b/tests/commands/rules.test.ts @@ -542,4 +542,184 @@ describe('switchbot rules (commander surface)', () => { expect(body.data.lastFired).toBe('2026-04-25T08:00:00.000Z'); }); }); + + describe('rules conflicts', () => { + it('exits 0 and reports clean when no conflicts detected', async () => { + const p = path.join(tmpDir, 'clean.yaml'); + fs.writeFileSync(p, v02Policy(sampleAutomation)); + const { exitCode, stdout } = await runCli(['rules', 'conflicts', p]); + expect(exitCode).toBe(0); + expect(stdout.join(' ')).toMatch(/no conflicts detected/i); + }); + + it('exits 0 and emits findings when opposing-action pair exists (warnings, not errors)', async () => { + const conflict = v02Policy([ + 'automation:', + ' enabled: true', + ' rules:', + ' - name: r-on', + ' when: { source: mqtt, event: motion.detected }', + ' then:', + ' - { command: "devices command DEVICE-X turnOn", device: DEVICE-X }', + ' - name: r-off', + ' when: { source: mqtt, event: motion.detected }', + ' then:', + ' - { command: "devices command DEVICE-X turnOff", device: DEVICE-X }', + '', + ].join('\n')); + const p = path.join(tmpDir, 'conflict.yaml'); + fs.writeFileSync(p, conflict); + const { exitCode, stdout } = await runCli(['--json', 'rules', 'conflicts', p]); + // Opposing actions are "warning" severity → clean:true → exit 0 + expect(exitCode).toBe(0); + const body = JSON.parse(stdout[0]) as { data: { clean: boolean; findings: Array<{ code: string }> } }; + expect(body.data.findings.length).toBeGreaterThan(0); + expect(body.data.findings.some((f) => f.code === 'opposing-actions')).toBe(true); + }); + + it('--json includes counts for warning findings and has clean:true when only warnings exist', async () => { + const twoRules = v02Policy([ + 'automation:', + ' enabled: true', + ' rules:', + ' - name: on', + ' when: { source: mqtt, event: motion.detected }', + ' then:', + ' - { command: "devices command DD turnOn", device: DD }', + ' - name: off', + ' when: { source: mqtt, event: motion.detected }', + ' then:', + ' - { command: "devices command DD turnOff", device: DD }', + '', + ].join('\n')); + const p = path.join(tmpDir, 'conflict2.yaml'); + fs.writeFileSync(p, twoRules); + const { stdout } = await runCli(['--json', 'rules', 'conflicts', p]); + const body = JSON.parse(stdout[0]) as { data: { clean: boolean; counts: Record } }; + // Only warnings → clean:true (errors needed for clean:false) + expect(body.data.clean).toBe(true); + expect(body.data.counts.warning).toBeGreaterThan(0); + expect(body.data.counts.error).toBe(0); + }); + }); + + describe('rules doctor', () => { + it('--json exits 0 with overall:true for a valid policy', async () => { + const p = path.join(tmpDir, 'ok.yaml'); + fs.writeFileSync(p, v02Policy(sampleAutomation)); + const { exitCode, stdout } = await runCli(['--json', 'rules', 'doctor', p]); + expect(exitCode).toBe(0); + const body = JSON.parse(stdout[0]) as { data: { overall: boolean } }; + expect(body.data.overall).toBe(true); + }); + + it('--json exits 1 with overall:false for a policy with duplicate rule names (lint error)', async () => { + const bad = v02Policy([ + 'automation:', + ' enabled: true', + ' rules:', + ' - name: dup-name', + ' when: { source: mqtt, event: motion.detected }', + ' then:', + ' - { command: "devices command EE turnOn" }', + ' - name: dup-name', + ' when: { source: mqtt, event: motion.detected }', + ' then:', + ' - { command: "devices command FF turnOff" }', + '', + ].join('\n')); + const p = path.join(tmpDir, 'doctor-bad.yaml'); + fs.writeFileSync(p, bad); + const { exitCode, stdout } = await runCli(['--json', 'rules', 'doctor', p]); + expect(exitCode).toBe(1); + const body = JSON.parse(stdout[0]) as { data: { overall: boolean } }; + expect(body.data.overall).toBe(false); + }); + }); + + describe('rules summary', () => { + function writeAudit(file: string, rows: unknown[]): void { + fs.writeFileSync(file, rows.map((r) => JSON.stringify(r)).join('\n') + '\n'); + } + + it('prints "(no rule activity)" when the audit log is empty', async () => { + const f = path.join(tmpDir, 'audit-empty.log'); + fs.writeFileSync(f, ''); + const { stdout } = await runCli(['rules', 'summary', '--file', f]); + expect(stdout.join(' ')).toMatch(/no rule activity/i); + }); + + it('--json reports total count and summaries when entries exist', async () => { + const f = path.join(tmpDir, 'audit-sum.log'); + const now = new Date().toISOString(); + writeAudit(f, [ + { t: now, kind: 'rule-fire', rule: { name: 'lights on', triggerSource: 'mqtt', fireId: 'f1' }, result: 'ok', deviceId: 'D1', command: 'turnOn', parameter: null, commandType: 'command', dryRun: false }, + { t: now, kind: 'rule-fire', rule: { name: 'lights on', triggerSource: 'mqtt', fireId: 'f2' }, result: 'ok', deviceId: 'D1', command: 'turnOn', parameter: null, commandType: 'command', dryRun: false }, + { t: now, kind: 'rule-fire', rule: { name: 'lights on', triggerSource: 'mqtt', fireId: 'f3' }, result: 'error', deviceId: 'D1', command: 'turnOn', parameter: null, commandType: 'command', dryRun: false }, + ]); + const { exitCode, stdout } = await runCli(['--json', 'rules', 'summary', '--file', f]); + expect(exitCode).toBe(0); + const body = JSON.parse(stdout[0]) as { data: { total: number; summaries: Array<{ rule: string; fires: number; errors: number }> } }; + expect(body.data.total).toBe(3); + const s = body.data.summaries.find((x) => x.rule === 'lights on'); + expect(s).toBeDefined(); + expect(s!.fires).toBe(3); + expect(s!.errors).toBe(1); + }); + + it('--rule filters to a single rule name', async () => { + const f = path.join(tmpDir, 'audit-filter.log'); + const now = new Date().toISOString(); + writeAudit(f, [ + { t: now, kind: 'rule-fire', rule: { name: 'rule-A', triggerSource: 'mqtt', fireId: 'x1' }, result: 'ok', deviceId: 'D', command: 'turnOn', parameter: null, commandType: 'command', dryRun: false }, + { t: now, kind: 'rule-fire', rule: { name: 'rule-B', triggerSource: 'mqtt', fireId: 'x2' }, result: 'ok', deviceId: 'D', command: 'turnOn', parameter: null, commandType: 'command', dryRun: false }, + ]); + const { stdout } = await runCli(['--json', 'rules', 'summary', '--file', f, '--rule', 'rule-A']); + const body = JSON.parse(stdout[0]) as { data: { summaries: Array<{ rule: string }> } }; + expect(body.data.summaries.every((s) => s.rule === 'rule-A')).toBe(true); + }); + }); + + describe('rules last-fired', () => { + function writeAudit(file: string, rows: unknown[]): void { + fs.writeFileSync(file, rows.map((r) => JSON.stringify(r)).join('\n') + '\n'); + } + + it('prints hint when no rule-fire entries exist', async () => { + const f = path.join(tmpDir, 'audit-empty2.log'); + fs.writeFileSync(f, ''); + const { stdout } = await runCli(['rules', 'last-fired', '--file', f]); + expect(stdout.join(' ')).toMatch(/no rule-fire entries/i); + }); + + it('--json returns entries in reverse chronological order', async () => { + const f = path.join(tmpDir, 'audit-lf.log'); + const base = new Date('2026-04-25T10:00:00.000Z'); + writeAudit(f, [1, 2, 3].map((i) => ({ + t: new Date(base.getTime() + i * 1000).toISOString(), + kind: 'rule-fire', + rule: { name: 'night-light', triggerSource: 'mqtt', fireId: `f${i}` }, + result: 'ok', deviceId: 'D1', command: 'turnOn', parameter: null, commandType: 'command', dryRun: false, + }))); + const { exitCode, stdout } = await runCli(['--json', 'rules', 'last-fired', '--file', f]); + expect(exitCode).toBe(0); + const body = JSON.parse(stdout[0]) as { data: { count: number; entries: Array<{ kind: string }> } }; + expect(body.data.count).toBe(3); + expect(body.data.entries[0].kind).toBe('rule-fire'); + }); + + it('-n limits the number of results returned', async () => { + const f = path.join(tmpDir, 'audit-n.log'); + const base = new Date('2026-04-25T12:00:00.000Z'); + writeAudit(f, Array.from({ length: 15 }, (_, i) => ({ + t: new Date(base.getTime() + i * 1000).toISOString(), + kind: 'rule-fire', + rule: { name: 'flood-rule', triggerSource: 'mqtt', fireId: `id${i}` }, + result: 'ok', deviceId: 'D', command: 'turnOn', parameter: null, commandType: 'command', dryRun: false, + }))); + const { stdout } = await runCli(['--json', 'rules', 'last-fired', '--file', f, '-n', '5']); + const body = JSON.parse(stdout[0]) as { data: { count: number } }; + expect(body.data.count).toBe(5); + }); + }); }); From bb7d4a6b06b5ea5ded073cbdfccd7bcdf2c5e831 Mon Sep 17 00:00:00 2001 From: chenliuyun Date: Sat, 25 Apr 2026 18:51:54 +0800 Subject: [PATCH 13/26] test: add webhook token, suggest, health-check, scenes validate/simulate, status-sync, upgrade-check coverage --- tests/commands/capabilities.test.ts | 34 ++++++++ tests/commands/health-check.test.ts | 93 ++++++++++++++++++++ tests/commands/rules.test.ts | 125 +++++++++++++++++++++++++++ tests/commands/scenes.test.ts | 90 +++++++++++++++++++ tests/commands/status-sync.test.ts | 103 ++++++++++++++++++++++ tests/commands/upgrade-check.test.ts | 56 ++++++++++++ 6 files changed, 501 insertions(+) create mode 100644 tests/commands/health-check.test.ts create mode 100644 tests/commands/status-sync.test.ts diff --git a/tests/commands/capabilities.test.ts b/tests/commands/capabilities.test.ts index e7ab730..43ca552 100644 --- a/tests/commands/capabilities.test.ts +++ b/tests/commands/capabilities.test.ts @@ -281,4 +281,38 @@ describe('capabilities B3/B4', () => { const keys = resources.keys as Array<{ keyType: string }>; expect(keys.map((k) => k.keyType).sort()).toEqual(['disposable', 'permanent', 'timeLimit', 'urgent']); }); + + it('commandMeta flat map includes derived risk fields on every entry', async () => { + const out = await runCapabilitiesWith([]); + const commandMeta = out.commandMeta as Record>; + expect(commandMeta).toBeDefined(); + // Spot-check a known entry + const devList = commandMeta['devices list']; + expect(devList).toBeDefined(); + expect(devList.agentSafetyTier).toBe('read'); + expect(devList.mutating).toBe(false); + expect(devList.consumesQuota).toBe(true); + // Derived risk meta must be present + expect(devList.riskLevel).toBe('low'); + expect(devList.requiresConfirmation).toBe(false); + expect(devList.recommendedMode).toBe('direct'); + // All entries must have the derived fields + for (const [_key, entry] of Object.entries(commandMeta)) { + expect(entry).toHaveProperty('riskLevel'); + expect(entry).toHaveProperty('requiresConfirmation'); + expect(entry).toHaveProperty('recommendedMode'); + } + }); + + it('--surface cli restricts surfaces block to cli only', async () => { + const out = await runCapabilitiesWith(['--surface', 'cli']); + const surfaces = out.surfaces as Record; + expect(Object.keys(surfaces)).toEqual(['cli']); + }); + + it('--surface mqtt restricts surfaces block to mqtt only', async () => { + const out = await runCapabilitiesWith(['--surface', 'mqtt']); + const surfaces = out.surfaces as Record; + expect(Object.keys(surfaces)).toEqual(['mqtt']); + }); }); diff --git a/tests/commands/health-check.test.ts b/tests/commands/health-check.test.ts new file mode 100644 index 0000000..aeeb8ce --- /dev/null +++ b/tests/commands/health-check.test.ts @@ -0,0 +1,93 @@ +import { describe, it, expect, vi, beforeEach } from 'vitest'; +import type { HealthReport } from '../../src/utils/health.js'; + +const healthMock = vi.hoisted(() => ({ + getHealthReport: vi.fn<[], HealthReport>(), + toPrometheusText: vi.fn(() => 'switchbot_quota_used_total 0\n'), +})); + +vi.mock('../../src/utils/health.js', () => healthMock); + +import { registerHealthCommand } from '../../src/commands/health.js'; +import { runCli } from '../helpers/cli.js'; + +const OK_REPORT: HealthReport = { + generatedAt: '2026-04-25T12:00:00.000Z', + overall: 'ok', + process: { pid: 1234, uptimeSeconds: 60, platform: 'linux', nodeVersion: 'v18.0.0', memoryMb: 50 }, + quota: { used: 10, limit: 10000, percentUsed: 0, remaining: 9990, status: 'ok' }, + audit: { present: false, recentErrors: 0, recentTotal: 0, errorRatePercent: 0, status: 'ok' }, + circuit: { name: 'switchbot-api', state: 'closed', failures: 0, status: 'ok' }, +}; + +const DEGRADED_REPORT: HealthReport = { + ...OK_REPORT, + overall: 'degraded', + quota: { used: 7500, limit: 10000, percentUsed: 75, remaining: 2500, status: 'warn' }, +}; + +const DOWN_REPORT: HealthReport = { + ...OK_REPORT, + overall: 'down', + circuit: { name: 'switchbot-api', state: 'open', failures: 5, status: 'open' }, +}; + +describe('health check CLI', () => { + beforeEach(() => { + healthMock.getHealthReport.mockReset().mockReturnValue(OK_REPORT); + healthMock.toPrometheusText.mockReset().mockReturnValue('switchbot_quota_used_total 0\n'); + }); + + it('--json exits 0 and includes overall, quota, circuit, process', async () => { + const res = await runCli(registerHealthCommand, ['--json', 'health', 'check']); + expect(res.exitCode).toBeNull(); + const body = JSON.parse(res.stdout.join('')) as { data: HealthReport }; + expect(body.data.overall).toBe('ok'); + expect(body.data.quota).toBeDefined(); + expect(body.data.circuit).toBeDefined(); + expect(body.data.process).toBeDefined(); + }); + + it('--json exits 0 even when overall is degraded (no process.exit in JSON mode)', async () => { + healthMock.getHealthReport.mockReturnValue(DEGRADED_REPORT); + const res = await runCli(registerHealthCommand, ['--json', 'health', 'check']); + expect(res.exitCode).toBeNull(); + const body = JSON.parse(res.stdout.join('')) as { data: HealthReport }; + expect(body.data.overall).toBe('degraded'); + }); + + it('human mode exits 0 and prints ✓ overall when healthy', async () => { + const res = await runCli(registerHealthCommand, ['health', 'check']); + expect(res.exitCode).toBeNull(); + expect(res.stdout.join(' ')).toMatch(/overall.*ok/i); + }); + + it('human mode exits 1 when overall is degraded', async () => { + healthMock.getHealthReport.mockReturnValue(DEGRADED_REPORT); + const res = await runCli(registerHealthCommand, ['health', 'check']); + expect(res.exitCode).toBe(1); + expect(res.stdout.join(' ')).toMatch(/overall.*degraded/i); + }); + + it('human mode exits 1 when circuit is open (overall: down)', async () => { + healthMock.getHealthReport.mockReturnValue(DOWN_REPORT); + const res = await runCli(registerHealthCommand, ['health', 'check']); + expect(res.exitCode).toBe(1); + }); + + it('--prometheus writes Prometheus text to stdout and exits 0', async () => { + const stdoutLines: string[] = []; + const writeSpy = vi.spyOn(process.stdout, 'write').mockImplementation((chunk: unknown) => { + stdoutLines.push(String(chunk)); + return true; + }); + try { + const res = await runCli(registerHealthCommand, ['health', 'check', '--prometheus']); + expect(res.exitCode).toBeNull(); + } finally { + writeSpy.mockRestore(); + } + expect(healthMock.toPrometheusText).toHaveBeenCalledWith(OK_REPORT); + expect(stdoutLines.join('')).toContain('switchbot_quota_used_total'); + }); +}); diff --git a/tests/commands/rules.test.ts b/tests/commands/rules.test.ts index 4734088..be6cca7 100644 --- a/tests/commands/rules.test.ts +++ b/tests/commands/rules.test.ts @@ -723,3 +723,128 @@ describe('switchbot rules (commander surface)', () => { }); }); }); + +describe('rules webhook-rotate-token', () => { + let tokenDir: string; + + beforeEach(() => { + tokenDir = fs.mkdtempSync(path.join(os.tmpdir(), 'sbwh-')); + vi.spyOn(os, 'homedir').mockReturnValue(tokenDir); + }); + + afterEach(() => { + vi.restoreAllMocks(); + fs.rmSync(tokenDir, { recursive: true, force: true }); + }); + + it('creates a token file and prints the file path in human mode', async () => { + const { stdout } = await runCli(['rules', 'webhook-rotate-token']); + const tokenFile = path.join(tokenDir, '.switchbot', 'webhook-token'); + expect(fs.existsSync(tokenFile)).toBe(true); + const tokenContent = fs.readFileSync(tokenFile, 'utf-8').trim(); + expect(tokenContent.length).toBeGreaterThan(20); + expect(stdout.join(' ')).toMatch(/webhook bearer rotated/i); + }); + + it('--json reports status:rotated with filePath and tokenLength', async () => { + const { stdout } = await runCli(['--json', 'rules', 'webhook-rotate-token']); + const body = JSON.parse(stdout.join('')) as { data: { status: string; filePath: string; tokenLength: number } }; + expect(body.data.status).toBe('rotated'); + expect(typeof body.data.filePath).toBe('string'); + expect(body.data.tokenLength).toBeGreaterThan(20); + }); + + it('produces a different token on each rotation', async () => { + const tokenFile = path.join(tokenDir, '.switchbot', 'webhook-token'); + await runCli(['rules', 'webhook-rotate-token']); + const t1 = fs.readFileSync(tokenFile, 'utf-8').trim(); + await runCli(['rules', 'webhook-rotate-token']); + const t2 = fs.readFileSync(tokenFile, 'utf-8').trim(); + expect(t1).not.toBe(t2); + }); +}); + +describe('rules webhook-show-token', () => { + let tokenDir: string; + + beforeEach(() => { + tokenDir = fs.mkdtempSync(path.join(os.tmpdir(), 'sbwht-')); + vi.spyOn(os, 'homedir').mockReturnValue(tokenDir); + }); + + afterEach(() => { + vi.restoreAllMocks(); + fs.rmSync(tokenDir, { recursive: true, force: true }); + }); + + it('prints the token to stdout in human mode (creates if absent)', async () => { + const { stdout } = await runCli(['rules', 'webhook-show-token']); + expect(stdout.join('').trim().length).toBeGreaterThan(20); + }); + + it('returns the same token on repeated calls (stable, not rotating)', async () => { + const { stdout: s1 } = await runCli(['rules', 'webhook-show-token']); + const { stdout: s2 } = await runCli(['rules', 'webhook-show-token']); + expect(s1.join('').trim()).toBe(s2.join('').trim()); + }); + + it('--json reports filePath and tokenLength', async () => { + const { stdout } = await runCli(['--json', 'rules', 'webhook-show-token']); + const body = JSON.parse(stdout.join('')) as { data: { filePath: string; tokenLength: number } }; + expect(typeof body.data.filePath).toBe('string'); + expect(body.data.tokenLength).toBeGreaterThan(20); + }); +}); + +describe('rules suggest', () => { + it('exits with a Commander usage error when --intent is missing', async () => { + const program = makeProgram(); + await expect( + program.parseAsync(['node', 'test', 'rules', 'suggest']), + ).rejects.toThrow(); + }); + + it('outputs YAML to stdout when trigger can be inferred from intent', async () => { + const stdoutLines: string[] = []; + const writeSpy = vi.spyOn(process.stdout, 'write').mockImplementation((chunk: unknown) => { + stdoutLines.push(String(chunk)); + return true; + }); + try { + await runCli(['rules', 'suggest', '--intent', 'turn on light when motion detected']); + } finally { + writeSpy.mockRestore(); + } + const yaml = stdoutLines.join(''); + expect(yaml).toContain('name:'); + expect(yaml).toContain('when:'); + expect(yaml).toContain('then:'); + }); + + it('--json outputs structured rule + rule_yaml + warnings', async () => { + const { stdout } = await runCli(['--json', 'rules', 'suggest', '--intent', 'turn on lights at 8am every morning']); + const body = JSON.parse(stdout.join('')) as { data: { rule: Record; rule_yaml: string; warnings: string[] } }; + expect(body.data).toHaveProperty('rule'); + expect(body.data).toHaveProperty('rule_yaml'); + expect(Array.isArray(body.data.warnings)).toBe(true); + expect(body.data.rule.name).toBe('turn on lights at 8am every morning'); + }); + + it('writes YAML to --out file instead of stdout', async () => { + const outDir = fs.mkdtempSync(path.join(os.tmpdir(), 'sbsug-')); + const outFile = path.join(outDir, 'rule.yaml'); + try { + const { stdout } = await runCli([ + 'rules', 'suggest', + '--intent', 'turn on fan when button pressed', + '--out', outFile, + ]); + expect(fs.existsSync(outFile)).toBe(true); + const content = fs.readFileSync(outFile, 'utf-8'); + expect(content).toContain('name:'); + expect(stdout.join(' ')).toMatch(/rule YAML written/i); + } finally { + fs.rmSync(outDir, { recursive: true, force: true }); + } + }); +}); diff --git a/tests/commands/scenes.test.ts b/tests/commands/scenes.test.ts index aa1a672..f79064d 100644 --- a/tests/commands/scenes.test.ts +++ b/tests/commands/scenes.test.ts @@ -298,4 +298,94 @@ describe('scenes command', () => { expect((out.error as Record).message).toMatch(/scene not found/i); }); }); + + describe('validate', () => { + function mockScenes() { + apiMock.__instance.get.mockResolvedValue({ + data: { + body: [ + { sceneId: 'V1', sceneName: 'Sunrise' }, + { sceneId: 'V2', sceneName: 'Sunset' }, + ], + }, + }); + } + + it('--json exits 0 with ok:true when all supplied IDs exist', async () => { + mockScenes(); + const res = await runCli(registerScenesCommand, ['--json', 'scenes', 'validate', 'V1', 'V2']); + expect(res.exitCode).toBeNull(); + const body = JSON.parse(res.stdout.join('')) as { data: { ok: boolean; results: unknown[] } }; + expect(body.data.ok).toBe(true); + expect(body.data.results).toHaveLength(2); + }); + + it('--json exits 1 with ok:false when a supplied ID does not exist', async () => { + mockScenes(); + const res = await runCli(registerScenesCommand, ['--json', 'scenes', 'validate', 'V1', 'MISSING']); + expect(res.exitCode).toBe(1); + const body = JSON.parse(res.stdout[0]) as { data: { ok: boolean; results: Array<{ sceneId: string; valid: boolean }> } }; + expect(body.data.ok).toBe(false); + const missingEntry = body.data.results.find((r) => r.sceneId === 'MISSING'); + expect(missingEntry?.valid).toBe(false); + }); + + it('human mode exits 0 and prints ✓ for valid scenes', async () => { + mockScenes(); + const res = await runCli(registerScenesCommand, ['scenes', 'validate', 'V1']); + expect(res.exitCode).toBeNull(); + expect(res.stdout.join(' ')).toContain('✓'); + }); + + it('validates all scenes when no IDs are supplied', async () => { + mockScenes(); + const res = await runCli(registerScenesCommand, ['--json', 'scenes', 'validate']); + expect(res.exitCode).toBeNull(); + const body = JSON.parse(res.stdout.join('')) as { data: { ok: boolean; results: unknown[] } }; + expect(body.data.results).toHaveLength(2); + }); + }); + + describe('simulate', () => { + function mockScenes() { + apiMock.__instance.get.mockResolvedValue({ + data: { + body: [ + { sceneId: 'SIM1', sceneName: 'Good Night' }, + ], + }, + }); + } + + it('--json returns simulated:true with wouldSend details', async () => { + mockScenes(); + const res = await runCli(registerScenesCommand, ['--json', 'scenes', 'simulate', 'SIM1']); + expect(res.exitCode).toBeNull(); + const body = JSON.parse(res.stdout.join('')) as { data: Record }; + expect(body.data.simulated).toBe(true); + expect(body.data.sceneId).toBe('SIM1'); + expect(body.data.sceneName).toBe('Good Night'); + const wouldSend = body.data.wouldSend as Record; + expect(wouldSend.method).toBe('POST'); + expect(wouldSend.url).toContain('SIM1'); + }); + + it('human mode prints sceneId, sceneName and wouldSend', async () => { + mockScenes(); + const res = await runCli(registerScenesCommand, ['scenes', 'simulate', 'SIM1']); + expect(res.exitCode).toBeNull(); + const out = res.stdout.join('\n'); + expect(out).toContain('SIM1'); + expect(out).toContain('Good Night'); + expect(out).toContain('POST'); + }); + + it('--json exits 2 with error envelope for unknown sceneId', async () => { + mockScenes(); + const res = await runCli(registerScenesCommand, ['--json', 'scenes', 'simulate', 'UNKNOWN']); + expect(res.exitCode).toBe(2); + const out = JSON.parse(res.stdout.find((l) => l.trim().startsWith('{'))!) as Record; + expect((out.error as Record).message).toMatch(/scene not found/i); + }); + }); }); diff --git a/tests/commands/status-sync.test.ts b/tests/commands/status-sync.test.ts new file mode 100644 index 0000000..520ddec --- /dev/null +++ b/tests/commands/status-sync.test.ts @@ -0,0 +1,103 @@ +import { describe, it, expect, vi, beforeEach } from 'vitest'; +import type { StatusSyncStatus, StopStatusSyncResult } from '../../src/status-sync/manager.js'; + +const managerMock = vi.hoisted(() => ({ + getStatusSyncStatus: vi.fn<[], StatusSyncStatus>(), + startStatusSync: vi.fn<[], StatusSyncStatus>(), + stopStatusSync: vi.fn<[], StopStatusSyncResult>(), + runStatusSyncForeground: vi.fn<[], Promise>(), +})); + +vi.mock('../../src/status-sync/manager.js', () => managerMock); + +import { registerStatusSyncCommand } from '../../src/commands/status-sync.js'; +import { runCli } from '../helpers/cli.js'; + +const NOT_RUNNING: StatusSyncStatus = { + running: false, + pid: null, + startedAt: null, + stateDir: '/mock/.switchbot/status-sync', + stateFile: '/mock/.switchbot/status-sync/state.json', + stdoutLog: '/mock/.switchbot/status-sync/stdout.log', + stderrLog: '/mock/.switchbot/status-sync/stderr.log', + command: null, + openclawUrl: null, + openclawModel: null, + topic: null, + configPath: null, + profile: null, +}; + +const RUNNING: StatusSyncStatus = { + ...NOT_RUNNING, + running: true, + pid: 9876, + startedAt: '2026-04-25T12:00:00.000Z', + command: ['node', 'src/index.js', 'status-sync', 'run'], + openclawUrl: 'http://localhost:18789', + openclawModel: 'home-agent', +}; + +describe('status-sync command', () => { + beforeEach(() => { + managerMock.getStatusSyncStatus.mockReset().mockReturnValue(NOT_RUNNING); + managerMock.startStatusSync.mockReset().mockReturnValue(RUNNING); + managerMock.stopStatusSync.mockReset().mockReturnValue({ + stopped: false, stale: false, pid: null, status: NOT_RUNNING, + }); + managerMock.runStatusSyncForeground.mockReset().mockResolvedValue(0); + }); + + describe('status', () => { + it('--json exits 0 with running:false when not running', async () => { + const res = await runCli(registerStatusSyncCommand, ['--json', 'status-sync', 'status']); + expect(res.exitCode).toBeNull(); + const body = JSON.parse(res.stdout.join('')) as { data: StatusSyncStatus }; + expect(body.data.running).toBe(false); + expect(body.data.pid).toBeNull(); + }); + + it('--json exits 0 with running:true and pid when running', async () => { + managerMock.getStatusSyncStatus.mockReturnValue(RUNNING); + const res = await runCli(registerStatusSyncCommand, ['--json', 'status-sync', 'status']); + expect(res.exitCode).toBeNull(); + const body = JSON.parse(res.stdout.join('')) as { data: StatusSyncStatus }; + expect(body.data.running).toBe(true); + expect(body.data.pid).toBe(9876); + }); + + it('human mode prints "not running" when not running', async () => { + const res = await runCli(registerStatusSyncCommand, ['status-sync', 'status']); + expect(res.exitCode).toBeNull(); + expect(res.stdout.join(' ')).toMatch(/not running/i); + }); + }); + + describe('stop', () => { + it('--json exits 0 with stopped:false when nothing is running', async () => { + const res = await runCli(registerStatusSyncCommand, ['--json', 'status-sync', 'stop']); + expect(res.exitCode).toBeNull(); + const body = JSON.parse(res.stdout.join('')) as { data: StopStatusSyncResult }; + expect(body.data.stopped).toBe(false); + expect(body.data.pid).toBeNull(); + }); + + it('human mode prints "not running" when nothing to stop', async () => { + const res = await runCli(registerStatusSyncCommand, ['status-sync', 'stop']); + expect(res.exitCode).toBeNull(); + expect(res.stdout.join(' ')).toMatch(/not running/i); + }); + + it('--json exits 0 with stopped:true when a running bridge is stopped', async () => { + managerMock.stopStatusSync.mockReturnValue({ + stopped: true, stale: false, pid: 9876, status: NOT_RUNNING, + }); + const res = await runCli(registerStatusSyncCommand, ['--json', 'status-sync', 'stop']); + expect(res.exitCode).toBeNull(); + const body = JSON.parse(res.stdout.join('')) as { data: StopStatusSyncResult }; + expect(body.data.stopped).toBe(true); + expect(body.data.pid).toBe(9876); + }); + }); +}); diff --git a/tests/commands/upgrade-check.test.ts b/tests/commands/upgrade-check.test.ts index 97f40fb..f4039c4 100644 --- a/tests/commands/upgrade-check.test.ts +++ b/tests/commands/upgrade-check.test.ts @@ -113,3 +113,59 @@ describe('upgrade-check action — prerelease guard', () => { expect(out).not.toMatch(/Update available/i); }); }); + +// ── happy path and network error ───────────────────────────────────────────── +describe('upgrade-check action — version comparison', () => { + afterEach(() => { + httpsMock.get.mockReset(); + }); + + it('--json: when up to date (registry returns same version), upToDate:true exits 0', async () => { + makeHttpsGet('3.1.1'); + const { registerUpgradeCheckCommand } = await import('../../src/commands/upgrade-check.js'); + const { runCli } = await import('../helpers/cli.js'); + + const res = await runCli(registerUpgradeCheckCommand, ['--json', 'upgrade-check']); + expect(res.exitCode).toBeNull(); + const line = res.stdout.find((l) => l.trim().startsWith('{')); + const out = JSON.parse(line!) as Record; + const data = (out.data ?? out) as Record; + expect(data.upToDate).toBe(true); + expect(data.updateAvailable).toBe(false); + expect(data.installCommand).toBeNull(); + }); + + it('--json: when newer version available, updateAvailable:true and exits 1', async () => { + makeHttpsGet('99.0.0'); + const { registerUpgradeCheckCommand } = await import('../../src/commands/upgrade-check.js'); + const { runCli } = await import('../helpers/cli.js'); + + const res = await runCli(registerUpgradeCheckCommand, ['--json', 'upgrade-check']); + // JSON mode returns early without calling process.exit(1) — that only happens in human mode + expect(res.exitCode).toBeNull(); + const line = res.stdout.find((l) => l.trim().startsWith('{')); + const out = JSON.parse(line!) as Record; + const data = (out.data ?? out) as Record; + expect(data.updateAvailable).toBe(true); + expect(data.breakingChange).toBe(true); + expect(typeof data.installCommand).toBe('string'); + }); + + it('--json: network error produces ok:false envelope and exits 1', async () => { + httpsMock.get.mockImplementation((_url: unknown, _opts: unknown, _cb: unknown) => { + const req = Object.assign(new EventEmitter(), { destroy: vi.fn() }); + process.nextTick(() => req.emit('error', new Error('ECONNREFUSED'))); + return req; + }); + const { registerUpgradeCheckCommand } = await import('../../src/commands/upgrade-check.js'); + const { runCli } = await import('../helpers/cli.js'); + + const res = await runCli(registerUpgradeCheckCommand, ['--json', 'upgrade-check']); + expect(res.exitCode).toBe(1); + const line = res.stdout.find((l) => l.trim().startsWith('{')); + const out = JSON.parse(line!) as Record; + const data = (out.data ?? out) as Record; + expect(data.ok).toBe(false); + expect(typeof data.error).toBe('string'); + }); +}); From 0141ee580cc06a8f9b62bf24bb729b5a1fb2fe32 Mon Sep 17 00:00:00 2001 From: chenliuyun Date: Sat, 25 Apr 2026 21:30:08 +0800 Subject: [PATCH 14/26] =?UTF-8?q?test:=20address=20code=20review=20finding?= =?UTF-8?q?s=20=E2=80=94=20timer=20cleanup,=20bundle=20guard,=20status-syn?= =?UTF-8?q?c=20start/run,=20test=20count?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Add afterEach vi.useRealTimers() to daemon stop/reload describe blocks to prevent fake timer leak - Fix bundle-size test to skip size assertion when tsc output is present (detects esbuild bundle by absence of relative imports) - Add status-sync start (--json + human) and run (exit 0 and exit 1) test coverage - Update README test count to 1959 --- README.md | 6 +++--- tests/build/bundle-size.test.ts | 14 +++++++++++- tests/commands/daemon.test.ts | 4 +++- tests/commands/status-sync.test.ts | 34 ++++++++++++++++++++++++++++++ 4 files changed, 53 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 4028238..86c4d46 100644 --- a/README.md +++ b/README.md @@ -93,7 +93,7 @@ Under the hood every surface shares the same catalog, cache, and HMAC client — - 🎨 **Dual output modes** — colorized tables by default; `--json` passthrough for `jq` and scripting - 🔐 **Secure credentials** — HMAC-SHA256 signed requests; config file written with `0600`; env-var override for CI - 🔍 **Dry-run mode** — preview every mutating request before it hits the API -- 🧪 **Fully tested** — 1900 Vitest tests, mocked axios, zero network in CI +- 🧪 **Fully tested** — 1959 Vitest tests, mocked axios, zero network in CI - ⚡ **Shell completion** — Bash / Zsh / Fish / PowerShell ## Requirements @@ -1123,7 +1123,7 @@ npm install npm run dev -- # Run from TypeScript sources via tsx npm run build # Compile to dist/ -npm test # Run the Vitest suite (1900 tests) +npm test # Run the Vitest suite (1959 tests) npm run test:watch # Watch mode npm run test:coverage # Coverage report (v8, HTML + text) ``` @@ -1205,7 +1205,7 @@ src/ ├── format.ts # renderRows / filterFields / output-format dispatch ├── audit.ts # JSONL audit log writer └── quota.ts # Local daily-quota counter -tests/ # Vitest suite (1900 tests, mocked axios, no network) +tests/ # Vitest suite (1959 tests, mocked axios, no network) ``` ### Release flow diff --git a/tests/build/bundle-size.test.ts b/tests/build/bundle-size.test.ts index 89677c8..816432d 100644 --- a/tests/build/bundle-size.test.ts +++ b/tests/build/bundle-size.test.ts @@ -5,11 +5,23 @@ import path from 'node:path'; describe('production bundle size', () => { const distEntry = path.resolve('dist/index.js'); + // tsc output has relative imports like `from './utils/...'`; esbuild inlines everything. + function isBundledOutput(): boolean { + if (!fs.existsSync(distEntry)) return false; + const head = fs.readFileSync(distEntry, 'utf-8').slice(0, 4096); + return !head.includes("from './"); + } + it('dist/index.js exists', () => { expect(fs.existsSync(distEntry)).toBe(true); }); - it('dist/index.js is under 15 MB', () => { + it('esbuild bundle is under 15 MB (skipped when tsc output is present)', () => { + if (!isBundledOutput()) { + // CI runs `npm run build` (tsc), not `npm run build:prod` (esbuild). + // Skip size guard when the single-file esbuild bundle has not been built. + return; + } const { size } = fs.statSync(distEntry); const sizeMb = size / (1024 * 1024); expect(sizeMb, `dist/index.js is ${sizeMb.toFixed(1)} MB — exceeds 15 MB budget`).toBeLessThan(15); diff --git a/tests/commands/daemon.test.ts b/tests/commands/daemon.test.ts index a44c933..97f7258 100644 --- a/tests/commands/daemon.test.ts +++ b/tests/commands/daemon.test.ts @@ -1,4 +1,4 @@ -import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; const fsMock = vi.hoisted(() => ({ mkdirSync: vi.fn(), @@ -175,6 +175,7 @@ describe('daemon stop', () => { daemonStateMock.readDaemonState.mockReset().mockReturnValue(null); daemonStateMock.writeDaemonState.mockClear(); }); + afterEach(() => { vi.useRealTimers(); }); it('prints "No running daemon found" and exits 0 when no daemon is running', async () => { const res = await runCli(registerDaemonCommand, ['daemon', 'stop']); @@ -250,6 +251,7 @@ describe('daemon reload', () => { pidFileMock.writeReloadSentinel.mockClear(); pidFileMock.sighupSupported.mockReturnValue(false); }); + afterEach(() => { vi.useRealTimers(); }); it('exits 2 with usage error when no daemon is running', async () => { const res = await runCli(registerDaemonCommand, ['daemon', 'reload']); diff --git a/tests/commands/status-sync.test.ts b/tests/commands/status-sync.test.ts index 520ddec..c615f34 100644 --- a/tests/commands/status-sync.test.ts +++ b/tests/commands/status-sync.test.ts @@ -74,6 +74,40 @@ describe('status-sync command', () => { }); }); + describe('start', () => { + it('--json exits 0 and returns running state from startStatusSync', async () => { + managerMock.startStatusSync.mockReturnValue(RUNNING); + const res = await runCli(registerStatusSyncCommand, ['--json', 'status-sync', 'start']); + expect(res.exitCode).toBeNull(); + const body = JSON.parse(res.stdout.join('')) as { data: StatusSyncStatus }; + expect(body.data.running).toBe(true); + expect(body.data.pid).toBe(9876); + expect(managerMock.startStatusSync).toHaveBeenCalled(); + }); + + it('human mode prints started message with pid', async () => { + managerMock.startStatusSync.mockReturnValue(RUNNING); + const res = await runCli(registerStatusSyncCommand, ['status-sync', 'start']); + expect(res.exitCode).toBeNull(); + expect(res.stdout.join(' ')).toMatch(/started.*9876/i); + }); + }); + + describe('run', () => { + it('calls runStatusSyncForeground and exits 0 when it resolves 0', async () => { + managerMock.runStatusSyncForeground.mockResolvedValue(0); + const res = await runCli(registerStatusSyncCommand, ['status-sync', 'run']); + expect(res.exitCode).toBeNull(); + expect(managerMock.runStatusSyncForeground).toHaveBeenCalled(); + }); + + it('exits with the code returned by runStatusSyncForeground when non-zero', async () => { + managerMock.runStatusSyncForeground.mockResolvedValue(1); + const res = await runCli(registerStatusSyncCommand, ['status-sync', 'run']); + expect(res.exitCode).toBe(1); + }); + }); + describe('stop', () => { it('--json exits 0 with stopped:false when nothing is running', async () => { const res = await runCli(registerStatusSyncCommand, ['--json', 'status-sync', 'stop']); From 7e9397983844f0bd121240482750a58f8a7c627a Mon Sep 17 00:00:00 2001 From: chenliuyun Date: Sat, 25 Apr 2026 21:34:07 +0800 Subject: [PATCH 15/26] chore: bump version to 3.2.0 --- package-lock.json | 4 ++-- package.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/package-lock.json b/package-lock.json index 8c9cc61..160bb0d 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@switchbot/openapi-cli", - "version": "3.1.1", + "version": "3.2.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@switchbot/openapi-cli", - "version": "3.1.1", + "version": "3.2.0", "license": "MIT", "dependencies": { "@modelcontextprotocol/sdk": "^1.29.0", diff --git a/package.json b/package.json index 7b59343..b36bf9e 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@switchbot/openapi-cli", - "version": "3.1.1", + "version": "3.2.0", "description": "SwitchBot smart home CLI — control devices, run scenes, stream real-time events, and integrate AI agents via MCP. Full API v1.1 coverage.", "keywords": [ "switchbot", From 10d094ceefe976c04ec9ee4293c6e8e41b038596 Mon Sep 17 00:00:00 2001 From: chenliuyun Date: Sat, 25 Apr 2026 21:51:04 +0800 Subject: [PATCH 16/26] =?UTF-8?q?fix:=20remove=20duplicate=20shebang=20?= =?UTF-8?q?=E2=80=94=20src/index.ts=20had=20shebang=20AND=20bundle.mjs=20b?= =?UTF-8?q?anner=20both=20added=20it?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/index.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/src/index.ts b/src/index.ts index 25763c2..1759a7e 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,4 +1,3 @@ -#!/usr/bin/env node import { Command, CommanderError, InvalidArgumentError } from 'commander'; import { createRequire } from 'node:module'; import chalk from 'chalk'; From d79cf466a180db2d5e4d23eb1dee964117a1653d Mon Sep 17 00:00:00 2001 From: chenliuyun Date: Sat, 25 Apr 2026 21:52:54 +0800 Subject: [PATCH 17/26] chore: bump version to 3.2.1 --- package-lock.json | 4 ++-- package.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/package-lock.json b/package-lock.json index 160bb0d..8705814 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@switchbot/openapi-cli", - "version": "3.2.0", + "version": "3.2.1", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@switchbot/openapi-cli", - "version": "3.2.0", + "version": "3.2.1", "license": "MIT", "dependencies": { "@modelcontextprotocol/sdk": "^1.29.0", diff --git a/package.json b/package.json index b36bf9e..ec7a5dd 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@switchbot/openapi-cli", - "version": "3.2.0", + "version": "3.2.1", "description": "SwitchBot smart home CLI — control devices, run scenes, stream real-time events, and integrate AI agents via MCP. Full API v1.1 coverage.", "keywords": [ "switchbot", From fadfa0f730546a470585dbaf6f0066c8251c22b6 Mon Sep 17 00:00:00 2001 From: chenliuyun Date: Sat, 25 Apr 2026 22:06:36 +0800 Subject: [PATCH 18/26] test: add esbuild bundle validation tests (shebang, syntax, size, version smoke) --- .github/workflows/ci.yml | 37 +++++++++++++++++++ package.json | 2 +- scripts/bundle.mjs | 7 +++- tests/build/bundle-size.test.ts | 63 +++++++++++++++++++++++---------- 4 files changed, 88 insertions(+), 21 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 7449c28..ed1e38b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -44,6 +44,43 @@ jobs: fi - run: npm test + bundle-smoke: + name: esbuild bundle smoke test + runs-on: ubuntu-latest + needs: test + # esbuild CJS interop issues on Node 22 are tracked in a follow-up PR. + # This job is advisory until that is resolved. + continue-on-error: true + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-node@v4 + with: + node-version: 20.x + cache: npm + - run: npm ci + - run: npm run build:prod + - name: Shebang count must be exactly 1 + run: | + COUNT=$(grep -c "#!/usr/bin/env node" dist/index.js) + if [ "$COUNT" -ne 1 ]; then + echo "FAIL: Expected 1 shebang, found $COUNT" + exit 1 + fi + echo "OK: shebang count = $COUNT" + - name: Node.js syntax check + run: node --check dist/index.js + - name: --version smoke test (exits 0, outputs correct semver) + run: | + PKG=$(node -p "require('./package.json').version") + CLI=$(node dist/index.js --version) + echo "package.json=$PKG bundle=$CLI" + if [ "$PKG" != "$CLI" ]; then + echo "FAIL: version mismatch" + exit 1 + fi + - name: Bundle size check + run: npm test -- tests/build/ + offline-smoke: name: Offline size budgets runs-on: ubuntu-latest diff --git a/package.json b/package.json index ec7a5dd..438076d 100644 --- a/package.json +++ b/package.json @@ -46,7 +46,7 @@ "test": "vitest run", "test:watch": "vitest", "test:coverage": "vitest run --coverage", - "prepublishOnly": "npm test && npm run clean && npm run build:prod" + "prepublishOnly": "npm test && npm run clean && npm run build && node dist/index.js --version" }, "dependencies": { "@modelcontextprotocol/sdk": "^1.29.0", diff --git a/scripts/bundle.mjs b/scripts/bundle.mjs index 0b2b975..4b0915e 100644 --- a/scripts/bundle.mjs +++ b/scripts/bundle.mjs @@ -10,16 +10,21 @@ import { fileURLToPath } from 'node:url'; const __dirname = path.dirname(fileURLToPath(import.meta.url)); const root = path.resolve(__dirname, '..'); +const outfile = process.env.BUNDLE_OUTFILE ?? path.join(root, 'dist/index.js'); + await build({ entryPoints: [path.join(root, 'src/index.ts')], bundle: true, platform: 'node', target: 'node18', format: 'esm', - outfile: path.join(root, 'dist/index.js'), + outfile, // Keep heavy native-binding or large deps external; they stay in node_modules. external: [ 'node:*', + // commander uses CJS require('node:events') internally; its CJS-to-ESM + // interop in esbuild's shim breaks under Node 22. Keep it external. + 'commander', // native binding deps 'mqtt', 'pino', diff --git a/tests/build/bundle-size.test.ts b/tests/build/bundle-size.test.ts index 816432d..8723df9 100644 --- a/tests/build/bundle-size.test.ts +++ b/tests/build/bundle-size.test.ts @@ -1,29 +1,54 @@ -import { describe, it, expect } from 'vitest'; +import { describe, it, expect, beforeAll } from 'vitest'; import fs from 'node:fs'; import path from 'node:path'; +import { spawnSync, execSync } from 'node:child_process'; -describe('production bundle size', () => { - const distEntry = path.resolve('dist/index.js'); +// Build to a separate path so we don't overwrite the tsc dist/index.js +// that other tests (install smoke, status-sync smoke) depend on. +const bundleEntry = path.resolve('dist/bundle-test/index.js'); - // tsc output has relative imports like `from './utils/...'`; esbuild inlines everything. - function isBundledOutput(): boolean { - if (!fs.existsSync(distEntry)) return false; - const head = fs.readFileSync(distEntry, 'utf-8').slice(0, 4096); - return !head.includes("from './"); - } +describe('esbuild production bundle', () => { + beforeAll(() => { + execSync(`node scripts/bundle.mjs --outfile=${bundleEntry}`, { + stdio: 'pipe', + env: { ...process.env, BUNDLE_OUTFILE: bundleEntry }, + }); + }, 30_000); - it('dist/index.js exists', () => { - expect(fs.existsSync(distEntry)).toBe(true); + it('bundle output exists', () => { + expect(fs.existsSync(bundleEntry), `${bundleEntry} not found after build:prod`).toBe(true); }); - it('esbuild bundle is under 15 MB (skipped when tsc output is present)', () => { - if (!isBundledOutput()) { - // CI runs `npm run build` (tsc), not `npm run build:prod` (esbuild). - // Skip size guard when the single-file esbuild bundle has not been built. - return; - } - const { size } = fs.statSync(distEntry); + it('has exactly one shebang line', () => { + const content = fs.readFileSync(bundleEntry, 'utf-8'); + const count = (content.match(/^#!\/usr\/bin\/env node/gm) ?? []).length; + expect(count, `Expected exactly 1 shebang, found ${count} — check bundle.mjs banner vs src/index.ts`).toBe(1); + }); + + it('passes Node.js syntax check', () => { + const result = spawnSync(process.execPath, ['--check', bundleEntry], { encoding: 'utf-8' }); + expect(result.status, `node --check failed (exit ${result.status}):\n${result.stderr}`).toBe(0); + expect(result.stderr).toBe(''); + }); + + // TODO: esbuild inlines CJS packages (yaml, etc.) that use require('process') + // without the node: prefix; this breaks at runtime on Node 22. Fix tracked + // in a follow-up PR (externalize problematic CJS deps or switch to CJS output). + it.skip('--version exits 0 and outputs a valid semver', () => { + const result = spawnSync(process.execPath, [bundleEntry, '--version'], { encoding: 'utf-8' }); + expect(result.status, `--version exited ${result.status}:\n${result.stderr}`).toBe(0); + expect(result.stdout.trim()).toMatch(/^\d+\.\d+\.\d+/); + }); + + it.skip('--version matches package.json version', () => { + const pkgVersion = JSON.parse(fs.readFileSync(path.resolve('package.json'), 'utf-8')).version as string; + const result = spawnSync(process.execPath, [bundleEntry, '--version'], { encoding: 'utf-8' }); + expect(result.stdout.trim(), `Bundle reports ${result.stdout.trim()} but package.json says ${pkgVersion}`).toBe(pkgVersion); + }); + + it('is under 15 MB', () => { + const { size } = fs.statSync(bundleEntry); const sizeMb = size / (1024 * 1024); - expect(sizeMb, `dist/index.js is ${sizeMb.toFixed(1)} MB — exceeds 15 MB budget`).toBeLessThan(15); + expect(sizeMb, `bundle is ${sizeMb.toFixed(1)} MB — exceeds 15 MB budget`).toBeLessThan(15); }); }); From 7bbbc44e581edc2b53a79c0d31e037cd463184f8 Mon Sep 17 00:00:00 2001 From: chenliuyun Date: Sat, 25 Apr 2026 22:22:00 +0800 Subject: [PATCH 19/26] fix(bundle): resolve CJS require shim and duplicate shebang in esbuild output - Remove shebang from src/index.ts; bundle.mjs banner is the sole source - Add createRequire-based require shim in banner so bundled CJS packages (yaml, commander) can call bare require('process') on Node 20/22 - Add scripts/cjs-shim.mjs as esbuild inject target for require polyfill - Fix agent-bootstrap and upgrade-check to import version from src/version.ts instead of require('../../package.json') which breaks when bundled to a non-root dist/ location - Rewrite bundle-size test: build to dist/bundle-test.js (same level as dist/index.js so ../package.json resolves correctly), add shebang-count, node --check, --version semver, and size < 15 MB assertions --- scripts/bundle.mjs | 17 +++++++++++++---- scripts/cjs-shim.mjs | 6 ++++++ src/commands/agent-bootstrap.ts | 5 +---- src/commands/upgrade-check.ts | 5 ++--- tests/build/bundle-size.test.ts | 10 ++++------ 5 files changed, 26 insertions(+), 17 deletions(-) create mode 100644 scripts/cjs-shim.mjs diff --git a/scripts/bundle.mjs b/scripts/bundle.mjs index 4b0915e..bf3a7ec 100644 --- a/scripts/bundle.mjs +++ b/scripts/bundle.mjs @@ -22,9 +22,6 @@ await build({ // Keep heavy native-binding or large deps external; they stay in node_modules. external: [ 'node:*', - // commander uses CJS require('node:events') internally; its CJS-to-ESM - // interop in esbuild's shim breaks under Node 22. Keep it external. - 'commander', // native binding deps 'mqtt', 'pino', @@ -34,8 +31,20 @@ await build({ '@modelcontextprotocol/sdk', // pure-JS but large — inline separately if needed ], + // Inject a createRequire-based require() so CJS packages bundled into the + // ESM output can call require('process'), require('events'), etc. (bare names + // without node: prefix) without hitting esbuild's __require2 "not supported" error. + inject: [path.join(root, 'scripts/cjs-shim.mjs')], banner: { - js: '#!/usr/bin/env node', + // The shebang must come first (Node.js requires it at byte 0). + // The `const require` line runs BEFORE esbuild's __require IIFE (which checks + // `typeof require !== "undefined"`), so CJS packages that call bare + // require('process') or require('node:events') get the real Node require(). + js: [ + '#!/usr/bin/env node', + 'import { createRequire as __cjsReq } from "node:module";', + 'const require = __cjsReq(import.meta.url);', + ].join('\n'), }, logLevel: 'info', }); diff --git a/scripts/cjs-shim.mjs b/scripts/cjs-shim.mjs new file mode 100644 index 0000000..844da6b --- /dev/null +++ b/scripts/cjs-shim.mjs @@ -0,0 +1,6 @@ +// Inject a proper require() implementation for CJS packages bundled into the +// ESM output. Without this, esbuild's __require2 shim throws +// "Dynamic require of X is not supported" when CJS packages call +// require('process'), require('events'), etc. (bare names, no node: prefix). +import { createRequire } from 'node:module'; +export const require = createRequire(import.meta.url); diff --git a/src/commands/agent-bootstrap.ts b/src/commands/agent-bootstrap.ts index ceda31f..e3ec25d 100644 --- a/src/commands/agent-bootstrap.ts +++ b/src/commands/agent-bootstrap.ts @@ -18,10 +18,7 @@ import { } from '../policy/load.js'; import { validateLoadedPolicy } from '../policy/validate.js'; import { selectCredentialStore, CredentialBackendName } from '../credentials/keychain.js'; -import { createRequire } from 'node:module'; - -const require = createRequire(import.meta.url); -const { version: pkgVersion } = require('../../package.json') as { version: string }; +import { VERSION as pkgVersion } from '../version.js'; /** * Schema version of the agent-bootstrap payload. Must stay in lockstep diff --git a/src/commands/upgrade-check.ts b/src/commands/upgrade-check.ts index 7871f81..a39e77c 100644 --- a/src/commands/upgrade-check.ts +++ b/src/commands/upgrade-check.ts @@ -1,11 +1,10 @@ import { Command } from 'commander'; -import { createRequire } from 'node:module'; import https from 'node:https'; import { isJsonMode, printJson } from '../utils/output.js'; import chalk from 'chalk'; +import { VERSION as currentVersion } from '../version.js'; -const require = createRequire(import.meta.url); -const { name: pkgName, version: currentVersion } = require('../../package.json') as { name: string; version: string }; +const pkgName = '@switchbot/openapi-cli'; function fetchLatestVersion(packageName: string, timeoutMs = 8000): Promise { const encoded = packageName.replace('/', '%2F'); diff --git a/tests/build/bundle-size.test.ts b/tests/build/bundle-size.test.ts index 8723df9..050a6a5 100644 --- a/tests/build/bundle-size.test.ts +++ b/tests/build/bundle-size.test.ts @@ -5,7 +5,8 @@ import { spawnSync, execSync } from 'node:child_process'; // Build to a separate path so we don't overwrite the tsc dist/index.js // that other tests (install smoke, status-sync smoke) depend on. -const bundleEntry = path.resolve('dist/bundle-test/index.js'); +// Must stay in dist/ (not a subdirectory) so require('../package.json') resolves correctly. +const bundleEntry = path.resolve('dist/bundle-test.js'); describe('esbuild production bundle', () => { beforeAll(() => { @@ -31,16 +32,13 @@ describe('esbuild production bundle', () => { expect(result.stderr).toBe(''); }); - // TODO: esbuild inlines CJS packages (yaml, etc.) that use require('process') - // without the node: prefix; this breaks at runtime on Node 22. Fix tracked - // in a follow-up PR (externalize problematic CJS deps or switch to CJS output). - it.skip('--version exits 0 and outputs a valid semver', () => { + it('--version exits 0 and outputs a valid semver', () => { const result = spawnSync(process.execPath, [bundleEntry, '--version'], { encoding: 'utf-8' }); expect(result.status, `--version exited ${result.status}:\n${result.stderr}`).toBe(0); expect(result.stdout.trim()).toMatch(/^\d+\.\d+\.\d+/); }); - it.skip('--version matches package.json version', () => { + it('--version matches package.json version', () => { const pkgVersion = JSON.parse(fs.readFileSync(path.resolve('package.json'), 'utf-8')).version as string; const result = spawnSync(process.execPath, [bundleEntry, '--version'], { encoding: 'utf-8' }); expect(result.stdout.trim(), `Bundle reports ${result.stdout.trim()} but package.json says ${pkgVersion}`).toBe(pkgVersion); From b92948a2ce2f7ea1d75f0807032ce9fcdfede15b Mon Sep 17 00:00:00 2001 From: chenliuyun Date: Sun, 26 Apr 2026 00:17:18 +0800 Subject: [PATCH 20/26] Harden npm package publish and smoke verification --- .githooks/pre-commit | 8 ++ .githooks/pre-push | 8 ++ .github/workflows/ci.yml | 15 ++ .github/workflows/npm-published-smoke.yml | 162 ++++++++++++++++++++++ .github/workflows/publish.yml | 5 +- package.json | 7 +- scripts/copy-assets.mjs | 20 ++- scripts/install-git-hooks.mjs | 36 +++++ scripts/smoke-pack-install.mjs | 67 +++++++++ tests/version.test.ts | 5 + 10 files changed, 330 insertions(+), 3 deletions(-) create mode 100644 .githooks/pre-commit create mode 100644 .githooks/pre-push create mode 100644 .github/workflows/npm-published-smoke.yml create mode 100644 scripts/install-git-hooks.mjs create mode 100644 scripts/smoke-pack-install.mjs diff --git a/.githooks/pre-commit b/.githooks/pre-commit new file mode 100644 index 0000000..9565ea3 --- /dev/null +++ b/.githooks/pre-commit @@ -0,0 +1,8 @@ +#!/usr/bin/env sh +set -eu + +REPO_ROOT="$(git rev-parse --show-toplevel)" +cd "$REPO_ROOT" + +echo "[pre-commit] packaging sanity checks" +npm run verify:pre-commit diff --git a/.githooks/pre-push b/.githooks/pre-push new file mode 100644 index 0000000..8b24ba9 --- /dev/null +++ b/.githooks/pre-push @@ -0,0 +1,8 @@ +#!/usr/bin/env sh +set -eu + +REPO_ROOT="$(git rev-parse --show-toplevel)" +cd "$REPO_ROOT" + +echo "[pre-push] tarball install smoke" +npm run verify:pre-push diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ed1e38b..b083391 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -126,6 +126,21 @@ jobs: exit 1 fi + pack-install-smoke: + name: Packed install smoke + runs-on: ubuntu-latest + needs: test + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-node@v4 + with: + node-version: 20.x + cache: npm + - run: npm ci + - run: npm run build:prod + - name: npm pack -> npm install tarball -> switchbot --version + run: npm run smoke:pack-install + policy-schema-sync: name: Policy schema sync with skill repo runs-on: ubuntu-latest diff --git a/.github/workflows/npm-published-smoke.yml b/.github/workflows/npm-published-smoke.yml new file mode 100644 index 0000000..6c0668f --- /dev/null +++ b/.github/workflows/npm-published-smoke.yml @@ -0,0 +1,162 @@ +name: npm published smoke + +on: + workflow_run: + workflows: ['Publish to npm'] + types: [completed] + workflow_dispatch: + inputs: + version: + description: 'Published npm version to verify (defaults to package.json from checked-out commit)' + required: false + +jobs: + smoke: + if: > + github.event_name == 'workflow_dispatch' || + (github.event.workflow_run.conclusion == 'success' && + github.event.workflow_run.event == 'release') + runs-on: ubuntu-latest + permissions: + contents: read + steps: + - uses: actions/checkout@v4 + with: + ref: ${{ github.event_name == 'workflow_run' && github.event.workflow_run.head_sha || github.sha }} + + - uses: actions/setup-node@v4 + with: + node-version: 20.x + registry-url: https://registry.npmjs.org + + - name: Verify credentials present + env: + TOKEN: ${{ secrets.SWITCHBOT_TOKEN }} + SECRET: ${{ secrets.SWITCHBOT_SECRET }} + run: | + if [ -z "$TOKEN" ] || [ -z "$SECRET" ]; then + echo "SWITCHBOT_TOKEN / SWITCHBOT_SECRET not set in repo secrets" + exit 1 + fi + + - name: Verify npm token present + env: + TOKEN: ${{ secrets.NPM_TOKEN }} + run: | + if [ -z "$TOKEN" ]; then + echo "NPM_TOKEN not set in repo secrets" + exit 1 + fi + + - name: Resolve target version + id: version + run: | + if [ -n "${{ inputs.version }}" ]; then + VERSION="${{ inputs.version }}" + else + VERSION=$(node -p "require('./package.json').version") + fi + echo "version=$VERSION" >> "$GITHUB_OUTPUT" + echo "target_version=$VERSION" + + - name: Resolve current latest dist-tag + id: latest + run: | + LATEST=$(npm view @switchbot/openapi-cli dist-tags.latest) + echo "version=$LATEST" >> "$GITHUB_OUTPUT" + echo "current_latest=$LATEST" + + - name: Wait for npm package to become available + id: wait_package + env: + VERSION: ${{ steps.version.outputs.version }} + run: | + for i in $(seq 1 24); do + if [ "${{ github.event_name }}" = "workflow_run" ]; then + FOUND=$(npm view "@switchbot/openapi-cli@next" version 2>/dev/null || true) + if [ "$FOUND" = "$VERSION" ]; then + echo "npm package is available on next: $FOUND" + exit 0 + fi + echo "waiting for @switchbot/openapi-cli@$VERSION to appear on npm dist-tag next ($i/24); current next=$FOUND" + else + FOUND=$(npm view "@switchbot/openapi-cli@$VERSION" version 2>/dev/null || true) + if [ "$FOUND" = "$VERSION" ]; then + echo "npm package version is available: $FOUND" + exit 0 + fi + echo "waiting for @switchbot/openapi-cli@$VERSION to appear on npm ($i/24)" + fi + sleep 10 + done + echo "Timed out waiting for @switchbot/openapi-cli@$VERSION on npm" + exit 1 + + - name: Install published package in a clean temp project + id: install_package + env: + VERSION: ${{ steps.version.outputs.version }} + run: | + TMPDIR=$(mktemp -d) + echo "TMPDIR=$TMPDIR" >> "$GITHUB_ENV" + cd "$TMPDIR" + npm init -y >/dev/null 2>&1 + npm install "@switchbot/openapi-cli@$VERSION" + + - name: Binary and offline smoke + id: offline_smoke + env: + TMPDIR: ${{ env.TMPDIR }} + VERSION: ${{ steps.version.outputs.version }} + run: | + cd "$TMPDIR" + ACTUAL=$(npx --no-install switchbot --version) + test "$ACTUAL" = "$VERSION" + npx --no-install switchbot --help >/dev/null + npx --no-install switchbot schema export --compact >/dev/null + npx --no-install switchbot capabilities --json | jq -e '.data.commandMeta != null' >/dev/null + + - name: Live smoke with configured credentials + id: live_smoke + env: + TMPDIR: ${{ env.TMPDIR }} + SWITCHBOT_TOKEN: ${{ secrets.SWITCHBOT_TOKEN }} + SWITCHBOT_SECRET: ${{ secrets.SWITCHBOT_SECRET }} + run: | + cd "$TMPDIR" + npx --no-install switchbot doctor --json | jq -e '.data.summary != null' >/dev/null + npx --no-install switchbot devices list --json | jq -e '.data.deviceList != null or .data.infraredRemoteList != null' >/dev/null + + - name: Promote verified version to latest + if: success() + env: + VERSION: ${{ steps.version.outputs.version }} + NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} + run: | + npm dist-tag add "@switchbot/openapi-cli@$VERSION" latest + echo "Promoted @switchbot/openapi-cli@$VERSION to dist-tag latest" + + - name: Deprecate failed version + if: > + failure() && + steps.wait_package.outcome == 'success' && + ( + steps.install_package.outcome == 'failure' || + steps.offline_smoke.outcome == 'failure' + ) + env: + VERSION: ${{ steps.version.outputs.version }} + PREVIOUS_LATEST: ${{ steps.latest.outputs.version }} + NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} + run: | + npm deprecate "@switchbot/openapi-cli@$VERSION" "Published to dist-tag next but failed package smoke tests. Install @switchbot/openapi-cli@${PREVIOUS_LATEST} or use dist-tag latest." + echo "Deprecated @switchbot/openapi-cli@$VERSION after package smoke failure" + + - name: Cleanup temp project + if: always() + env: + TMPDIR: ${{ env.TMPDIR }} + run: | + if [ -n "$TMPDIR" ] && [ -d "$TMPDIR" ]; then + rm -rf "$TMPDIR" + fi diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index b1c5c8e..1a68cbd 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -28,6 +28,9 @@ jobs: echo "Tag $TAG_VERSION does not match package.json version $PKG_VERSION" exit 1 fi - - run: npm publish --provenance --access public + - name: Smoke test packed npm artifact + run: npm run smoke:pack-install + - name: Publish package to npm dist-tag next + run: npm publish --tag next --provenance --access public env: NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} diff --git a/package.json b/package.json index 438076d..db28267 100644 --- a/package.json +++ b/package.json @@ -40,13 +40,18 @@ "build:prod": "node scripts/bundle.mjs && node scripts/copy-assets.mjs", "clean": "node -e \"require('fs').rmSync('dist',{recursive:true,force:true})\"", "dev": "tsx src/index.ts", + "hooks:install": "node scripts/install-git-hooks.mjs", "lint:md": "markdownlint \"**/*.md\"", "lint:md:changelog": "markdownlint CHANGELOG.md", + "prepare": "node scripts/install-git-hooks.mjs", "start": "node dist/index.js", + "smoke:pack-install": "node scripts/smoke-pack-install.mjs", "test": "vitest run", "test:watch": "vitest", "test:coverage": "vitest run --coverage", - "prepublishOnly": "npm test && npm run clean && npm run build && node dist/index.js --version" + "verify:pre-commit": "npm run build:prod && npm test -- tests/version.test.ts", + "verify:pre-push": "npm run build:prod && npm test -- tests/version.test.ts && npm run smoke:pack-install", + "prepublishOnly": "npm test && npm run clean && npm run build:prod && node dist/index.js --version" }, "dependencies": { "@modelcontextprotocol/sdk": "^1.29.0", diff --git a/scripts/copy-assets.mjs b/scripts/copy-assets.mjs index 83c4919..105fb1f 100644 --- a/scripts/copy-assets.mjs +++ b/scripts/copy-assets.mjs @@ -1,9 +1,10 @@ -import { cpSync, mkdirSync, existsSync } from 'node:fs'; +import { cpSync, mkdirSync, existsSync, readFileSync, writeFileSync, chmodSync } from 'node:fs'; import { dirname, join } from 'node:path'; import { fileURLToPath } from 'node:url'; const scriptDir = dirname(fileURLToPath(import.meta.url)); const repoRoot = dirname(scriptDir); +const NODE_SHEBANG = '#!/usr/bin/env node\n'; const assets = [ ['src/policy/schema', 'dist/policy/schema'], @@ -21,3 +22,20 @@ for (const [srcRel, dstRel] of assets) { cpSync(src, dst, { recursive: true }); console.log(`copy-assets: ${src} -> ${dst}`); } + +const cliEntry = join(repoRoot, 'dist', 'index.js'); +if (!existsSync(cliEntry)) { + console.error(`copy-assets: CLI entry missing: ${cliEntry}`); + process.exit(1); +} + +const cliSource = readFileSync(cliEntry, 'utf-8'); +if (!cliSource.startsWith(NODE_SHEBANG)) { + writeFileSync(cliEntry, NODE_SHEBANG + cliSource, 'utf-8'); +} + +try { + chmodSync(cliEntry, 0o755); +} catch { + // Best-effort on filesystems that ignore POSIX modes. +} diff --git a/scripts/install-git-hooks.mjs b/scripts/install-git-hooks.mjs new file mode 100644 index 0000000..c1a88cc --- /dev/null +++ b/scripts/install-git-hooks.mjs @@ -0,0 +1,36 @@ +import { existsSync, readFileSync, writeFileSync } from 'node:fs'; +import path from 'node:path'; +import { fileURLToPath } from 'node:url'; + +const scriptDir = path.dirname(fileURLToPath(import.meta.url)); +const repoRoot = path.dirname(scriptDir); +const gitDir = path.join(repoRoot, '.git'); +const gitConfig = path.join(gitDir, 'config'); +const hookLine = '\thooksPath = .githooks'; + +if (!existsSync(gitDir) || !existsSync(gitConfig)) { + process.exit(0); +} + +try { + const raw = readFileSync(gitConfig, 'utf-8'); + + let next; + if (/\[core\]/.test(raw)) { + if (/^\s*hooksPath\s*=.*$/m.test(raw)) { + next = raw.replace(/^\s*hooksPath\s*=.*$/m, hookLine); + } else { + next = raw.replace(/\[core\][^\[]*/m, (section) => `${section.trimEnd()}\n${hookLine}\n`); + } + } else { + const prefix = raw.endsWith('\n') ? raw : `${raw}\n`; + next = `${prefix}[core]\n${hookLine}\n`; + } + + if (next !== raw) { + writeFileSync(gitConfig, next, 'utf-8'); + } +} catch { + // Best-effort only. Published-package consumers and non-git environments + // should not fail install because of local hook setup. +} diff --git a/scripts/smoke-pack-install.mjs b/scripts/smoke-pack-install.mjs new file mode 100644 index 0000000..5165d95 --- /dev/null +++ b/scripts/smoke-pack-install.mjs @@ -0,0 +1,67 @@ +import { execFileSync } from 'node:child_process'; +import { mkdtempSync, readFileSync, rmSync } from 'node:fs'; +import os from 'node:os'; +import path from 'node:path'; +import { fileURLToPath } from 'node:url'; + +const scriptDir = path.dirname(fileURLToPath(import.meta.url)); +const repoRoot = path.dirname(scriptDir); +const pkg = JSON.parse(readFileSync(path.join(repoRoot, 'package.json'), 'utf-8')); +const expectedVersion = String(pkg.version); + +function runNpm(args, options = {}) { + const npmExecPath = process.env.npm_execpath; + if (npmExecPath) { + return execFileSync(process.execPath, [npmExecPath, ...args], options); + } + const npmCmd = process.platform === 'win32' ? 'npm.cmd' : 'npm'; + return execFileSync(npmCmd, args, options); +} + +const workDir = mkdtempSync(path.join(os.tmpdir(), 'switchbot-pack-smoke-')); +let tarballPath = null; + +try { + const packJson = runNpm(['pack', '--json'], { + cwd: repoRoot, + encoding: 'utf-8', + }); + const [packResult] = JSON.parse(packJson); + if (!packResult?.filename) { + throw new Error(`npm pack did not return a filename: ${packJson}`); + } + + tarballPath = path.join(repoRoot, packResult.filename); + + runNpm(['init', '-y'], { + cwd: workDir, + stdio: 'ignore', + }); + + runNpm(['install', tarballPath], { + cwd: workDir, + stdio: 'inherit', + }); + + const actualVersion = process.platform === 'win32' + ? execFileSync(path.join(workDir, 'node_modules', '.bin', 'switchbot.cmd'), ['--version'], { + cwd: workDir, + encoding: 'utf-8', + shell: true, + }).trim() + : execFileSync(path.join(workDir, 'node_modules', '.bin', 'switchbot'), ['--version'], { + cwd: workDir, + encoding: 'utf-8', + }).trim(); + + if (actualVersion !== expectedVersion) { + throw new Error(`Packed CLI version mismatch: expected ${expectedVersion}, got ${actualVersion}`); + } + + console.log(`pack-install smoke ok: switchbot --version -> ${actualVersion}`); +} finally { + if (tarballPath) { + rmSync(tarballPath, { force: true }); + } + rmSync(workDir, { recursive: true, force: true }); +} diff --git a/tests/version.test.ts b/tests/version.test.ts index fef53f3..e82bff5 100644 --- a/tests/version.test.ts +++ b/tests/version.test.ts @@ -12,6 +12,11 @@ const pkg = JSON.parse( ) as { version: string }; describe('CLI --version', () => { + it('keeps a node shebang on dist/index.js for npm bin execution', () => { + const cli = readFileSync(path.join(here, '..', 'dist', 'index.js'), 'utf-8'); + expect(cli.startsWith('#!/usr/bin/env node\n')).toBe(true); + }); + it('matches package.json version', () => { // Regression guard for the v1.3.1 bug where src/index.ts hardcoded a // stale version string. execFileSync + process.execPath avoids shell From 12dae35445156f8aae82ab57cc2e1eec775635ab Mon Sep 17 00:00:00 2001 From: chenliuyun Date: Sun, 26 Apr 2026 00:42:14 +0800 Subject: [PATCH 21/26] chore(ci): align pack-install smoke with publish artifact, document release pipeline Add a parallel pack-install-smoke-tsc CI job that runs `npm run build` (tsc) + smoke:pack-install, mirroring exactly what publish.yml does before npm publish. Previously only the esbuild bundle was smoke-tested on PRs while publish shipped the tsc output, so a tsc-specific packaging regression would only surface after the release tag was already pushed. Add docs/release-pipeline.md with the tsc vs esbuild split, the full gate sequence, and the invariants that must hold for future pipeline changes. Link it from README and the CHANGELOG 3.2.1 entry. --- .github/workflows/ci.yml | 20 ++++++- CHANGELOG.md | 14 +++++ README.md | 2 + docs/release-pipeline.md | 115 +++++++++++++++++++++++++++++++++++++++ 4 files changed, 150 insertions(+), 1 deletion(-) create mode 100644 docs/release-pipeline.md diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b083391..943429b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -127,7 +127,7 @@ jobs: fi pack-install-smoke: - name: Packed install smoke + name: Packed install smoke (esbuild) runs-on: ubuntu-latest needs: test steps: @@ -141,6 +141,24 @@ jobs: - name: npm pack -> npm install tarball -> switchbot --version run: npm run smoke:pack-install + # Mirrors exactly what publish.yml does: `npm run build` (tsc) + smoke:pack-install. + # Keeps the PR gate honest — if this job fails, the publish workflow would also fail. + # See docs/release-pipeline.md for the tsc vs esbuild split. + pack-install-smoke-tsc: + name: Packed install smoke (tsc — matches publish) + runs-on: ubuntu-latest + needs: test + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-node@v4 + with: + node-version: 20.x + cache: npm + - run: npm ci + - run: npm run build + - name: npm pack -> npm install tarball -> switchbot --version + run: npm run smoke:pack-install + policy-schema-sync: name: Policy schema sync with skill repo runs-on: ubuntu-latest diff --git a/CHANGELOG.md b/CHANGELOG.md index 43ff01f..987ded9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -32,6 +32,20 @@ This project follows [Semantic Versioning](https://semver.org/spec/v2.0.0.html). - `rules lint` now validates `hysteresis` / `requires_stable_for` duration syntax and warns when `hysteresis` and `requires_stable_for` are both set. +### Changed — release pipeline + +- Pre-publish `smoke:pack-install` now runs in `publish.yml` before `npm publish`, and the + same smoke runs locally via `pre-push` hook (`verify:pre-push`) and on every PR in CI + (`pack-install-smoke`). +- `scripts/copy-assets.mjs` now injects the `#!/usr/bin/env node` shebang into `dist/index.js` + and chmods it to `0755` after every build, so the npm bin entry is always executable. +- New `pack-install-smoke-tsc` CI job validates the same `npm run build` (tsc) artifact that + `publish.yml` actually ships, closing the gap where the PR gate was testing the esbuild + bundle while publish shipped the tsc output. See [`docs/release-pipeline.md`](./docs/release-pipeline.md). +- New `npm-published-smoke.yml` workflow verifies published tarballs on the npm registry, + auto-promotes `next → latest` on success, and auto-deprecates on package-install/offline + smoke failures only (never on live API flakes). + ## [3.2.0] - 2026-04-25 ### Added — daemon, upgrade-check, scenes validate/simulate, rules summary diff --git a/README.md b/README.md index 86c4d46..f07bd51 100644 --- a/README.md +++ b/README.md @@ -1219,6 +1219,8 @@ git push --follow-tags Then on GitHub → **Releases → Draft a new release → select tag → Publish**. The `publish.yml` workflow runs tests, verifies the tag matches `package.json`, and publishes `@switchbot/openapi-cli` to npm with [provenance](https://docs.npmjs.com/generating-provenance-statements). +See [`docs/release-pipeline.md`](./docs/release-pipeline.md) for the full pre-publish and post-publish verification flow (local hooks → CI → `publish.yml` → `npm-published-smoke.yml`). + ## License [MIT](./LICENSE) © chenliuyun diff --git a/docs/release-pipeline.md b/docs/release-pipeline.md new file mode 100644 index 0000000..687093b --- /dev/null +++ b/docs/release-pipeline.md @@ -0,0 +1,115 @@ +# Release pipeline + +This document describes how `@switchbot/openapi-cli` goes from commit to npm +registry, and the invariants that keep the published artifact safe. + +## Two builders, one tarball + +| Script | Tool | Output shape | Used by | +|---|---|---|---| +| `npm run build` | `tsc` | Per-file `dist/*.js` mirroring `src/` | `publish.yml` (what actually publishes) | +| `npm run build:prod` | `esbuild` | Single-file `dist/index.js` with inlined deps | Local dev, `ci.yml` bundle smoke | + +`npm pack` follows `"files": ["dist", "README.md", "LICENSE"]` in `package.json`, +so whatever ends up in `dist/` when `npm pack` runs is what ships. + +Both builders end by running `scripts/copy-assets.mjs`, which: + +1. Copies policy JSON Schema assets into `dist/policy/`. +2. Injects `#!/usr/bin/env node` into `dist/index.js` if missing. +3. Sets `0o755` on `dist/index.js` (best-effort on filesystems that ignore POSIX modes). + +Steps 2 and 3 make the file directly executable through the `switchbot` bin +entry. `tests/version.test.ts` contains a regression guard that fails the suite +if the shebang goes missing. + +## Gates before `npm publish` + +``` +git commit ──▶ pre-commit hook ─── verify:pre-commit + (build:prod + tests/version.test.ts) + +git push ──▶ pre-push hook ──── verify:pre-push + (build:prod + version test + + smoke:pack-install) + +open PR ──▶ ci.yml ──────────── docs-lint + test matrix (Node 18/20/22) + bundle-smoke (advisory) + offline-smoke (size budgets) + pack-install-smoke (esbuild) + pack-install-smoke-tsc (tsc, matches publish) + policy-schema-sync + +merge PR ──▶ main + +release ──▶ publish.yml ─────── 1. npm ci + 2. npm run build (tsc) + 3. npm test + 4. tag == package.json version + 5. npm run smoke:pack-install ◀── last gate + 6. npm publish --tag next +``` + +The critical pre-publish gate is step 5 of `publish.yml`. It runs +`scripts/smoke-pack-install.mjs`, which: + +- Runs `npm pack` on the freshly-built tarball. +- Installs the tarball into a throwaway temp project. +- Executes `node_modules/.bin/switchbot --version` and compares the output to + `package.json.version`. + +If the shebang is missing, the bin entry is not marked executable, the version +drifts, or any deps are missing — the CLI fails to run and the smoke test exits +non-zero. `npm publish` does not run. + +## Post-publish defense-in-depth + +``` +npm-published-smoke.yml (triggered by publish.yml completion): + 1. wait_package — wait for @next to appear on the registry + 2. install_package — install in a clean temp project + 3. offline_smoke — --version, --help, schema export, capabilities + 4. live_smoke — doctor, devices list (uses real credentials) + 5. promote to @latest ◀── only if all four above pass + OR + npm deprecate ◀── only on install_package / offline_smoke failure + (never on live_smoke — API flakes should not + auto-deprecate a package) +``` + +This workflow runs *after* `npm publish`. It is defense-in-depth only; the +pre-publish gates are what keep bad artifacts off the registry in the first +place. + +## Invariants + +Changes to the release pipeline must preserve these invariants: + +1. **`publish.yml` must run `smoke:pack-install` before `npm publish`.** If + this gate is removed or skipped, a broken tarball can reach the registry. + +2. **If `publish.yml` is ever switched from `build` (tsc) to `build:prod` + (esbuild), `bundle-smoke` must become blocking in the same PR.** That job + currently carries `continue-on-error: true` because of a tracked Node 22 + CJS interop issue; making it advisory is only safe while the published + artifact comes from tsc. + +3. **Auto-deprecate must never fire on `live_smoke` failure.** Live smoke + depends on real SwitchBot API availability and valid credentials; a transient + outage should not deprecate a working package. Only `install_package` and + `offline_smoke` failures justify an automatic deprecation. + +4. **`copy-assets.mjs` must run on every build path.** Both `build` and + `build:prod` chain into it. It is the single place where the shebang and + exec bit are enforced. Moving that logic elsewhere — or adding a third build + path that skips it — will break npm bin execution. + +## Related tests + +- `tests/version.test.ts` — asserts shebang presence and `--version` parity with + `package.json`. +- `tests/build/` — esbuild bundle guards (shebang count, `node --check`, size + budget). +- `scripts/smoke-pack-install.mjs` — the end-to-end install smoke used by both + the `pre-push` hook and the CI / publish workflows. From 845237dec958cf43e63eed828b4628dbe30d8a72 Mon Sep 17 00:00:00 2001 From: chenliuyun Date: Sun, 26 Apr 2026 08:56:19 +0800 Subject: [PATCH 22/26] =?UTF-8?q?fix(ci):=20align=20publish.yml=20with=20p?= =?UTF-8?q?republishOnly=20=E2=80=94=20single=20esbuild=20publish=20source?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit publish.yml was running `npm run build` (tsc) + smoke:pack-install, but `npm publish` triggers prepublishOnly, which does `clean && build:prod` unconditionally. So the tarball that smoke validated was thrown away and replaced with the esbuild bundle right before upload — smoke was verifying a different artifact than what shipped. Fix by making esbuild the single publish source: - publish.yml step 2: `npm run build` -> `npm run build:prod`, so the artifact validated by smoke:pack-install is byte-identical to what prepublishOnly produces and what npm publish uploads. - ci.yml: remove the `pack-install-smoke-tsc` job added in the previous commit — it was validating the tsc output that never ships. - docs/release-pipeline.md: rewrite to reflect the single-publish-source model; add explicit invariant that publish.yml and prepublishOnly must use the same builder. - CHANGELOG: update the 3.2.1 release-pipeline entry. --- .github/workflows/ci.yml | 20 +----------- .github/workflows/publish.yml | 2 +- CHANGELOG.md | 27 ++++++++++------- docs/release-pipeline.md | 57 ++++++++++++++++++++++++----------- 4 files changed, 58 insertions(+), 48 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 943429b..ec4a97c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -127,7 +127,7 @@ jobs: fi pack-install-smoke: - name: Packed install smoke (esbuild) + name: Packed install smoke (esbuild — matches publish) runs-on: ubuntu-latest needs: test steps: @@ -141,24 +141,6 @@ jobs: - name: npm pack -> npm install tarball -> switchbot --version run: npm run smoke:pack-install - # Mirrors exactly what publish.yml does: `npm run build` (tsc) + smoke:pack-install. - # Keeps the PR gate honest — if this job fails, the publish workflow would also fail. - # See docs/release-pipeline.md for the tsc vs esbuild split. - pack-install-smoke-tsc: - name: Packed install smoke (tsc — matches publish) - runs-on: ubuntu-latest - needs: test - steps: - - uses: actions/checkout@v4 - - uses: actions/setup-node@v4 - with: - node-version: 20.x - cache: npm - - run: npm ci - - run: npm run build - - name: npm pack -> npm install tarball -> switchbot --version - run: npm run smoke:pack-install - policy-schema-sync: name: Policy schema sync with skill repo runs-on: ubuntu-latest diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 1a68cbd..f19057e 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -18,7 +18,7 @@ jobs: registry-url: https://registry.npmjs.org cache: npm - run: npm ci - - run: npm run build + - run: npm run build:prod - run: npm test - name: Verify tag matches package.json version run: | diff --git a/CHANGELOG.md b/CHANGELOG.md index 987ded9..c28339a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -34,17 +34,22 @@ This project follows [Semantic Versioning](https://semver.org/spec/v2.0.0.html). ### Changed — release pipeline -- Pre-publish `smoke:pack-install` now runs in `publish.yml` before `npm publish`, and the - same smoke runs locally via `pre-push` hook (`verify:pre-push`) and on every PR in CI - (`pack-install-smoke`). -- `scripts/copy-assets.mjs` now injects the `#!/usr/bin/env node` shebang into `dist/index.js` - and chmods it to `0755` after every build, so the npm bin entry is always executable. -- New `pack-install-smoke-tsc` CI job validates the same `npm run build` (tsc) artifact that - `publish.yml` actually ships, closing the gap where the PR gate was testing the esbuild - bundle while publish shipped the tsc output. See [`docs/release-pipeline.md`](./docs/release-pipeline.md). -- New `npm-published-smoke.yml` workflow verifies published tarballs on the npm registry, - auto-promotes `next → latest` on success, and auto-deprecates on package-install/offline - smoke failures only (never on live API flakes). +- **Single publish source**: `publish.yml` now runs `npm run build:prod` + (esbuild) to match what `prepublishOnly` produces at `npm publish` time. The + tarball validated by `smoke:pack-install` is now byte-identical to the + tarball that actually ships to the registry — no artifact swap during + publish. +- Pre-publish `smoke:pack-install` runs in `publish.yml` before `npm publish`, + and the same smoke runs locally via `pre-push` hook (`verify:pre-push`) and + on every PR in CI (`pack-install-smoke`). +- `scripts/copy-assets.mjs` now injects the `#!/usr/bin/env node` shebang into + `dist/index.js` and chmods it to `0755` after every build, so the npm bin + entry is always executable. +- New `npm-published-smoke.yml` workflow verifies published tarballs on the + npm registry, auto-promotes `next → latest` on success, and auto-deprecates + on package-install/offline smoke failures only (never on live API flakes). +- See [`docs/release-pipeline.md`](./docs/release-pipeline.md) for the full + gate sequence and invariants. ## [3.2.0] - 2026-04-25 diff --git a/docs/release-pipeline.md b/docs/release-pipeline.md index 687093b..0cd3a91 100644 --- a/docs/release-pipeline.md +++ b/docs/release-pipeline.md @@ -3,15 +3,18 @@ This document describes how `@switchbot/openapi-cli` goes from commit to npm registry, and the invariants that keep the published artifact safe. -## Two builders, one tarball +## Single publish source: esbuild -| Script | Tool | Output shape | Used by | -|---|---|---|---| -| `npm run build` | `tsc` | Per-file `dist/*.js` mirroring `src/` | `publish.yml` (what actually publishes) | -| `npm run build:prod` | `esbuild` | Single-file `dist/index.js` with inlined deps | Local dev, `ci.yml` bundle smoke | +The published artifact is **always** the esbuild bundle (`npm run build:prod`). +`npm publish` triggers `prepublishOnly`, which unconditionally runs +`npm test && npm run clean && npm run build:prod && node dist/index.js --version` +right before uploading the tarball — so the bundle is the only thing that can +ship. -`npm pack` follows `"files": ["dist", "README.md", "LICENSE"]` in `package.json`, -so whatever ends up in `dist/` when `npm pack` runs is what ships. +| Script | Tool | Output shape | Role | +|---|---|---|---| +| `npm run build:prod` | `esbuild` | Single-file `dist/index.js`, deps inlined | **Publish source** — what ships to npm | +| `npm run build` | `tsc` | Per-file `dist/*.js` mirroring `src/` | Local dev only — type check, source maps, fast iteration | Both builders end by running `scripts/copy-assets.mjs`, which: @@ -23,6 +26,9 @@ Steps 2 and 3 make the file directly executable through the `switchbot` bin entry. `tests/version.test.ts` contains a regression guard that fails the suite if the shebang goes missing. +`npm pack` follows `"files": ["dist", "README.md", "LICENSE"]` in `package.json`, +so whatever ends up in `dist/` when `npm pack` runs is what ships. + ## Gates before `npm publish` ``` @@ -37,20 +43,25 @@ open PR ──▶ ci.yml ──────────── docs-lint test matrix (Node 18/20/22) bundle-smoke (advisory) offline-smoke (size budgets) - pack-install-smoke (esbuild) - pack-install-smoke-tsc (tsc, matches publish) + pack-install-smoke (esbuild, matches publish) policy-schema-sync merge PR ──▶ main release ──▶ publish.yml ─────── 1. npm ci - 2. npm run build (tsc) + 2. npm run build:prod (esbuild) 3. npm test 4. tag == package.json version 5. npm run smoke:pack-install ◀── last gate 6. npm publish --tag next + └── prepublishOnly: clean + build:prod + --version + (same builder as step 2 — no artifact swap) ``` +Because step 2 and `prepublishOnly` use the same `build:prod` script, the +tarball that `smoke:pack-install` validates in step 5 is byte-identical to the +tarball `npm publish` uploads in step 6. No artifact swap happens in between. + The critical pre-publish gate is step 5 of `publish.yml`. It runs `scripts/smoke-pack-install.mjs`, which: @@ -86,14 +97,13 @@ place. Changes to the release pipeline must preserve these invariants: -1. **`publish.yml` must run `smoke:pack-install` before `npm publish`.** If - this gate is removed or skipped, a broken tarball can reach the registry. +1. **One publish source — always the esbuild bundle.** `publish.yml` step 2 + must run `npm run build:prod`, and `prepublishOnly` must also run + `build:prod`. If these two ever diverge, the smoke test will validate a + different artifact than what actually ships. -2. **If `publish.yml` is ever switched from `build` (tsc) to `build:prod` - (esbuild), `bundle-smoke` must become blocking in the same PR.** That job - currently carries `continue-on-error: true` because of a tracked Node 22 - CJS interop issue; making it advisory is only safe while the published - artifact comes from tsc. +2. **`publish.yml` must run `smoke:pack-install` before `npm publish`.** If + this gate is removed or skipped, a broken tarball can reach the registry. 3. **Auto-deprecate must never fire on `live_smoke` failure.** Live smoke depends on real SwitchBot API availability and valid credentials; a transient @@ -105,6 +115,19 @@ Changes to the release pipeline must preserve these invariants: exec bit are enforced. Moving that logic elsewhere — or adding a third build path that skips it — will break npm bin execution. +## Known gaps + +- **`bundle-smoke` is advisory (`continue-on-error: true`).** Since the bundle + is now the publish source, this job should become blocking once the tracked + Node 22 CJS interop issue is resolved. Until then, the pre-publish smoke and + the post-publish `npm-published-smoke.yml` workflow provide coverage. + +- **The `test` matrix runs `npm run build` (tsc), not `build:prod`.** This + verifies that the source compiles under Node 18/20/22, but does not exercise + the esbuild bundle on all three. The `pack-install-smoke` job covers the + bundle on Node 20 only. If end-user Node 22 runtime behavior with the bundle + matters, add a bundle-aware matrix job in a follow-up. + ## Related tests - `tests/version.test.ts` — asserts shebang presence and `--version` parity with From 67a1a41b993f16fc1bb4656deedf54e98fb76348 Mon Sep 17 00:00:00 2001 From: chenliuyun Date: Sun, 26 Apr 2026 09:00:18 +0800 Subject: [PATCH 23/26] ci(bundle-smoke): make blocking + matrix across Node 18/20/22 Now that the esbuild bundle is the single publish source, bundle-smoke must catch bundle regressions on every supported Node version before a PR can merge. Previously it was advisory (continue-on-error: true) and single-node (20.x). Changes: - Remove continue-on-error. The stale "Node 22 CJS interop issues" comment dates from before 7bbbc44 fixed the CJS require shim; the bundle now runs cleanly on Node 18/20/22 (verified locally on 22.21). - Add strategy.matrix.node-version = [18.x, 20.x, 22.x] with fail-fast: false, so any Node version that cannot run the bundle is surfaced independently. - Update docs/release-pipeline.md: remove the "Known gaps" section, add invariant #5 making the matrix + blocking status explicit. - CHANGELOG entry added. --- .github/workflows/ci.yml | 11 ++++++----- CHANGELOG.md | 3 +++ docs/release-pipeline.md | 23 +++++++++-------------- 3 files changed, 18 insertions(+), 19 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ec4a97c..48cff5c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -45,17 +45,18 @@ jobs: - run: npm test bundle-smoke: - name: esbuild bundle smoke test + name: esbuild bundle smoke test (Node ${{ matrix.node-version }}) runs-on: ubuntu-latest needs: test - # esbuild CJS interop issues on Node 22 are tracked in a follow-up PR. - # This job is advisory until that is resolved. - continue-on-error: true + strategy: + fail-fast: false + matrix: + node-version: [18.x, 20.x, 22.x] steps: - uses: actions/checkout@v4 - uses: actions/setup-node@v4 with: - node-version: 20.x + node-version: ${{ matrix.node-version }} cache: npm - run: npm ci - run: npm run build:prod diff --git a/CHANGELOG.md b/CHANGELOG.md index c28339a..2267e93 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -48,6 +48,9 @@ This project follows [Semantic Versioning](https://semver.org/spec/v2.0.0.html). - New `npm-published-smoke.yml` workflow verifies published tarballs on the npm registry, auto-promotes `next → latest` on success, and auto-deprecates on package-install/offline smoke failures only (never on live API flakes). +- `bundle-smoke` CI job is now a blocking matrix across Node 18/20/22 (was + single-node Node 20, advisory), so the esbuild bundle must start cleanly + on every supported Node version before a PR can merge. - See [`docs/release-pipeline.md`](./docs/release-pipeline.md) for the full gate sequence and invariants. diff --git a/docs/release-pipeline.md b/docs/release-pipeline.md index 0cd3a91..79de1b0 100644 --- a/docs/release-pipeline.md +++ b/docs/release-pipeline.md @@ -40,8 +40,8 @@ git push ──▶ pre-push hook ──── verify:pre-push smoke:pack-install) open PR ──▶ ci.yml ──────────── docs-lint - test matrix (Node 18/20/22) - bundle-smoke (advisory) + test matrix (Node 18/20/22, tsc) + bundle-smoke (Node 18/20/22, esbuild) offline-smoke (size budgets) pack-install-smoke (esbuild, matches publish) policy-schema-sync @@ -115,18 +115,13 @@ Changes to the release pipeline must preserve these invariants: exec bit are enforced. Moving that logic elsewhere — or adding a third build path that skips it — will break npm bin execution. -## Known gaps - -- **`bundle-smoke` is advisory (`continue-on-error: true`).** Since the bundle - is now the publish source, this job should become blocking once the tracked - Node 22 CJS interop issue is resolved. Until then, the pre-publish smoke and - the post-publish `npm-published-smoke.yml` workflow provide coverage. - -- **The `test` matrix runs `npm run build` (tsc), not `build:prod`.** This - verifies that the source compiles under Node 18/20/22, but does not exercise - the esbuild bundle on all three. The `pack-install-smoke` job covers the - bundle on Node 20 only. If end-user Node 22 runtime behavior with the bundle - matters, add a bundle-aware matrix job in a follow-up. +5. **`bundle-smoke` must stay blocking and matrixed.** Because the bundle is + the publish source, it has to start cleanly on every Node version the + package supports (`engines.node >= 18`). The job runs `build:prod + node + --check + --version + bundle size test` on Node 18/20/22. Adding a new + supported Node version means adding it to the matrix; making the job + advisory again means end-users on some supported Node version can install a + broken CLI without CI catching it. ## Related tests From 7f59a0b7927854030403f29ee4c6144d08a1698c Mon Sep 17 00:00:00 2001 From: chenliuyun Date: Sun, 26 Apr 2026 09:40:34 +0800 Subject: [PATCH 24/26] test(status-sync): replace vi.spyOn(process.kill) with direct assignment On some Linux CI runners, vi.spyOn(process, 'kill') failed to reliably intercept process.kill(pid, 0), so isProcessRunning hit the real kill syscall against a PID that happened to exist in the container, got back EPERM, and reported the stale process as still running. Replace the spy with a direct property assignment and restore the original kill in afterAll so the interception is deterministic across Node 18/20/22. --- tests/status-sync/manager.test.ts | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/tests/status-sync/manager.test.ts b/tests/status-sync/manager.test.ts index 5300b96..e957bec 100644 --- a/tests/status-sync/manager.test.ts +++ b/tests/status-sync/manager.test.ts @@ -1,5 +1,5 @@ import path from 'node:path'; -import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; +import { afterAll, afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; const fsMock = vi.hoisted(() => ({ existsSync: vi.fn(), @@ -40,7 +40,13 @@ import { describe('status-sync manager', () => { const originalArgv = process.argv; - const killSpy = vi.spyOn(process, 'kill'); + const originalKill = process.kill; + const killSpy = vi.fn(); + (process as unknown as { kill: typeof process.kill }).kill = killSpy as unknown as typeof process.kill; + + afterAll(() => { + (process as unknown as { kill: typeof process.kill }).kill = originalKill; + }); beforeEach(() => { process.argv = ['node', '/repo/dist/index.js']; From 8802f478d222fc8aa5af18a2bb0cf5a536ab992d Mon Sep 17 00:00:00 2001 From: chenliuyun Date: Sun, 26 Apr 2026 10:00:36 +0800 Subject: [PATCH 25/26] refactor(build): collapse build and build:prod into a single release pipeline npm run build is now the only path that produces the published artifact. It drives a 5-stage orchestrator in scripts/build.mjs: 1. clean remove dist/ 2. typecheck tsc --noEmit 3. bundle scripts/bundle.mjs (esbuild) 4. copy-assets scripts/copy-assets.mjs (policy assets only) 5. ensure-binary scripts/ensure-binary.mjs (shebang + chmod 0755 guard) prepublishOnly, verify:pre-commit, verify:pre-push, publish.yml, and the bundle-smoke / pack-install-smoke CI jobs all call npm run build by name; no job re-implements any step. scripts/copy-assets.mjs no longer injects the shebang or chmods the entry. The new scripts/ensure-binary.mjs is a regression guard: it asserts the shebang is present and fails loudly (pointing at scripts/bundle.mjs) if the esbuild banner ever drops it, rather than silently repairing the output the way copy-assets used to. Dropped scripts: build:prod, clean (folded into build.mjs). Added script: typecheck (tsc --noEmit). --- .github/workflows/ci.yml | 4 +- .github/workflows/publish.yml | 2 +- CHANGELOG.md | 21 +++++ docs/release-pipeline.md | 140 +++++++++++++++++++++------------- package.json | 11 ++- scripts/build.mjs | 74 ++++++++++++++++++ scripts/copy-assets.mjs | 20 +---- scripts/ensure-binary.mjs | 49 ++++++++++++ 8 files changed, 238 insertions(+), 83 deletions(-) create mode 100644 scripts/build.mjs create mode 100644 scripts/ensure-binary.mjs diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 48cff5c..e088269 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -59,7 +59,7 @@ jobs: node-version: ${{ matrix.node-version }} cache: npm - run: npm ci - - run: npm run build:prod + - run: npm run build - name: Shebang count must be exactly 1 run: | COUNT=$(grep -c "#!/usr/bin/env node" dist/index.js) @@ -138,7 +138,7 @@ jobs: node-version: 20.x cache: npm - run: npm ci - - run: npm run build:prod + - run: npm run build - name: npm pack -> npm install tarball -> switchbot --version run: npm run smoke:pack-install diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index f19057e..1a68cbd 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -18,7 +18,7 @@ jobs: registry-url: https://registry.npmjs.org cache: npm - run: npm ci - - run: npm run build:prod + - run: npm run build - run: npm test - name: Verify tag matches package.json version run: | diff --git a/CHANGELOG.md b/CHANGELOG.md index 2267e93..455c257 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,27 @@ All notable changes to `@switchbot/openapi-cli` are documented in this file. The format is loosely based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/). This project follows [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [Unreleased] + +### Changed — release pipeline + +- Release pipeline unified: `npm run build` is now the single source for the + published tarball. It runs a 5-stage `scripts/build.mjs` orchestrator + (clean → typecheck → bundle → copy-assets → ensure-binary). `prepublishOnly`, + `verify:pre-commit`, `verify:pre-push`, `publish.yml`, and the `bundle-smoke` + / `pack-install-smoke` CI jobs all call `npm run build` by name — no job + re-implements the build steps and no other script writes to `dist/`. +- Removed `npm run build:prod` and `npm run clean` — both are folded into + `scripts/build.mjs`. +- Added `npm run typecheck` (`tsc --noEmit`) as the local "does it still + compile?" escape hatch. +- Split `scripts/copy-assets.mjs` responsibility into two scripts with one + failure mode each: `copy-assets.mjs` only copies policy assets, and the + new `scripts/ensure-binary.mjs` asserts the shebang is present on + `dist/index.js` and `chmod 0755`s it. `ensure-binary.mjs` is a regression + guard — it fails loudly if the esbuild banner drops the shebang, rather + than silently repairing it the way `copy-assets.mjs` used to. + ## [3.2.1] - 2026-04-25 ### Added — plan resource model, MCP risk profiles, rules safety primitives diff --git a/docs/release-pipeline.md b/docs/release-pipeline.md index 79de1b0..7854e75 100644 --- a/docs/release-pipeline.md +++ b/docs/release-pipeline.md @@ -3,66 +3,87 @@ This document describes how `@switchbot/openapi-cli` goes from commit to npm registry, and the invariants that keep the published artifact safe. -## Single publish source: esbuild +## Single publish source -The published artifact is **always** the esbuild bundle (`npm run build:prod`). -`npm publish` triggers `prepublishOnly`, which unconditionally runs -`npm test && npm run clean && npm run build:prod && node dist/index.js --version` -right before uploading the tarball — so the bundle is the only thing that can -ship. +There is exactly one way to produce the release artifact: -| Script | Tool | Output shape | Role | +``` +npm run build → node scripts/build.mjs +``` + +Every script on the release path — `prepublishOnly`, `verify:pre-commit`, +`verify:pre-push`, `publish.yml`, `ci.yml/bundle-smoke`, `ci.yml/pack-install-smoke` — +calls `npm run build` by name. No job re-implements any of the steps, and no +other script writes to `dist/`. + +### The five stages of `scripts/build.mjs` + +| # | Stage | Script | Responsibility | |---|---|---|---| -| `npm run build:prod` | `esbuild` | Single-file `dist/index.js`, deps inlined | **Publish source** — what ships to npm | -| `npm run build` | `tsc` | Per-file `dist/*.js` mirroring `src/` | Local dev only — type check, source maps, fast iteration | +| 1 | clean | inline | remove `dist/` so nothing stale leaks into the tarball | +| 2 | typecheck | `tsc --noEmit` | all types must compile before we bundle | +| 3 | bundle | `scripts/bundle.mjs` | esbuild produces the single-file `dist/index.js` (shebang via `banner.js`) | +| 4 | copy-assets | `scripts/copy-assets.mjs` | copy `src/policy/{schema,examples}` → `dist/policy/...` | +| 5 | ensure-binary | `scripts/ensure-binary.mjs` | assert the shebang is present and `chmod 0755` on `dist/index.js` | + +Each stage does exactly one thing. First non-zero exit aborts the build. -Both builders end by running `scripts/copy-assets.mjs`, which: +### Why `ensure-binary.mjs` is a guard, not a repair -1. Copies policy JSON Schema assets into `dist/policy/`. -2. Injects `#!/usr/bin/env node` into `dist/index.js` if missing. -3. Sets `0o755` on `dist/index.js` (best-effort on filesystems that ignore POSIX modes). +The shebang (`#!/usr/bin/env node`) is injected at bundle time by +`scripts/bundle.mjs` via the esbuild `banner.js` option. `ensure-binary.mjs` +re-reads `dist/index.js` and **verifies** that the first bytes are the +expected shebang — if not, it exits non-zero with a pointer to the banner +config. -Steps 2 and 3 make the file directly executable through the `switchbot` bin -entry. `tests/version.test.ts` contains a regression guard that fails the suite -if the shebang goes missing. +Previously, `copy-assets.mjs` silently **repaired** a missing shebang by +prepending it at the end of the build. That masked the root cause (a +change to the banner config would not surface at build time). The current +split is: -`npm pack` follows `"files": ["dist", "README.md", "LICENSE"]` in `package.json`, -so whatever ends up in `dist/` when `npm pack` runs is what ships. +- `bundle.mjs` — *produces* the shebang via banner. +- `ensure-binary.mjs` — *asserts* the shebang exists. Never patches. + +If anything ever drops the banner line, `npm run build` fails loudly at +stage 5 with a message pointing to `scripts/bundle.mjs`. + +`npm pack` follows `"files": ["dist", "README.md", "LICENSE"]` in +`package.json`, so whatever ends up in `dist/` after stage 5 is what ships. ## Gates before `npm publish` ``` git commit ──▶ pre-commit hook ─── verify:pre-commit - (build:prod + tests/version.test.ts) + (npm run build + tests/version.test.ts) git push ──▶ pre-push hook ──── verify:pre-push - (build:prod + version test + + (npm run build + version test + smoke:pack-install) open PR ──▶ ci.yml ──────────── docs-lint - test matrix (Node 18/20/22, tsc) - bundle-smoke (Node 18/20/22, esbuild) + test matrix (Node 18/20/22) + bundle-smoke (Node 18/20/22) offline-smoke (size budgets) - pack-install-smoke (esbuild, matches publish) + pack-install-smoke (matches publish) policy-schema-sync merge PR ──▶ main release ──▶ publish.yml ─────── 1. npm ci - 2. npm run build:prod (esbuild) + 2. npm run build 3. npm test 4. tag == package.json version 5. npm run smoke:pack-install ◀── last gate 6. npm publish --tag next - └── prepublishOnly: clean + build:prod + --version - (same builder as step 2 — no artifact swap) + └── prepublishOnly: test + build + smoke + (same commands as steps 2-5 — no drift) ``` -Because step 2 and `prepublishOnly` use the same `build:prod` script, the -tarball that `smoke:pack-install` validates in step 5 is byte-identical to the +Because step 2 and `prepublishOnly` both call `npm run build`, the tarball +validated by `smoke:pack-install` in step 5 is byte-identical to the tarball `npm publish` uploads in step 6. No artifact swap happens in between. -The critical pre-publish gate is step 5 of `publish.yml`. It runs +The critical pre-publish gate is step 5. It runs `scripts/smoke-pack-install.mjs`, which: - Runs `npm pack` on the freshly-built tarball. @@ -97,37 +118,46 @@ place. Changes to the release pipeline must preserve these invariants: -1. **One publish source — always the esbuild bundle.** `publish.yml` step 2 - must run `npm run build:prod`, and `prepublishOnly` must also run - `build:prod`. If these two ever diverge, the smoke test will validate a - different artifact than what actually ships. +1. **One command produces the release artifact.** `npm run build` is the + only path that writes `dist/` for publish. `publish.yml`, `prepublishOnly`, + `verify:pre-push`, and both `bundle-smoke` + `pack-install-smoke` jobs must + all call it by name — never re-implement steps. -2. **`publish.yml` must run `smoke:pack-install` before `npm publish`.** If - this gate is removed or skipped, a broken tarball can reach the registry. +2. **One artifact is smoked.** `smoke:pack-install` always runs against the + `dist/` that `npm run build` just produced. No other script writes to + `dist/` between the build and the smoke. -3. **Auto-deprecate must never fire on `live_smoke` failure.** Live smoke - depends on real SwitchBot API availability and valid credentials; a transient - outage should not deprecate a working package. Only `install_package` and - `offline_smoke` failures justify an automatic deprecation. +3. **One failure mode per script.** `copy-assets.mjs` can fail because an + asset is missing. `ensure-binary.mjs` can fail because the shebang is + missing or the output is absent. No script silently repairs the output of + another. + +4. **`prepublishOnly` and `publish.yml` do not drift.** Both run + `npm test && npm run build && npm run smoke:pack-install`. Any edit that + changes one must change the other in the same commit. + +5. **`publish.yml` must run `smoke:pack-install` before `npm publish`.** If + this gate is removed or skipped, a broken tarball can reach the registry. -4. **`copy-assets.mjs` must run on every build path.** Both `build` and - `build:prod` chain into it. It is the single place where the shebang and - exec bit are enforced. Moving that logic elsewhere — or adding a third build - path that skips it — will break npm bin execution. +6. **Auto-deprecate must never fire on `live_smoke` failure.** Live smoke + depends on real SwitchBot API availability and valid credentials; a + transient outage should not deprecate a working package. Only + `install_package` and `offline_smoke` failures justify an automatic + deprecation. -5. **`bundle-smoke` must stay blocking and matrixed.** Because the bundle is +7. **`bundle-smoke` must stay blocking and matrixed.** Because the bundle is the publish source, it has to start cleanly on every Node version the - package supports (`engines.node >= 18`). The job runs `build:prod + node - --check + --version + bundle size test` on Node 18/20/22. Adding a new - supported Node version means adding it to the matrix; making the job - advisory again means end-users on some supported Node version can install a - broken CLI without CI catching it. + package supports (`engines.node >= 18`). The job runs `npm run build + + shebang count + node --check + --version + bundle size test` on Node + 18/20/22. Adding a new supported Node version means adding it to the + matrix; making the job advisory again means end-users on some supported + Node version can install a broken CLI without CI catching it. ## Related tests -- `tests/version.test.ts` — asserts shebang presence and `--version` parity with - `package.json`. -- `tests/build/` — esbuild bundle guards (shebang count, `node --check`, size - budget). -- `scripts/smoke-pack-install.mjs` — the end-to-end install smoke used by both - the `pre-push` hook and the CI / publish workflows. +- `tests/version.test.ts` — asserts shebang presence and `--version` parity + with `package.json`. +- `tests/build/` — esbuild bundle guards (shebang count, `node --check`, + size budget). +- `scripts/smoke-pack-install.mjs` — the end-to-end install smoke used by + both the `pre-push` hook and the CI / publish workflows. diff --git a/package.json b/package.json index db28267..6fa58b5 100644 --- a/package.json +++ b/package.json @@ -36,9 +36,8 @@ "access": "public" }, "scripts": { - "build": "tsc && node scripts/copy-assets.mjs", - "build:prod": "node scripts/bundle.mjs && node scripts/copy-assets.mjs", - "clean": "node -e \"require('fs').rmSync('dist',{recursive:true,force:true})\"", + "typecheck": "tsc --noEmit", + "build": "node scripts/build.mjs", "dev": "tsx src/index.ts", "hooks:install": "node scripts/install-git-hooks.mjs", "lint:md": "markdownlint \"**/*.md\"", @@ -49,9 +48,9 @@ "test": "vitest run", "test:watch": "vitest", "test:coverage": "vitest run --coverage", - "verify:pre-commit": "npm run build:prod && npm test -- tests/version.test.ts", - "verify:pre-push": "npm run build:prod && npm test -- tests/version.test.ts && npm run smoke:pack-install", - "prepublishOnly": "npm test && npm run clean && npm run build:prod && node dist/index.js --version" + "verify:pre-commit": "npm run build && npm test -- tests/version.test.ts", + "verify:pre-push": "npm run build && npm test -- tests/version.test.ts && npm run smoke:pack-install", + "prepublishOnly": "npm test && npm run build && npm run smoke:pack-install" }, "dependencies": { "@modelcontextprotocol/sdk": "^1.29.0", diff --git a/scripts/build.mjs b/scripts/build.mjs new file mode 100644 index 0000000..3465a61 --- /dev/null +++ b/scripts/build.mjs @@ -0,0 +1,74 @@ +// scripts/build.mjs +// Single release-pipeline entry point. `npm run build` always calls this. +// +// Stages (one thing each; first failure aborts): +// 1. clean wipe dist/ so nothing stale leaks into the tarball +// 2. typecheck tsc --noEmit gate (types must still compile) +// 3. bundle esbuild produces dist/index.js (shebang via banner.js) +// 4. copy-assets copy policy schema/examples into dist/ +// 5. ensure-binary assert shebang + chmod 0755 on dist/index.js +// +// The invariant this file enforces: whatever ships (prepublishOnly, publish.yml, +// smoke:pack-install) was produced by EXACTLY these five steps. No other script +// writes to dist/ on the release path. + +import { spawnSync } from 'node:child_process'; +import { rmSync } from 'node:fs'; +import { dirname, join } from 'node:path'; +import { fileURLToPath } from 'node:url'; + +const scriptDir = dirname(fileURLToPath(import.meta.url)); +const repoRoot = dirname(scriptDir); +const isWindows = process.platform === 'win32'; +const npxCmd = isWindows ? 'npx.cmd' : 'npx'; + +const TOTAL = 5; +let stageIdx = 0; + +function runStage(label, fn) { + stageIdx += 1; + console.log(`build: [${stageIdx}/${TOTAL}] ${label}`); + try { + fn(); + } catch (err) { + console.error(`build: FAIL at ${label}`); + console.error(err?.message ?? err); + process.exit(1); + } +} + +function runNode(args) { + const res = spawnSync(process.execPath, args, { cwd: repoRoot, stdio: 'inherit' }); + if (res.status !== 0) { + throw new Error(`node ${args.join(' ')} exited with ${res.status}`); + } +} + +function runNpx(args) { + const res = spawnSync(npxCmd, args, { cwd: repoRoot, stdio: 'inherit', shell: isWindows }); + if (res.status !== 0) { + throw new Error(`npx ${args.join(' ')} exited with ${res.status}`); + } +} + +runStage('clean', () => { + rmSync(join(repoRoot, 'dist'), { recursive: true, force: true }); +}); + +runStage('typecheck', () => { + runNpx(['tsc', '--noEmit']); +}); + +runStage('bundle', () => { + runNode([join(scriptDir, 'bundle.mjs')]); +}); + +runStage('copy-assets', () => { + runNode([join(scriptDir, 'copy-assets.mjs')]); +}); + +runStage('ensure-binary', () => { + runNode([join(scriptDir, 'ensure-binary.mjs')]); +}); + +console.log('build: done'); diff --git a/scripts/copy-assets.mjs b/scripts/copy-assets.mjs index 105fb1f..83c4919 100644 --- a/scripts/copy-assets.mjs +++ b/scripts/copy-assets.mjs @@ -1,10 +1,9 @@ -import { cpSync, mkdirSync, existsSync, readFileSync, writeFileSync, chmodSync } from 'node:fs'; +import { cpSync, mkdirSync, existsSync } from 'node:fs'; import { dirname, join } from 'node:path'; import { fileURLToPath } from 'node:url'; const scriptDir = dirname(fileURLToPath(import.meta.url)); const repoRoot = dirname(scriptDir); -const NODE_SHEBANG = '#!/usr/bin/env node\n'; const assets = [ ['src/policy/schema', 'dist/policy/schema'], @@ -22,20 +21,3 @@ for (const [srcRel, dstRel] of assets) { cpSync(src, dst, { recursive: true }); console.log(`copy-assets: ${src} -> ${dst}`); } - -const cliEntry = join(repoRoot, 'dist', 'index.js'); -if (!existsSync(cliEntry)) { - console.error(`copy-assets: CLI entry missing: ${cliEntry}`); - process.exit(1); -} - -const cliSource = readFileSync(cliEntry, 'utf-8'); -if (!cliSource.startsWith(NODE_SHEBANG)) { - writeFileSync(cliEntry, NODE_SHEBANG + cliSource, 'utf-8'); -} - -try { - chmodSync(cliEntry, 0o755); -} catch { - // Best-effort on filesystems that ignore POSIX modes. -} diff --git a/scripts/ensure-binary.mjs b/scripts/ensure-binary.mjs new file mode 100644 index 0000000..45d36c9 --- /dev/null +++ b/scripts/ensure-binary.mjs @@ -0,0 +1,49 @@ +// scripts/ensure-binary.mjs +// Regression guard for the shipping binary. +// +// Contract: +// dist/index.js MUST exist, its first 20 bytes MUST be "#!/usr/bin/env node", +// and its mode SHOULD be 0o755. If any of those is not true, fail loudly. +// +// This script does NOT repair the output. The shebang is produced by +// scripts/bundle.mjs via the esbuild `banner.js` line; if that ever drops out, +// we want the build to fail here, not silently patch it at the last minute. + +import { chmodSync, existsSync, readFileSync, statSync } from 'node:fs'; +import { dirname, join } from 'node:path'; +import { fileURLToPath } from 'node:url'; + +const scriptDir = dirname(fileURLToPath(import.meta.url)); +const repoRoot = dirname(scriptDir); +const NODE_SHEBANG = '#!/usr/bin/env node'; + +const cliEntry = join(repoRoot, 'dist', 'index.js'); + +if (!existsSync(cliEntry)) { + console.error(`ensure-binary: dist/index.js is missing (expected at ${cliEntry})`); + console.error(' Did scripts/bundle.mjs run? This script only verifies, it does not build.'); + process.exit(1); +} + +const head = readFileSync(cliEntry, { encoding: 'utf-8' }).slice(0, NODE_SHEBANG.length); +if (head !== NODE_SHEBANG) { + console.error('ensure-binary: dist/index.js is missing the node shebang'); + console.error(` expected first bytes: ${JSON.stringify(NODE_SHEBANG)}`); + console.error(` actual first bytes: ${JSON.stringify(head)}`); + console.error(' Check scripts/bundle.mjs banner.js — the shebang is produced there.'); + process.exit(1); +} + +try { + chmodSync(cliEntry, 0o755); +} catch { + // Best-effort. Filesystems that ignore POSIX modes (e.g. some Windows FSes) + // still produce a valid tarball; npm records the mode at pack time on Linux. +} + +try { + const mode = statSync(cliEntry).mode & 0o777; + console.log(`ensure-binary: ok (shebang present, mode 0o${mode.toString(8)})`); +} catch { + console.log('ensure-binary: ok (shebang present)'); +} From 10224b5e5e07076dfa9b28ed072df9950c32f604 Mon Sep 17 00:00:00 2001 From: chenliuyun Date: Sun, 26 Apr 2026 10:18:40 +0800 Subject: [PATCH 26/26] chore: bump version to 3.2.2 --- CHANGELOG.md | 8 +++++++- README.md | 2 +- package-lock.json | 4 ++-- package.json | 2 +- 4 files changed, 11 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 455c257..a295434 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,7 +7,7 @@ All notable changes to `@switchbot/openapi-cli` are documented in this file. The format is loosely based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/). This project follows [Semantic Versioning](https://semver.org/spec/v2.0.0.html). -## [Unreleased] +## [3.2.2] - 2026-04-26 ### Changed — release pipeline @@ -30,6 +30,12 @@ This project follows [Semantic Versioning](https://semver.org/spec/v2.0.0.html). ## [3.2.1] - 2026-04-25 +> **Deprecated on npm.** The initial `3.2.1` publish shipped a broken bin +> (missing shebang / exec bit after `npm pack`). It has been rolled back +> from `main` and relanded in `3.2.2`; install `@switchbot/openapi-cli@3.2.2` +> or later. The feature list below is retained as the historical record of +> what `3.2.1` intended to deliver and what `3.2.2` now ships. + ### Added — plan resource model, MCP risk profiles, rules safety primitives - `switchbot plan save [file]` — persist a validated plan to `~/.switchbot/plans/.json` diff --git a/README.md b/README.md index f07bd51..00a0002 100644 --- a/README.md +++ b/README.md @@ -894,7 +894,7 @@ Queries the npm registry for the latest published version and compares it agains ```json { - "current": "3.2.1", + "current": "3.2.2", "latest": "4.0.0", "upToDate": false, "updateAvailable": true, diff --git a/package-lock.json b/package-lock.json index 8705814..08ebe3f 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@switchbot/openapi-cli", - "version": "3.2.1", + "version": "3.2.2", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@switchbot/openapi-cli", - "version": "3.2.1", + "version": "3.2.2", "license": "MIT", "dependencies": { "@modelcontextprotocol/sdk": "^1.29.0", diff --git a/package.json b/package.json index 6fa58b5..d031954 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@switchbot/openapi-cli", - "version": "3.2.1", + "version": "3.2.2", "description": "SwitchBot smart home CLI — control devices, run scenes, stream real-time events, and integrate AI agents via MCP. Full API v1.1 coverage.", "keywords": [ "switchbot",