diff --git a/.claude/hooks/check-new-deps/README.md b/.claude/hooks/check-new-deps/README.md
new file mode 100644
index 00000000..5be7f3a6
--- /dev/null
+++ b/.claude/hooks/check-new-deps/README.md
@@ -0,0 +1,102 @@
+# check-new-deps Hook
+
+A Claude Code pre-tool hook that checks new dependencies against [Socket.dev](https://socket.dev) before they're added to the project. It runs automatically every time Claude tries to edit or create a dependency manifest file.
+
+## What it does
+
+When Claude edits a file like `package.json`, `requirements.txt`, `Cargo.toml`, or any of 17+ supported ecosystems, this hook:
+
+1. **Detects the file type** and extracts dependency names from the content
+2. **Diffs against the old content** (for edits) so only *newly added* deps are checked
+3. **Queries the Socket.dev API** to check for malware and critical security alerts
+4. **Blocks the edit** (exit code 2) if malware or critical alerts are found
+5. **Warns** (but allows) if a package has a low quality score
+6. **Allows** (exit code 0) if everything is clean or the file isn't a manifest
+
+## How it works
+
+```
+Claude wants to edit package.json
+ │
+ ▼
+Hook receives the edit via stdin (JSON)
+ │
+ ▼
+Extract new deps from new_string
+Diff against old_string (if Edit)
+ │
+ ▼
+Build Package URLs (PURLs) for each dep
+ │
+ ▼
+Call sdk.checkMalware(components)
+ - ≤5 deps: parallel firewall API (fast, full data)
+ - >5 deps: batch PURL API (efficient)
+ │
+ ├── Malware/critical alert → EXIT 2 (blocked)
+ ├── Low score → warn, EXIT 0 (allowed)
+ └── Clean → EXIT 0 (allowed)
+```
+
+## Supported ecosystems
+
+| File | Ecosystem | Example dep format |
+|------|-----------|-------------------|
+| `package.json` | npm | `"express": "^4.19"` |
+| `package-lock.json`, `pnpm-lock.yaml`, `yarn.lock` | npm | lockfile entries |
+| `requirements.txt`, `pyproject.toml`, `setup.py` | PyPI | `flask>=3.0` |
+| `Cargo.toml`, `Cargo.lock` | Cargo (Rust) | `serde = "1.0"` |
+| `go.mod`, `go.sum` | Go | `github.com/gin-gonic/gin v1.9` |
+| `Gemfile`, `Gemfile.lock` | RubyGems | `gem 'rails'` |
+| `composer.json`, `composer.lock` | Composer (PHP) | `"vendor/package": "^3.0"` |
+| `pom.xml`, `build.gradle` | Maven (Java) | `commons` |
+| `pubspec.yaml`, `pubspec.lock` | Pub (Dart) | `flutter_bloc: ^8.1` |
+| `.csproj` | NuGet (.NET) | `` |
+| `mix.exs` | Hex (Elixir) | `{:phoenix, "~> 1.7"}` |
+| `Package.swift` | Swift PM | `.package(url: "...", from: "4.0")` |
+| `*.tf` | Terraform | `source = "hashicorp/aws"` |
+| `Brewfile` | Homebrew | `brew "git"` |
+| `conanfile.*` | Conan (C/C++) | `boost/1.83.0` |
+| `flake.nix` | Nix | `github:owner/repo` |
+| `.github/workflows/*.yml` | GitHub Actions | `uses: owner/repo@ref` |
+
+## Configuration
+
+The hook is registered in `.claude/settings.json`:
+
+```json
+{
+ "hooks": {
+ "PreToolUse": [
+ {
+ "matcher": "Edit|Write",
+ "hooks": [
+ {
+ "type": "command",
+ "command": "node .claude/hooks/check-new-deps/index.mts"
+ }
+ ]
+ }
+ ]
+ }
+}
+```
+
+## Dependencies
+
+All dependencies use `catalog:` references from the workspace root (`pnpm-workspace.yaml`):
+
+- `@socketsecurity/sdk` — Socket.dev SDK v4 with `checkMalware()` API
+- `@socketsecurity/lib` — shared constants and path utilities
+- `@socketregistry/packageurl-js` — Package URL (PURL) parsing and stringification
+
+## Caching
+
+API responses are cached in-memory for 5 minutes (max 500 entries) to avoid redundant network calls when Claude checks the same dependency multiple times in a session.
+
+## Exit codes
+
+| Code | Meaning | Claude behavior |
+|------|---------|----------------|
+| 0 | Allow | Edit/Write proceeds normally |
+| 2 | Block | Edit/Write is rejected, Claude sees the error message |
diff --git a/.claude/hooks/check-new-deps/index.mts b/.claude/hooks/check-new-deps/index.mts
new file mode 100644
index 00000000..d84d95d4
--- /dev/null
+++ b/.claude/hooks/check-new-deps/index.mts
@@ -0,0 +1,705 @@
+#!/usr/bin/env node
+// Claude Code PreToolUse hook — Socket.dev dependency firewall.
+//
+// Intercepts Edit/Write tool calls to dependency manifest files across
+// 17+ package ecosystems. Extracts newly-added dependencies, builds
+// Package URLs (PURLs), and checks them against the Socket.dev API
+// using the SDK v4 checkMalware() method.
+//
+// Diff-aware: when old_string is present (Edit), only deps that
+// appear in new_string but NOT in old_string are checked.
+//
+// Caching: API responses are cached in-process with a TTL to avoid
+// redundant network calls when the same dep is checked repeatedly.
+// The cache auto-evicts expired entries and caps at MAX_CACHE_SIZE.
+//
+// Exit codes:
+// 0 = allow (no new deps, all clean, or non-dep file)
+// 2 = block (malware or critical alert from Socket.dev)
+
+import {
+ parseNpmSpecifier,
+ stringify,
+} from '@socketregistry/packageurl-js'
+import type { PackageURL } from '@socketregistry/packageurl-js'
+import {
+ SOCKET_PUBLIC_API_TOKEN,
+} from '@socketsecurity/lib/constants/socket'
+import { getDefaultLogger } from '@socketsecurity/lib/logger'
+import {
+ normalizePath,
+} from '@socketsecurity/lib/paths/normalize'
+import { SocketSdk } from '@socketsecurity/sdk'
+import type { MalwareCheckPackage } from '@socketsecurity/sdk'
+
+const logger = getDefaultLogger()
+
+// Per-request timeout (ms) to avoid blocking the hook on slow responses.
+const API_TIMEOUT = 5_000
+// Deps scoring below this threshold trigger a warning (not a block).
+const LOW_SCORE_THRESHOLD = 0.5
+// Max PURLs per batch request (API limit is 1024).
+const MAX_BATCH_SIZE = 1024
+// How long (ms) to cache a successful API response (5 minutes).
+const CACHE_TTL = 5 * 60 * 1_000
+// Maximum cache entries before forced eviction of oldest.
+const MAX_CACHE_SIZE = 500
+
+// SDK instance using the public API token (no user config needed).
+const sdk = new SocketSdk(SOCKET_PUBLIC_API_TOKEN, {
+ timeout: API_TIMEOUT,
+})
+
+// --- types ---
+
+// Extracted dependency with ecosystem type, name, and optional scope.
+interface Dep {
+ type: string
+ name: string
+ namespace?: string
+ version?: string
+}
+
+// Shape of the JSON blob Claude Code pipes to the hook via stdin.
+interface HookInput {
+ tool_name: string
+ tool_input?: {
+ file_path?: string
+ new_string?: string
+ old_string?: string
+ content?: string
+ }
+}
+
+// Result of checking a single dep against the Socket.dev API.
+interface CheckResult {
+ purl: string
+ blocked?: boolean
+ warned?: boolean
+ reason?: string
+ score?: number
+}
+
+
+// A cached API lookup result with expiration timestamp.
+interface CacheEntry {
+ result: CheckResult | undefined
+ expiresAt: number
+}
+
+// Function that extracts deps from file content.
+type Extractor = (content: string) => Dep[]
+
+// --- cache ---
+
+// Simple TTL + max-size cache for API responses.
+// Prevents redundant network calls when the same dep is checked
+// multiple times in a session. Evicts expired entries on every
+// get/set, and drops oldest entries if the cache exceeds MAX_CACHE_SIZE.
+const cache = new Map()
+
+function cacheGet(key: string): CacheEntry | undefined {
+ const entry = cache.get(key)
+ if (!entry) return
+ if (Date.now() > entry.expiresAt) {
+ cache.delete(key)
+ return
+ }
+ return entry
+}
+
+function cacheSet(
+ key: string,
+ result: CheckResult | undefined,
+): void {
+ // Evict expired entries before inserting.
+ if (cache.size >= MAX_CACHE_SIZE) {
+ const now = Date.now()
+ for (const [k, v] of cache) {
+ if (now > v.expiresAt) cache.delete(k)
+ }
+ }
+ // If still over capacity, drop the oldest entries (FIFO).
+ if (cache.size >= MAX_CACHE_SIZE) {
+ const excess = cache.size - MAX_CACHE_SIZE + 1
+ let dropped = 0
+ for (const k of cache.keys()) {
+ if (dropped >= excess) break
+ cache.delete(k)
+ dropped++
+ }
+ }
+ cache.set(key, {
+ result,
+ expiresAt: Date.now() + CACHE_TTL,
+ })
+}
+
+// Manifest file suffix → extractor function.
+// __proto__: null prevents prototype-pollution on lookups.
+const extractors: Record = {
+ __proto__: null as unknown as Extractor,
+ '.csproj': extract(
+ // .NET:
+ /PackageReference\s+Include="([^"]+)"/g,
+ (m): Dep => ({ type: 'nuget', name: m[1] })
+ ),
+ '.tf': extractTerraform,
+ 'brew': extractBrewfile,
+ 'Brewfile': extractBrewfile,
+ 'build.gradle': extractMaven,
+ 'build.gradle.kts': extractMaven,
+ 'Cargo.lock': extract(
+ // Rust lockfile: [[package]]\nname = "serde"\nversion = "1.0.0"
+ /name\s*=\s*"([\w][\w-]*)"/gm,
+ (m): Dep => ({ type: 'cargo', name: m[1] })
+ ),
+ 'Cargo.toml': extract(
+ // Rust: serde = "1.0" or serde = { version = "1.0", features = [...] }
+ /^(\w[\w-]*)\s*=\s*(?:\{[^}]*version\s*=\s*"[^"]*"|\s*"[^"]*")/gm,
+ (m): Dep => ({ type: 'cargo', name: m[1] })
+ ),
+ 'conanfile.py': extractConan,
+ 'conanfile.txt': extractConan,
+ 'composer.lock': extract(
+ // PHP lockfile: "name": "vendor/package"
+ /"name":\s*"([a-z][\w-]*)\/([a-z][\w-]*)"/g,
+ (m): Dep => ({
+ type: 'composer',
+ namespace: m[1],
+ name: m[2],
+ })
+ ),
+ 'composer.json': extract(
+ // PHP: "vendor/package": "^3.0"
+ /"([a-z][\w-]*)\/([a-z][\w-]*)":\s*"/g,
+ (m): Dep => ({
+ type: 'composer',
+ namespace: m[1],
+ name: m[2],
+ })
+ ),
+ 'flake.nix': extractNixFlake,
+ 'Gemfile.lock': extract(
+ // Ruby lockfile: indented gem names under GEM > specs
+ /^\s{4}(\w[\w-]*)\s+\(/gm,
+ (m): Dep => ({ type: 'gem', name: m[1] })
+ ),
+ 'Gemfile': extract(
+ // Ruby: gem 'rails', '~> 7.0'
+ /gem\s+['"]([^'"]+)['"]/g,
+ (m): Dep => ({ type: 'gem', name: m[1] })
+ ),
+ 'go.sum': extract(
+ // Go checksum file: module/path v1.2.3 h1:hash=
+ /([\w./-]+)\s+v[\d.]+/gm,
+ (m): Dep => {
+ const parts = m[1].split('/')
+ return {
+ type: 'golang',
+ name: parts.pop()!,
+ namespace: parts.join('/') || undefined,
+ }
+ }
+ ),
+ 'go.mod': extract(
+ // Go: github.com/gin-gonic/gin v1.9.1
+ /([\w./-]+)\s+v[\d.]+/gm,
+ (m): Dep => {
+ const parts = m[1].split('/')
+ return {
+ type: 'golang',
+ name: parts.pop()!,
+ namespace: parts.join('/') || undefined,
+ }
+ }
+ ),
+ 'mix.exs': extract(
+ // Elixir: {:phoenix, "~> 1.7"}
+ /\{:(\w+),/g,
+ (m): Dep => ({ type: 'hex', name: m[1] })
+ ),
+ 'package-lock.json': extractNpmLockfile,
+ 'package.json': extractNpm,
+ 'Package.swift': extract(
+ // Swift: .package(url: "https://github.com/vapor/vapor", from: "4.0.0")
+ /\.package\s*\(\s*url:\s*"https:\/\/github\.com\/([^/]+)\/([^"]+)".*?from:\s*"([^"]+)"/gs,
+ (m): Dep => ({
+ type: 'swift',
+ namespace: `github.com/${m[1]}`,
+ name: m[2],
+ version: m[3],
+ })
+ ),
+ 'Pipfile.lock': extractPypi,
+ 'pnpm-lock.yaml': extractNpmLockfile,
+ 'poetry.lock': extract(
+ // Python poetry lockfile: [[package]]\nname = "flask"
+ /name\s*=\s*"([a-zA-Z][\w.-]*)"/gm,
+ (m): Dep => ({ type: 'pypi', name: m[1] })
+ ),
+ 'pom.xml': extractMaven,
+ 'Project.toml': extract(
+ // Julia: JSON3 = "uuid-string"
+ /^(\w[\w.-]*)\s*=\s*"/gm,
+ (m): Dep => ({ type: 'julia', name: m[1] })
+ ),
+ 'pubspec.lock': extract(
+ // Dart lockfile: top-level package names at column 2
+ /^ (\w[\w_-]*):/gm,
+ (m): Dep => ({ type: 'pub', name: m[1] })
+ ),
+ 'pubspec.yaml': extract(
+ // Dart: flutter_bloc: ^8.1.3 (2-space indented under dependencies:)
+ /^\s{2}(\w[\w_-]*):\s/gm,
+ (m): Dep => ({ type: 'pub', name: m[1] })
+ ),
+ 'pyproject.toml': extractPypi,
+ 'requirements.txt': extractPypi,
+ 'setup.py': extractPypi,
+ 'yarn.lock': extractNpmLockfile,
+}
+
+// --- main (only when executed directly, not imported) ---
+
+if (import.meta.filename === process.argv[1]) {
+ // Read the full JSON blob from stdin (piped by Claude Code).
+ let input = ''
+ for await (const chunk of process.stdin) input += chunk
+ const hook: HookInput = JSON.parse(input)
+
+ if (hook.tool_name !== 'Edit' && hook.tool_name !== 'Write') {
+ process.exitCode = 0
+ } else {
+ process.exitCode = await check(hook)
+ }
+}
+
+// --- core ---
+
+// Orchestrates the full check: extract deps, diff against old, query API.
+async function check(hook: HookInput): Promise {
+ // Normalize backslashes and collapse segments for cross-platform paths.
+ const filePath = normalizePath(
+ hook.tool_input?.file_path || ''
+ )
+
+ // GitHub Actions workflows live under .github/workflows/*.yml
+ const isWorkflow =
+ /\.github\/workflows\/.*\.ya?ml$/.test(filePath)
+ const extractor = isWorkflow
+ ? extractGitHubActions
+ : findExtractor(filePath)
+ if (!extractor) return 0
+
+ // Edit provides new_string; Write provides content.
+ const newContent =
+ hook.tool_input?.new_string
+ || hook.tool_input?.content
+ || ''
+ const oldContent = hook.tool_input?.old_string || ''
+
+ const newDeps = extractor(newContent)
+ if (newDeps.length === 0) return 0
+
+ // Diff-aware: only check deps added in this edit, not pre-existing.
+ const deps = oldContent
+ ? diffDeps(newDeps, extractor(oldContent))
+ : newDeps
+ if (deps.length === 0) return 0
+
+ // Check all deps via SDK checkMalware().
+ const { blocked, warned } = await checkDepsBatch(deps)
+
+ if (warned.length > 0) {
+ logger.warn('Socket: low-scoring dependencies (not blocked):')
+ for (const w of warned) {
+ logger.warn(` ${w.purl}: overall score ${w.score}`)
+ }
+ }
+ if (blocked.length > 0) {
+ logger.error(`Socket: blocked ${blocked.length} dep(s):`)
+ for (const b of blocked) {
+ logger.error(` ${b.purl}: ${b.reason}`)
+ }
+ return 2
+ }
+ return 0
+}
+
+// Check deps against Socket.dev using SDK v4 checkMalware().
+// The SDK automatically routes small sets (<=5) to parallel firewall
+// requests and larger sets to the batch PURL API.
+// Deps already in cache are skipped; results are cached after lookup.
+async function checkDepsBatch(
+ deps: Dep[],
+): Promise<{ blocked: CheckResult[]; warned: CheckResult[] }> {
+ const blocked: CheckResult[] = []
+ const warned: CheckResult[] = []
+
+ // Partition deps into cached vs uncached.
+ const uncached: Array<{ dep: Dep; purl: string }> = []
+ for (const dep of deps) {
+ const purl = stringify(dep as unknown as PackageURL)
+ const cached = cacheGet(purl)
+ if (cached) {
+ if (cached.result?.blocked) blocked.push(cached.result)
+ else if (cached.result?.warned) warned.push(cached.result)
+ continue
+ }
+ uncached.push({ dep, purl })
+ }
+
+ if (!uncached.length) return { blocked, warned }
+
+ try {
+ // Process in chunks to respect API batch size limit.
+ for (let i = 0; i < uncached.length; i += MAX_BATCH_SIZE) {
+ const batch = uncached.slice(i, i + MAX_BATCH_SIZE)
+ const components = batch.map(({ purl }) => ({ purl }))
+
+ const result = await sdk.checkMalware(components)
+
+ if (!result.success) {
+ logger.warn(
+ `Socket: API returned ${result.status}, allowing all`
+ )
+ return { blocked, warned }
+ }
+
+ // Build lookup keyed by full PURL (includes namespace + version).
+ const purlByKey = new Map()
+ for (const { dep, purl } of batch) {
+ const ns = dep.namespace ? `${dep.namespace}/` : ''
+ purlByKey.set(`${dep.type}:${ns}${dep.name}`, purl)
+ }
+
+ for (const pkg of result.data as MalwareCheckPackage[]) {
+ const ns = pkg.namespace ? `${pkg.namespace}/` : ''
+ const key = `${pkg.type}:${ns}${pkg.name}`
+ const purl = purlByKey.get(key)
+ if (!purl) continue
+
+ // Check for malware or critical-severity alerts.
+ const critical = pkg.alerts.find(
+ a => a.severity === 'critical' || a.type === 'malware'
+ )
+ if (critical) {
+ const cr: CheckResult = {
+ purl,
+ blocked: true,
+ reason: `${critical.type} — ${critical.severity ?? 'critical'}`,
+ }
+ cacheSet(purl, cr)
+ blocked.push(cr)
+ continue
+ }
+
+ // Warn on low quality score.
+ if (
+ pkg.score?.overall !== undefined
+ && pkg.score.overall < LOW_SCORE_THRESHOLD
+ ) {
+ const wr: CheckResult = {
+ purl,
+ warned: true,
+ score: pkg.score.overall,
+ }
+ cacheSet(purl, wr)
+ warned.push(wr)
+ continue
+ }
+
+ // No blocking alerts — clean dep.
+ cacheSet(purl, undefined)
+ }
+ }
+ } catch (e) {
+ // Network failure — log and allow all deps through.
+ logger.warn(
+ `Socket: network error`
+ + ` (${(e as Error).message}), allowing all`
+ )
+ }
+
+ return { blocked, warned }
+}
+
+// Return deps in `newDeps` that don't appear in `oldDeps` (by PURL).
+function diffDeps(newDeps: Dep[], oldDeps: Dep[]): Dep[] {
+ const old = new Set(
+ oldDeps.map(d => stringify(d as unknown as PackageURL))
+ )
+ return newDeps.filter(
+ d => !old.has(stringify(d as unknown as PackageURL))
+ )
+}
+
+// Match file path suffix against the extractors map.
+function findExtractor(
+ filePath: string,
+): Extractor | undefined {
+ for (const [suffix, fn] of Object.entries(extractors)) {
+ if (filePath.endsWith(suffix)) return fn
+ }
+}
+
+// --- extractor factory ---
+
+// Higher-order function: takes a regex and a match→Dep transform,
+// returns an Extractor that applies matchAll and collects results.
+function extract(
+ re: RegExp,
+ transform: (m: RegExpExecArray) => Dep | undefined,
+): Extractor {
+ return (content: string): Dep[] => {
+ const deps: Dep[] = []
+ for (const m of content.matchAll(re)) {
+ const dep = transform(m as RegExpExecArray)
+ if (dep) deps.push(dep)
+ }
+ return deps
+ }
+}
+
+// --- ecosystem extractors (alphabetic) ---
+
+// Homebrew (Brewfile): brew "package" or tap "owner/repo".
+function extractBrewfile(content: string): Dep[] {
+ const deps: Dep[] = []
+ // brew "git", cask "firefox", tap "homebrew/cask"
+ for (const m of content.matchAll(
+ /(?:brew|cask)\s+['"]([^'"]+)['"]/g
+ )) {
+ deps.push({ type: 'brew', name: m[1] })
+ }
+ return deps
+}
+
+// Conan (C/C++): "boost/1.83.0" in conanfile.txt,
+// or requires = "zlib/1.3.0" in conanfile.py.
+function extractConan(content: string): Dep[] {
+ const deps: Dep[] = []
+ for (const m of content.matchAll(
+ /([a-z][\w.-]+)\/[\d.]+/gm
+ )) {
+ deps.push({ type: 'conan', name: m[1] })
+ }
+ return deps
+}
+
+// GitHub Actions: "uses: owner/repo@ref" in workflow YAML.
+// Handles subpaths like "org/repo/subpath@v1".
+function extractGitHubActions(content: string): Dep[] {
+ const deps: Dep[] = []
+ for (const m of content.matchAll(
+ /uses:\s*['"]?([^@\s'"]+)@([^\s'"]+)/g
+ )) {
+ const parts = m[1].split('/')
+ if (parts.length >= 2) {
+ deps.push({
+ type: 'github',
+ namespace: parts[0],
+ name: parts.slice(1).join('/'),
+ })
+ }
+ }
+ return deps
+}
+
+// Maven/Gradle (Java/Kotlin):
+// pom.xml: org.apachecommons
+// build.gradle(.kts): implementation 'group:artifact:version'
+function extractMaven(content: string): Dep[] {
+ const deps: Dep[] = []
+ // XML-style Maven POM declarations.
+ for (const m of content.matchAll(
+ /([^<]+)<\/groupId>\s*([^<]+)<\/artifactId>/g
+ )) {
+ deps.push({
+ type: 'maven',
+ namespace: m[1],
+ name: m[2],
+ })
+ }
+ // Gradle shorthand: implementation/api/compile 'group:artifact:ver'
+ for (const m of content.matchAll(
+ /(?:implementation|api|compile)\s+['"]([^:'"]+):([^:'"]+)(?::[^'"]*)?['"]/g
+ )) {
+ deps.push({
+ type: 'maven',
+ namespace: m[1],
+ name: m[2],
+ })
+ }
+ return deps
+}
+
+// Convenience entry point for testing: route any file path
+// through the correct extractor and return all deps found.
+function extractNewDeps(
+ rawFilePath: string,
+ content: string,
+): Dep[] {
+ // Normalize backslashes and collapse segments for cross-platform.
+ const filePath = normalizePath(rawFilePath)
+ const isWorkflow =
+ /\.github\/workflows\/.*\.ya?ml$/.test(filePath)
+ const extractor = isWorkflow
+ ? extractGitHubActions
+ : findExtractor(filePath)
+ return extractor ? extractor(content) : []
+}
+
+// Nix flakes (flake.nix): inputs.name.url = "github:owner/repo"
+// or inputs.name = { url = "github:owner/repo"; };
+function extractNixFlake(content: string): Dep[] {
+ const deps: Dep[] = []
+ // Match github:owner/repo patterns in flake inputs.
+ for (const m of content.matchAll(
+ /github:([^/\s"]+)\/([^/\s"]+)/g
+ )) {
+ deps.push({
+ type: 'github',
+ namespace: m[1],
+ name: m[2].replace(/\/.*$/, ''),
+ })
+ }
+ return deps
+}
+
+// npm lockfiles (package-lock.json, pnpm-lock.yaml, yarn.lock):
+// Each format references packages differently:
+// package-lock.json: "node_modules/@scope/name" or "node_modules/name"
+// pnpm-lock.yaml: /@scope/name@version or /name@version
+// yarn.lock: "@scope/name@version" or name@version
+function extractNpmLockfile(content: string): Dep[] {
+ const deps: Dep[] = []
+ const seen = new Set()
+
+ // package-lock.json: "node_modules/name" or "node_modules/@scope/name"
+ for (const m of content.matchAll(
+ /node_modules\/((?:@[\w.-]+\/)?[\w][\w.-]*)/g
+ )) {
+ addNpmDep(m[1], deps, seen)
+ }
+ // pnpm-lock.yaml: '/name@ver' or '/@scope/name@ver'
+ // yarn.lock: "name@ver" or "@scope/name@ver"
+ for (const m of content.matchAll(
+ /['"/]((?:@[\w.-]+\/)?[\w][\w.-]*)@/gm
+ )) {
+ addNpmDep(m[1], deps, seen)
+ }
+ return deps
+}
+
+// Deduplicated npm dep insertion using parseNpmSpecifier.
+function addNpmDep(
+ raw: string,
+ deps: Dep[],
+ seen: Set,
+): void {
+ if (seen.has(raw)) return
+ seen.add(raw)
+ if (raw.startsWith('.') || raw.startsWith('/')) return
+ if (raw.startsWith('@') || /^[a-z]/.test(raw)) {
+ const { namespace, name } = parseNpmSpecifier(raw)
+ if (name) deps.push({ type: 'npm', namespace, name })
+ }
+}
+
+// npm (package.json): "name": "version" or "@scope/name": "ver".
+// Only matches entries where the value looks like a version/range/specifier,
+// not arbitrary string values like scripts or config.
+function extractNpm(content: string): Dep[] {
+ const deps: Dep[] = []
+ for (const m of content.matchAll(
+ /"(@?[^"]+)":\s*"([^"]*)"/g
+ )) {
+ const raw = m[1]
+ const val = m[2]
+ // Skip builtins, relative, and absolute paths.
+ if (
+ raw.startsWith('node:')
+ || raw.startsWith('.')
+ || raw.startsWith('/')
+ ) continue
+ // Value must look like a version specifier: semver, range, workspace:,
+ // catalog:, npm:, *, latest, or starts with ^~><=.
+ if (!/^[\^~><=*]|^\d|^workspace:|^catalog:|^npm:|^latest$/.test(val)) continue
+ // Only lowercase or scoped names are real deps.
+ if (raw.startsWith('@') || /^[a-z]/.test(raw)) {
+ const { namespace, name } = parseNpmSpecifier(raw)
+ if (name) deps.push({ type: 'npm', namespace, name })
+ }
+ }
+ return deps
+}
+
+// PyPI (requirements.txt, pyproject.toml, setup.py, Pipfile.lock):
+// requirements.txt: package>=1.0 or package==1.0 at line start
+// pyproject.toml: "package>=1.0" in dependencies arrays
+// setup.py: "package>=1.0" in install_requires lists
+function extractPypi(content: string): Dep[] {
+ const deps: Dep[] = []
+ const seen = new Set()
+ // requirements.txt style: package name at line start, followed by
+ // version specifier, extras bracket, or end of line.
+ for (const m of content.matchAll(
+ /^([a-zA-Z][\w.-]+)\s*(?:[>===18.20.4",
+ "pnpm": ">=10.25.0"
+ }
+ },
+ "../../../node_modules/.pnpm/@socketsecurity+lib@5.15.0_typescript@5.9.3/node_modules/@socketsecurity/lib": {
+ "version": "5.15.0",
+ "license": "MIT",
+ "devDependencies": {
+ "@anthropic-ai/claude-code": "2.1.92",
+ "@babel/core": "7.28.4",
+ "@babel/parser": "7.28.4",
+ "@babel/traverse": "7.28.4",
+ "@babel/types": "7.28.4",
+ "@dotenvx/dotenvx": "1.49.0",
+ "@inquirer/checkbox": "4.3.1",
+ "@inquirer/confirm": "5.1.16",
+ "@inquirer/input": "4.2.2",
+ "@inquirer/password": "4.0.18",
+ "@inquirer/search": "3.1.1",
+ "@inquirer/select": "4.3.2",
+ "@npmcli/arborist": "9.1.4",
+ "@npmcli/package-json": "7.0.0",
+ "@npmcli/promise-spawn": "8.0.3",
+ "@socketregistry/is-unicode-supported": "1.0.5",
+ "@socketregistry/packageurl-js": "1.4.1",
+ "@socketregistry/yocto-spinner": "1.0.25",
+ "@socketsecurity/lib-stable": "npm:@socketsecurity/lib@5.14.0",
+ "@types/node": "24.9.2",
+ "@typescript/native-preview": "7.0.0-dev.20250920.1",
+ "@vitest/coverage-v8": "4.0.3",
+ "@vitest/ui": "4.0.3",
+ "@yarnpkg/core": "4.5.0",
+ "@yarnpkg/extensions": "2.0.6",
+ "adm-zip": "0.5.16",
+ "cacache": "20.0.1",
+ "debug": "4.4.3",
+ "del": "8.0.1",
+ "del-cli": "6.0.0",
+ "esbuild": "0.25.11",
+ "eslint-plugin-sort-destructure-keys": "2.0.0",
+ "fast-glob": "3.3.3",
+ "fast-sort": "3.4.1",
+ "get-east-asian-width": "1.3.0",
+ "globals": "16.4.0",
+ "has-flag": "5.0.1",
+ "husky": "9.1.7",
+ "libnpmexec": "10.2.3",
+ "libnpmpack": "9.0.9",
+ "lint-staged": "15.2.11",
+ "magic-string": "0.30.17",
+ "make-fetch-happen": "15.0.2",
+ "nock": "14.0.10",
+ "normalize-package-data": "8.0.0",
+ "npm-package-arg": "13.0.0",
+ "oxfmt": "^0.37.0",
+ "oxlint": "1.53.0",
+ "p-map": "7.0.4",
+ "pacote": "21.0.1",
+ "picomatch": "4.0.4",
+ "pony-cause": "2.1.11",
+ "semver": "7.7.2",
+ "signal-exit": "4.1.0",
+ "spdx-correct": "3.2.0",
+ "spdx-expression-parse": "4.0.0",
+ "streaming-iterables": "8.0.1",
+ "supports-color": "10.2.2",
+ "tar-fs": "3.1.2",
+ "tar-stream": "3.1.8",
+ "taze": "19.9.2",
+ "trash": "10.0.0",
+ "type-coverage": "2.29.7",
+ "typescript": "5.9.2",
+ "validate-npm-package-name": "6.0.2",
+ "vite-tsconfig-paths": "5.1.4",
+ "vitest": "4.0.3",
+ "which": "5.0.0",
+ "yargs-parser": "22.0.0",
+ "yoctocolors-cjs": "2.1.3",
+ "zod": "4.1.12"
+ },
+ "engines": {
+ "node": ">=22",
+ "pnpm": ">=10.25.0"
+ },
+ "peerDependencies": {
+ "typescript": ">=5.0.0"
+ },
+ "peerDependenciesMeta": {
+ "typescript": {
+ "optional": true
+ }
+ }
+ },
+ "../../../node_modules/.pnpm/@socketsecurity+sdk@4.0.0_typescript@5.9.3/node_modules/@socketsecurity/sdk": {
+ "version": "4.0.0",
+ "license": "MIT",
+ "dependencies": {
+ "@socketsecurity/lib": "5.15.0",
+ "form-data": "4.0.5"
+ },
+ "devDependencies": {
+ "@anthropic-ai/claude-code": "2.1.92",
+ "@babel/generator": "7.28.5",
+ "@babel/parser": "7.26.3",
+ "@babel/traverse": "7.26.4",
+ "@babel/types": "7.26.3",
+ "@dotenvx/dotenvx": "1.54.1",
+ "@oxlint/migrate": "1.52.0",
+ "@sveltejs/acorn-typescript": "1.0.8",
+ "@types/babel__traverse": "7.28.0",
+ "@types/node": "24.9.2",
+ "@typescript/native-preview": "7.0.0-dev.20250926.1",
+ "@vitest/coverage-v8": "4.0.3",
+ "acorn": "8.15.0",
+ "del": "8.0.1",
+ "dev-null-cli": "2.0.0",
+ "ecc-agentshield": "1.4.0",
+ "esbuild": "0.25.11",
+ "fast-glob": "3.3.3",
+ "husky": "9.1.7",
+ "magic-string": "0.30.14",
+ "nock": "14.0.10",
+ "openapi-typescript": "6.7.6",
+ "oxfmt": "0.37.0",
+ "oxlint": "1.52.0",
+ "semver": "7.7.2",
+ "taze": "19.9.2",
+ "type-coverage": "2.29.7",
+ "vitest": "4.0.3"
+ },
+ "engines": {
+ "node": ">=18.20.8",
+ "pnpm": ">=10.33.0"
+ }
+ },
+ "../../../node_modules/.pnpm/@types+node@24.9.2/node_modules/@types/node": {
+ "version": "24.9.2",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "undici-types": "~7.16.0"
+ }
+ },
+ "node_modules/@socketregistry/packageurl-js": {
+ "resolved": "../../../node_modules/.pnpm/@socketregistry+packageurl-js@1.4.1/node_modules/@socketregistry/packageurl-js",
+ "link": true
+ },
+ "node_modules/@socketsecurity/lib": {
+ "resolved": "../../../node_modules/.pnpm/@socketsecurity+lib@5.15.0_typescript@5.9.3/node_modules/@socketsecurity/lib",
+ "link": true
+ },
+ "node_modules/@socketsecurity/sdk": {
+ "resolved": "../../../node_modules/.pnpm/@socketsecurity+sdk@4.0.0_typescript@5.9.3/node_modules/@socketsecurity/sdk",
+ "link": true
+ },
+ "node_modules/@types/node": {
+ "resolved": "../../../node_modules/.pnpm/@types+node@24.9.2/node_modules/@types/node",
+ "link": true
+ }
+ }
+}
diff --git a/.claude/hooks/check-new-deps/package.json b/.claude/hooks/check-new-deps/package.json
new file mode 100644
index 00000000..cd736d1b
--- /dev/null
+++ b/.claude/hooks/check-new-deps/package.json
@@ -0,0 +1,20 @@
+{
+ "name": "@socketsecurity/hook-check-new-deps",
+ "private": true,
+ "type": "module",
+ "main": "./index.mts",
+ "exports": {
+ ".": "./index.mts"
+ },
+ "scripts": {
+ "test": "node --test test/*.test.mts"
+ },
+ "dependencies": {
+ "@socketregistry/packageurl-js": "1.4.1",
+ "@socketsecurity/lib": "5.15.0",
+ "@socketsecurity/sdk": "4.0.0"
+ },
+ "devDependencies": {
+ "@types/node": "24.9.2"
+ }
+}
diff --git a/.claude/hooks/check-new-deps/test/extract-deps.test.mts b/.claude/hooks/check-new-deps/test/extract-deps.test.mts
new file mode 100644
index 00000000..b4fb35db
--- /dev/null
+++ b/.claude/hooks/check-new-deps/test/extract-deps.test.mts
@@ -0,0 +1,750 @@
+import { describe, it } from 'node:test'
+import assert from 'node:assert/strict'
+
+import { whichSync } from '@socketsecurity/lib/bin'
+import { spawnSync } from '@socketsecurity/lib/spawn'
+
+import {
+ cache,
+ cacheGet,
+ cacheSet,
+ extractBrewfile,
+ extractNewDeps,
+ extractNixFlake,
+ extractNpmLockfile,
+ extractTerraform,
+ diffDeps,
+} from '../index.mts'
+
+const hookScript = new URL('../index.mts', import.meta.url).pathname
+const nodeBin = whichSync('node')
+if (!nodeBin) {
+ throw new Error('"node" not found on PATH')
+}
+
+// Helper: run the full hook as a subprocess.
+// Uses spawnSync because we need to pipe stdin content (the hook reads JSON from stdin).
+function runHook(
+ toolInput: Record,
+ toolName = 'Edit',
+): { code: number | null; stdout: string; stderr: string } {
+ const input = JSON.stringify({
+ tool_name: toolName,
+ tool_input: toolInput,
+ })
+ const result = spawnSync(nodeBin, [hookScript], {
+ input,
+ timeout: 15_000,
+ stdio: ['pipe', 'pipe', 'pipe'],
+ })
+ return {
+ code: result.status ?? 1,
+ stdout: typeof result.stdout === 'string' ? result.stdout : result.stdout.toString(),
+ stderr: typeof result.stderr === 'string' ? result.stderr : result.stderr.toString(),
+ }
+}
+
+
+// ============================================================================
+// Unit tests: extractNewDeps per ecosystem
+// ============================================================================
+
+describe('extractNewDeps', () => {
+ // npm
+ describe('npm', () => {
+ it('unscoped', () => {
+ const d = extractNewDeps(
+ 'package.json',
+ '"lodash": "^4.17.21"',
+ )
+ assert.equal(d.length, 1)
+ assert.equal(d[0].type, 'npm')
+ assert.equal(d[0].name, 'lodash')
+ assert.equal(d[0].namespace, undefined)
+ })
+ it('scoped', () => {
+ const d = extractNewDeps(
+ 'package.json',
+ '"@types/node": "^20.0.0"',
+ )
+ assert.equal(d[0].namespace, '@types')
+ assert.equal(d[0].name, 'node')
+ })
+ it('multiple', () => {
+ const d = extractNewDeps(
+ 'package.json',
+ '"a": "1", "@b/c": "2", "d": "3"',
+ )
+ assert.equal(d.length, 3)
+ })
+ it('ignores node: builtins', () => {
+ assert.equal(
+ extractNewDeps('package.json', '"node:fs": "1"').length,
+ 0,
+ )
+ })
+ it('ignores relative', () => {
+ assert.equal(
+ extractNewDeps('package.json', '"./foo": "1"').length,
+ 0,
+ )
+ })
+ it('ignores absolute', () => {
+ assert.equal(
+ extractNewDeps('package.json', '"/foo": "1"').length,
+ 0,
+ )
+ })
+ it('ignores capitalized keys', () => {
+ assert.equal(
+ extractNewDeps('package.json', '"Name": "my-project"').length,
+ 0,
+ )
+ })
+ it('handles workspace protocol', () => {
+ const d = extractNewDeps(
+ 'package.json',
+ '"my-lib": "workspace:*"',
+ )
+ assert.equal(d.length, 1)
+ })
+ })
+
+ // cargo
+ describe('cargo', () => {
+ it('inline version', () => {
+ const d = extractNewDeps('Cargo.toml', 'serde = "1.0"')
+ assert.deepEqual(d[0], { type: 'cargo', name: 'serde' })
+ })
+ it('table version', () => {
+ const d = extractNewDeps(
+ 'Cargo.toml',
+ 'serde = { version = "1.0", features = ["derive"] }',
+ )
+ assert.equal(d[0].name, 'serde')
+ })
+ it('hyphenated name', () => {
+ assert.equal(
+ extractNewDeps('Cargo.toml', 'simd-json = "0.17"')[0].name,
+ 'simd-json',
+ )
+ })
+ it('multiple', () => {
+ assert.equal(
+ extractNewDeps('Cargo.toml', 'a = "1"\nb = { version = "2" }').length,
+ 2,
+ )
+ })
+ })
+
+ // golang
+ describe('golang', () => {
+ it('with namespace', () => {
+ const d = extractNewDeps(
+ 'go.mod',
+ 'github.com/gin-gonic/gin v1.9.1',
+ )
+ assert.equal(d[0].namespace, 'github.com/gin-gonic')
+ assert.equal(d[0].name, 'gin')
+ })
+ it('stdlib extension', () => {
+ const d = extractNewDeps(
+ 'go.mod',
+ 'golang.org/x/sync v0.7.0',
+ )
+ assert.equal(d[0].namespace, 'golang.org/x')
+ assert.equal(d[0].name, 'sync')
+ })
+ })
+
+ // pypi
+ describe('pypi', () => {
+ it('requirements.txt', () => {
+ const d = extractNewDeps(
+ 'requirements.txt',
+ 'flask>=2.0\nrequests==2.31',
+ )
+ assert.ok(d.some(x => x.name === 'flask'))
+ assert.ok(d.some(x => x.name === 'requests'))
+ })
+ it('pyproject.toml', () => {
+ assert.ok(
+ extractNewDeps('pyproject.toml', '"django>=4.2"')
+ .some(x => x.name === 'django'),
+ )
+ })
+ it('setup.py', () => {
+ assert.ok(
+ extractNewDeps('setup.py', '"numpy>=1.24"')
+ .some(x => x.name === 'numpy'),
+ )
+ })
+ })
+
+ // gem
+ describe('gem', () => {
+ it('single-quoted', () => {
+ assert.equal(
+ extractNewDeps('Gemfile', "gem 'rails'")[0].name,
+ 'rails',
+ )
+ })
+ it('double-quoted with version', () => {
+ assert.equal(
+ extractNewDeps('Gemfile', 'gem "sinatra", "~> 3.0"')[0].name,
+ 'sinatra',
+ )
+ })
+ })
+
+ // maven
+ describe('maven', () => {
+ it('pom.xml', () => {
+ const d = extractNewDeps(
+ 'pom.xml',
+ 'org.apachecommons-lang3',
+ )
+ assert.equal(d[0].namespace, 'org.apache')
+ assert.equal(d[0].name, 'commons-lang3')
+ })
+ it('build.gradle', () => {
+ const d = extractNewDeps(
+ 'build.gradle',
+ "implementation 'com.google.guava:guava:32.1'",
+ )
+ assert.equal(d[0].namespace, 'com.google.guava')
+ assert.equal(d[0].name, 'guava')
+ })
+ it('build.gradle.kts', () => {
+ const d = extractNewDeps(
+ 'build.gradle.kts',
+ "implementation 'org.jetbrains:annotations:24.0'",
+ )
+ assert.equal(d[0].name, 'annotations')
+ })
+ })
+
+ // swift
+ describe('swift', () => {
+ it('github package', () => {
+ const d = extractNewDeps(
+ 'Package.swift',
+ '.package(url: "https://github.com/vapor/vapor", from: "4.0.0")',
+ )
+ assert.equal(d[0].type, 'swift')
+ assert.equal(d[0].name, 'vapor')
+ })
+ })
+
+ // pub
+ describe('pub', () => {
+ it('dart package', () => {
+ assert.equal(
+ extractNewDeps('pubspec.yaml', ' flutter_bloc: ^8.1')[0].name,
+ 'flutter_bloc',
+ )
+ })
+ })
+
+ // hex
+ describe('hex', () => {
+ it('elixir dep', () => {
+ assert.equal(
+ extractNewDeps('mix.exs', '{:phoenix, "~> 1.7"}')[0].name,
+ 'phoenix',
+ )
+ })
+ })
+
+ // composer
+ describe('composer', () => {
+ it('vendor/package', () => {
+ const d = extractNewDeps(
+ 'composer.json',
+ '"monolog/monolog": "^3.0"',
+ )
+ assert.equal(d[0].namespace, 'monolog')
+ assert.equal(d[0].name, 'monolog')
+ })
+ })
+
+ // nuget
+ describe('nuget', () => {
+ it('.csproj PackageReference', () => {
+ assert.equal(
+ extractNewDeps(
+ 'test.csproj',
+ '',
+ )[0].name,
+ 'Newtonsoft.Json',
+ )
+ })
+ })
+
+ // julia
+ describe('julia', () => {
+ it('Project.toml', () => {
+ assert.equal(
+ extractNewDeps('Project.toml', 'JSON3 = "0a1fb500"')[0].name,
+ 'JSON3',
+ )
+ })
+ })
+
+ // conan
+ describe('conan', () => {
+ it('conanfile.txt', () => {
+ assert.equal(
+ extractNewDeps('conanfile.txt', 'boost/1.83.0')[0].name,
+ 'boost',
+ )
+ })
+ it('conanfile.py', () => {
+ assert.equal(
+ extractNewDeps('conanfile.py', 'requires = "zlib/1.3.0"')[0].name,
+ 'zlib',
+ )
+ })
+ })
+
+ // github actions
+ describe('github actions', () => {
+ it('extracts action with version', () => {
+ const d = extractNewDeps(
+ '.github/workflows/ci.yml',
+ 'uses: actions/checkout@v4',
+ )
+ assert.equal(d[0].type, 'github')
+ assert.equal(d[0].namespace, 'actions')
+ assert.equal(d[0].name, 'checkout')
+ })
+ it('extracts action with SHA', () => {
+ const d = extractNewDeps(
+ '.github/workflows/ci.yml',
+ 'uses: actions/setup-node@abc123def',
+ )
+ assert.equal(d[0].name, 'setup-node')
+ })
+ it('extracts action with subpath', () => {
+ const d = extractNewDeps(
+ '.github/workflows/ci.yml',
+ 'uses: org/repo/subpath@v1',
+ )
+ assert.equal(d[0].namespace, 'org')
+ assert.equal(d[0].name, 'repo/subpath')
+ })
+ it('multiple actions', () => {
+ const d = extractNewDeps(
+ '.github/workflows/ci.yml',
+ 'uses: a/b@v1\n uses: c/d@v2',
+ )
+ assert.equal(d.length, 2)
+ })
+ })
+
+ // terraform
+ describe('terraform', () => {
+ it('registry module source', () => {
+ const d = extractTerraform(
+ 'source = "hashicorp/consul/aws"',
+ )
+ assert.equal(d[0].type, 'terraform')
+ assert.equal(d[0].namespace, 'hashicorp')
+ assert.equal(d[0].name, 'consul')
+ })
+ it('via extractNewDeps', () => {
+ const d = extractNewDeps(
+ 'main.tf',
+ 'source = "cloudflare/dns/cloudflare"',
+ )
+ assert.equal(d.length, 1)
+ assert.equal(d[0].namespace, 'cloudflare')
+ })
+ })
+
+ // nix flakes
+ describe('nix flakes', () => {
+ it('github input', () => {
+ const d = extractNixFlake(
+ 'inputs.nixpkgs.url = "github:NixOS/nixpkgs"',
+ )
+ assert.equal(d[0].type, 'github')
+ assert.equal(d[0].namespace, 'NixOS')
+ assert.equal(d[0].name, 'nixpkgs')
+ })
+ it('via extractNewDeps', () => {
+ const d = extractNewDeps(
+ 'flake.nix',
+ 'url = "github:nix-community/home-manager"',
+ )
+ assert.equal(d.length, 1)
+ assert.equal(d[0].name, 'home-manager')
+ })
+ })
+
+ // homebrew
+ describe('homebrew', () => {
+ it('brew formula', () => {
+ const d = extractBrewfile('brew "git"')
+ assert.equal(d[0].type, 'brew')
+ assert.equal(d[0].name, 'git')
+ })
+ it('cask', () => {
+ const d = extractBrewfile('cask "firefox"')
+ assert.equal(d[0].name, 'firefox')
+ })
+ it('via extractNewDeps', () => {
+ const d = extractNewDeps(
+ 'Brewfile',
+ 'brew "wget"\ncask "iterm2"',
+ )
+ assert.equal(d.length, 2)
+ })
+ })
+
+ // lockfiles
+ describe('lockfiles', () => {
+ it('package-lock.json', () => {
+ const d = extractNpmLockfile(
+ '"node_modules/lodash": { "version": "4.17.21" }',
+ )
+ assert.ok(d.some(x => x.name === 'lodash'))
+ })
+ it('pnpm-lock.yaml', () => {
+ const d = extractNewDeps(
+ 'pnpm-lock.yaml',
+ "'/lodash@4.17.21':\n resolution:",
+ )
+ assert.ok(d.some(x => x.name === 'lodash'))
+ })
+ it('yarn.lock', () => {
+ const d = extractNewDeps(
+ 'yarn.lock',
+ '"lodash@^4.17.21":\n version:',
+ )
+ assert.ok(d.some(x => x.name === 'lodash'))
+ })
+ it('Cargo.lock', () => {
+ const d = extractNewDeps(
+ 'Cargo.lock',
+ 'name = "serde"\nversion = "1.0.210"',
+ )
+ assert.equal(d[0].type, 'cargo')
+ assert.equal(d[0].name, 'serde')
+ })
+ it('go.sum', () => {
+ const d = extractNewDeps(
+ 'go.sum',
+ 'github.com/gin-gonic/gin v1.9.1 h1:abc=',
+ )
+ assert.equal(d[0].type, 'golang')
+ assert.equal(d[0].name, 'gin')
+ })
+ it('Gemfile.lock', () => {
+ const d = extractNewDeps(
+ 'Gemfile.lock',
+ ' rails (7.1.0)\n activerecord (7.1.0)',
+ )
+ assert.ok(d.some(x => x.name === 'rails'))
+ })
+ it('composer.lock', () => {
+ const d = extractNewDeps(
+ 'composer.lock',
+ '"name": "monolog/monolog"',
+ )
+ assert.equal(d[0].namespace, 'monolog')
+ assert.equal(d[0].name, 'monolog')
+ })
+ it('poetry.lock', () => {
+ const d = extractNewDeps(
+ 'poetry.lock',
+ 'name = "flask"\nversion = "3.0.0"',
+ )
+ assert.ok(d.some(x => x.name === 'flask'))
+ })
+ it('pubspec.lock', () => {
+ const d = extractNewDeps(
+ 'pubspec.lock',
+ ' flutter_bloc:\n dependency: direct',
+ )
+ assert.ok(d.some(x => x.name === 'flutter_bloc'))
+ })
+ })
+
+ // windows paths
+ describe('windows paths', () => {
+ it('handles backslash in package.json path', () => {
+ const d = extractNewDeps(
+ 'C:\\Users\\foo\\project\\package.json',
+ '"lodash": "^4"',
+ )
+ assert.equal(d.length, 1)
+ assert.equal(d[0].name, 'lodash')
+ })
+ it('handles backslash in workflow path', () => {
+ const d = extractNewDeps(
+ '.github\\workflows\\ci.yml',
+ 'uses: actions/checkout@v4',
+ )
+ assert.equal(d.length, 1)
+ assert.equal(d[0].name, 'checkout')
+ })
+ it('handles backslash in Cargo.toml path', () => {
+ const d = extractNewDeps(
+ 'src\\parser\\Cargo.toml',
+ 'serde = "1.0"',
+ )
+ assert.equal(d.length, 1)
+ })
+ })
+
+ // pass-through
+ describe('unsupported files', () => {
+ it('returns empty for .rs', () => {
+ assert.equal(
+ extractNewDeps('main.rs', 'fn main(){}').length,
+ 0,
+ )
+ })
+ it('returns empty for .js', () => {
+ assert.equal(
+ extractNewDeps('index.js', 'x').length,
+ 0,
+ )
+ })
+ it('returns empty for .md', () => {
+ assert.equal(
+ extractNewDeps('README.md', '# hi').length,
+ 0,
+ )
+ })
+ })
+})
+
+// ============================================================================
+// Unit tests: diffDeps
+// ============================================================================
+
+describe('diffDeps', () => {
+ it('returns only new deps', () => {
+ const newDeps = [
+ { type: 'npm', name: 'a' },
+ { type: 'npm', name: 'b' },
+ ]
+ const oldDeps = [{ type: 'npm', name: 'a' }]
+ const result = diffDeps(newDeps, oldDeps)
+ assert.equal(result.length, 1)
+ assert.equal(result[0].name, 'b')
+ })
+ it('returns empty when no new deps', () => {
+ const deps = [{ type: 'npm', name: 'a' }]
+ assert.equal(diffDeps(deps, deps).length, 0)
+ })
+ it('returns all when old is empty', () => {
+ const deps = [
+ { type: 'npm', name: 'a' },
+ { type: 'npm', name: 'b' },
+ ]
+ assert.equal(diffDeps(deps, []).length, 2)
+ })
+})
+
+// ============================================================================
+// Unit tests: cache
+// ============================================================================
+
+describe('cache', () => {
+ it('stores and retrieves entries', () => {
+ cache.clear()
+ cacheSet('pkg:npm/test', { purl: 'pkg:npm/test', blocked: true })
+ const entry = cacheGet('pkg:npm/test')
+ assert.ok(entry)
+ assert.equal(entry!.result?.blocked, true)
+ })
+ it('returns undefined for missing keys', () => {
+ cache.clear()
+ assert.equal(cacheGet('pkg:npm/missing'), undefined)
+ })
+ it('evicts expired entries on get', () => {
+ cache.clear()
+ // Manually insert an expired entry.
+ cache.set('pkg:npm/expired', {
+ result: undefined,
+ expiresAt: Date.now() - 1000,
+ })
+ assert.equal(cacheGet('pkg:npm/expired'), undefined)
+ assert.equal(cache.has('pkg:npm/expired'), false)
+ })
+ it('caches undefined for clean deps', () => {
+ cache.clear()
+ cacheSet('pkg:npm/clean', undefined)
+ const entry = cacheGet('pkg:npm/clean')
+ assert.ok(entry)
+ assert.equal(entry!.result, undefined)
+ })
+})
+
+// ============================================================================
+// Integration tests: full hook subprocess
+// ============================================================================
+
+describe('hook integration', () => {
+ // Blocking
+ it('blocks malware (npm)', async () => {
+ const r = await runHook({
+ file_path: '/tmp/package.json',
+ new_string: '"bradleymeck": "^1.0.0"',
+ })
+ assert.equal(r.code, 2)
+ assert.ok(r.stderr.includes('blocked'))
+ })
+
+ // Allowing
+ it('allows clean npm package', async () => {
+ const r = await runHook({
+ file_path: '/tmp/package.json',
+ new_string: '"lodash": "^4.17.21"',
+ })
+ assert.equal(r.code, 0)
+ })
+ it('allows scoped npm package', async () => {
+ const r = await runHook({
+ file_path: '/tmp/package.json',
+ new_string: '"@types/node": "^20"',
+ })
+ assert.equal(r.code, 0)
+ })
+ it('allows cargo crate', async () => {
+ const r = await runHook({
+ file_path: '/tmp/Cargo.toml',
+ new_string: 'serde = "1.0"',
+ })
+ assert.equal(r.code, 0)
+ })
+ it('allows go module', async () => {
+ const r = await runHook({
+ file_path: '/tmp/go.mod',
+ new_string: 'golang.org/x/sync v0.7.0',
+ })
+ assert.equal(r.code, 0)
+ })
+ it('allows pypi package', async () => {
+ const r = await runHook({
+ file_path: '/tmp/requirements.txt',
+ new_string: 'flask>=2.0',
+ })
+ assert.equal(r.code, 0)
+ })
+ it('allows ruby gem', async () => {
+ const r = await runHook({
+ file_path: '/tmp/Gemfile',
+ new_string: "gem 'rails'",
+ })
+ assert.equal(r.code, 0)
+ })
+ it('allows maven dep', async () => {
+ const r = await runHook({
+ file_path: '/tmp/build.gradle',
+ new_string: "implementation 'com.google.guava:guava:32.1'",
+ })
+ assert.equal(r.code, 0)
+ })
+ it('allows nuget package', async () => {
+ const r = await runHook({
+ file_path: '/tmp/test.csproj',
+ new_string: '',
+ })
+ assert.equal(r.code, 0)
+ })
+ it('allows github action', async () => {
+ const r = await runHook({
+ file_path: '/tmp/.github/workflows/ci.yml',
+ new_string: 'uses: actions/checkout@v4',
+ })
+ assert.equal(r.code, 0)
+ })
+
+ // Pass-through
+ it('passes non-dep files', async () => {
+ const r = await runHook({
+ file_path: '/tmp/main.rs',
+ new_string: 'fn main(){}',
+ })
+ assert.equal(r.code, 0)
+ })
+ it('passes non-Edit tools', async () => {
+ const r = await runHook(
+ { file_path: '/tmp/package.json' },
+ 'Read',
+ )
+ assert.equal(r.code, 0)
+ })
+
+ // Diff-aware
+ it('skips pre-existing deps in old_string', async () => {
+ const r = await runHook({
+ file_path: '/tmp/package.json',
+ old_string: '"lodash": "^4.17.21"',
+ new_string: '"lodash": "^4.17.21"',
+ })
+ assert.equal(r.code, 0)
+ })
+ it('checks only NEW deps when old_string present', async () => {
+ const r = await runHook({
+ file_path: '/tmp/package.json',
+ old_string: '"lodash": "^4.17.21"',
+ new_string: '"lodash": "^4.17.21", "bradleymeck": "^1.0.0"',
+ })
+ assert.equal(r.code, 2)
+ })
+
+ // Batch (multiple deps in one request)
+ it('checks multiple deps in batch (fast)', async () => {
+ const start = Date.now()
+ const r = await runHook({
+ file_path: '/tmp/package.json',
+ new_string: '"express": "^4", "lodash": "^4", "debug": "^4"',
+ })
+ assert.equal(r.code, 0)
+ assert.ok(
+ Date.now() - start < 5000,
+ 'batch should be fast',
+ )
+ })
+
+ // Write tool
+ it('works with Write tool', async () => {
+ const r = await runHook(
+ { file_path: '/tmp/package.json', content: '"lodash": "^4"' },
+ 'Write',
+ )
+ assert.equal(r.code, 0)
+ })
+
+ // Empty content
+ it('handles empty content', async () => {
+ const r = await runHook({
+ file_path: '/tmp/package.json',
+ new_string: '',
+ })
+ assert.equal(r.code, 0)
+ })
+
+ // Lockfile monitoring
+ it('checks lockfile deps (Cargo.lock)', async () => {
+ const r = await runHook({
+ file_path: '/tmp/Cargo.lock',
+ new_string: 'name = "serde"\nversion = "1.0.210"',
+ })
+ assert.equal(r.code, 0)
+ })
+
+ // Terraform
+ it('checks terraform module', async () => {
+ const r = await runHook({
+ file_path: '/tmp/main.tf',
+ new_string: 'source = "hashicorp/consul/aws"',
+ })
+ assert.equal(r.code, 0)
+ })
+})
diff --git a/.claude/hooks/check-new-deps/tsconfig.json b/.claude/hooks/check-new-deps/tsconfig.json
new file mode 100644
index 00000000..748e9587
--- /dev/null
+++ b/.claude/hooks/check-new-deps/tsconfig.json
@@ -0,0 +1,13 @@
+{
+ "compilerOptions": {
+ "noEmit": true,
+ "target": "esnext",
+ "module": "nodenext",
+ "moduleResolution": "nodenext",
+ "rewriteRelativeImportExtensions": true,
+ "erasableSyntaxOnly": true,
+ "verbatimModuleSyntax": true,
+ "strict": true,
+ "skipLibCheck": true
+ }
+}
diff --git a/.claude/settings.json b/.claude/settings.json
new file mode 100644
index 00000000..ac130fc1
--- /dev/null
+++ b/.claude/settings.json
@@ -0,0 +1,15 @@
+{
+ "hooks": {
+ "PreToolUse": [
+ {
+ "matcher": "Edit|Write",
+ "hooks": [
+ {
+ "type": "command",
+ "command": "node .claude/hooks/check-new-deps/index.mts"
+ }
+ ]
+ }
+ ]
+ }
+}
diff --git a/.gitignore b/.gitignore
index 3f298222..14ec50ac 100644
--- a/.gitignore
+++ b/.gitignore
@@ -21,7 +21,9 @@ WIP
/.claude/*
!/.claude/agents/
!/.claude/commands/
+!/.claude/hooks/
!/.claude/ops/
+!/.claude/settings.json
!/.claude/skills/
# Environment files