diff --git a/.claude/agents/security-reviewer.md b/.claude/agents/security-reviewer.md index 1d35eabd..04d4b138 100644 --- a/.claude/agents/security-reviewer.md +++ b/.claude/agents/security-reviewer.md @@ -1,6 +1,6 @@ --- name: security-reviewer -description: Reviews findings from AgentShield + zizmor against socket-sdk-js's CLAUDE.md security rules and grades the result A-F. Spawned by the security-scan skill after the static scans run. +description: Reviews findings from AgentShield + zizmor against the project's CLAUDE.md security rules and grades the result A-F. Spawned by the security-scan skill after the static scans run. tools: Read, Grep, Glob, Bash(git:*), Bash(rg:*), Bash(grep:*), Bash(find:*), Bash(ls:*), Bash(pnpm exec agentshield:*), Bash(zizmor:*), Bash(command -v:*), Bash(cat:*), Bash(head:*), Bash(tail:*) --- @@ -18,7 +18,7 @@ Apply these rules from CLAUDE.md exactly: 1. **Secrets**: Hardcoded API keys, passwords, tokens, private keys in code or config 2. **Injection**: Command injection via shell: true or string interpolation in spawn/exec. Path traversal in file operations. -3. **Dependencies**: npx/dlx usage. Unpinned versions (^ or ~). Missing minimumReleaseAge bypass justification. # zizmor: documentation-checklist +3. **Dependencies**: npx/dlx usage. Unpinned versions (^ or ~). Missing soak-window bypass justification (pnpm-workspace.yaml `minimumReleaseAgeExclude`). # zizmor: documentation-checklist 4. **File operations**: fs.rm without safeDelete. process.chdir usage. fetch() usage (must use lib's httpRequest). 5. **GitHub Actions**: Unpinned action versions (must use full SHA). Secrets outside env blocks. Template injection from untrusted inputs. 6. **Error handling**: Sensitive data in error messages. Stack traces exposed to users. diff --git a/.claude/hooks/auth-rotation-reminder/README.md b/.claude/hooks/auth-rotation-reminder/README.md new file mode 100644 index 00000000..fb278842 --- /dev/null +++ b/.claude/hooks/auth-rotation-reminder/README.md @@ -0,0 +1,131 @@ +# auth-rotation-reminder + +Claude Code `Stop` hook that periodically logs you out of authenticated +CLIs (npm, pnpm, gcloud, vault, aws sso, docker, socket, …) so stale +long-lived tokens don't sit in your dotfiles or keychain for days. + +## Why + +Long-lived auth tokens live in well-known locations: `~/.npmrc`, +`~/.config/gh/hosts.yml`, `~/.config/gcloud/`, `~/.docker/config.json`. +A compromised dev workstation has a wide blast radius on those files. +Periodic auto-revocation tightens the window and forces explicit +re-authentication, which is itself a small phishing-defense moment +("did I really mean to publish?"). + +## Defaults + +- **Interval**: 1 hour. Set `SOCKET_AUTH_ROTATION_INTERVAL_HOURS=4` to + loosen, `=0` to run on every Stop event. +- **Mode**: auto-logout (the hook *acts*, not just warns). +- **Default skip-list**: `gh` is skipped because Claude Code itself + uses `gh` for `gh pr edit` etc. — auto-revoking it would break the + agent. +- **CI**: hook short-circuits when `CI` env var is set. + +## What's swept + +| id | display name | detect | logout | +| --------- | ----------------- | ----------------- | ------------------------------ | +| npm | npm | `npm whoami` | `npm logout` | +| pnpm | pnpm | `pnpm whoami` | `pnpm logout` | +| yarn | yarn | `yarn --version` | `yarn npm logout` | +| gcloud | gcloud | `gcloud auth list ... ACTIVE` | `gcloud auth revoke --all --quiet` | +| aws-sso | aws (sso) | `aws sts get-caller-identity` | `aws sso logout` | +| gh | gh (GitHub CLI) | `gh auth status` | `gh auth logout --hostname github.com` | +| vault | vault | `vault token lookup` | `vault token revoke -self` | +| docker | docker | `docker info \| grep Username:` | `docker logout` | +| socket | socket | `socket whoami` | `socket logout` | + +The hook never reads, prints, or compares any token value. Detection +is exit-code only; logout commands' output is suppressed except for +non-zero exit codes which surface as "logout failed" lines. + +## Snoozing + +Need to keep your auth alive for the next few hours (e.g. mid-publish)? +Drop a `.snooze` file with an ISO 8601 expiry on line 1. + +```bash +# Snooze for 4 hours, project-local +date -ud "+4 hours" +"%Y-%m-%dT%H:%M:%SZ" > .claude/auth-rotation.snooze + +# Snooze globally for 8 hours (applies to every repo) +mkdir -p ~/.claude/hooks/auth-rotation +date -ud "+8 hours" +"%Y-%m-%dT%H:%M:%SZ" > ~/.claude/hooks/auth-rotation/snooze +``` + +The hook **automatically deletes the file** once the timestamp is +reached. No manual cleanup needed. + +Snoozes that are malformed, empty, or unreadable are also auto-deleted +on the next run — fail-safe so a corrupted file can't permanently +disable rotation. + +`.claude/*.snooze` is gitignored; project-local snoozes never leak into +commits. + +## Skip-list + +Permanently skip a service: + +```bash +# Per-user: applies to every repo +mkdir -p ~/.claude/hooks/auth-rotation +echo gcloud >> ~/.claude/hooks/auth-rotation/services-skip + +# Per-repo: applies just to this checkout +echo vault >> .claude/auth-rotation.services-skip +``` + +One id per line. Lines starting with `#` are comments. Service ids +are stable — see the table above. + +## Disable temporarily + +```bash +SOCKET_AUTH_ROTATION_DISABLED=1 # any non-empty value +``` + +For pairing sessions, demos, etc. The hook short-circuits before +doing any work. + +## Wiring + +In `.claude/settings.json`: + +```json +{ + "hooks": { + "Stop": [ + { + "hooks": [ + { + "type": "command", + "command": "node .claude/hooks/auth-rotation-reminder/index.mts" + } + ] + } + ] + } +} +``` + +## Tests + +```bash +cd .claude/hooks/auth-rotation-reminder +node --test test/*.test.mts +``` + +## Reusing the snooze convention + +Other hooks can adopt the same `.snooze` pattern. The convention is: + +- Filename: `.claude/.snooze` (project) or + `~/.claude/hooks//snooze` (global). +- Format: ISO 8601 expiry on line 1. Optional further lines ignored. +- `.gitignore`: `.claude/*.snooze`. +- Cleanup: hook auto-deletes expired files via `safeDelete`. +- The `checkSnoozes` / `tryUnlink` helpers in `index.mts` are easy to + copy into a sibling hook. diff --git a/.claude/hooks/auth-rotation-reminder/index.mts b/.claude/hooks/auth-rotation-reminder/index.mts new file mode 100644 index 00000000..06e05356 --- /dev/null +++ b/.claude/hooks/auth-rotation-reminder/index.mts @@ -0,0 +1,353 @@ +#!/usr/bin/env node +// Claude Code Stop hook — auth-rotation-reminder. +// +// Periodically logs you out of authenticated CLIs (npm, pnpm, gcloud, +// vault, aws sso, docker, socket, …) so stale long-lived tokens don't +// sit in dotfiles or keychains for days. +// +// Behavior on each Stop event: +// +// 1. Drain stdin (Stop hook delivers a JSON payload we don't need). +// 2. Skip if running in CI (CI auth has its own lifecycle). +// 3. Read both global + project-local `.snooze` files. Each carries +// an ISO 8601 expiry on line 1; if past, the file is auto-cleaned +// and the hook proceeds. If unexpired, the hook honors the snooze +// and exits silently. +// 4. Throttle via a state file: if the last successful run was within +// the configured interval (default 1h), exit silently. +// 5. For each service in services.mts: +// a. Skip if the binary is missing and `optional: true`. +// b. Run detectCmd. Skip if not authenticated. +// c. Run logoutCmd. Log to stderr via lib's logger. +// 6. Update the state file's mtime. +// +// The hook NEVER reads, prints, or compares any token value. Detection +// is exit-code only; logout commands' output is suppressed except for +// non-zero exit codes which surface as "logout failed" lines. +// +// Snooze file format (ISO 8601 timestamp on line 1): +// +// $ date -ud '+4 hours' +"%Y-%m-%dT%H:%M:%SZ" > .claude/auth-rotation.snooze +// +// Removed automatically once the timestamp is reached. +// +// Configuration env vars (all optional): +// +// SOCKET_AUTH_ROTATION_INTERVAL_HOURS default: 1 +// How long between actual auth-rotation runs (state-file throttle). +// Set to 0 to run on every Stop event (verbose). +// +// SOCKET_AUTH_ROTATION_DISABLED default: unset +// If set to a truthy value, skip the hook entirely. + +import { spawnSync } from 'node:child_process' +import { + existsSync, + mkdirSync, + readFileSync, + statSync, + utimesSync, + writeFileSync, +} from 'node:fs' +import { homedir } from 'node:os' +import path from 'node:path' +import process from 'node:process' + +import { safeDelete } from '@socketsecurity/lib/fs' +import { getDefaultLogger } from '@socketsecurity/lib/logger' + +import { DEFAULT_SKIP_IDS, SERVICES } from './services.mts' +import type { Service } from './services.mts' + +const logger = getDefaultLogger() +const PREFIX = '[auth-rotation-reminder]' + +// ── Paths ─────────────────────────────────────────────────────────── + +const STATE_DIR = path.join(homedir(), '.claude', 'hooks', 'auth-rotation') +const STATE_FILE = path.join(STATE_DIR, 'last-run') +const GLOBAL_SNOOZE = path.join(STATE_DIR, 'snooze') +const GLOBAL_SKIP_LIST = path.join(STATE_DIR, 'services-skip') + +// Project-local files live at the repo root next to .claude/. Claude +// Code spawns Stop hooks with the working directory set to the repo +// root so process.cwd() is reliable here. +const PROJECT_SNOOZE = path.join( + process.cwd(), + '.claude', + 'auth-rotation.snooze', +) +const PROJECT_SKIP_LIST = path.join( + process.cwd(), + '.claude', + 'auth-rotation.services-skip', +) + +// ── Snooze handling ───────────────────────────────────────────────── + +interface SnoozeStatus { + active: boolean + cleaned: string[] +} + +async function checkSnoozes(): Promise { + const status: SnoozeStatus = { active: false, cleaned: [] } + const cleanFile = async (file: string, reason: string): Promise => { + try { + await safeDelete(file) + status.cleaned.push(file) + } catch (e) { + logger.error( + `${PREFIX} safeDelete(${path.basename(file)}) failed (${reason}): ${(e as Error).message}`, + ) + } + } + for (const file of [GLOBAL_SNOOZE, PROJECT_SNOOZE]) { + if (!existsSync(file)) { + continue + } + let content = '' + try { + content = readFileSync(file, 'utf8').trim() + } catch { + await cleanFile(file, 'unreadable') + continue + } + // Empty content = legacy form, no expiry. Treat as expired now. + if (content.length === 0) { + await cleanFile(file, 'legacy (no expiry)') + continue + } + const firstLine = content.split('\n')[0]!.trim() + const expiry = Date.parse(firstLine) + if (Number.isNaN(expiry)) { + await cleanFile(file, 'malformed expiry') + continue + } + if (Date.now() >= expiry) { + await cleanFile(file, 'expired') + continue + } + // Unexpired snooze. Honor it. + status.active = true + return status + } + return status +} + +// ── Skip-list ─────────────────────────────────────────────────────── + +function loadSkipIds(): Set { + const skipIds = new Set(DEFAULT_SKIP_IDS) + for (const file of [GLOBAL_SKIP_LIST, PROJECT_SKIP_LIST]) { + if (!existsSync(file)) { + continue + } + try { + const content = readFileSync(file, 'utf8') + for (const raw of content.split('\n')) { + const trimmed = raw.trim() + if (trimmed && !trimmed.startsWith('#')) { + skipIds.add(trimmed) + } + } + } catch { + // Ignore unreadable skip-list — better to over-rotate than fail closed. + } + } + return skipIds +} + +// ── Throttle ──────────────────────────────────────────────────────── + +function intervalMs(): number { + const raw = process.env['SOCKET_AUTH_ROTATION_INTERVAL_HOURS'] + const hours = raw === undefined ? 1 : Number.parseFloat(raw) + if (!Number.isFinite(hours) || hours < 0) { + return 60 * 60 * 1000 + } + return Math.round(hours * 60 * 60 * 1000) +} + +function withinThrottle(): boolean { + const interval = intervalMs() + if (interval === 0) { + return false + } + if (!existsSync(STATE_FILE)) { + return false + } + try { + const { mtimeMs } = statSync(STATE_FILE) + return Date.now() - mtimeMs < interval + } catch { + return false + } +} + +function touchStateFile(): void { + try { + mkdirSync(STATE_DIR, { recursive: true }) + if (!existsSync(STATE_FILE)) { + writeFileSync(STATE_FILE, '') + } + const now = new Date() + utimesSync(STATE_FILE, now, now) + } catch { + // Throttle is best-effort. Loss = hook runs more often than configured; + // not worth surfacing. + } +} + +// ── Service detection + logout ────────────────────────────────────── + +interface RotationResult { + loggedOut: string[] + failed: Array<{ service: string; reason: string }> + skippedMissing: string[] +} + +function isOnPath(binary: string): boolean { + // `command -v` is portable across sh/bash/zsh and exits 0 if found. + const r = spawnSync('sh', ['-c', `command -v ${binary} >/dev/null 2>&1`], { + stdio: 'ignore', + }) + return r.status === 0 +} + +function isAuthenticated(s: Service): boolean { + const r = spawnSync(s.detectCmd[0]!, s.detectCmd.slice(1) as string[], { + stdio: 'ignore', + timeout: 5000, + }) + return r.status === 0 +} + +function runLogout(s: Service): { ok: boolean; reason?: string } { + const r = spawnSync(s.logoutCmd[0]!, s.logoutCmd.slice(1) as string[], { + stdio: 'ignore', + timeout: 10_000, + }) + if (r.status === 0) { + return { ok: true } + } + if (r.error) { + return { ok: false, reason: r.error.message } + } + return { ok: false, reason: `exit code ${r.status}` } +} + +function rotateAll(skipIds: Set): RotationResult { + const result: RotationResult = { + loggedOut: [], + failed: [], + skippedMissing: [], + } + for (const service of SERVICES) { + if (skipIds.has(service.id)) { + continue + } + if (!isOnPath(service.detectCmd[0]!)) { + if (!service.optional) { + result.skippedMissing.push(service.name) + } + continue + } + if (!isAuthenticated(service)) { + continue + } + const out = runLogout(service) + if (out.ok) { + result.loggedOut.push(service.name) + } else { + result.failed.push({ + service: service.name, + reason: out.reason ?? 'unknown', + }) + } + } + return result +} + +// ── Output ────────────────────────────────────────────────────────── + +function reportSnoozeCleaned(cleaned: string[]): void { + for (const file of cleaned) { + logger.error(`${PREFIX} cleared expired snooze: ${file}`) + } +} + +function reportRotation(result: RotationResult): void { + const parts: string[] = [] + if (result.loggedOut.length > 0) { + parts.push( + `logged out of ${result.loggedOut.length} CLI(s): ${result.loggedOut.join(', ')}`, + ) + } + if (result.failed.length > 0) { + const failed = result.failed + .map(f => `${f.service} (${f.reason})`) + .join(', ') + parts.push(`logout failed: ${failed}`) + } + if (result.skippedMissing.length > 0) { + parts.push(`expected-but-missing: ${result.skippedMissing.join(', ')}`) + } + if (parts.length === 0) { + return + } + logger.error(`${PREFIX} ${parts.join('; ')}`) + logger.error( + ` Snooze for next 4h: date -ud "+4 hours" +"%Y-%m-%dT%H:%M:%SZ" > .claude/auth-rotation.snooze`, + ) +} + +// ── Main ──────────────────────────────────────────────────────────── + +async function run(): Promise { + if (process.env['CI']) { + return + } + if (process.env['SOCKET_AUTH_ROTATION_DISABLED']) { + return + } + const snooze = await checkSnoozes() + reportSnoozeCleaned(snooze.cleaned) + if (snooze.active) { + return + } + if (withinThrottle()) { + return + } + const skipIds = loadSkipIds() + const result = rotateAll(skipIds) + reportRotation(result) + touchStateFile() +} + +function main(): void { + // Drain stdin so Node doesn't keep us alive waiting on the Stop hook's + // JSON payload (we don't read its contents). + process.stdin.resume() + process.stdin.on('data', () => {}) + process.stdin.on('end', () => { + run() + .catch(e => { + logger.error(`${PREFIX} unexpected error: ${(e as Error).message}`) + }) + .finally(() => { + process.exit(0) + }) + }) + if (process.stdin.readable === false) { + run() + .catch(e => { + logger.error(`${PREFIX} unexpected error: ${(e as Error).message}`) + }) + .finally(() => { + process.exit(0) + }) + } +} + +main() diff --git a/.claude/hooks/auth-rotation-reminder/package.json b/.claude/hooks/auth-rotation-reminder/package.json new file mode 100644 index 00000000..38fa6a49 --- /dev/null +++ b/.claude/hooks/auth-rotation-reminder/package.json @@ -0,0 +1,18 @@ +{ + "name": "hook-auth-rotation-reminder", + "private": true, + "type": "module", + "main": "./index.mts", + "exports": { + ".": "./index.mts" + }, + "dependencies": { + "@socketsecurity/lib": "catalog:" + }, + "devDependencies": { + "@types/node": "catalog:" + }, + "scripts": { + "test": "node --test test/*.test.mts" + } +} diff --git a/.claude/hooks/auth-rotation-reminder/services.mts b/.claude/hooks/auth-rotation-reminder/services.mts new file mode 100644 index 00000000..f0168d09 --- /dev/null +++ b/.claude/hooks/auth-rotation-reminder/services.mts @@ -0,0 +1,142 @@ +// Service catalog for auth-rotation-reminder. +// +// Each entry tells the hook how to detect whether a CLI is currently +// authenticated and how to log it out. `optional: true` means the hook +// silently skips the service if the binary isn't on PATH (most are +// optional — most devs have a subset of these installed). +// +// Detection commands MUST exit 0 when authenticated and non-zero when +// not. Output goes to /dev/null; the hook reads only the exit code. +// +// Logout commands run unconditionally when the hook is in auto-logout +// mode. They should be idempotent — re-running them on an already +// logged-out CLI is fine. + +export interface Service { + // Stable id used in skip-list files and error messages. Never rename + // without a deprecation cycle — devs encode these in their personal + // `.skip` lists. + id: string + // Display name for output. + name: string + // Command + args that exit 0 if logged in, non-zero otherwise. + detectCmd: readonly string[] + // Command + args that performs the logout. Must be idempotent. + logoutCmd: readonly string[] + // Skip silently when the binary isn't on PATH. False means the + // hook reports "binary missing" as a finding (rare — only for + // first-class fleet CLIs we expect every dev to have). + optional: boolean + // Optional human-readable doc URL surfaced when the hook reports the + // logout. Empty when no canonical doc page exists. + docUrl?: string +} + +// Default skip-list seeds. Devs can extend via the per-user +// `~/.claude/hooks/auth-rotation/services-skip` (one id per line) +// or per-repo `.claude/auth-rotation.services-skip` files. +// +// `gh` is seeded because Claude Code itself uses `gh` for `gh pr edit` +// etc. — auto-revoking it mid-session would break the agent. +export const DEFAULT_SKIP_IDS = ['gh'] as const + +export const SERVICES: readonly Service[] = [ + { + id: 'npm', + name: 'npm', + detectCmd: ['npm', 'whoami'], + logoutCmd: ['npm', 'logout'], + optional: true, + docUrl: 'https://docs.npmjs.com/cli/v11/commands/npm-logout', + }, + { + id: 'pnpm', + name: 'pnpm', + detectCmd: ['pnpm', 'whoami'], + logoutCmd: ['pnpm', 'logout'], + optional: false, + docUrl: 'https://pnpm.io/id/11.x/cli/logout', + }, + { + id: 'yarn', + name: 'yarn', + // Yarn Berry's logout lives under `npm` namespace; Yarn Classic's + // is bare. We try Berry first (the modern default), fall back to + // Classic. Detection is the same: `npm whoami` from inside a + // yarn-managed registry. Yarn doesn't expose a portable whoami, + // so we approximate by checking for a yarn auth token in + // `~/.yarnrc.yml` via grep — too fragile to ship; use logout-only + // (idempotent: clears nothing if nothing's there). + detectCmd: ['yarn', '--version'], + logoutCmd: ['yarn', 'npm', 'logout'], + optional: true, + }, + { + id: 'gcloud', + name: 'gcloud', + // `gcloud auth list` exits 0 always; we check whether any non-empty + // active account is reported. Wrap with sh -c to chain. + detectCmd: [ + 'sh', + '-c', + 'gcloud auth list --filter=status:ACTIVE --format="value(account)" 2>/dev/null | grep -q .', + ], + logoutCmd: ['gcloud', 'auth', 'revoke', '--all', '--quiet'], + optional: true, + docUrl: 'https://cloud.google.com/sdk/gcloud/reference/auth/revoke', + }, + { + id: 'aws-sso', + name: 'aws (sso)', + // `aws sts get-caller-identity` succeeds when authenticated. + // sts is the universal probe across all AWS auth flavors. + detectCmd: ['aws', 'sts', 'get-caller-identity'], + // `aws sso logout` only clears SSO cache. For non-SSO creds, the + // dev would have to remove `~/.aws/credentials` themselves; we + // don't touch that file because it might hold long-lived keys + // intentionally. SSO-only is the conservative default. + logoutCmd: ['aws', 'sso', 'logout'], + optional: true, + }, + { + id: 'gh', + name: 'gh (GitHub CLI)', + detectCmd: ['gh', 'auth', 'status'], + logoutCmd: ['gh', 'auth', 'logout', '--hostname', 'github.com'], + optional: true, + docUrl: 'https://cli.github.com/manual/gh_auth_logout', + }, + { + id: 'vault', + name: 'vault', + detectCmd: ['vault', 'token', 'lookup'], + // `token revoke -self` revokes the active token; survives the + // logout safely (re-auth via `vault login` next session). + logoutCmd: ['vault', 'token', 'revoke', '-self'], + optional: true, + }, + { + id: 'docker', + name: 'docker', + // No portable "am I logged in" — `docker info` returns mixed data. + // Approximate via `docker system info` filter. + detectCmd: [ + 'sh', + '-c', + 'docker info 2>/dev/null | grep -q "^ Username:"', + ], + // Without a registry arg, `docker logout` clears the default index. + logoutCmd: ['docker', 'logout'], + optional: true, + }, + { + id: 'socket', + name: 'socket', + // `socket whoami` (when present in the cli) is the canonical probe. + // The cli emits exit 0 when authenticated. + detectCmd: ['socket', 'whoami'], + // `socket logout` clears the local API token from settings. + logoutCmd: ['socket', 'logout'], + optional: true, + }, +] as const diff --git a/.claude/hooks/auth-rotation-reminder/test/auth-rotation-reminder.test.mts b/.claude/hooks/auth-rotation-reminder/test/auth-rotation-reminder.test.mts new file mode 100644 index 00000000..4582a1f9 --- /dev/null +++ b/.claude/hooks/auth-rotation-reminder/test/auth-rotation-reminder.test.mts @@ -0,0 +1,162 @@ +import { spawn } from 'node:child_process' +import { existsSync, mkdirSync, mkdtempSync, rmSync, writeFileSync } from 'node:fs' +import { tmpdir } from 'node:os' +import path from 'node:path' +import { fileURLToPath } from 'node:url' +import { test } from 'node:test' +import assert from 'node:assert/strict' + +const __dirname = path.dirname(fileURLToPath(import.meta.url)) +const HOOK = path.resolve(__dirname, '..', 'index.mts') + +interface Env { + [key: string]: string +} + +function runHook(opts: { + cwd?: string + env?: Env +} = {}): Promise<{ code: number; stderr: string }> { + return new Promise((resolve, reject) => { + const child = spawn(process.execPath, [HOOK], { + cwd: opts.cwd ?? process.cwd(), + stdio: ['pipe', 'ignore', 'pipe'], + env: { + // Default to a sentinel CI value the hook short-circuits on, + // unless the caller overrides. Most tests want the early-exit + // path so they don't actually run logout commands. + ...process.env, + ...opts.env, + }, + }) + let stderr = '' + child.stderr.on('data', d => { + stderr += d.toString() + }) + child.on('error', reject) + child.on('exit', code => { + resolve({ code: code ?? -1, stderr }) + }) + child.stdin.end('{}\n') + }) +} + +function makeRepo(): string { + const dir = mkdtempSync(path.join(tmpdir(), 'auth-rotation-test-')) + mkdirSync(path.join(dir, '.claude'), { recursive: true }) + return dir +} + +test('exits 0 silently when CI env var is set', async () => { + const repo = makeRepo() + try { + const { code, stderr } = await runHook({ + cwd: repo, + env: { CI: '1' }, + }) + assert.equal(code, 0) + assert.equal(stderr, '', `expected no output in CI; got: ${stderr}`) + } finally { + rmSync(repo, { recursive: true, force: true }) + } +}) + +test('exits 0 silently when SOCKET_AUTH_ROTATION_DISABLED is set', async () => { + const repo = makeRepo() + try { + const { code, stderr } = await runHook({ + cwd: repo, + env: { + CI: '', + SOCKET_AUTH_ROTATION_DISABLED: '1', + }, + }) + assert.equal(code, 0) + assert.equal(stderr, '') + } finally { + rmSync(repo, { recursive: true, force: true }) + } +}) + +test('honors a project-local snooze with future expiry', async () => { + const repo = makeRepo() + try { + const expiry = new Date(Date.now() + 60 * 60 * 1000).toISOString() + writeFileSync(path.join(repo, '.claude', 'auth-rotation.snooze'), expiry) + const { code, stderr } = await runHook({ + cwd: repo, + env: { CI: '' }, + }) + assert.equal(code, 0) + // Hook should NOT report cleanup of an unexpired snooze. + assert.ok( + !stderr.includes('cleared expired snooze'), + `hook cleared a fresh snooze: ${stderr}`, + ) + } finally { + rmSync(repo, { recursive: true, force: true }) + } +}) + +test('auto-cleans expired project-local snooze and proceeds', async () => { + const repo = makeRepo() + const snoozeFile = path.join(repo, '.claude', 'auth-rotation.snooze') + try { + const expiry = new Date(Date.now() - 60 * 60 * 1000).toISOString() + writeFileSync(snoozeFile, expiry) + const { code, stderr } = await runHook({ + cwd: repo, + // Force CI so the hook short-circuits AFTER snooze handling + // (which is what we're testing). + env: { CI: '' }, + }) + assert.equal(code, 0) + // We can't easily assert on snooze cleanup messaging without + // also forcing the hook to do real auth detection. The strong + // assertion is that the file is gone afterward. + assert.ok( + !existsSync(snoozeFile), + 'expired snooze file should have been deleted', + ) + } finally { + rmSync(repo, { recursive: true, force: true }) + } +}) + +test('auto-cleans malformed snooze content', async () => { + const repo = makeRepo() + const snoozeFile = path.join(repo, '.claude', 'auth-rotation.snooze') + try { + writeFileSync(snoozeFile, 'not-an-iso-timestamp\n') + const { code } = await runHook({ + cwd: repo, + env: { CI: '' }, + }) + assert.equal(code, 0) + assert.ok( + !existsSync(snoozeFile), + 'malformed snooze file should have been deleted', + ) + } finally { + rmSync(repo, { recursive: true, force: true }) + } +}) + +test('auto-cleans empty (legacy) snooze file', async () => { + const repo = makeRepo() + const snoozeFile = path.join(repo, '.claude', 'auth-rotation.snooze') + try { + writeFileSync(snoozeFile, '') + const { code } = await runHook({ + cwd: repo, + env: { CI: '' }, + }) + assert.equal(code, 0) + assert.ok( + !existsSync(snoozeFile), + 'empty (legacy) snooze file should have been deleted', + ) + } finally { + rmSync(repo, { recursive: true, force: true }) + } +}) diff --git a/.claude/hooks/auth-rotation-reminder/tsconfig.json b/.claude/hooks/auth-rotation-reminder/tsconfig.json new file mode 100644 index 00000000..53c5c847 --- /dev/null +++ b/.claude/hooks/auth-rotation-reminder/tsconfig.json @@ -0,0 +1,15 @@ +{ + "compilerOptions": { + "declarationMap": false, + "erasableSyntaxOnly": true, + "module": "nodenext", + "moduleResolution": "nodenext", + "noEmit": true, + "rewriteRelativeImportExtensions": true, + "skipLibCheck": true, + "sourceMap": false, + "strict": true, + "target": "esnext", + "verbatimModuleSyntax": true + } +} diff --git a/.claude/hooks/logger-guard/README.md b/.claude/hooks/logger-guard/README.md new file mode 100644 index 00000000..55ba3ece --- /dev/null +++ b/.claude/hooks/logger-guard/README.md @@ -0,0 +1,54 @@ +# logger-guard + +Claude Code `PreToolUse` hook that blocks `Edit`/`Write` tool calls +introducing direct stream writes (`process.stderr.write`, +`process.stdout.write`, `console.log/error/warn/info/debug`) into +source files. + +## Why + +Source code uses `getDefaultLogger()` from `@socketsecurity/lib/logger` +for all output. Direct stream writes bypass: + +- Color/theme handling +- Indentation tracking +- Stream redirection in tests +- Counter increments used by spinners and progress bars + +so they produce inconsistent output that breaks layout-sensitive +workflows (spinner clears, footer rendering). + +## Scope + +- Only fires on `Edit` / `Write` tools. +- Only inspects files matching `*.{ts,mts,tsx,cts}` under repo + source. Hooks (`.claude/hooks/`), git-hooks (`.git-hooks/`), build + scripts (`scripts/`), tests, fixtures, and external/vendored code + are exempt. +- Lines containing `# socket-hook: allow logger` are exempt + (canonical opt-out). The bare `# socket-hook: allow` form also + works. +- Lines that look like documentation (`*` / `//` / `#` comments, + JSDoc tags, fully-backticked code spans) are exempt. + +## Suggested replacements + +| Direct call | Logger equivalent | +| --- | --- | +| `process.stderr.write(s)` | `logger.error(s)` | +| `process.stdout.write(s)` | `logger.info(s)` | +| `console.error(...)` | `logger.error(...)` | +| `console.warn(...)` | `logger.warn(...)` | +| `console.info(...)` | `logger.info(...)` | +| `console.debug(...)` | `logger.debug(...)` | +| `console.log(...)` | `logger.info(...)` | + +The hook surfaces the rewrite as a `Fix:` line per hit so the agent +can apply it directly. + +## Tests + +```bash +cd .claude/hooks/logger-guard +node --test test/*.test.mts +``` diff --git a/.claude/hooks/logger-guard/index.mts b/.claude/hooks/logger-guard/index.mts new file mode 100644 index 00000000..f7ad0f85 --- /dev/null +++ b/.claude/hooks/logger-guard/index.mts @@ -0,0 +1,261 @@ +#!/usr/bin/env node +// Claude Code PreToolUse hook — logger-guard. +// +// Blocks Edit/Write tool calls that would introduce direct calls to +// `process.stderr.write`, `process.stdout.write`, `console.log`, +// `console.error`, `console.warn`, `console.info`, or `console.debug` +// in source files. Exit code 2 makes Claude Code refuse the tool call +// so the diff never lands. The model sees the rejection reason on +// stderr and retries using the lib's logger. +// +// Why this rule: +// +// The fleet's source code uses `getDefaultLogger()` from +// `@socketsecurity/lib/logger` for every output. Direct stream writes +// bypass: +// - Color/theme handling +// - Indentation tracking +// - Stream redirection in tests +// - Counter increments used by spinners +// so they produce inconsistent output that breaks layout-sensitive +// workflows (spinner clears, footer rendering). +// +// Scope: +// +// - Fires only on `Edit` and `Write` tool calls. +// - Only inspects files under `src/` with .ts/.mts/.tsx/.cts +// extensions. Hooks (.claude/hooks/), git-hooks (.git-hooks/), +// scripts (scripts/), tests, fixtures, and external/ vendored code +// are exempt — see EXEMPT_PATH_PATTERNS. +// - Lines marked `# socket-hook: allow logger` are exempt (canonical +// opt-out marker, same as path-guard / token-guard / npx-guard). +// - Lines that look like documentation (comment lines, JSDoc tags, +// fully backticked code spans) are exempt — handled by the shared +// `looksLikeDocumentation` heuristic in `_helpers.mts`. +// +// The hook fails OPEN on its own bugs (exit 0 + stderr log) so a bad +// hook deploy can't brick the session. + +import process from 'node:process' + +// Files exempt from the rule. Comments explain why each is excluded. +const EXEMPT_PATH_PATTERNS: RegExp[] = [ + // Hook code itself runs early in the lifecycle and may need to log + // to stderr before the lib is fully resolvable. Treat hooks as + // "system code" with their own conventions. + /\.claude\/hooks\//, + // Git hooks (.git-hooks/_helpers.mts, pre-commit, etc.) run before + // workspace deps are guaranteed to be installed. + /\.git-hooks\//, + // Build scripts often produce direct stdout for human-readable + // build output (progress, summary). Migrate these case-by-case + // outside of this hook's scope. + /(^|\/)scripts\//, + // Test files commonly use console.* to capture / assert output. + /\.(test|spec)\.(m?[jt]s|tsx?|cts|mts)$/, + /(^|\/)tests?\//, + /(^|\/)fixtures\//, + // Vendored upstream sources — never modified for local conventions. + /(^|\/)external\//, + /(^|\/)vendor\//, + /(^|\/)upstream\//, + // The hook itself. + /\.claude\/hooks\/logger-guard\//, +] + +const LOGGER_LEAK_RE = + /\b(process\.std(?:err|out)\.write|console\.(?:log|error|warn|info|debug))\s*\(/ + +const COMMENT_LINE_RE = /^\s*(\*|\/\/|#)/ +const JSDOC_TAG_RE = /@(example|param|returns?|see|link)\b/ +const SOCKET_HOOK_MARKER_RE = /#\s*socket-hook:\s*allow(?:\s+([\w-]+))?/ + +function isMarkerSuppressed(line: string): boolean { + const m = line.match(SOCKET_HOOK_MARKER_RE) + if (!m) { + return false + } + // No specific rule named → blanket allow. Targeted form must name + // 'logger' to suppress this scanner. + return !m[1] || m[1] === 'logger' +} + +function isInsideBackticks(line: string): boolean { + // Find every backtick-delimited span on the line and test if every + // logger-leak match sits within one. Conservative: any match outside + // a backtick span fails the check. + const spans: Array<[number, number]> = [] + for (let i = 0; i < line.length; i += 1) { + if (line[i] === '`') { + const end = line.indexOf('`', i + 1) + if (end < 0) { + break + } + spans.push([i, end]) + i = end + } + } + if (spans.length === 0) { + return false + } + const re = new RegExp(LOGGER_LEAK_RE.source, 'g') + let m: RegExpExecArray | null + while ((m = re.exec(line)) !== null) { + const start = m.index + const end = start + m[0].length + const inside = spans.some(([s, e]) => start > s && end <= e) + if (!inside) { + return false + } + } + return true +} + +function looksLikeDocumentation(line: string): boolean { + if (isMarkerSuppressed(line)) { + return true + } + if (COMMENT_LINE_RE.test(line)) { + return true + } + if (JSDOC_TAG_RE.test(line)) { + return true + } + if (isInsideBackticks(line)) { + return true + } + return false +} + +function suggestReplacement(line: string): string { + return line + .replace(/\bprocess\.stderr\.write\s*\(/g, 'logger.error(') + .replace(/\bprocess\.stdout\.write\s*\(/g, 'logger.info(') + .replace(/\bconsole\.error\s*\(/g, 'logger.error(') + .replace(/\bconsole\.warn\s*\(/g, 'logger.warn(') + .replace(/\bconsole\.info\s*\(/g, 'logger.info(') + .replace(/\bconsole\.debug\s*\(/g, 'logger.debug(') + .replace(/\bconsole\.log\s*\(/g, 'logger.info(') +} + +interface Hit { + lineNumber: number + line: string + suggested: string +} + +function scan(source: string): Hit[] { + const hits: Hit[] = [] + const lines = source.split('\n') + for (let i = 0; i < lines.length; i += 1) { + const line = lines[i]! + if (!LOGGER_LEAK_RE.test(line)) { + continue + } + if (looksLikeDocumentation(line)) { + continue + } + hits.push({ + lineNumber: i + 1, + line, + suggested: suggestReplacement(line), + }) + } + return hits +} + +function isInScope(filePath: string): boolean { + if (!filePath) { + return false + } + if (!/\.(m?ts|tsx|cts)$/.test(filePath)) { + return false + } + for (const re of EXEMPT_PATH_PATTERNS) { + if (re.test(filePath)) { + return false + } + } + return true +} + +function readStdin(): Promise { + return new Promise(resolve => { + let buf = '' + process.stdin.setEncoding('utf8') + process.stdin.on('data', chunk => (buf += chunk)) + process.stdin.on('end', () => resolve(buf)) + }) +} + +interface ToolInput { + tool_name?: string + tool_input?: { + file_path?: string + new_string?: string + content?: string + } +} + +function emitBlock(filePath: string, hits: Hit[]): void { + // Hook itself logs to stderr (no lib import at module load — keep + // hooks self-contained for fast startup). The rule only applies to + // source code; this output is informational for the agent. + const out: string[] = [] + out.push('') + out.push('[logger-guard] Blocked: direct stream write found') + out.push( + ' Use `getDefaultLogger()` from `@socketsecurity/lib/logger` instead.', + ) + out.push(` File: ${filePath}`) + for (const h of hits.slice(0, 3)) { + out.push(` Line ${h.lineNumber}: ${h.line.trim()}`) + out.push(` Fix: ${h.suggested.trim()}`) + } + if (hits.length > 3) { + out.push(` …and ${hits.length - 3} more.`) + } + out.push( + ' Opt-out for one line (rare): append `// # socket-hook: allow logger`.', + ) + out.push('') + process.stderr.write(out.join('\n')) +} + +async function main(): Promise { + const raw = await readStdin() + if (!raw) { + return + } + let payload: ToolInput + try { + payload = JSON.parse(raw) as ToolInput + } catch { + return + } + if (payload.tool_name !== 'Edit' && payload.tool_name !== 'Write') { + return + } + const filePath = payload.tool_input?.file_path ?? '' + if (!isInScope(filePath)) { + return + } + const source = + payload.tool_input?.new_string ?? payload.tool_input?.content ?? '' + if (!source) { + return + } + const hits = scan(source) + if (hits.length === 0) { + return + } + emitBlock(filePath, hits) + process.exitCode = 2 +} + +main().catch(e => { + // Fail open on hook bugs. + process.stderr.write( + `[logger-guard] hook error (continuing): ${(e as Error).message}\n`, + ) +}) diff --git a/.claude/hooks/logger-guard/package.json b/.claude/hooks/logger-guard/package.json new file mode 100644 index 00000000..7bc46780 --- /dev/null +++ b/.claude/hooks/logger-guard/package.json @@ -0,0 +1,15 @@ +{ + "name": "hook-logger-guard", + "private": true, + "type": "module", + "main": "./index.mts", + "exports": { + ".": "./index.mts" + }, + "devDependencies": { + "@types/node": "catalog:" + }, + "scripts": { + "test": "node --test test/*.test.mts" + } +} diff --git a/.claude/hooks/logger-guard/test/logger-guard.test.mts b/.claude/hooks/logger-guard/test/logger-guard.test.mts new file mode 100644 index 00000000..a509b365 --- /dev/null +++ b/.claude/hooks/logger-guard/test/logger-guard.test.mts @@ -0,0 +1,171 @@ +import { spawn } from 'node:child_process' +import path from 'node:path' +import { fileURLToPath } from 'node:url' +import { test } from 'node:test' +import assert from 'node:assert/strict' + +const __dirname = path.dirname(fileURLToPath(import.meta.url)) +const HOOK = path.resolve(__dirname, '..', 'index.mts') + +interface Payload { + tool_name: 'Edit' | 'Write' | string + tool_input: { + file_path?: string + new_string?: string + content?: string + } +} + +function runHook(payload: Payload): Promise<{ code: number; stderr: string }> { + return new Promise((resolve, reject) => { + const child = spawn(process.execPath, [HOOK], { + stdio: ['pipe', 'ignore', 'pipe'], + }) + let stderr = '' + child.stderr.on('data', d => { + stderr += d.toString() + }) + child.on('error', reject) + child.on('exit', code => { + resolve({ code: code ?? -1, stderr }) + }) + child.stdin.end(JSON.stringify(payload)) + }) +} + +test('blocks console.log in src/ .ts files', async () => { + const { code, stderr } = await runHook({ + tool_name: 'Write', + tool_input: { + file_path: 'src/foo.ts', + content: 'export function foo() { console.log("hi") }', + }, + }) + assert.equal(code, 2, `expected exit 2; got ${code}; stderr=${stderr}`) + assert.ok(stderr.includes('logger-guard')) + assert.ok(stderr.includes('Fix:')) + assert.ok(stderr.includes('logger.info')) +}) + +test('blocks process.stderr.write in src/ .mts files', async () => { + const { code, stderr } = await runHook({ + tool_name: 'Edit', + tool_input: { + file_path: 'src/utils/output.mts', + new_string: 'process.stderr.write("oops\\n")', + }, + }) + assert.equal(code, 2) + assert.ok(stderr.includes('logger.error(')) +}) + +test('allows hooks themselves to use process.stderr.write', async () => { + const { code, stderr } = await runHook({ + tool_name: 'Edit', + tool_input: { + file_path: '.claude/hooks/some-hook/index.mts', + new_string: 'process.stderr.write("ok\\n")', + }, + }) + assert.equal(code, 0, `expected exit 0; got ${code}; stderr=${stderr}`) +}) + +test('allows scripts/ to use console.log', async () => { + const { code } = await runHook({ + tool_name: 'Edit', + tool_input: { + file_path: 'scripts/build.mts', + new_string: 'console.log("build complete")', + }, + }) + assert.equal(code, 0) +}) + +test('allows tests to use console.log', async () => { + const { code } = await runHook({ + tool_name: 'Edit', + tool_input: { + file_path: 'src/utils/foo.test.mts', + new_string: 'console.log("debug")', + }, + }) + assert.equal(code, 0) +}) + +test('respects # socket-hook: allow logger marker', async () => { + const { code } = await runHook({ + tool_name: 'Edit', + tool_input: { + file_path: 'src/foo.ts', + new_string: + 'const x = 1; console.error("legacy") // # socket-hook: allow logger', + }, + }) + assert.equal(code, 0) +}) + +test('respects bare # socket-hook: allow marker', async () => { + const { code } = await runHook({ + tool_name: 'Edit', + tool_input: { + file_path: 'src/foo.ts', + new_string: 'console.warn("a") // # socket-hook: allow', + }, + }) + assert.equal(code, 0) +}) + +test('does not flag JSDoc examples', async () => { + const { code } = await runHook({ + tool_name: 'Write', + tool_input: { + file_path: 'src/foo.ts', + content: + '/**\n * @example\n * console.log("usage")\n */\nexport const foo = 1', + }, + }) + assert.equal(code, 0) +}) + +test('does not flag comment lines', async () => { + const { code } = await runHook({ + tool_name: 'Edit', + tool_input: { + file_path: 'src/foo.ts', + new_string: '// previously: console.log("debug")', + }, + }) + assert.equal(code, 0) +}) + +test('does not flag content fully inside a single backtick span', async () => { + const { code } = await runHook({ + tool_name: 'Edit', + tool_input: { + file_path: 'src/foo.ts', + // Single-line markdown-style backtick span — the inner content + // is documentation, not real code. + new_string: 'const note = `use logger.info() not console.log()`', + }, + }) + assert.equal(code, 0) +}) + +test('does not run on non-Edit/Write tools', async () => { + const { code } = await runHook({ + tool_name: 'Bash', + tool_input: { content: 'console.log("nope")' }, + }) + assert.equal(code, 0) +}) + +test('does not run on .js files (out of scope)', async () => { + const { code } = await runHook({ + tool_name: 'Edit', + tool_input: { + file_path: 'src/foo.js', + new_string: 'console.log("legacy")', + }, + }) + assert.equal(code, 0) +}) diff --git a/.claude/hooks/logger-guard/tsconfig.json b/.claude/hooks/logger-guard/tsconfig.json new file mode 100644 index 00000000..53c5c847 --- /dev/null +++ b/.claude/hooks/logger-guard/tsconfig.json @@ -0,0 +1,15 @@ +{ + "compilerOptions": { + "declarationMap": false, + "erasableSyntaxOnly": true, + "module": "nodenext", + "moduleResolution": "nodenext", + "noEmit": true, + "rewriteRelativeImportExtensions": true, + "skipLibCheck": true, + "sourceMap": false, + "strict": true, + "target": "esnext", + "verbatimModuleSyntax": true + } +} diff --git a/.claude/hooks/path-guard/README.md b/.claude/hooks/path-guard/README.md index 523a31b4..2dee9c19 100644 --- a/.claude/hooks/path-guard/README.md +++ b/.claude/hooks/path-guard/README.md @@ -1,6 +1,6 @@ # path-guard -Claude Code `PreToolUse` hook that refuses `Edit`/`Write` tool calls that would *construct* a multi-segment build/output path inline in a `.mts` or `.cts` file. Mandatory across the Socket fleet — every repo ships this file byte-for-byte via `scripts/sync-scaffolding.mjs`. +Claude Code `PreToolUse` hook that refuses `Edit`/`Write` tool calls that would *construct* a multi-segment build/output path inline in a `.mts` or `.cts` file. Mandatory across the Socket fleet — every repo ships this file byte-for-byte via `scripts/sync-scaffolding.mts`. **Mantra: 1 path, 1 reference.** @@ -57,10 +57,10 @@ Adding a new detection pattern: update `STAGE_SEGMENTS` (or `KNOWN_SIBLING_PACKA ## Updating across the fleet -This file is in `IDENTICAL_FILES` in `scripts/sync-scaffolding.mjs` (in `socket-repo-template`). After editing, run from `socket-repo-template`: +This file is in `IDENTICAL_FILES` in `scripts/sync-scaffolding.mts` (in `socket-repo-template`). After editing, run from `socket-repo-template`: ```bash -node scripts/sync-scaffolding.mjs --all --fix +node scripts/sync-scaffolding.mts --all --fix ``` to propagate the change to every fleet repo. diff --git a/.claude/hooks/path-guard/segments.mts b/.claude/hooks/path-guard/segments.mts index 891d0b8b..e2e4f4b8 100644 --- a/.claude/hooks/path-guard/segments.mts +++ b/.claude/hooks/path-guard/segments.mts @@ -6,7 +6,7 @@ // consumers import from here so they can never drift apart. // // Synced byte-identically across the Socket fleet via -// socket-repo-template/scripts/sync-scaffolding.mjs (IDENTICAL_FILES). +// socket-repo-template/scripts/sync-scaffolding.mts (IDENTICAL_FILES). // When adding a new stage/build-root/mode/sibling, edit this file in // the template and re-sync. @@ -41,7 +41,7 @@ export const MODE_SEGMENTS = new Set(['dev', 'prod', 'shared']) // Socket fleet — the gate is byte-identical via sync-scaffolding, so // listing every fleet package keeps Rule B firing in any repo. When a // new package joins the workspace, add it here and propagate via -// `node scripts/sync-scaffolding.mjs --all --fix` from +// `node scripts/sync-scaffolding.mts --all --fix` from // socket-repo-template. export const KNOWN_SIBLING_PACKAGES = new Set([ // socket-btm diff --git a/.claude/hooks/stale-process-sweeper/README.md b/.claude/hooks/stale-process-sweeper/README.md new file mode 100644 index 00000000..38d96674 --- /dev/null +++ b/.claude/hooks/stale-process-sweeper/README.md @@ -0,0 +1,74 @@ +# stale-process-sweeper + +Claude Code `Stop` hook that sweeps stale Node test/build worker +processes at turn-end, before they pile up across turns and exhaust +system memory. + +## Why + +Vitest's `forks` pool spawns one Node worker per CPU. When the parent +runner exits abnormally — `Bash` timeout, `SIGINT` from the user, +pre-commit hook crash — the workers stay alive holding 80–100 MB +each. After a few interrupted runs the host has gigabytes of +abandoned processes. + +The sweeper finds those processes (matched by command-line pattern) +that have lost their parent, and sends them `SIGTERM`. A still-living +parent means the worker is part of a real, in-progress run, and the +sweeper leaves it alone. + +## What's swept + +| Pattern | Source | +| --- | --- | +| `vitest/dist/workers/(forks\|threads)` | Vitest worker pool | +| `vitest/dist/(cli\|node).[mc]?js` | Orphaned Vitest parent runners | +| `\btsgo\b` | TypeScript Go-based type checker | +| `type-coverage/bin/type-coverage` | Type coverage tool | +| `esbuild/(bin\|lib)/.*\bservice\b` | esbuild's daemon service | + +## What's not swept + +- Anything spawned by a still-living shell (PPID alive) +- The Claude Code process itself or its parent terminal +- Anything outside the pattern list + +## Wiring + +In `.claude/settings.json`: + +```json +{ + "hooks": { + "Stop": [ + { + "hooks": [ + { + "type": "command", + "command": "node .claude/hooks/stale-process-sweeper/index.mts" + } + ] + } + ] + } +} +``` + +## Output + +Silent on the happy path (no orphans found). When something is reaped: + +``` +[stale-process-sweeper] reaped 14 stale worker(s), ~1120MB freed: +vitest-worker=29240(95MB), vitest-worker=33278(93MB), … +``` + +The line goes to stderr. Stop-hook output is shown to the user, not +the model — useful diagnostic, doesn't pollute Claude's context. + +## Tests + +```bash +cd .claude/hooks/stale-process-sweeper +node --test test/*.test.mts +``` diff --git a/.claude/hooks/stale-process-sweeper/index.mts b/.claude/hooks/stale-process-sweeper/index.mts new file mode 100644 index 00000000..4e9923e5 --- /dev/null +++ b/.claude/hooks/stale-process-sweeper/index.mts @@ -0,0 +1,214 @@ +#!/usr/bin/env node +// Claude Code Stop hook — stale-process-sweeper. +// +// Fires at turn-end. Finds Node test/build worker processes that the +// session left behind (test runner crashed mid-run, hook timed out, +// user interrupted `Bash`, etc.) and kills them so they don't pile up +// across turns and exhaust system memory. +// +// What's swept: +// - vitest workers (`vitest/dist/workers/forks` and the threads pool) +// - vitest itself (orphan parent runners that survived a SIGINT) +// - tsgo / tsc type-check daemons +// - type-coverage workers +// - esbuild service processes +// +// What's NOT swept: +// - Anything spawned by a still-living shell (PPID alive) +// - Anything matching the user's editors / IDEs / terminals +// - The Claude Code process itself +// +// The hook is fast (one `ps` call + a few regex matches + a couple of +// `kill -0` probes) and silent on the happy path. It only writes to +// stderr when it actually killed something — that's a useful signal. +// +// Stop hooks receive JSON on stdin (we don't read it; the body +// shape is irrelevant to our work) and exit code is advisory. + +import { spawnSync } from 'node:child_process' +import process from 'node:process' + +// Process-name patterns that indicate a stale test/build worker. +// Must be specific enough that real user processes (a normal `node` +// invocation, an editor's language server) don't match. +const STALE_PATTERNS: Array<{ name: string; rx: RegExp }> = [ + // Vitest worker pools — both `forks` (process-per-worker) and the + // path the threads pool uses when isolation is requested. The + // canonical leak: Vitest spawns N workers, parent crashes/SIGINTs, + // workers stay alive holding 80–100MB each. + { + name: 'vitest-worker', + rx: /vitest\/dist\/workers\/(forks|threads)/, + }, + // Vitest parent runner that survived its own children's exit. + // Matches `node ... vitest/dist/cli ... run` etc. + { + name: 'vitest-runner', + rx: /vitest\/dist\/(cli|node)\.[mc]?js/, + }, + // tsgo / tsc daemons. `tsgo` is the new Go-based type checker; + // `tsc --watch` daemons can also linger. + { + name: 'tsgo', + rx: /\btsgo\b/, + }, + // type-coverage runs as a separate process and sometimes outlives + // its CI step. + { + name: 'type-coverage', + rx: /type-coverage\/bin\/type-coverage/, + }, + // esbuild's daemon service helper. + { + name: 'esbuild-service', + rx: /esbuild\/(bin|lib)\/.*\bservice\b/, + }, +] + +interface ProcRow { + pid: number + ppid: number + rss: number + command: string +} + +function listProcesses(): ProcRow[] { + // -A: all processes, -o: custom format, no truncation. macOS + Linux + // both support this exact form. Windows isn't supported (Stop hook + // is unix-only in practice for socket-* repos). + const result = spawnSync( + 'ps', + ['-A', '-o', 'pid=,ppid=,rss=,command='], + { encoding: 'utf8' }, + ) + if (result.status !== 0 || !result.stdout) { + return [] + } + const rows: ProcRow[] = [] + for (const line of result.stdout.split('\n')) { + if (!line.trim()) { + continue + } + // Split into [pid, ppid, rss, ...command]. `command` may contain + // arbitrary spaces, so re-join after the first three fields. + const parts = line.trim().split(/\s+/) + if (parts.length < 4) { + continue + } + const pid = Number.parseInt(parts[0]!, 10) + const ppid = Number.parseInt(parts[1]!, 10) + const rss = Number.parseInt(parts[2]!, 10) + if (!Number.isFinite(pid) || !Number.isFinite(ppid)) { + continue + } + const command = parts.slice(3).join(' ') + rows.push({ pid, ppid, rss, command }) + } + return rows +} + +function isAlive(pid: number): boolean { + if (pid <= 1) { + // PID 0 / 1 are the kernel / init — if our parent is one of those, + // we're definitely an orphan, but `kill -0 1` would mislead. + return false + } + try { + process.kill(pid, 0) + return true + } catch { + return false + } +} + +function classify(row: ProcRow): string | undefined { + for (const { name, rx } of STALE_PATTERNS) { + if (rx.test(row.command)) { + return name + } + } + return undefined +} + +function sweep(): { killed: Array<{ pid: number; name: string; rssMb: number }>; skipped: number } { + const rows = listProcesses() + const myPid = process.pid + const myPpid = process.ppid + const killed: Array<{ pid: number; name: string; rssMb: number }> = [] + let skipped = 0 + + for (const row of rows) { + // Never touch ourselves or our parent (Claude Code). + if (row.pid === myPid || row.pid === myPpid) { + continue + } + const name = classify(row) + if (!name) { + continue + } + // Only sweep if the parent is gone (true orphan) or is PID 1 + // (re-parented to init after the original parent exited). A live + // parent means the worker is part of a real, in-progress run we + // should not interrupt. + const orphan = row.ppid === 1 || !isAlive(row.ppid) + if (!orphan) { + skipped += 1 + continue + } + try { + // SIGTERM first — give the worker a chance to flush. We don't + // wait for it; the next sweep (next turn) will SIGKILL anything + // that ignored SIGTERM. Keeping the hook fast matters more than + // squeezing every last byte. + process.kill(row.pid, 'SIGTERM') + killed.push({ + pid: row.pid, + name, + rssMb: Math.round(row.rss / 1024), + }) + } catch { + // Already gone, or we lack permission — nothing to do. + } + } + return { killed, skipped } +} + +function main() { + // Drain stdin (Stop hook delivers a JSON payload). We don't need + // the body, but Node will keep the event loop alive if we don't + // consume it. + process.stdin.resume() + process.stdin.on('data', () => {}) + process.stdin.on('end', runSweep) + // If stdin is already closed (some hook runners don't pipe input), + // run immediately. + if (process.stdin.readable === false) { + runSweep() + } +} + +function runSweep() { + let result: { killed: Array<{ pid: number; name: string; rssMb: number }>; skipped: number } + try { + result = sweep() + } catch (e) { + // Hooks must never crash a Claude turn. Log and exit clean. + process.stderr.write( + `[stale-process-sweeper] unexpected error: ${(e as Error).message}\n`, + ) + process.exit(0) + } + if (result.killed.length > 0) { + const totalMb = result.killed.reduce((sum, k) => sum + k.rssMb, 0) + const breakdown = result.killed + .map(k => `${k.name}=${k.pid}(${k.rssMb}MB)`) + .join(', ') + process.stderr.write( + `[stale-process-sweeper] reaped ${result.killed.length} stale ` + + `worker(s), ~${totalMb}MB freed: ${breakdown}\n`, + ) + } + process.exit(0) +} + +main() diff --git a/.claude/hooks/stale-process-sweeper/package.json b/.claude/hooks/stale-process-sweeper/package.json new file mode 100644 index 00000000..1a0f6de1 --- /dev/null +++ b/.claude/hooks/stale-process-sweeper/package.json @@ -0,0 +1,12 @@ +{ + "name": "hook-stale-process-sweeper", + "private": true, + "type": "module", + "main": "./index.mts", + "exports": { + ".": "./index.mts" + }, + "scripts": { + "test": "node --test test/*.test.mts" + } +} diff --git a/.claude/hooks/stale-process-sweeper/test/stale-process-sweeper.test.mts b/.claude/hooks/stale-process-sweeper/test/stale-process-sweeper.test.mts new file mode 100644 index 00000000..56ac3572 --- /dev/null +++ b/.claude/hooks/stale-process-sweeper/test/stale-process-sweeper.test.mts @@ -0,0 +1,84 @@ +import { spawn } from 'node:child_process' +import { fileURLToPath } from 'node:url' +import path from 'node:path' +import { test } from 'node:test' +import assert from 'node:assert/strict' + +const __dirname = path.dirname(fileURLToPath(import.meta.url)) +const HOOK = path.resolve(__dirname, '..', 'index.mts') + +// Run the hook with an empty stdin payload (Stop hook delivers JSON, +// but the body is unused). Captures stderr + exit code. +function runHook(): Promise<{ code: number; stderr: string }> { + return new Promise((resolve, reject) => { + const child = spawn(process.execPath, [HOOK], { + stdio: ['pipe', 'ignore', 'pipe'], + }) + let stderr = '' + child.stderr.on('data', d => { + stderr += d.toString() + }) + child.on('error', reject) + child.on('exit', code => { + resolve({ code: code ?? -1, stderr }) + }) + // Stop hooks receive a JSON payload on stdin. Send an empty object + // so the hook's drain logic completes. + child.stdin.end('{}\n') + }) +} + +test('stale-process-sweeper: exits 0 when nothing to sweep', async () => { + const { code, stderr } = await runHook() + assert.equal(code, 0, `hook should exit 0; stderr=${stderr}`) + // On a clean host the hook should be silent. + assert.equal( + stderr, + '', + `hook should be silent when no orphans exist; got: ${stderr}`, + ) +}) + +test('stale-process-sweeper: ignores live-parent test workers', async () => { + // Spawn a fake "vitest worker" whose parent is still alive. The + // sweeper must not touch it. We use a script path that matches the + // worker regex; the actual command runs `node -e 'setTimeout(...)'` + // long enough to outlive the hook invocation. + // + // Note: matching the regex `vitest/dist/workers/forks` requires a + // command line that contains that substring. We can't easily forge + // a real vitest binary, so we approximate by passing the path as an + // argv string — `ps -o command=` reflects argv, and the regex sees + // it. + const fakeWorker = spawn( + process.execPath, + [ + '-e', + 'setTimeout(() => {}, 5000)', + // This dummy arg is what `ps` will report; the sweeper's regex + // picks it up. The worker still has a live parent (this test + // process), so the sweeper should NOT kill it. + '/fake/vitest/dist/workers/forks.js', + ], + { stdio: 'ignore', detached: false }, + ) + // Give the OS a moment to register the child. + await new Promise(r => setTimeout(r, 100)) + try { + const { code, stderr } = await runHook() + assert.equal(code, 0) + // Should NOT have reaped the fake worker — its parent (us) is + // alive. If the hook killed it, the message would mention it. + assert.ok( + !stderr.includes('reaped'), + `hook reaped a live-parent worker: ${stderr}`, + ) + // Verify the worker is still alive. + assert.ok( + !fakeWorker.killed && fakeWorker.exitCode === null, + 'fake worker should still be running', + ) + } finally { + fakeWorker.kill('SIGKILL') + } +}) diff --git a/.claude/hooks/stale-process-sweeper/tsconfig.json b/.claude/hooks/stale-process-sweeper/tsconfig.json new file mode 100644 index 00000000..53c5c847 --- /dev/null +++ b/.claude/hooks/stale-process-sweeper/tsconfig.json @@ -0,0 +1,15 @@ +{ + "compilerOptions": { + "declarationMap": false, + "erasableSyntaxOnly": true, + "module": "nodenext", + "moduleResolution": "nodenext", + "noEmit": true, + "rewriteRelativeImportExtensions": true, + "skipLibCheck": true, + "sourceMap": false, + "strict": true, + "target": "esnext", + "verbatimModuleSyntax": true + } +} diff --git a/.claude/hooks/token-guard/README.md b/.claude/hooks/token-guard/README.md index 9cba28a5..3b1ae32a 100644 --- a/.claude/hooks/token-guard/README.md +++ b/.claude/hooks/token-guard/README.md @@ -1,6 +1,6 @@ # token-guard -Claude Code `PreToolUse` hook that refuses Bash tool calls that would leak secrets to tool output. Mandatory across the Socket fleet — every repo ships this file byte-for-byte via `scripts/sync-scaffolding.mjs`. +Claude Code `PreToolUse` hook that refuses Bash tool calls that would leak secrets to tool output. Mandatory across the Socket fleet — every repo ships this file byte-for-byte via `scripts/sync-scaffolding.mts`. ## What it blocks @@ -48,10 +48,10 @@ Adding new token-shape detections: update `LITERAL_TOKEN_PATTERNS` in `index.mts ## Updating across the fleet -This file is in `IDENTICAL_FILES` in `scripts/sync-scaffolding.mjs`. After editing, run from `socket-repo-template`: +This file is in `IDENTICAL_FILES` in `scripts/sync-scaffolding.mts`. After editing, run from `socket-repo-template`: ```bash -node scripts/sync-scaffolding.mjs --all --fix +node scripts/sync-scaffolding.mts --all --fix ``` to propagate the change to every fleet repo. diff --git a/.claude/settings.json b/.claude/settings.json index 894dcf15..cca69875 100644 --- a/.claude/settings.json +++ b/.claude/settings.json @@ -8,6 +8,10 @@ "type": "command", "command": "node .claude/hooks/check-new-deps/index.mts" }, + { + "type": "command", + "command": "node .claude/hooks/logger-guard/index.mts" + }, { "type": "command", "command": "node .claude/hooks/path-guard/index.mts" @@ -35,6 +39,20 @@ } ] } + ], + "Stop": [ + { + "hooks": [ + { + "type": "command", + "command": "node .claude/hooks/auth-rotation-reminder/index.mts" + }, + { + "type": "command", + "command": "node .claude/hooks/stale-process-sweeper/index.mts" + } + ] + } ] }, "permissions": { diff --git a/.claude/skills/_shared/path-guard-rule.md b/.claude/skills/_shared/path-guard-rule.md index fa42a32e..2447f8b7 100644 --- a/.claude/skills/_shared/path-guard-rule.md +++ b/.claude/skills/_shared/path-guard-rule.md @@ -1,7 +1,7 @@ diff --git a/.claude/skills/path-guard/SKILL.md b/.claude/skills/path-guard/SKILL.md index 747ad02b..8ff21c2b 100644 --- a/.claude/skills/path-guard/SKILL.md +++ b/.claude/skills/path-guard/SKILL.md @@ -2,7 +2,7 @@ name: path-guard description: Audit and fix path duplication in this Socket repo. Apply the strict "1 path, 1 reference" rule — every build/test/runtime/config path is constructed exactly once; everywhere else references the constructed value. Default mode finds and fixes; `check` mode reports only; `install` mode drops the gate + hook + rule into a fresh repo. user-invocable: true -allowed-tools: Task, Bash, Read, Edit, Write, Grep, Glob, AskUserQuestion +allowed-tools: Task, Read, Edit, Write, Grep, Glob, AskUserQuestion, Bash(pnpm run check:*), Bash(node scripts/check-paths:*), Bash(rg:*), Bash(grep:*), Bash(find:*), Bash(git:*) --- # path-guard diff --git a/.claude/skills/programmatic-claude-lockdown/SKILL.md b/.claude/skills/programmatic-claude-lockdown/SKILL.md new file mode 100644 index 00000000..f2561013 --- /dev/null +++ b/.claude/skills/programmatic-claude-lockdown/SKILL.md @@ -0,0 +1,84 @@ +--- +name: programmatic-claude-lockdown +description: Reference for locking down programmatic Claude invocations (the `claude` CLI in workflows/scripts, the `@anthropic-ai/claude-agent-sdk` `query()` in code). Loads on demand when writing or reviewing any callsite that runs Claude programmatically. Source: https://code.claude.com/docs/en/agent-sdk/permissions. +user-invocable: false +allowed-tools: Read, Grep, Glob +--- + +# Programmatic Claude lockdown + +**Rule:** every programmatic Claude callsite sets four flags. Skip any one and a future edit silently widens the surface. + +## The four flags + +| Layer | SDK option | CLI flag | What it does | +|---|---|---|---| +| Definition | `tools` | `--tools` | Base set the model is told about. Tools not listed are invisible — no `tool_use` block possible. | +| Auto-approve | `allowedTools` | `--allowedTools` | Step 4. Listed tools run without invoking `canUseTool`. | +| Deny | `disallowedTools` | `--disallowedTools` | Step 2. Wins even against `bypassPermissions`. Defense-in-depth. | +| Mode | `permissionMode: 'dontAsk'` | `--permission-mode dontAsk` | Step 3. Unmatched tools denied without falling through to a missing `canUseTool`. | + +The official permission flow (1) hooks → (2) deny rules → (3) permission mode → (4) allow rules → (5) `canUseTool`. In `dontAsk` mode step 5 is skipped — denied. The doc states verbatim: *"`allowedTools` and `disallowedTools` ... control whether a tool call is approved, not whether the tool is available."* Availability is `tools`. + +## Recipe — read-only agent (audit, classify, summarize) + +```ts +import { query } from '@anthropic-ai/claude-agent-sdk' + +query({ + prompt: '...', + options: { + tools: ['Read', 'Grep', 'Glob'], + allowedTools: ['Read', 'Grep', 'Glob'], + disallowedTools: ['Agent', 'Bash', 'Edit', 'NotebookEdit', 'Task', 'WebFetch', 'WebSearch', 'Write'], + permissionMode: 'dontAsk', + }, +}) +``` + +CLI form for workflow YAML / shell scripts: + +```yaml +claude --print \ + --tools "Read" "Grep" "Glob" \ + --allowedTools "Read" "Grep" "Glob" \ + --disallowedTools "Agent" "Bash" "Edit" "NotebookEdit" "Task" "WebFetch" "WebSearch" "Write" \ + --permission-mode dontAsk \ + --model "$MODEL" \ + --max-turns 25 \ + "" +``` + +## Recipe — agent that needs Bash (e.g. `/updating`: pnpm + git + jq) + +Narrow `Bash(...)` patterns surgically. Block dangerous Bash patterns explicitly. Fleet rules: no `npx`/`pnpm dlx`/`yarn dlx`; no `curl`/`wget` exfil; no destructive `rm -rf`; no `sudo`. Build the deny list as shell vars so the npx/dlx denials can carry the `# zizmor:` exemption marker (the pre-commit `scanNpxDlx` hook treats those literal strings as the prohibited tools, not as exemptions, unless the line is tagged): + +```yaml +DISALLOW_BASE='Agent Task NotebookEdit WebFetch WebSearch Bash(curl:*) Bash(wget:*) Bash(rm -rf*) Bash(sudo:*)' +DISALLOW_PKG_EXEC='Bash(npx:*) Bash(pnpm dlx:*) Bash(yarn dlx:*)' # zizmor: documentation-prohibition +claude --print \ + --tools "Bash" "Read" "Write" "Edit" "Glob" "Grep" \ + --allowedTools "Bash(pnpm:*)" "Bash(git:*)" "Bash(jq:*)" "Read" "Write" "Edit" "Glob" "Grep" \ + --disallowedTools $DISALLOW_BASE $DISALLOW_PKG_EXEC \ + --permission-mode dontAsk \ + --model "$MODEL" --max-turns 25 \ + "" +``` + +## Never + +- ❌ `permissionMode: 'default'` in headless contexts — falls through to a missing `canUseTool`. Behavior undefined. +- ❌ `permissionMode: 'bypassPermissions'` / `allowDangerouslySkipPermissions: true`. +- ❌ Omitting `tools` — SDK default is the full claude_code preset. +- ❌ `Agent` / `Task` permitted — sub-agents inherit modes and can escape per-subagent restrictions when the parent is `bypassPermissions`/`acceptEdits`/`auto`. + +## Reference implementation + +`socket-lib/tools/prim/src/disambiguate.mts` — canonical SDK-form callsite. The file header documents each flag against the eval-flow step it enforces. + +`socket-lib/tools/prim/test/disambiguate.test.mts` — source-text guards that fail the build if `BASE_TOOLS` widens, if `tools: BASE_TOOLS` is unwired, if `permissionMode` drifts from `'dontAsk'`, or if `bypassPermissions` / `allowDangerouslySkipPermissions: true` ever appears. Mirror this pattern in any new callsite. + +## Existing fleet callsites + +- `socket-registry/.github/workflows/weekly-update.yml` — two `claude --print` invocations (run `/updating` skill, fix test failures). Bash recipe above. +- `socket-lib/tools/prim/src/disambiguate.mts` — read-only recipe above (`query()` SDK form). diff --git a/.claude/skills/promise-race-pitfall/SKILL.md b/.claude/skills/promise-race-pitfall/SKILL.md new file mode 100644 index 00000000..d38f3c2a --- /dev/null +++ b/.claude/skills/promise-race-pitfall/SKILL.md @@ -0,0 +1,57 @@ +--- +name: promise-race-pitfall +description: Reference for the `Promise.race` cross-iteration handler-leak bug. Loads on demand when writing or reviewing concurrency code that uses `Promise.race`, `Promise.any`, or hand-rolled concurrency limiters. +--- + +# Promise.race in loops — the handler-leak pitfall + +**Never re-race the same pool of promises across loop iterations.** Each call to `Promise.race([A, B, …])` attaches fresh `.then` handlers to every arm. A promise that survives N iterations accumulates N handler sets. See [nodejs/node#17469](https://github.com/nodejs/node/issues/17469) and [`@watchable/unpromise`](https://github.com/watchable/unpromise). + +## Patterns + +- **Safe** — both arms created per call: + + ```ts + const value = await Promise.race([ + fetchSomething(), + new Promise((_, r) => setTimeout(() => r(new Error('timeout')), 5000)), + ]) + ``` + +- **Leaky** — `pool` survives across iterations, accumulating handlers: + + ```ts + while (queue.length) { + const winner = await Promise.race(pool) // ← N handlers per arm by iteration N + pool = pool.filter(p => p !== winner) + } + ``` + + Same hazard for `Promise.any` and any long-lived arm such as an interrupt signal. + +## The fix + +Use a single-waiter "slot available" signal. Each task's `.then` resolves a one-shot `promiseWithResolvers` that the loop awaits, then replaces. No persistent pool, nothing to stack. + +```ts +let signal = Promise.withResolvers() +function startTask(task: Task) { + task.run().then(() => { + const prev = signal + signal = Promise.withResolvers() + prev.resolve(task) + }) +} +while (queue.length) { + // launch up to N tasks + while (running < N && queue.length) startTask(queue.shift()!) + const finished = await signal.promise + running -= 1 +} +``` + +The arm being awaited is *always fresh*; nothing accumulates handlers. + +## Quick check + +Before merging concurrency code, ask: *does any arm of a `Promise.race`/`Promise.any` outlive the call?* If yes, refactor to the single-waiter signal. diff --git a/.git-hooks/_helpers.mts b/.git-hooks/_helpers.mts old mode 100644 new mode 100755 index b8a29978..1181e37e --- a/.git-hooks/_helpers.mts +++ b/.git-hooks/_helpers.mts @@ -96,10 +96,123 @@ const PERSONAL_PATH_RE = const PERSONAL_PATH_PLACEHOLDER_RE = /(\/Users\/<[^>]*>\/|\/home\/<[^>]*>\/|C:\\Users\\<[^>]*>\\|\/Users\/\$\{?[A-Z_]+\}?\/|\/home\/\$\{?[A-Z_]+\}?\/)/ -export type LineHit = { lineNumber: number; line: string } +// Per-line opt-out marker for our pre-commit / pre-push scanners. +// +// Canonical form: # socket-hook: allow +// Targeted form: # socket-hook: allow +// +// The targeted form names a specific rule (`personal-path`, `npx`, +// `aws-key`, etc.) and is recommended for reviewers; the bare `allow` +// form blanket-suppresses every scanner on that line. eslint-style +// precedent. +// +// Legacy `# zizmor: ...` markers are still recognized for one cycle so +// existing files don't have to be rewritten in the same change that +// renames the marker. +const SOCKET_HOOK_MARKER_RE = /#\s*socket-hook:\s*allow(?:\s+([\w-]+))?/ +const LEGACY_ZIZMOR_MARKER_RE = /#\s*zizmor:\s*[\w-]+/ + +function lineIsSuppressed(line: string, rule?: string): boolean { + if (LEGACY_ZIZMOR_MARKER_RE.test(line)) { + return true + } + const m = line.match(SOCKET_HOOK_MARKER_RE) + if (!m) { + return false + } + // No rule named on the marker → blanket allow. + if (!m[1]) { + return true + } + // Marker named a specific rule → only suppress that rule. + return rule === undefined || m[1] === rule +} + +// Heuristic context flags: lines that look like "this is a doc example" +// rather than a real call leaked into runtime code. +// - Comment lines (start with `*`, `//`, `#`). +// - Lines that contain a JSDoc tag like @example / @param / @returns +// (multi-line JSDoc bodies use leading ` * ` which we already match). +// - Lines whose entire interesting content sits inside a backtick span +// (markdown / template-literal example). +const COMMENT_LINE_RE = /^\s*(\*|\/\/|#)/ +const JSDOC_TAG_RE = /@(example|param|returns?|see|link)\b/ + +function isInsideBackticks(line: string, needleRe: RegExp): boolean { + // Find every backtick-delimited span on the line and test if the + // pattern only appears within those spans. Conservative: if any + // hit is *outside* a span, treat the line as runtime code. + const spans: Array<[number, number]> = [] + for (let i = 0; i < line.length; i++) { + if (line[i] === '`') { + const end = line.indexOf('`', i + 1) + if (end < 0) { + break + } + spans.push([i, end]) + i = end + } + } + if (spans.length === 0) { + return false + } + let m: RegExpExecArray | null + const re = new RegExp(needleRe.source, needleRe.flags.replace('g', '') + 'g') + while ((m = re.exec(line)) !== null) { + const start = m.index + const end = start + m[0].length + const inside = spans.some(([s, e]) => start > s && end <= e) + if (!inside) { + return false + } + } + return true +} + +function looksLikeDocumentation( + line: string, + needleRe: RegExp, + rule?: string, +): boolean { + if (lineIsSuppressed(line, rule)) { + return true + } + if (COMMENT_LINE_RE.test(line)) { + return true + } + if (JSDOC_TAG_RE.test(line)) { + return true + } + if (isInsideBackticks(line, needleRe)) { + return true + } + return false +} + +export type LineHit = { + lineNumber: number + line: string + // Suggested rewrite when this flagged line is documentation-style and + // the scanner can offer a concrete fix. Undefined for runtime-code + // paths where the right answer depends on the surrounding code. + suggested?: string +} + +// Build a suggested rewrite for a documentation-style personal path. +// Replaces the matched real-path username segment with the canonical +// placeholder form: `` / `` (matching the platform +// convention of the surrounding path). +function suggestPlaceholder(line: string): string { + return line + .replace(/\/Users\/[^/\s]+\//g, '/Users//') + .replace(/\/home\/[^/\s]+\//g, '/home//') + .replace(/C:\\Users\\[^\\]+\\/g, 'C:\\Users\\\\') +} -// Returns lines that contain a real personal path (excludes lines -// that are pure placeholders). Caller decides what to do with hits. +// Returns lines that contain a real personal path (excludes lines that +// are pure placeholders or look like documentation examples). Each hit +// carries a `suggested` rewrite when the scanner can offer one — the +// caller surfaces it to the user as the fix recipe. export const scanPersonalPaths = (text: string): LineHit[] => { const hits: LineHit[] = [] const lines = text.split('\n') @@ -109,8 +222,6 @@ export const scanPersonalPaths = (text: string): LineHit[] => { continue } if (PERSONAL_PATH_PLACEHOLDER_RE.test(line)) { - // Has placeholder — but might also have a real path on the - // same line. Strip placeholder forms and re-test. const stripped = line.replace( new RegExp(PERSONAL_PATH_PLACEHOLDER_RE, 'g'), '', @@ -119,7 +230,14 @@ export const scanPersonalPaths = (text: string): LineHit[] => { continue } } - hits.push({ lineNumber: i + 1, line }) + if (looksLikeDocumentation(line, PERSONAL_PATH_RE, 'personal-path')) { + continue + } + hits.push({ + lineNumber: i + 1, + line, + suggested: suggestPlaceholder(line), + }) } return hits } @@ -186,14 +304,80 @@ export const scanPrivateKeys = (text: string): LineHit[] => { const NPX_DLX_RE = /\b(npx|pnpm dlx|yarn dlx)\b/ +// Suggest the canonical replacement for a runtime npx/dlx call. +// Documentation contexts (comments, JSDoc) are exempt via +// looksLikeDocumentation(); we only ever land here for code lines, where +// the right swap is `pnpm exec` (since `pnpm` is the fleet's package +// manager) or `pnpm run` for script entries. +function suggestNpxReplacement(line: string): string { + return line + .replace(/\bpnpm dlx\b/g, 'pnpm exec') + .replace(/\byarn dlx\b/g, 'pnpm exec') + .replace(/\bnpx\b/g, 'pnpm exec') +} + export const scanNpxDlx = (text: string): LineHit[] => { const hits: LineHit[] = [] const lines = text.split('\n') for (let i = 0; i < lines.length; i++) { const line = lines[i]! - if (NPX_DLX_RE.test(line) && !line.includes('# zizmor:')) { - hits.push({ lineNumber: i + 1, line }) + if (!NPX_DLX_RE.test(line)) { + continue + } + if (looksLikeDocumentation(line, NPX_DLX_RE, 'npx')) { + continue + } + hits.push({ + lineNumber: i + 1, + line, + suggested: suggestNpxReplacement(line), + }) + } + return hits +} + +// ── Logger leak scanner ──────────────────────────────────────────── +// +// The fleet rule: source code uses `getDefaultLogger()` from +// `@socketsecurity/lib/logger`. Direct calls to `process.stderr.write`, +// `process.stdout.write`, `console.log`, `console.error`, `console.warn`, +// `console.info`, `console.debug` are blocked. Doc-context lines are +// exempt; lines carrying `# socket-hook: allow logger` are exempt too. + +const LOGGER_LEAK_RE = + /\b(process\.std(?:err|out)\.write|console\.(?:log|error|warn|info|debug))\s*\(/ + +// Map each direct call to its lib-logger equivalent. process.stdout is +// closer to logger.info; process.stderr / console.error → logger.error; +// console.warn → logger.warn; console.info / console.log → logger.info; +// console.debug → logger.debug. +function suggestLoggerReplacement(line: string): string { + return line + .replace(/\bprocess\.stderr\.write\s*\(/g, 'logger.error(') + .replace(/\bprocess\.stdout\.write\s*\(/g, 'logger.info(') + .replace(/\bconsole\.error\s*\(/g, 'logger.error(') + .replace(/\bconsole\.warn\s*\(/g, 'logger.warn(') + .replace(/\bconsole\.info\s*\(/g, 'logger.info(') + .replace(/\bconsole\.debug\s*\(/g, 'logger.debug(') + .replace(/\bconsole\.log\s*\(/g, 'logger.info(') +} + +export const scanLoggerLeaks = (text: string): LineHit[] => { + const hits: LineHit[] = [] + const lines = text.split('\n') + for (let i = 0; i < lines.length; i++) { + const line = lines[i]! + if (!LOGGER_LEAK_RE.test(line)) { + continue + } + if (looksLikeDocumentation(line, LOGGER_LEAK_RE, 'logger')) { + continue } + hits.push({ + lineNumber: i + 1, + line, + suggested: suggestLoggerReplacement(line), + }) } return hits } diff --git a/.git-hooks/pre-commit.mts b/.git-hooks/pre-commit.mts old mode 100644 new mode 100755 index 61df1057..d4b885a4 --- a/.git-hooks/pre-commit.mts +++ b/.git-hooks/pre-commit.mts @@ -20,6 +20,7 @@ import { readFileForScan, scanAwsKeys, scanGitHubTokens, + scanLoggerLeaks, scanNpxDlx, scanPersonalPaths, scanPrivateKeys, @@ -97,8 +98,18 @@ const main = (): number => { const hits = scanPersonalPaths(text) if (hits.length > 0) { out(red(`✗ ERROR: Hardcoded personal path found in: ${file}`)) - hits.slice(0, 3).forEach(h => out(`${h.lineNumber}:${h.line.trim()}`)) - out('Replace with relative paths or environment variables.') + for (const h of hits.slice(0, 3)) { + out(`${h.lineNumber}: ${h.line.trim()}`) + if (h.suggested && h.suggested !== h.line) { + out(` fix: ${h.suggested.trim()}`) + } + } + out( + 'Replace with `` / `` placeholders, an env var ' + + '(`$HOME`, `${USER}`), or — for documentation lines that need ' + + 'the literal username form — append the marker ' + + '`# zizmor: documentation-placeholder`.', + ) errors++ } } @@ -159,7 +170,12 @@ const main = (): number => { if ( file.includes('node_modules/') || file.endsWith('pnpm-lock.yaml') || - file.includes('.git-hooks/') + file.includes('.git-hooks/') || + // CHANGELOG entries discuss npx ecosystem *behavior* (cache + // semantics, naming conventions) as historical documentation — + // they're not commands. Skip the npx/dlx scan for changelogs. + file === 'CHANGELOG.md' || + file.endsWith('/CHANGELOG.md') ) { continue } @@ -170,8 +186,66 @@ const main = (): number => { const hits = scanNpxDlx(text) if (hits.length > 0) { out(red(`✗ ERROR: npx/dlx usage found in: ${file}`)) - hits.slice(0, 3).forEach(h => out(`${h.lineNumber}:${h.line.trim()}`)) - out("Use 'pnpm exec ' or 'pnpm run