diff --git a/.github/workflows/stage-2-test.yaml b/.github/workflows/stage-2-test.yaml index 37148449..839d633d 100644 --- a/.github/workflows/stage-2-test.yaml +++ b/.github/workflows/stage-2-test.yaml @@ -126,6 +126,16 @@ jobs: - name: "Run linting" run: | make test-lint + test-lua-lint: + name: "Lua linting" + runs-on: ubuntu-latest + timeout-minutes: 5 + steps: + - name: "Checkout code" + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + - name: "Run luacheck" + run: | + make test-lua-lint test-typecheck: name: "Typecheck" runs-on: ubuntu-latest diff --git a/.luarc.json b/.luarc.json new file mode 100644 index 00000000..09ef6a0c --- /dev/null +++ b/.luarc.json @@ -0,0 +1,12 @@ +{ + "diagnostics": { + "globals": [ + "KEYS", + "ARGV", + "redis", + "cjson", + "cmsgpack", + "bit" + ] + } +} diff --git a/lambdas/https-client-lambda/package.json b/lambdas/https-client-lambda/package.json index 19763116..f03df4df 100644 --- a/lambdas/https-client-lambda/package.json +++ b/lambdas/https-client-lambda/package.json @@ -20,6 +20,7 @@ "@types/node": "catalog:tools", "@types/node-forge": "catalog:tools", "eslint": "catalog:lint", + "fengari": "^0.1.5", "jest": "catalog:test", "typescript": "catalog:tools" }, diff --git a/lambdas/https-client-lambda/src/__tests__/admit-lua.test.ts b/lambdas/https-client-lambda/src/__tests__/admit-lua.test.ts new file mode 100644 index 00000000..bf64f5e0 --- /dev/null +++ b/lambdas/https-client-lambda/src/__tests__/admit-lua.test.ts @@ -0,0 +1,351 @@ +import admitLuaSrc from "services/admit.lua"; +import { createRedisStore, evalLua } from "__tests__/helpers/lua-redis-mock"; + +type AdmitArgs = { + now: number; + refillPerSec: number; + capacity: number; + cbProbeIntervalMs: number; + cbEnabled: boolean; + decayPeriodMs: number; +}; + +const defaultArgs: AdmitArgs = { + now: 1_000_000, + refillPerSec: 10, + capacity: 10, + cbProbeIntervalMs: 60_000, + cbEnabled: true, + decayPeriodMs: 300_000, +}; + +function runAdmit( + store: ReturnType, + args: Partial = {}, + targetId = "t1", +) { + const merged = { ...defaultArgs, ...args }; + return evalLua( + admitLuaSrc, + [`rl:${targetId}`, `cb:${targetId}`], + [ + merged.now.toString(), + merged.refillPerSec.toString(), + merged.capacity.toString(), + merged.cbProbeIntervalMs.toString(), + merged.cbEnabled ? "1" : "0", + merged.decayPeriodMs.toString(), + ], + store, + ); +} + +describe("admit.lua", () => { + describe("rate limiting", () => { + it("allows the first request with full token bucket", () => { + const store = createRedisStore(); + const result = runAdmit(store); + + expect(result).toEqual({ + allowed: true, + probe: false, + effectiveRate: 10, + }); + }); + + it("deducts tokens on each call", () => { + const store = createRedisStore(); + + for (let i = 0; i < 10; i++) { + const result = runAdmit(store); + expect(result).toMatchObject({ allowed: true }); + } + + const result = runAdmit(store); + expect(result).toMatchObject({ + allowed: false, + reason: "rate_limited", + }); + }); + + it("returns retryAfterMs when rate limited", () => { + const store = createRedisStore(); + + for (let i = 0; i < 10; i++) { + runAdmit(store); + } + + const result = runAdmit(store); + expect(result).toMatchObject({ + allowed: false, + reason: "rate_limited", + retryAfterMs: expect.any(Number), + }); + expect((result as { retryAfterMs: number }).retryAfterMs).toBeGreaterThan( + 0, + ); + }); + + it("refills tokens over time", () => { + const store = createRedisStore(); + const now = 1_000_000; + + for (let i = 0; i < 10; i++) { + runAdmit(store, { now }); + } + + const denied = runAdmit(store, { now }); + expect(denied).toMatchObject({ allowed: false }); + + const later = now + 1000; + const result = runAdmit(store, { now: later }); + expect(result).toMatchObject({ allowed: true }); + }); + + it("caps tokens at capacity", () => { + const store = createRedisStore(); + + const result = runAdmit(store, { + now: 1_000_000, + capacity: 5, + refillPerSec: 100, + }); + expect(result).toMatchObject({ allowed: true }); + + const rlHash = store.get("rl:t1"); + const tokensRaw = rlHash?.get("tokens"); + expect(tokensRaw).toBeDefined(); + const tokens = Number.parseFloat(tokensRaw ?? ""); + expect(tokens).toBeLessThanOrEqual(4); + }); + + it("handles zero refill rate", () => { + const store = createRedisStore(); + + for (let i = 0; i < 10; i++) { + runAdmit(store, { refillPerSec: 0 }); + } + + const result = runAdmit(store, { refillPerSec: 0 }); + expect(result).toMatchObject({ + allowed: false, + reason: "rate_limited", + retryAfterMs: 1000, + }); + }); + }); + + describe("circuit breaker", () => { + it("blocks requests when circuit is open", () => { + const store = createRedisStore(); + const now = 1_000_000; + const openedUntil = now + 60_000; + + store.set( + "cb:t1", + new Map([ + ["opened_until_ms", openedUntil.toString()], + ["last_probe_ms", now.toString()], + ]), + ); + + const result = runAdmit(store, { now }); + expect(result).toMatchObject({ + allowed: false, + reason: "circuit_open", + effectiveRate: 0, + }); + }); + + it("returns retryAfterMs for open circuit", () => { + const store = createRedisStore(); + const now = 1_000_000; + const openedUntil = now + 30_000; + + store.set( + "cb:t1", + new Map([ + ["opened_until_ms", openedUntil.toString()], + ["last_probe_ms", now.toString()], + ]), + ); + + const result = runAdmit(store, { now }); + expect(result).toMatchObject({ + allowed: false, + reason: "circuit_open", + retryAfterMs: 30_000, + }); + }); + + it("allows probe when probe interval has elapsed", () => { + const store = createRedisStore(); + const now = 1_000_000; + const openedUntil = now + 120_000; + const lastProbe = now - 61_000; + + store.set( + "cb:t1", + new Map([ + ["opened_until_ms", openedUntil.toString()], + ["last_probe_ms", lastProbe.toString()], + ]), + ); + + const result = runAdmit(store, { + now, + cbProbeIntervalMs: 60_000, + }); + expect(result).toEqual({ + allowed: true, + probe: true, + effectiveRate: 0, + }); + + const cbHash = store.get("cb:t1")!; + expect(cbHash.get("last_probe_ms")).toBe(now.toString()); + }); + + it("does not probe when interval has not elapsed", () => { + const store = createRedisStore(); + const now = 1_000_000; + const openedUntil = now + 120_000; + const lastProbe = now - 30_000; + + store.set( + "cb:t1", + new Map([ + ["opened_until_ms", openedUntil.toString()], + ["last_probe_ms", lastProbe.toString()], + ]), + ); + + const result = runAdmit(store, { + now, + cbProbeIntervalMs: 60_000, + }); + expect(result).toMatchObject({ + allowed: false, + reason: "circuit_open", + }); + }); + + it("skips circuit breaker when disabled", () => { + const store = createRedisStore(); + const now = 1_000_000; + const openedUntil = now + 60_000; + + store.set( + "cb:t1", + new Map([["opened_until_ms", openedUntil.toString()]]), + ); + + const result = runAdmit(store, { now, cbEnabled: false }); + expect(result).toMatchObject({ allowed: true, probe: false }); + }); + }); + + describe("decay scaling", () => { + it("applies reduced rate during decay period", () => { + const store = createRedisStore(); + const closedAt = 1_000_000; + const decayPeriodMs = 300_000; + const halfwayThrough = closedAt + decayPeriodMs / 2; + + store.set("cb:t1", new Map([["opened_until_ms", closedAt.toString()]])); + + const result = runAdmit(store, { + now: halfwayThrough, + refillPerSec: 10, + decayPeriodMs, + }); + expect(result).toMatchObject({ allowed: true }); + expect((result as { effectiveRate: number }).effectiveRate).toBeCloseTo( + 5, + 0, + ); + }); + + it("uses full rate after decay period ends", () => { + const store = createRedisStore(); + const closedAt = 1_000_000; + const decayPeriodMs = 300_000; + const afterDecay = closedAt + decayPeriodMs + 1; + + store.set("cb:t1", new Map([["opened_until_ms", closedAt.toString()]])); + + const result = runAdmit(store, { + now: afterDecay, + refillPerSec: 10, + decayPeriodMs, + }); + expect(result).toMatchObject({ + allowed: true, + effectiveRate: 10, + }); + }); + + it("clamps minimum effective rate to 0.001", () => { + const store = createRedisStore(); + const closedAt = 1_000_000; + const decayPeriodMs = 300_000; + const veryEarly = closedAt + 1; + + store.set("cb:t1", new Map([["opened_until_ms", closedAt.toString()]])); + + const result = runAdmit(store, { + now: veryEarly, + refillPerSec: 10, + decayPeriodMs, + }); + const rate = (result as { effectiveRate: number }).effectiveRate; + expect(rate).toBeGreaterThanOrEqual(0.001); + }); + + it("does not decay when decayPeriodMs is 0", () => { + const store = createRedisStore(); + const closedAt = 1_000_000; + + store.set("cb:t1", new Map([["opened_until_ms", closedAt.toString()]])); + + const result = runAdmit(store, { + now: closedAt + 1, + refillPerSec: 10, + decayPeriodMs: 0, + }); + expect(result).toMatchObject({ + allowed: true, + effectiveRate: 10, + }); + }); + + it("does not decay when circuit breaker is disabled", () => { + const store = createRedisStore(); + const closedAt = 1_000_000; + + store.set("cb:t1", new Map([["opened_until_ms", closedAt.toString()]])); + + const result = runAdmit(store, { + now: closedAt + 1, + refillPerSec: 10, + decayPeriodMs: 300_000, + cbEnabled: false, + }); + expect(result).toMatchObject({ + allowed: true, + effectiveRate: 10, + }); + }); + }); + + describe("redis state persistence", () => { + it("persists token count and last_refill_ms", () => { + const store = createRedisStore(); + runAdmit(store, { now: 1_000_000, capacity: 5 }); + + const rlHash = store.get("rl:t1")!; + expect(rlHash.get("tokens")).toBeDefined(); + expect(rlHash.get("last_refill_ms")).toBe("1000000"); + }); + }); +}); diff --git a/lambdas/https-client-lambda/src/__tests__/helpers/fengari.d.ts b/lambdas/https-client-lambda/src/__tests__/helpers/fengari.d.ts new file mode 100644 index 00000000..e40c1e59 --- /dev/null +++ b/lambdas/https-client-lambda/src/__tests__/helpers/fengari.d.ts @@ -0,0 +1,32 @@ +declare module "fengari" { + type LuaState = object; + + const lua: { + LUA_OK: number; + lua_close(L: LuaState): void; + lua_createtable(L: LuaState, narr: number, nrec: number): void; + lua_getglobal(L: LuaState, name: Uint8Array): number; + lua_gettop(L: LuaState): number; + lua_pushboolean(L: LuaState, b: number): void; + lua_pushcfunction(L: LuaState, fn: (L: LuaState) => number): void; + lua_pushinteger(L: LuaState, n: number): void; + lua_pushstring(L: LuaState, s: Uint8Array): void; + lua_rawseti(L: LuaState, idx: number, n: number): void; + lua_setglobal(L: LuaState, name: Uint8Array): void; + lua_tostring(L: LuaState, idx: number): Uint8Array; + }; + + const lauxlib: { + luaL_dostring(L: LuaState, s: Uint8Array): number; + luaL_newstate(): LuaState; + }; + + const lualib: { + luaL_openlibs(L: LuaState): void; + }; + + // eslint-disable-next-line @typescript-eslint/naming-convention -- fengari uses snake_case names + function to_jsstring(s: Uint8Array): string; + // eslint-disable-next-line @typescript-eslint/naming-convention -- fengari uses snake_case names + function to_luastring(s: string): Uint8Array; +} diff --git a/lambdas/https-client-lambda/src/__tests__/helpers/lua-redis-mock.ts b/lambdas/https-client-lambda/src/__tests__/helpers/lua-redis-mock.ts new file mode 100644 index 00000000..adee5a1a --- /dev/null +++ b/lambdas/https-client-lambda/src/__tests__/helpers/lua-redis-mock.ts @@ -0,0 +1,144 @@ +import { + lauxlib, + lua, + lualib, + to_jsstring as toJsstring, + to_luastring as toLuastring, +} from "fengari"; + +type LuaState = ReturnType; +type RedisStore = Map>; + +export function createRedisStore(): RedisStore { + return new Map(); +} + +function hget( + store: RedisStore, + key: string, + field: string, +): string | undefined { + return store.get(key)?.get(field); +} + +function hset(store: RedisStore, ...args: string[]): number { + const key = args[0]; + const hash = store.get(key) ?? new Map(); + store.set(key, hash); + let fieldsSet = 0; + for (let i = 1; i < args.length; i += 2) { + // eslint-disable-next-line security/detect-object-injection -- args is a controlled array from redis HSET parsing + hash.set(args[i], args[i + 1]); + fieldsSet += 1; + } + return fieldsSet; +} + +function redisCallHandler(L: LuaState, store: RedisStore): number { + const cmd = toJsstring(lua.lua_tostring(L, 1)).toUpperCase(); + if (cmd === "HGET") { + const key = toJsstring(lua.lua_tostring(L, 2)); + const field = toJsstring(lua.lua_tostring(L, 3)); + const val = hget(store, key, field); + if (val === undefined) { + lua.lua_pushboolean(L, 0); + } else { + lua.lua_pushstring(L, toLuastring(val)); + } + return 1; + } + if (cmd === "HSET") { + const nArgs = lua.lua_gettop(L); + const args: string[] = []; + for (let i = 2; i <= nArgs; i++) { + args.push(toJsstring(lua.lua_tostring(L, i))); + } + const count = hset(store, ...args); + lua.lua_pushinteger(L, count); + return 1; + } + throw new Error(`Unsupported Redis command in mock: ${cmd}`); +} + +const CJSON_AND_REDIS_PREAMBLE = ` + cjson = {} + function cjson.encode(t) + if type(t) ~= "table" then return tostring(t) end + local parts = {} + for k, v in pairs(t) do + local key = '"' .. tostring(k) .. '"' + local val + if type(v) == "boolean" then + val = v and "true" or "false" + elseif type(v) == "number" then + if v == math.floor(v) and v < 1e15 and v > -1e15 then + val = string.format("%d", v) + else + val = tostring(v) + end + elseif type(v) == "string" then + val = '"' .. v .. '"' + else + val = tostring(v) + end + parts[#parts + 1] = key .. ":" .. val + end + return "{" .. table.concat(parts, ",") .. "}" + end + + redis = {} + function redis.call(cmd, ...) + return __redis_call(cmd, ...) + end +`; + +function registerRedisCallGlobal(L: LuaState, store: RedisStore): void { + lua.lua_pushcfunction(L, (ls: LuaState) => redisCallHandler(ls, store)); + lua.lua_setglobal(L, toLuastring("__redis_call")); +} + +function installCjsonAndRedisShims(L: LuaState): void { + lauxlib.luaL_dostring(L, toLuastring(CJSON_AND_REDIS_PREAMBLE)); +} + +function setStringArrayGlobal( + L: LuaState, + name: string, + values: string[], +): void { + lua.lua_createtable(L, values.length, 0); + for (const [i, value] of values.entries()) { + lua.lua_pushstring(L, toLuastring(value)); + lua.lua_rawseti(L, -2, i + 1); + } + lua.lua_setglobal(L, toLuastring(name)); +} + +function runScript(L: LuaState, script: string): string { + const status = lauxlib.luaL_dostring(L, toLuastring(script)); + if (status !== lua.LUA_OK) { + const errMsg = toJsstring(lua.lua_tostring(L, -1)); + throw new Error(`Lua error: ${errMsg}`); + } + return toJsstring(lua.lua_tostring(L, -1)); +} + +export function evalLua( + script: string, + keys: string[], + argv: string[], + store: RedisStore, +): unknown { + const L: LuaState = lauxlib.luaL_newstate(); + lualib.luaL_openlibs(L); + + try { + registerRedisCallGlobal(L, store); + installCjsonAndRedisShims(L); + setStringArrayGlobal(L, "KEYS", keys); + setStringArrayGlobal(L, "ARGV", argv); + return JSON.parse(runScript(L, script)); + } finally { + lua.lua_close(L); + } +} diff --git a/lambdas/https-client-lambda/src/__tests__/record-result-lua.test.ts b/lambdas/https-client-lambda/src/__tests__/record-result-lua.test.ts new file mode 100644 index 00000000..2c809559 --- /dev/null +++ b/lambdas/https-client-lambda/src/__tests__/record-result-lua.test.ts @@ -0,0 +1,316 @@ +import recordResultLuaSrc from "services/record-result.lua"; +import { createRedisStore, evalLua } from "__tests__/helpers/lua-redis-mock"; + +type RecordResultArgs = { + now: number; + success: boolean; + cbWindowPeriodMs: number; + cbErrorThreshold: number; + cbMinAttempts: number; + cbCooldownMs: number; + decayPeriodMs: number; +}; + +const defaultArgs: RecordResultArgs = { + now: 1_000_000, + success: true, + cbWindowPeriodMs: 60_000, + cbErrorThreshold: 0.5, + cbMinAttempts: 10, + cbCooldownMs: 60_000, + decayPeriodMs: 300_000, +}; + +function runRecordResult( + store: ReturnType, + args: Partial = {}, + targetId = "t1", +) { + const merged = { ...defaultArgs, ...args }; + return evalLua( + recordResultLuaSrc, + [`cb:${targetId}`], + [ + merged.now.toString(), + merged.success ? "1" : "0", + merged.cbWindowPeriodMs.toString(), + merged.cbErrorThreshold.toString(), + merged.cbMinAttempts.toString(), + merged.cbCooldownMs.toString(), + merged.decayPeriodMs.toString(), + ], + store, + ); +} + +describe("record-result.lua", () => { + describe("success recording", () => { + it("returns closed state for a successful result", () => { + const store = createRedisStore(); + const result = runRecordResult(store, { success: true }); + expect(result).toEqual({ ok: true, state: "closed" }); + }); + + it("increments attempt count without incrementing failures", () => { + const store = createRedisStore(); + runRecordResult(store, { success: true }); + + const cbHash = store.get("cb:t1")!; + expect(cbHash.get("cb_attempts")).toBe("1"); + expect(cbHash.get("cb_failures")).toBe("0"); + }); + }); + + describe("failure recording", () => { + it("increments both attempts and failures on error", () => { + const store = createRedisStore(); + runRecordResult(store, { success: false }); + + const cbHash = store.get("cb:t1")!; + expect(cbHash.get("cb_attempts")).toBe("1"); + expect(cbHash.get("cb_failures")).toBe("1"); + }); + + it("stays closed when below error threshold", () => { + const store = createRedisStore(); + const now = 1_000_000; + + for (let i = 0; i < 8; i++) { + runRecordResult(store, { now, success: true }); + } + for (let i = 0; i < 2; i++) { + runRecordResult(store, { now, success: false }); + } + + const result = runRecordResult(store, { now, success: true }); + expect(result).toEqual({ ok: true, state: "closed" }); + }); + }); + + describe("circuit opening", () => { + it("opens circuit when error rate exceeds threshold", () => { + const store = createRedisStore(); + const now = 1_000_000; + + for (let i = 0; i < 10; i++) { + runRecordResult(store, { + now, + success: false, + cbMinAttempts: 5, + cbErrorThreshold: 0.5, + }); + } + + const result = runRecordResult(store, { + now, + success: false, + cbMinAttempts: 5, + cbErrorThreshold: 0.5, + }); + expect(result).toEqual({ ok: false, state: "opened" }); + }); + + it("does not open circuit when below minimum attempts", () => { + const store = createRedisStore(); + const now = 1_000_000; + + for (let i = 0; i < 4; i++) { + runRecordResult(store, { + now, + success: false, + cbMinAttempts: 10, + }); + } + + const result = runRecordResult(store, { + now, + success: false, + cbMinAttempts: 10, + }); + expect(result).toEqual({ ok: true, state: "closed" }); + }); + + it("sets opened_until_ms with cooldown on open", () => { + const store = createRedisStore(); + const now = 1_000_000; + const cbCooldownMs = 30_000; + + for (let i = 0; i < 10; i++) { + runRecordResult(store, { + now, + success: false, + cbMinAttempts: 5, + cbErrorThreshold: 0.5, + cbCooldownMs, + }); + } + + const cbHash = store.get("cb:t1")!; + expect(Number(cbHash.get("opened_until_ms"))).toBe(now + cbCooldownMs); + }); + }); + + describe("window rolling", () => { + it("rolls window when period expires", () => { + const store = createRedisStore(); + const windowPeriodMs = 60_000; + const t0 = 1_000_000; + const t1 = t0 + windowPeriodMs + 1; + + for (let i = 0; i < 3; i++) { + runRecordResult(store, { + now: t0, + success: false, + cbWindowPeriodMs: windowPeriodMs, + }); + } + + runRecordResult(store, { + now: t1, + success: true, + cbWindowPeriodMs: windowPeriodMs, + }); + + const cbHash = store.get("cb:t1")!; + expect(cbHash.get("cb_prev_failures")).toBe("3"); + expect(cbHash.get("cb_prev_attempts")).toBe("3"); + expect(cbHash.get("cb_attempts")).toBe("1"); + expect(cbHash.get("cb_failures")).toBe("0"); + }); + + it("initialises window_from on first call", () => { + const store = createRedisStore(); + const now = 1_000_000; + + runRecordResult(store, { now }); + + const cbHash = store.get("cb:t1")!; + expect(cbHash.get("cb_window_from")).toBe(now.toString()); + }); + }); + + describe("two-window blended rate", () => { + it("blends previous window failures into current assessment", () => { + const store = createRedisStore(); + const windowPeriodMs = 60_000; + const t0 = 1_000_000; + + for (let i = 0; i < 10; i++) { + runRecordResult(store, { + now: t0, + success: false, + cbWindowPeriodMs: windowPeriodMs, + cbMinAttempts: 5, + cbErrorThreshold: 0.5, + }); + } + + const justAfterRoll = t0 + windowPeriodMs + 1; + const result = runRecordResult(store, { + now: justAfterRoll, + success: false, + cbWindowPeriodMs: windowPeriodMs, + cbMinAttempts: 5, + cbErrorThreshold: 0.5, + }); + + expect(result).toEqual({ ok: false, state: "opened" }); + }); + + it("reduces previous window weight as current window progresses", () => { + const store = createRedisStore(); + const windowPeriodMs = 100_000; + const t0 = 1_000_000; + + for (let i = 0; i < 10; i++) { + runRecordResult(store, { + now: t0, + success: false, + cbWindowPeriodMs: windowPeriodMs, + cbMinAttempts: 5, + cbErrorThreshold: 0.5, + }); + } + + const nearEndOfWindow = t0 + windowPeriodMs + windowPeriodMs - 1; + for (let i = 0; i < 20; i++) { + runRecordResult(store, { + now: nearEndOfWindow, + success: true, + cbWindowPeriodMs: windowPeriodMs, + cbMinAttempts: 5, + cbErrorThreshold: 0.5, + }); + } + + const result = runRecordResult(store, { + now: nearEndOfWindow, + success: true, + cbWindowPeriodMs: windowPeriodMs, + cbMinAttempts: 5, + cbErrorThreshold: 0.5, + }); + expect(result).toEqual({ ok: true, state: "closed" }); + }); + }); + + describe("decay period", () => { + it("clears opened_until_ms after decay period elapses", () => { + const store = createRedisStore(); + const openedAt = 1_000_000; + const cooldownMs = 60_000; + const decayPeriodMs = 300_000; + const openedUntil = openedAt + cooldownMs; + const afterDecay = openedUntil + decayPeriodMs + 1; + + store.set( + "cb:t1", + new Map([["opened_until_ms", openedUntil.toString()]]), + ); + + runRecordResult(store, { + now: afterDecay, + success: true, + decayPeriodMs, + }); + + const cbHash = store.get("cb:t1")!; + expect(cbHash.get("opened_until_ms")).toBe("0"); + }); + + it("preserves opened_until_ms during active decay", () => { + const store = createRedisStore(); + const openedUntil = 1_060_000; + const duringDecay = openedUntil + 100_000; + + store.set( + "cb:t1", + new Map([["opened_until_ms", openedUntil.toString()]]), + ); + + runRecordResult(store, { + now: duringDecay, + success: true, + decayPeriodMs: 300_000, + }); + + const cbHash = store.get("cb:t1")!; + expect(Number(cbHash.get("opened_until_ms"))).toBe(openedUntil); + }); + }); + + describe("state persistence", () => { + it("writes all counter fields to redis", () => { + const store = createRedisStore(); + runRecordResult(store); + + const cbHash = store.get("cb:t1")!; + expect(cbHash.has("opened_until_ms")).toBe(true); + expect(cbHash.has("cb_window_from")).toBe(true); + expect(cbHash.has("cb_failures")).toBe(true); + expect(cbHash.has("cb_attempts")).toBe(true); + expect(cbHash.has("cb_prev_failures")).toBe(true); + expect(cbHash.has("cb_prev_attempts")).toBe(true); + }); + }); +}); diff --git a/lambdas/https-client-lambda/src/services/admit.lua b/lambdas/https-client-lambda/src/services/admit.lua index 8fdf2b15..5bc88db5 100644 --- a/lambdas/https-client-lambda/src/services/admit.lua +++ b/lambdas/https-client-lambda/src/services/admit.lua @@ -69,7 +69,7 @@ end -- Check rate limit if tokens < 1 then -- Compute retry-after based on effective rate - local retryAfterMs = 0 + local retryAfterMs if effectiveRate > 0 then retryAfterMs = math.ceil((1 - tokens) / effectiveRate * 1000) else diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index a5d09976..a9dea2e0 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -383,6 +383,9 @@ importers: eslint: specifier: catalog:lint version: 9.39.4(jiti@2.6.1) + fengari: + specifier: ^0.1.5 + version: 0.1.5 jest: specifier: catalog:test version: 30.3.0(@types/node@24.12.0)(ts-node@10.9.2(@types/node@25.5.0)(typescript@5.9.3)) @@ -2951,6 +2954,9 @@ packages: picomatch: optional: true + fengari@0.1.5: + resolution: {integrity: sha512-0DS4Nn4rV8qyFlQCpKK8brT61EUtswynrpfFTcgLErcilBIBskSMQ86fO2WVuybr14ywyKdRjv91FiRZwnEuvQ==} + fflate@0.8.1: resolution: {integrity: sha512-/exOvEuc+/iaUm105QIiOt4LpBdMTWsXxqR0HDF35vx3fmaKzw7354gTilCh5rkzEt8WYyG//ku3h3nRmd7CHQ==} @@ -3890,6 +3896,10 @@ packages: react-is@19.0.0: resolution: {integrity: sha512-H91OHcwjZsbq3ClIDHMzBShc1rotbfACdWENsmEf0IFvZ3FgGPtdHMcsv45bQ1hAbgdfiA8SnxTKfDS+x/8m2g==} + readline-sync@1.4.10: + resolution: {integrity: sha512-gNva8/6UAe8QYepIQH/jQ2qn91Qj0B9sYjMBBs3QOB8F2CXcKgLxQaJRP76sWVRQt+QU+8fAkCbCvjjMFu7Ycw==} + engines: {node: '>= 0.8.0'} + real-require@0.2.0: resolution: {integrity: sha512-57frrGM/OCTLqLOAh0mhVA9VBMHd+9U7Zb2THMGdBUoZVOtGbJzjxsYGDJ3A9AYYCP4hn6y1TVbaOfzWtm5GFg==} engines: {node: '>= 12.13.0'} @@ -4062,6 +4072,9 @@ packages: sprintf-js@1.0.3: resolution: {integrity: sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==} + sprintf-js@1.1.3: + resolution: {integrity: sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA==} + stable-hash-x@0.2.0: resolution: {integrity: sha512-o3yWv49B/o4QZk5ZcsALc6t0+eCelPc44zZsLtCQnZPDwFpDYSWcDnrv2TtMmMbQ7uKo3J0HTURCqckw23czNQ==} engines: {node: '>=12.0.0'} @@ -4180,6 +4193,10 @@ packages: resolution: {integrity: sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==} engines: {node: '>=12.0.0'} + tmp@0.2.5: + resolution: {integrity: sha512-voyz6MApa1rQGUxT3E+BK7/ROe8itEx7vD8/HEvt4xwXucvQ5G5oeEiHkmHZJuBO21RpOf+YYm9MOivj709jow==} + engines: {node: '>=14.14'} + tmpl@1.0.5: resolution: {integrity: sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==} @@ -7663,6 +7680,12 @@ snapshots: optionalDependencies: picomatch: 4.0.4 + fengari@0.1.5: + dependencies: + readline-sync: 1.4.10 + sprintf-js: 1.1.3 + tmp: 0.2.5 + fflate@0.8.1: {} file-entry-cache@8.0.0: @@ -8875,6 +8898,8 @@ snapshots: react-is@19.0.0: {} + readline-sync@1.4.10: {} + real-require@0.2.0: {} refa@0.12.1: @@ -9067,6 +9092,8 @@ snapshots: sprintf-js@1.0.3: {} + sprintf-js@1.1.3: {} + stable-hash-x@0.2.0: {} stack-utils@2.0.6: @@ -9206,6 +9233,8 @@ snapshots: fdir: 6.5.0(picomatch@4.0.4) picomatch: 4.0.4 + tmp@0.2.5: {} + tmpl@1.0.5: {} to-regex-range@5.0.1: diff --git a/scripts/config/pre-commit.yaml b/scripts/config/pre-commit.yaml index 1e1da873..221b38f1 100644 --- a/scripts/config/pre-commit.yaml +++ b/scripts/config/pre-commit.yaml @@ -79,3 +79,10 @@ repos: entry: pnpm exec knip --no-progress language: system pass_filenames: false + - repo: local + hooks: + - id: check-lua-format + name: Check Lua format + entry: /usr/bin/env check=branch ./scripts/githooks/check-lua-format.sh + language: script + pass_filenames: false diff --git a/scripts/githooks/check-lua-format.sh b/scripts/githooks/check-lua-format.sh new file mode 100755 index 00000000..ec8fdf75 --- /dev/null +++ b/scripts/githooks/check-lua-format.sh @@ -0,0 +1,100 @@ +#!/bin/bash + +set -euo pipefail + +# Pre-commit git hook to lint Lua files using luacheck. Runs luacheck natively +# if installed, otherwise falls back to Docker. +# +# Usage: +# $ [options] ./check-lua-format.sh +# +# Options: +# check={all,staged-changes,working-tree-changes,branch} # Check mode, default is 'working-tree-changes' +# BRANCH_NAME=other-branch-than-main # Branch to compare with, default is `origin/main` +# FORCE_USE_DOCKER=true # If set to true the command is run in a Docker container, default is 'false' +# VERBOSE=true # Show all the executed commands, default is `false` + +# ============================================================================== + +function main() { + + cd "$(git rev-parse --show-toplevel)" + + check=${check:-working-tree-changes} + case $check in + "all") + files="$(git ls-files "*.lua")" + ;; + "staged-changes") + files="$(git diff --diff-filter=ACMRT --name-only --cached "*.lua")" + ;; + "working-tree-changes") + files="$(git diff --diff-filter=ACMRT --name-only "*.lua")" + ;; + "branch") + files="$( (git diff --diff-filter=ACMRT --name-only "${BRANCH_NAME:-origin/main}" "*.lua"; git diff --name-only "*.lua") | sort | uniq )" + ;; + *) + echo "Unrecognised check mode: $check" >&2 && exit 1 + ;; + esac + + if [ -n "$files" ]; then + # shellcheck disable=SC2155 + local globals=$(jq -r '.diagnostics.globals[]' .luarc.json | tr '\n' ' ') + if command -v luacheck > /dev/null 2>&1 && ! is-arg-true "${FORCE_USE_DOCKER:-false}"; then + files="$files" globals="$globals" run-luacheck-natively + else + files="$files" globals="$globals" run-luacheck-in-docker + fi + fi +} + +# Run luacheck natively. +# Arguments (provided as environment variables): +# files=[files to check] +# globals=[space-separated list of global names] +function run-luacheck-natively() { + + # shellcheck disable=SC2086 + luacheck $files --globals $globals +} + +# Run luacheck in a Docker container. +# Arguments (provided as environment variables): +# files=[files to check] +# globals=[space-separated list of global names] +function run-luacheck-in-docker() { + + # shellcheck disable=SC1091 + source ./scripts/docker/docker.lib.sh + + # shellcheck disable=SC2155 + local image=$(name=pipelinecomponents/luacheck docker-get-image-version-and-pull) + # shellcheck disable=SC2086 + docker run --rm --platform linux/amd64 \ + --volume "$PWD":/data \ + --workdir /data \ + --entrypoint luacheck \ + "$image" \ + $files --globals $globals +} + +# ============================================================================== + +function is-arg-true() { + + if [[ "$1" =~ ^(true|yes|y|on|1|TRUE|YES|Y|ON)$ ]]; then + return 0 + else + return 1 + fi +} + +# ============================================================================== + +is-arg-true "${VERBOSE:-false}" && set -x + +main "$@" + +exit 0 diff --git a/scripts/tests/lua-lint.sh b/scripts/tests/lua-lint.sh new file mode 100755 index 00000000..ae271da4 --- /dev/null +++ b/scripts/tests/lua-lint.sh @@ -0,0 +1,7 @@ +#!/bin/bash + +set -euo pipefail + +cd "$(git rev-parse --show-toplevel)" + +check=all ./scripts/githooks/check-lua-format.sh diff --git a/scripts/tests/test.mk b/scripts/tests/test.mk index d9303d92..a94a5af0 100644 --- a/scripts/tests/test.mk +++ b/scripts/tests/test.mk @@ -14,6 +14,9 @@ test-unit: # Run your unit tests from scripts/test/unit @Testing test-lint: # Lint your code from scripts/test/lint @Testing make _test name="lint" +test-lua-lint: # Lint Lua scripts @Testing + make _test name="lua-lint" + test-typecheck: # Typecheck your code from scripts/test/typecheck @Testing make _test name="typecheck"