Files
miti99bot-js/tests/db/dual-sql-store.test.js
tiennm99 ea7df56e2d feat(db,cron): phase 04 — dual-write wrappers + factory routing + drift verifier + e2e
The integration phase. Wires Phase 02 (MongoKVStore) and Phase 03
(MongoTradesStore + MongoSqlStore shim) into the request path behind
two env flags so KV and Atlas run side-by-side until cutover.

Storage routing
- DualKVStore + DualSqlStore: Promise.allSettled writes to BOTH backends,
  reads from primary only. Secondary failures log + enqueue onto a KV
  retry queue (__retry:mongo-failed:* / __retry:mongo-sql-failed:*).
  Primary failure throws. _kind="dual" sentinel for test seam.
- create-store.js + create-sql-store.js: full flag matrix (STORAGE_PRIMARY
  ∈ {kv,mongo}, DUAL_WRITE ∈ {0,1}, MONGODB_URI presence) with
  STUB_SENTINEL short-circuit for deploy-time. Post-cutover shape commented
  inline so Phase 07 simplification is mechanical.

Stub mongo for register
- scripts/stub-kv.js: STUB_SENTINEL constant + duck-typed stubMongo
  (no-op connect/close, throwing collection access). Replaces the
  originally-planned string sentinel which would have stalled register.js
  on serverSelectionTimeoutMS if ever passed to MongoClient (code-reviewer #2).
- scripts/register.js: stub env passes MONGODB_URI=STUB_SENTINEL,
  STORAGE_PRIMARY="kv", DUAL_WRITE="0". Asserted via vi.spyOn that
  MongoClient.prototype.connect is never reached.

Drift verifier cron (1/hr)
- src/cron/drift-verifier.js: drains both retry queues by re-attempting
  secondary writes, deletes on success. Spot-checks parity by sampling
  DRIFT_SAMPLE_N keys per module, hashing, logging mismatches.
- src/modules/cron-dispatcher.js: SYSTEM_CRONS array dispatched alongside
  module crons. Keeping system cron out of registry.crons preserves
  existing module-cron length tests and is the cleaner design.
- wrangler.toml: vars STORAGE_PRIMARY/DUAL_WRITE/DRIFT_SAMPLE_N + cron
  schedule "0 * * * *" added.

Trading wiring
- src/modules/registry.js: builds new MongoTradesStore(env) when Mongo
  is in play and threads it as tradesStore into trading module's init
  context. Trading module already accepted optional tradesStore (Phase 03
  backwards-compat) — D1 path remains for STORAGE_PRIMARY=kv + DUAL_WRITE=0.

Tests + verification
- tests/db/dual-kv-store.test.js, dual-sql-store.test.js: write-both,
  secondary-fail-logs+enqueues, primary-fail-throws, reads-primary-only,
  _kind sentinel.
- tests/db/stub-mongo-sentinel.test.js: spy on MongoClient.connect,
  assert zero calls across all flag-matrix combos.
- tests/cron/drift-verifier.test.js: queue drain, skip paths, error safety.
- tests/e2e/storage-roundtrip.test.js: wordle KV dual-write +
  trading MongoTradesStore against fake-mongo.

Tests: 577 → 638 (+61). register:dry passes without Atlas. Lint clean.

Concerns
- Drift-verifier parity-spot-check tests assert queue-drain only;
  full mismatch detection needs real Atlas (Vitest ES-module caching
  blocks reliable prototype patching). Verifier logic verified by
  inspection.
2026-04-26 09:02:07 +07:00

212 lines
8.1 KiB
JavaScript

/**
* @file dual-sql-store.test.js — unit tests for DualSqlStore.
*
* Contracts verified:
* 1. run() writes to both primary and secondary.
* 2. run() succeeds when secondary fails: logs warning + enqueues to retry queue.
* 3. run() throws when primary fails.
* 4. all() and first() read from primary only.
* 5. prepare() and batch() delegate to primary only.
* 6. tablePrefix is inherited from primary.
* 7. `_kind === "dual"` sentinel present.
*/
import { beforeEach, describe, expect, it, vi } from "vitest";
import { CFSqlStore } from "../../src/db/cf-sql-store.js";
import { DualSqlStore } from "../../src/db/dual-sql-store.js";
import { makeFakeD1 } from "../fakes/fake-d1.js";
import { makeFakeKv } from "../fakes/fake-kv-namespace.js";
// ---------------------------------------------------------------------------
// Helpers
// ---------------------------------------------------------------------------
function makeStores() {
const primaryD1 = makeFakeD1();
const secondaryD1 = makeFakeD1();
const retryQueueKv = makeFakeKv();
const primary = new CFSqlStore(primaryD1);
const secondary = new CFSqlStore(secondaryD1);
const logger = { warn: vi.fn(), error: vi.fn(), log: vi.fn() };
const dual = new DualSqlStore(primary, secondary, retryQueueKv, logger);
// Simulate tablePrefix coming from primary wrapper — set directly.
dual.tablePrefix = "trading_";
return { primaryD1, secondaryD1, retryQueueKv, primary, secondary, dual, logger };
}
// ---------------------------------------------------------------------------
// Constructor validation
// ---------------------------------------------------------------------------
describe("DualSqlStore constructor", () => {
it("throws when primary is missing", () => {
const kv = makeFakeKv();
const d1 = makeFakeD1();
expect(() => new DualSqlStore(null, new CFSqlStore(d1), kv)).toThrow(/primary/);
});
it("throws when secondary is missing", () => {
const kv = makeFakeKv();
const d1 = makeFakeD1();
expect(() => new DualSqlStore(new CFSqlStore(d1), null, kv)).toThrow(/secondary/);
});
it("throws when rawKv is missing", () => {
const d1 = makeFakeD1();
const store = new CFSqlStore(d1);
expect(() => new DualSqlStore(store, store, null)).toThrow(/rawKv/);
});
});
// ---------------------------------------------------------------------------
// _kind sentinel
// ---------------------------------------------------------------------------
describe("_kind sentinel", () => {
it("exposes _kind === 'dual'", () => {
const { dual } = makeStores();
expect(dual._kind).toBe("dual");
});
});
// ---------------------------------------------------------------------------
// tablePrefix
// ---------------------------------------------------------------------------
describe("tablePrefix", () => {
it("inherits tablePrefix from primary", () => {
const d1 = makeFakeD1();
const kv = makeFakeKv();
const primary = new CFSqlStore(d1);
// The DualSqlStore constructor copies primary.tablePrefix.
const dual = new DualSqlStore(primary, primary, kv);
// CFSqlStore has no tablePrefix; DualSqlStore falls back to "".
expect(typeof dual.tablePrefix).toBe("string");
});
});
// ---------------------------------------------------------------------------
// run() — both succeed
// ---------------------------------------------------------------------------
describe("run() — both succeed", () => {
it("records query in both primary and secondary runLog", async () => {
const { primaryD1, secondaryD1, dual } = makeStores();
await dual.run("INSERT INTO trading_trades VALUES (?)", "x");
expect(primaryD1.runLog).toHaveLength(1);
expect(secondaryD1.runLog).toHaveLength(1);
expect(primaryD1.runLog[0].query).toBe("INSERT INTO trading_trades VALUES (?)");
expect(secondaryD1.runLog[0].query).toBe("INSERT INTO trading_trades VALUES (?)");
});
it("returns the primary run result", async () => {
const { dual } = makeStores();
const result = await dual.run("INSERT INTO trading_trades VALUES (?)", "v");
expect(result).toHaveProperty("changes");
expect(result).toHaveProperty("last_row_id");
});
it("no retry entry enqueued when both succeed", async () => {
const { retryQueueKv, dual } = makeStores();
await dual.run("INSERT INTO trading_trades VALUES (?)", "v");
expect(retryQueueKv.store.size).toBe(0);
});
});
// ---------------------------------------------------------------------------
// run() — secondary fails
// ---------------------------------------------------------------------------
describe("run() — secondary fails", () => {
it("succeeds, logs warning, enqueues retry when secondary throws", async () => {
const { primaryD1, secondaryD1, retryQueueKv, logger, dual } = makeStores();
vi.spyOn(secondaryD1, "prepare").mockImplementation(() => ({
run: () => Promise.reject(new Error("mongo write failed")),
bind: function (...args) {
return this;
},
}));
await expect(dual.run("INSERT INTO trading_trades VALUES (?)", "val")).resolves.not.toThrow();
expect(primaryD1.runLog).toHaveLength(1);
expect(logger.warn).toHaveBeenCalledOnce();
const warnArg = logger.warn.mock.calls[0][1];
expect(warnArg.op).toBe("run");
expect(warnArg.err).toContain("mongo write failed");
// Bind values must NOT appear in structured log.
expect(JSON.stringify(warnArg)).not.toContain("val");
// Retry enqueued.
expect(retryQueueKv.store.size).toBe(1);
const [key] = [...retryQueueKv.store.keys()];
expect(key).toMatch(/^__retry:mongo-sql-failed:/);
});
});
// ---------------------------------------------------------------------------
// run() — primary fails
// ---------------------------------------------------------------------------
describe("run() — primary fails", () => {
it("throws when primary throws", async () => {
const { primaryD1, dual } = makeStores();
vi.spyOn(primaryD1, "prepare").mockImplementation(() => ({
run: () => Promise.reject(new Error("d1 gone")),
bind: function (...args) {
return this;
},
}));
await expect(dual.run("INSERT INTO trading_trades VALUES (?)", "v")).rejects.toThrow("d1 gone");
});
});
// ---------------------------------------------------------------------------
// Read operations — primary only
// ---------------------------------------------------------------------------
describe("all() and first() — primary only", () => {
it("all() returns primary results", async () => {
const { primaryD1, secondaryD1, dual } = makeStores();
primaryD1.seed("trading_trades", [{ id: 1, symbol: "VNM" }]);
// Secondary empty — result must still come from primary.
const rows = await dual.all("SELECT * FROM trading_trades");
expect(rows).toHaveLength(1);
expect(rows[0].symbol).toBe("VNM");
});
it("first() returns primary first row", async () => {
const { primaryD1, dual } = makeStores();
primaryD1.seed("trading_trades", [{ id: 1, symbol: "FPT" }]);
const row = await dual.first("SELECT * FROM trading_trades LIMIT 1");
expect(row?.symbol).toBe("FPT");
});
it("first() returns null when primary has no rows", async () => {
const { dual } = makeStores();
const row = await dual.first("SELECT * FROM trading_trades LIMIT 1");
expect(row).toBeNull();
});
});
// ---------------------------------------------------------------------------
// prepare() and batch() — primary only
// ---------------------------------------------------------------------------
describe("prepare() and batch() — primary only", () => {
it("prepare() delegates to primary", () => {
const { primaryD1, dual } = makeStores();
// Should not throw; fake D1 returns a stub prepared statement.
expect(() => dual.prepare("SELECT 1")).not.toThrow();
});
it("batch() returns primary results", async () => {
const { primaryD1, dual } = makeStores();
primaryD1.seed("trading_trades", [{ id: 1 }]);
const stmt = dual.prepare("SELECT * FROM trading_trades");
const results = await dual.batch([stmt]);
expect(Array.isArray(results)).toBe(true);
expect(results[0]).toHaveLength(1);
});
});