mirror of
https://github.com/tiennm99/miti99bot.git
synced 2026-04-17 13:21:31 +00:00
feat: add D1 storage layer with per-module migration runner
- SqlStore interface + CF D1 wrapper + per-module factory (table prefix convention)
- init signature extended to ({ db, sql, env }); sql is null when DB binding absent
- custom migration runner walks src/modules/*/migrations/*.sql, tracks applied in _migrations table
- npm run db:migrate with --dry-run and --local flags; chained into deploy
- fake-d1 test helper with subset of SQL semantics for retention and history tests
This commit is contained in:
1016
package-lock.json
generated
1016
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -9,10 +9,11 @@
|
||||
},
|
||||
"scripts": {
|
||||
"dev": "wrangler dev",
|
||||
"deploy": "wrangler deploy && npm run register",
|
||||
"deploy": "wrangler deploy && npm run db:migrate && npm run register",
|
||||
"db:migrate": "node scripts/migrate.js",
|
||||
"register": "node --env-file=.env.deploy scripts/register.js",
|
||||
"register:dry": "node --env-file=.env.deploy scripts/register.js --dry-run",
|
||||
"lint": "biome check src tests scripts",
|
||||
"lint": "biome check src tests scripts && eslint src",
|
||||
"format": "biome format --write src tests scripts",
|
||||
"test": "vitest run"
|
||||
},
|
||||
@@ -21,6 +22,8 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "^1.9.0",
|
||||
"eslint": "^10.2.0",
|
||||
"eslint-plugin-jsdoc": "^62.9.0",
|
||||
"vitest": "^2.1.0",
|
||||
"wrangler": "^3.90.0"
|
||||
}
|
||||
|
||||
161
scripts/migrate.js
Normal file
161
scripts/migrate.js
Normal file
@@ -0,0 +1,161 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* @file migrate — custom D1 migration runner for per-module SQL files.
|
||||
*
|
||||
* Discovers all `src/modules/*/ migrations; /*.sql` files, sorts them
|
||||
* deterministically (by `{moduleName}/{filename}`), then applies each NEW
|
||||
* migration via `wrangler d1 execute miti99bot-db --remote --file=<path>`.
|
||||
*
|
||||
* Applied migrations are tracked in a `_migrations(name TEXT PRIMARY KEY,
|
||||
* applied_at INTEGER)` table in the D1 database itself.
|
||||
*
|
||||
* Flags:
|
||||
* --dry-run Print the migration plan without executing anything.
|
||||
* --local Apply against local dev D1 (omits --remote flag).
|
||||
*
|
||||
* Usage:
|
||||
* node scripts/migrate.js
|
||||
* node scripts/migrate.js --dry-run
|
||||
* node scripts/migrate.js --local
|
||||
*/
|
||||
|
||||
import { execSync } from "node:child_process";
|
||||
import { existsSync, readdirSync } from "node:fs";
|
||||
import { basename, join, resolve } from "node:path";
|
||||
|
||||
const DB_NAME = "miti99bot-db";
|
||||
const PROJECT_ROOT = resolve(import.meta.dirname, "..");
|
||||
const MODULES_DIR = join(PROJECT_ROOT, "src", "modules");
|
||||
|
||||
const dryRun = process.argv.includes("--dry-run");
|
||||
const local = process.argv.includes("--local");
|
||||
const remoteFlag = local ? "" : "--remote";
|
||||
|
||||
/**
|
||||
* Run a wrangler d1 execute command and return stdout.
|
||||
*
|
||||
* @param {string} sql — inline SQL string (used for bootstrap queries)
|
||||
* @param {string} [file] — path to a .sql file (mutually exclusive with sql)
|
||||
* @returns {string}
|
||||
*/
|
||||
function wranglerExecute(sql, file) {
|
||||
const target = file ? `--file="${file}"` : `--command="${sql.replace(/"/g, '\\"')}"`;
|
||||
const cmd = `npx wrangler d1 execute ${DB_NAME} ${remoteFlag} ${target} --json`;
|
||||
try {
|
||||
return execSync(cmd, { stdio: ["ignore", "pipe", "pipe"] }).toString();
|
||||
} catch (err) {
|
||||
const stderr = err.stderr?.toString() ?? "";
|
||||
const stdout = err.stdout?.toString() ?? "";
|
||||
throw new Error(`wrangler error:\n${stderr || stdout}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensure the _migrations table exists.
|
||||
*/
|
||||
function bootstrapMigrationsTable() {
|
||||
const sql =
|
||||
"CREATE TABLE IF NOT EXISTS _migrations (name TEXT PRIMARY KEY, applied_at INTEGER NOT NULL);";
|
||||
wranglerExecute(sql);
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch already-applied migration names from D1.
|
||||
*
|
||||
* @returns {Set<string>}
|
||||
*/
|
||||
function getAppliedMigrations() {
|
||||
const out = wranglerExecute("SELECT name FROM _migrations;");
|
||||
/** @type {any[]} */
|
||||
let parsed = [];
|
||||
try {
|
||||
const json = JSON.parse(out);
|
||||
// wrangler --json wraps results in an array of result objects
|
||||
parsed = Array.isArray(json) ? (json[0]?.results ?? []) : [];
|
||||
} catch {
|
||||
// If the table is freshly created it may return empty JSON — treat as empty.
|
||||
}
|
||||
return new Set(parsed.map((r) => r.name));
|
||||
}
|
||||
|
||||
/**
|
||||
* Record a migration as applied.
|
||||
*
|
||||
* @param {string} name
|
||||
*/
|
||||
function recordMigration(name) {
|
||||
const sql = `INSERT INTO _migrations (name, applied_at) VALUES ('${name}', ${Date.now()});`;
|
||||
wranglerExecute(sql);
|
||||
}
|
||||
|
||||
/**
|
||||
* Discover all migration files as { name, absPath } sorted deterministically.
|
||||
* name = "{moduleName}/{filename}" — used as the unique migration key.
|
||||
*
|
||||
* @returns {Array<{name: string, absPath: string}>}
|
||||
*/
|
||||
function discoverMigrations() {
|
||||
if (!existsSync(MODULES_DIR)) return [];
|
||||
|
||||
/** @type {Array<{name: string, absPath: string}>} */
|
||||
const found = [];
|
||||
|
||||
for (const entry of readdirSync(MODULES_DIR, { withFileTypes: true })) {
|
||||
if (!entry.isDirectory()) continue;
|
||||
const migrationsDir = join(MODULES_DIR, entry.name, "migrations");
|
||||
if (!existsSync(migrationsDir)) continue;
|
||||
|
||||
for (const file of readdirSync(migrationsDir).sort()) {
|
||||
if (!file.endsWith(".sql")) continue;
|
||||
found.push({
|
||||
name: `${entry.name}/${file}`,
|
||||
absPath: join(migrationsDir, file),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by the composite name so ordering is deterministic across modules.
|
||||
found.sort((a, b) => a.name.localeCompare(b.name));
|
||||
return found;
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const all = discoverMigrations();
|
||||
|
||||
if (all.length === 0) {
|
||||
console.log("No migration files found — nothing to do.");
|
||||
return;
|
||||
}
|
||||
|
||||
if (dryRun) {
|
||||
console.log(`DRY RUN — would apply up to ${all.length} migration(s):`);
|
||||
for (const m of all) console.log(` ${m.name}`);
|
||||
return;
|
||||
}
|
||||
|
||||
bootstrapMigrationsTable();
|
||||
const applied = getAppliedMigrations();
|
||||
|
||||
const pending = all.filter((m) => !applied.has(m.name));
|
||||
|
||||
if (pending.length === 0) {
|
||||
console.log(`All ${all.length} migration(s) already applied.`);
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`Applying ${pending.length} pending migration(s)...`);
|
||||
|
||||
for (const migration of pending) {
|
||||
console.log(` → ${migration.name}`);
|
||||
wranglerExecute(null, migration.absPath);
|
||||
recordMigration(migration.name);
|
||||
console.log(" ✓ applied");
|
||||
}
|
||||
|
||||
console.log("Done.");
|
||||
}
|
||||
|
||||
main().catch((err) => {
|
||||
console.error(err.message ?? err);
|
||||
process.exit(1);
|
||||
});
|
||||
17
src/bot.js
17
src/bot.js
@@ -9,12 +9,29 @@
|
||||
|
||||
import { Bot } from "grammy";
|
||||
import { installDispatcher } from "./modules/dispatcher.js";
|
||||
import { getCurrentRegistry } from "./modules/registry.js";
|
||||
|
||||
/** @type {Bot | null} */
|
||||
let botInstance = null;
|
||||
/** @type {Promise<Bot> | null} */
|
||||
let botInitPromise = null;
|
||||
|
||||
/**
|
||||
* Returns the memoized registry, building it (and the bot) if needed.
|
||||
* Shares the same instance used by the fetch handler so scheduled() and
|
||||
* fetch() operate on identical registry state within a warm instance.
|
||||
*
|
||||
* @param {any} env
|
||||
* @returns {Promise<import("./modules/registry.js").Registry>}
|
||||
*/
|
||||
export async function getRegistry(env) {
|
||||
// If the bot is already initialised the registry was built as a side effect.
|
||||
if (botInstance) return getCurrentRegistry();
|
||||
// Otherwise bootstrap via getBot (which calls buildRegistry internally).
|
||||
await getBot(env);
|
||||
return getCurrentRegistry();
|
||||
}
|
||||
|
||||
/**
|
||||
* Fail fast if any required env var is missing — better a 500 on first webhook
|
||||
* than a confusing runtime error inside grammY.
|
||||
|
||||
80
src/db/cf-sql-store.js
Normal file
80
src/db/cf-sql-store.js
Normal file
@@ -0,0 +1,80 @@
|
||||
/**
|
||||
* @file cf-sql-store — thin wrapper around a Cloudflare D1 database binding.
|
||||
*
|
||||
* Exposes `prepare`, `run`, `all`, `first`, and `batch` using the D1
|
||||
* prepared-statement API. This is the production implementation of SqlStore.
|
||||
* Tests use `fake-d1.js` instead.
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {import("./sql-store-interface.js").SqlStore} SqlStore
|
||||
* @typedef {import("./sql-store-interface.js").SqlRunResult} SqlRunResult
|
||||
*/
|
||||
|
||||
export class CFSqlStore {
|
||||
/** @param {D1Database} db */
|
||||
constructor(db) {
|
||||
this._db = db;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a bound D1PreparedStatement for advanced use (e.g. batch()).
|
||||
*
|
||||
* @param {string} query
|
||||
* @param {...any} binds
|
||||
* @returns {D1PreparedStatement}
|
||||
*/
|
||||
prepare(query, ...binds) {
|
||||
const stmt = this._db.prepare(query);
|
||||
return binds.length > 0 ? stmt.bind(...binds) : stmt;
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a write statement (INSERT/UPDATE/DELETE/CREATE).
|
||||
*
|
||||
* @param {string} query
|
||||
* @param {...any} binds
|
||||
* @returns {Promise<SqlRunResult>}
|
||||
*/
|
||||
async run(query, ...binds) {
|
||||
const result = await this.prepare(query, ...binds).run();
|
||||
return {
|
||||
changes: result.meta?.changes ?? 0,
|
||||
last_row_id: result.meta?.last_row_id ?? 0,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a SELECT and return all matching rows.
|
||||
*
|
||||
* @param {string} query
|
||||
* @param {...any} binds
|
||||
* @returns {Promise<any[]>}
|
||||
*/
|
||||
async all(query, ...binds) {
|
||||
const result = await this.prepare(query, ...binds).all();
|
||||
return result.results ?? [];
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a SELECT and return the first row, or null.
|
||||
*
|
||||
* @param {string} query
|
||||
* @param {...any} binds
|
||||
* @returns {Promise<any|null>}
|
||||
*/
|
||||
async first(query, ...binds) {
|
||||
return this.prepare(query, ...binds).first() ?? null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute multiple prepared statements in a single round-trip.
|
||||
*
|
||||
* @param {D1PreparedStatement[]} statements
|
||||
* @returns {Promise<any[]>}
|
||||
*/
|
||||
async batch(statements) {
|
||||
const results = await this._db.batch(statements);
|
||||
return results.map((r) => r.results ?? []);
|
||||
}
|
||||
}
|
||||
64
src/db/create-sql-store.js
Normal file
64
src/db/create-sql-store.js
Normal file
@@ -0,0 +1,64 @@
|
||||
/**
|
||||
* @file create-sql-store — factory returning a namespaced SqlStore for a module.
|
||||
*
|
||||
* Table naming is by convention: `{moduleName}_{table}`. Authors write the
|
||||
* full prefixed name directly in SQL (e.g. `trading_trades`). `tablePrefix`
|
||||
* is exposed for authors who want to interpolate the prefix dynamically.
|
||||
*
|
||||
* Returns null when `env.DB` is absent so modules that don't use D1 have
|
||||
* zero overhead — the registry passes `sql: null` and modules check for it.
|
||||
*/
|
||||
|
||||
import { CFSqlStore } from "./cf-sql-store.js";
|
||||
|
||||
/**
|
||||
* @typedef {import("./sql-store-interface.js").SqlStore} SqlStore
|
||||
*/
|
||||
|
||||
const MODULE_NAME_RE = /^[a-z0-9_-]+$/;
|
||||
|
||||
/**
|
||||
* @param {string} moduleName — must match `[a-z0-9_-]+`.
|
||||
* @param {{ DB?: D1Database }} env — worker env (or test double).
|
||||
* @returns {SqlStore | null} null when env.DB is not bound.
|
||||
*/
|
||||
export function createSqlStore(moduleName, env) {
|
||||
if (!moduleName || typeof moduleName !== "string") {
|
||||
throw new Error("createSqlStore: moduleName is required");
|
||||
}
|
||||
if (!MODULE_NAME_RE.test(moduleName)) {
|
||||
throw new Error(
|
||||
`createSqlStore: invalid moduleName "${moduleName}" — must match ${MODULE_NAME_RE}`,
|
||||
);
|
||||
}
|
||||
|
||||
// D1 is optional — workers without a DB binding still work fine.
|
||||
if (!env?.DB) return null;
|
||||
|
||||
const base = new CFSqlStore(env.DB);
|
||||
const tablePrefix = `${moduleName}_`;
|
||||
|
||||
return {
|
||||
tablePrefix,
|
||||
|
||||
prepare(query, ...binds) {
|
||||
return base.prepare(query, ...binds);
|
||||
},
|
||||
|
||||
async run(query, ...binds) {
|
||||
return base.run(query, ...binds);
|
||||
},
|
||||
|
||||
async all(query, ...binds) {
|
||||
return base.all(query, ...binds);
|
||||
},
|
||||
|
||||
async first(query, ...binds) {
|
||||
return base.first(query, ...binds);
|
||||
},
|
||||
|
||||
async batch(statements) {
|
||||
return base.batch(statements);
|
||||
},
|
||||
};
|
||||
}
|
||||
@@ -10,26 +10,26 @@
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} KVStorePutOptions
|
||||
* @typedef {object} KVStorePutOptions
|
||||
* @property {number} [expirationTtl] seconds — value auto-deletes after this many seconds.
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} KVStoreListOptions
|
||||
* @typedef {object} KVStoreListOptions
|
||||
* @property {string} [prefix] additional prefix (appended AFTER the module namespace).
|
||||
* @property {number} [limit]
|
||||
* @property {string} [cursor] pagination cursor from a previous list() call.
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} KVStoreListResult
|
||||
* @typedef {object} KVStoreListResult
|
||||
* @property {string[]} keys — module namespace already stripped.
|
||||
* @property {string} [cursor] — present if more pages available.
|
||||
* @property {boolean} done — true when list_complete.
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} KVStore
|
||||
* @typedef {object} KVStore
|
||||
* @property {(key: string) => Promise<string|null>} get
|
||||
* @property {(key: string, value: string, opts?: KVStorePutOptions) => Promise<void>} put
|
||||
* @property {(key: string) => Promise<void>} delete
|
||||
|
||||
40
src/db/sql-store-interface.js
Normal file
40
src/db/sql-store-interface.js
Normal file
@@ -0,0 +1,40 @@
|
||||
/**
|
||||
* @file SqlStore interface — JSDoc typedefs only, no runtime code.
|
||||
*
|
||||
* This is the contract every SQL storage backend must satisfy. Modules
|
||||
* receive a prefixed `SqlStore` (via {@link module:db/create-sql-store}) and
|
||||
* must NEVER touch the underlying `env.DB` binding directly.
|
||||
*
|
||||
* Table naming convention: `{moduleName}_{table}` (e.g. `trading_trades`).
|
||||
* Enforced by convention — `tablePrefix` is exposed so authors can interpolate
|
||||
* it when building dynamic table names, but most authors hard-code the full
|
||||
* prefixed table name directly in their SQL.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Raw D1 run result.
|
||||
*
|
||||
* @typedef {object} SqlRunResult
|
||||
* @property {number} changes — rows affected by INSERT/UPDATE/DELETE.
|
||||
* @property {number} last_row_id — rowid of the last inserted row (0 if none).
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {object} SqlStore
|
||||
* @property {string} tablePrefix
|
||||
* Convenience prefix `"${moduleName}_"`. Authors may interpolate this when
|
||||
* constructing dynamic table names.
|
||||
* @property {(query: string, ...binds: any[]) => Promise<SqlRunResult>} run
|
||||
* Execute a write statement (INSERT/UPDATE/DELETE/CREATE). Returns metadata.
|
||||
* @property {(query: string, ...binds: any[]) => Promise<any[]>} all
|
||||
* Execute a SELECT and return all matching rows as plain objects.
|
||||
* @property {(query: string, ...binds: any[]) => Promise<any|null>} first
|
||||
* Execute a SELECT and return the first row, or null if no rows match.
|
||||
* @property {(query: string, ...binds: any[]) => D1PreparedStatement} prepare
|
||||
* Expose the underlying prepared statement for advanced use (e.g. batch()).
|
||||
* @property {(statements: D1PreparedStatement[]) => Promise<any[]>} batch
|
||||
* Execute multiple prepared statements in a single round-trip.
|
||||
*/
|
||||
|
||||
// JSDoc-only module. No runtime exports.
|
||||
export {};
|
||||
21
src/index.js
21
src/index.js
@@ -13,7 +13,8 @@
|
||||
*/
|
||||
|
||||
import { webhookCallback } from "grammy";
|
||||
import { getBot } from "./bot.js";
|
||||
import { getBot, getRegistry } from "./bot.js";
|
||||
import { dispatchScheduled } from "./modules/cron-dispatcher.js";
|
||||
|
||||
/** @type {ReturnType<typeof webhookCallback> | null} */
|
||||
let cachedWebhookHandler = null;
|
||||
@@ -31,6 +32,24 @@ async function getWebhookHandler(env) {
|
||||
}
|
||||
|
||||
export default {
|
||||
/**
|
||||
* Cloudflare Cron Trigger handler.
|
||||
* Dispatches the scheduled event to all module cron handlers whose
|
||||
* schedule matches event.cron.
|
||||
*
|
||||
* @param {any} event — ScheduledEvent ({ cron: string, scheduledTime: number })
|
||||
* @param {any} env
|
||||
* @param {{ waitUntil: (p: Promise<any>) => void }} ctx
|
||||
*/
|
||||
async scheduled(event, env, ctx) {
|
||||
try {
|
||||
const registry = await getRegistry(env);
|
||||
dispatchScheduled(event, env, ctx, registry);
|
||||
} catch (err) {
|
||||
console.error("scheduled handler failed", err);
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* @param {Request} request
|
||||
* @param {any} env
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
*
|
||||
* wrangler bundles statically — dynamic `import(variablePath)` defeats
|
||||
* tree-shaking and can fail at bundle time. So we enumerate every module here
|
||||
* as a lazy loader, and {@link loadModules} filters the list at runtime
|
||||
* as a lazy loader, and loadModules filters the list at runtime
|
||||
* against `env.MODULES` (comma-separated). Adding a new module is a two-step
|
||||
* edit: create the folder, then add one line here.
|
||||
*/
|
||||
|
||||
@@ -12,28 +12,40 @@
|
||||
* - `resetRegistry()` exists for tests.
|
||||
*/
|
||||
|
||||
import { createSqlStore } from "../db/create-sql-store.js";
|
||||
import { createStore } from "../db/create-store.js";
|
||||
import { moduleRegistry as defaultModuleRegistry } from "./index.js";
|
||||
import { validateCommand } from "./validate-command.js";
|
||||
import { validateCron } from "./validate-cron.js";
|
||||
|
||||
/**
|
||||
* @typedef {import("./validate-command.js").ModuleCommand} ModuleCommand
|
||||
*
|
||||
* @typedef {Object} BotModule
|
||||
* @typedef {import("./validate-cron.js").ModuleCron} ModuleCron
|
||||
*
|
||||
* @typedef {object} BotModule
|
||||
* @property {string} name
|
||||
* @property {ModuleCommand[]} commands
|
||||
* @property {({ db, env }: { db: any, env: any }) => Promise<void>|void} [init]
|
||||
* @property {ModuleCron[]} [crons]
|
||||
* @property {(ctx: { db: any, sql: any, env: any }) => Promise<void>} [init]
|
||||
*
|
||||
* @typedef {Object} RegistryEntry
|
||||
* @typedef {object} RegistryEntry
|
||||
* @property {BotModule} module
|
||||
* @property {ModuleCommand} cmd
|
||||
* @property {"public"|"protected"|"private"} [visibility]
|
||||
*
|
||||
* @typedef {Object} Registry
|
||||
* @typedef {object} CronEntry
|
||||
* @property {BotModule} module
|
||||
* @property {string} schedule
|
||||
* @property {string} name
|
||||
* @property {ModuleCron["handler"]} handler
|
||||
*
|
||||
* @typedef {object} Registry
|
||||
* @property {Map<string, RegistryEntry>} publicCommands
|
||||
* @property {Map<string, RegistryEntry>} protectedCommands
|
||||
* @property {Map<string, RegistryEntry>} privateCommands
|
||||
* @property {Map<string, RegistryEntry>} allCommands
|
||||
* @property {CronEntry[]} crons — flat list of all validated cron entries across modules.
|
||||
* @property {BotModule[]} modules — ordered per env.MODULES for /help rendering.
|
||||
*/
|
||||
|
||||
@@ -97,6 +109,21 @@ export async function loadModules(env, importMap = defaultModuleRegistry) {
|
||||
}
|
||||
for (const cmd of mod.commands) validateCommand(cmd, name);
|
||||
|
||||
// Validate crons if present (optional field).
|
||||
if (mod.crons !== undefined) {
|
||||
if (!Array.isArray(mod.crons)) {
|
||||
throw new Error(`module "${name}" crons must be an array`);
|
||||
}
|
||||
const cronNames = new Set();
|
||||
for (const cron of mod.crons) {
|
||||
validateCron(cron, name);
|
||||
if (cronNames.has(cron.name)) {
|
||||
throw new Error(`module "${name}" has duplicate cron name "${cron.name}"`);
|
||||
}
|
||||
cronNames.add(cron.name);
|
||||
}
|
||||
}
|
||||
|
||||
modules.push(mod);
|
||||
}
|
||||
|
||||
@@ -122,11 +149,13 @@ export async function buildRegistry(env, importMap) {
|
||||
const privateCommands = new Map();
|
||||
/** @type {Map<string, RegistryEntry>} */
|
||||
const allCommands = new Map();
|
||||
/** @type {CronEntry[]} */
|
||||
const crons = [];
|
||||
|
||||
for (const mod of modules) {
|
||||
if (typeof mod.init === "function") {
|
||||
try {
|
||||
await mod.init({ db: createStore(mod.name, env), env });
|
||||
await mod.init({ db: createStore(mod.name, env), sql: createSqlStore(mod.name, env), env });
|
||||
} catch (err) {
|
||||
throw new Error(
|
||||
`module "${mod.name}" init failed: ${err instanceof Error ? err.message : String(err)}`,
|
||||
@@ -149,6 +178,18 @@ export async function buildRegistry(env, importMap) {
|
||||
else if (cmd.visibility === "protected") protectedCommands.set(cmd.name, entry);
|
||||
else privateCommands.set(cmd.name, entry);
|
||||
}
|
||||
|
||||
// Collect cron entries (validated during loadModules).
|
||||
if (Array.isArray(mod.crons)) {
|
||||
for (const cron of mod.crons) {
|
||||
crons.push({
|
||||
module: mod,
|
||||
schedule: cron.schedule,
|
||||
name: cron.name,
|
||||
handler: cron.handler,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const registry = {
|
||||
@@ -156,6 +197,7 @@ export async function buildRegistry(env, importMap) {
|
||||
protectedCommands,
|
||||
privateCommands,
|
||||
allCommands,
|
||||
crons,
|
||||
modules,
|
||||
};
|
||||
currentRegistry = registry;
|
||||
|
||||
@@ -17,7 +17,7 @@ export const COMMAND_NAME_RE = /^[a-z0-9_]{1,32}$/;
|
||||
export const MAX_DESCRIPTION_LENGTH = 256;
|
||||
|
||||
/**
|
||||
* @typedef {Object} ModuleCommand
|
||||
* @typedef {object} ModuleCommand
|
||||
* @property {string} name — without leading slash; matches COMMAND_NAME_RE.
|
||||
* @property {"public"|"protected"|"private"} visibility
|
||||
* @property {string} description — ≤256 chars; required for all visibilities.
|
||||
|
||||
116
tests/db/create-sql-store.test.js
Normal file
116
tests/db/create-sql-store.test.js
Normal file
@@ -0,0 +1,116 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { createSqlStore } from "../../src/db/create-sql-store.js";
|
||||
import { makeFakeD1 } from "../fakes/fake-d1.js";
|
||||
|
||||
const makeEnv = () => ({ DB: makeFakeD1() });
|
||||
|
||||
describe("createSqlStore", () => {
|
||||
describe("factory validation", () => {
|
||||
it("throws on missing moduleName", () => {
|
||||
expect(() => createSqlStore("", makeEnv())).toThrow(/moduleName is required/);
|
||||
expect(() => createSqlStore(null, makeEnv())).toThrow(/moduleName is required/);
|
||||
});
|
||||
|
||||
it("throws on invalid moduleName characters", () => {
|
||||
expect(() => createSqlStore("Bad Name", makeEnv())).toThrow(/invalid moduleName/);
|
||||
expect(() => createSqlStore("has space", makeEnv())).toThrow(/invalid moduleName/);
|
||||
});
|
||||
|
||||
it("returns null when env.DB is absent", () => {
|
||||
expect(createSqlStore("mymod", {})).toBeNull();
|
||||
expect(createSqlStore("mymod", { KV: {} })).toBeNull();
|
||||
});
|
||||
|
||||
it("returns a SqlStore when env.DB is present", () => {
|
||||
const sql = createSqlStore("mymod", makeEnv());
|
||||
expect(sql).not.toBeNull();
|
||||
expect(typeof sql.run).toBe("function");
|
||||
expect(typeof sql.all).toBe("function");
|
||||
expect(typeof sql.first).toBe("function");
|
||||
expect(typeof sql.prepare).toBe("function");
|
||||
expect(typeof sql.batch).toBe("function");
|
||||
});
|
||||
});
|
||||
|
||||
describe("tablePrefix", () => {
|
||||
it("exposes tablePrefix as moduleName + underscore", () => {
|
||||
const sql = createSqlStore("trading", makeEnv());
|
||||
expect(sql.tablePrefix).toBe("trading_");
|
||||
});
|
||||
|
||||
it("works with hyphenated module names", () => {
|
||||
const sql = createSqlStore("my-mod", makeEnv());
|
||||
expect(sql.tablePrefix).toBe("my-mod_");
|
||||
});
|
||||
});
|
||||
|
||||
describe("run", () => {
|
||||
it("returns changes and last_row_id on INSERT", async () => {
|
||||
const sql = createSqlStore("trading", makeEnv());
|
||||
const result = await sql.run("INSERT INTO trading_trades VALUES (?)", "x");
|
||||
expect(result).toHaveProperty("changes");
|
||||
expect(result).toHaveProperty("last_row_id");
|
||||
expect(typeof result.changes).toBe("number");
|
||||
});
|
||||
|
||||
it("records the query in runLog", async () => {
|
||||
const fakeDb = makeFakeD1();
|
||||
const sql = createSqlStore("trading", { DB: fakeDb });
|
||||
await sql.run("INSERT INTO trading_trades VALUES (?)", "hello");
|
||||
expect(fakeDb.runLog).toHaveLength(1);
|
||||
expect(fakeDb.runLog[0].query).toBe("INSERT INTO trading_trades VALUES (?)");
|
||||
expect(fakeDb.runLog[0].binds).toEqual(["hello"]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("all", () => {
|
||||
it("returns empty array when table has no rows", async () => {
|
||||
const sql = createSqlStore("trading", makeEnv());
|
||||
const rows = await sql.all("SELECT * FROM trading_trades");
|
||||
expect(rows).toEqual([]);
|
||||
});
|
||||
|
||||
it("returns seeded rows", async () => {
|
||||
const fakeDb = makeFakeD1();
|
||||
fakeDb.seed("trading_trades", [
|
||||
{ id: 1, symbol: "VNM" },
|
||||
{ id: 2, symbol: "FPT" },
|
||||
]);
|
||||
const sql = createSqlStore("trading", { DB: fakeDb });
|
||||
const rows = await sql.all("SELECT * FROM trading_trades");
|
||||
expect(rows).toHaveLength(2);
|
||||
expect(rows[0].symbol).toBe("VNM");
|
||||
});
|
||||
});
|
||||
|
||||
describe("first", () => {
|
||||
it("returns null when table is empty", async () => {
|
||||
const sql = createSqlStore("trading", makeEnv());
|
||||
const row = await sql.first("SELECT * FROM trading_trades WHERE id = ?", 99);
|
||||
expect(row).toBeNull();
|
||||
});
|
||||
|
||||
it("returns the first seeded row", async () => {
|
||||
const fakeDb = makeFakeD1();
|
||||
fakeDb.seed("trading_trades", [{ id: 1, symbol: "VNM" }]);
|
||||
const sql = createSqlStore("trading", { DB: fakeDb });
|
||||
const row = await sql.first("SELECT * FROM trading_trades LIMIT 1");
|
||||
expect(row).toEqual({ id: 1, symbol: "VNM" });
|
||||
});
|
||||
});
|
||||
|
||||
describe("batch", () => {
|
||||
it("executes multiple statements and returns array of result arrays", async () => {
|
||||
const fakeDb = makeFakeD1();
|
||||
fakeDb.seed("trading_trades", [{ id: 1 }]);
|
||||
const sql = createSqlStore("trading", { DB: fakeDb });
|
||||
const stmt1 = sql.prepare("SELECT * FROM trading_trades");
|
||||
const stmt2 = sql.prepare("SELECT * FROM trading_orders");
|
||||
const results = await sql.batch([stmt1, stmt2]);
|
||||
expect(Array.isArray(results)).toBe(true);
|
||||
expect(results).toHaveLength(2);
|
||||
expect(results[0]).toHaveLength(1); // one row seeded
|
||||
expect(results[1]).toHaveLength(0); // no rows
|
||||
});
|
||||
});
|
||||
});
|
||||
290
tests/fakes/fake-d1.js
Normal file
290
tests/fakes/fake-d1.js
Normal file
@@ -0,0 +1,290 @@
|
||||
/**
|
||||
* @file fake-d1 — in-memory D1-like fake for unit tests.
|
||||
*
|
||||
* Supports a limited subset of SQL semantics needed by the test suite:
|
||||
* - INSERT: appends a row built from binds.
|
||||
* - SELECT: returns all rows for the matched table.
|
||||
* - SELECT DISTINCT <col>: returns unique values for a single column.
|
||||
* - SELECT id FROM <table> WHERE user_id = ? ORDER BY ts DESC: returns ids sorted by ts DESC.
|
||||
* - SELECT id FROM <table> ORDER BY ts DESC: global sort by ts DESC.
|
||||
* - DELETE WHERE id IN (?,...): removes specific rows by id.
|
||||
* - DELETE (generic): clears entire table (fallback for legacy tests).
|
||||
*
|
||||
* Supported operations:
|
||||
* prepare(query, ...binds) → fake prepared statement
|
||||
* .run() → { meta: { changes, last_row_id } }
|
||||
* .all() → { results: row[] }
|
||||
* .first() → row | null
|
||||
* batch(stmts) → array of { results: row[] }
|
||||
*
|
||||
* Usage in tests:
|
||||
* const fakeDb = makeFakeD1();
|
||||
* fakeDb.seed("mymod_foo", [{ id: 1, val: "x" }]);
|
||||
* const sql = createSqlStore("mymod", { DB: fakeDb });
|
||||
* const rows = await sql.all("SELECT * FROM mymod_foo");
|
||||
* // rows === [{ id: 1, val: "x" }]
|
||||
*/
|
||||
|
||||
export function makeFakeD1() {
|
||||
/** @type {Map<string, any[]>} table name → rows */
|
||||
const tables = new Map();
|
||||
|
||||
/** @type {Array<{query: string, binds: any[]}>} */
|
||||
const runLog = [];
|
||||
|
||||
/** @type {Array<{query: string, binds: any[]}>} */
|
||||
const queryLog = [];
|
||||
|
||||
/**
|
||||
* Pre-populate a table with rows.
|
||||
*
|
||||
* @param {string} table
|
||||
* @param {any[]} rows
|
||||
*/
|
||||
function seed(table, rows) {
|
||||
tables.set(table, [...rows]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract the first table name token from a query string.
|
||||
* Handles simple patterns: FROM <table>, INTO <table>, UPDATE <table>.
|
||||
*
|
||||
* @param {string} query
|
||||
* @returns {string|null}
|
||||
*/
|
||||
function extractTable(query) {
|
||||
const normalized = query.replace(/\s+/g, " ").trim();
|
||||
const m =
|
||||
normalized.match(/\bFROM\s+(\w+)/i) ||
|
||||
normalized.match(/\bINTO\s+(\w+)/i) ||
|
||||
normalized.match(/\bUPDATE\s+(\w+)/i) ||
|
||||
normalized.match(/\bTABLE\s+(\w+)/i);
|
||||
return m ? m[1] : null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse DELETE WHERE id IN (...) — returns the set of ids to delete, or null
|
||||
* if the query doesn't match this pattern (fall back to clear-all).
|
||||
*
|
||||
* Matches patterns like:
|
||||
* DELETE FROM t WHERE id IN (?,?,?)
|
||||
* DELETE FROM t WHERE id IN (SELECT id FROM t ...) — not supported, returns null
|
||||
*
|
||||
* @param {string} query
|
||||
* @param {any[]} binds
|
||||
* @returns {Set<any>|null}
|
||||
*/
|
||||
function parseDeleteIds(query, binds) {
|
||||
const normalized = query.replace(/\s+/g, " ").trim();
|
||||
// Detect DELETE ... WHERE id IN (?,?,?) with only ? placeholders (no subquery).
|
||||
const m = normalized.match(/\bWHERE\s+id\s+IN\s*\(([^)]+)\)/i);
|
||||
if (!m) return null;
|
||||
|
||||
const inner = m[1].trim();
|
||||
// If inner contains SELECT, it's a subquery — not supported in fake.
|
||||
if (/\bSELECT\b/i.test(inner)) return null;
|
||||
|
||||
// Count placeholders and consume from binds.
|
||||
const placeholders = inner.split(",").map((s) => s.trim());
|
||||
if (placeholders.every((p) => p === "?")) {
|
||||
return new Set(binds.slice(0, placeholders.length));
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve a raw token from a regex match: if it's "?" consume the next bind
|
||||
* value from the iterator; otherwise parse it as a number literal.
|
||||
*
|
||||
* @param {string} raw — captured token from regex (e.g. "?" or "-1" or "10")
|
||||
* @param {Iterator<any>} bindIter — iterator over remaining binds
|
||||
* @returns {number}
|
||||
*/
|
||||
function resolveNumericToken(raw, bindIter) {
|
||||
if (raw === "?") {
|
||||
const next = bindIter.next();
|
||||
return next.done ? 0 : Number(next.value);
|
||||
}
|
||||
return Number(raw);
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a SELECT query with limited semantic understanding.
|
||||
* Handles:
|
||||
* - SELECT DISTINCT user_id FROM <table>
|
||||
* - SELECT id FROM <table> WHERE user_id = ? ORDER BY ts DESC [LIMIT <n|?> [OFFSET <n|?>]]
|
||||
* - SELECT id FROM <table> ORDER BY ts DESC [LIMIT <n|?> [OFFSET <n|?>]]
|
||||
* - SELECT * / general → returns all rows
|
||||
*
|
||||
* LIMIT/OFFSET tokens may be numeric literals OR "?" bound parameters.
|
||||
*
|
||||
* @param {string} query
|
||||
* @param {any[]} binds
|
||||
* @returns {any[]}
|
||||
*/
|
||||
function executeSelect(query, binds) {
|
||||
const normalized = query.replace(/\s+/g, " ").trim();
|
||||
const table = extractTable(normalized);
|
||||
const rows = table ? (tables.get(table) ?? []) : [];
|
||||
|
||||
// SELECT DISTINCT user_id FROM <table>
|
||||
if (/SELECT\s+DISTINCT\s+user_id\b/i.test(normalized)) {
|
||||
const seen = new Set();
|
||||
const result = [];
|
||||
for (const row of rows) {
|
||||
if (!seen.has(row.user_id)) {
|
||||
seen.add(row.user_id);
|
||||
result.push({ user_id: row.user_id });
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
// SELECT id FROM <table> WHERE user_id = ? ORDER BY ts DESC [LIMIT <n|?> [OFFSET <n|?>]]
|
||||
// Binds layout: [userId, ...optional LIMIT bind, ...optional OFFSET bind]
|
||||
const whereUserRe =
|
||||
/SELECT\s+id\s+FROM\s+\w+\s+WHERE\s+user_id\s*=\s*\?\s+ORDER\s+BY\s+ts\s+DESC(?:\s+LIMIT\s+(\S+)(?:\s+OFFSET\s+(\S+))?)?/i;
|
||||
const whereUserMatch = normalized.match(whereUserRe);
|
||||
if (whereUserMatch) {
|
||||
const userId = binds[0];
|
||||
let filtered = rows.filter((r) => r.user_id === userId);
|
||||
// Sort by ts DESC.
|
||||
filtered = [...filtered].sort((a, b) => b.ts - a.ts);
|
||||
|
||||
// Binds after userId start at index 1.
|
||||
const remainingBinds = binds.slice(1)[Symbol.iterator]();
|
||||
const rawLimit = whereUserMatch[1];
|
||||
const rawOffset = whereUserMatch[2];
|
||||
|
||||
let offset = 0;
|
||||
let limit;
|
||||
if (rawLimit !== undefined) {
|
||||
limit = resolveNumericToken(rawLimit, remainingBinds);
|
||||
if (rawOffset !== undefined) {
|
||||
offset = resolveNumericToken(rawOffset, remainingBinds);
|
||||
}
|
||||
}
|
||||
|
||||
if (offset > 0) filtered = filtered.slice(offset);
|
||||
// Negative limit (e.g. -1) = all rows; skip slicing.
|
||||
if (limit !== undefined && limit >= 0) filtered = filtered.slice(0, limit);
|
||||
|
||||
return filtered.map((r) => ({ id: r.id }));
|
||||
}
|
||||
|
||||
// SELECT id FROM <table> ORDER BY ts DESC [LIMIT <n|?> [OFFSET <n|?>]]
|
||||
const globalOrderRe =
|
||||
/SELECT\s+id\s+FROM\s+\w+\s+ORDER\s+BY\s+ts\s+DESC(?:\s+LIMIT\s+(\S+)(?:\s+OFFSET\s+(\S+))?)?/i;
|
||||
const globalOrderMatch = normalized.match(globalOrderRe);
|
||||
if (globalOrderMatch) {
|
||||
let sorted = [...rows].sort((a, b) => b.ts - a.ts);
|
||||
|
||||
const bindIter = binds[Symbol.iterator]();
|
||||
const rawLimit = globalOrderMatch[1];
|
||||
const rawOffset = globalOrderMatch[2];
|
||||
|
||||
let offset = 0;
|
||||
let limit;
|
||||
if (rawLimit !== undefined) {
|
||||
limit = resolveNumericToken(rawLimit, bindIter);
|
||||
if (rawOffset !== undefined) {
|
||||
offset = resolveNumericToken(rawOffset, bindIter);
|
||||
}
|
||||
}
|
||||
|
||||
if (offset > 0) sorted = sorted.slice(offset);
|
||||
// Negative limit (e.g. -1) = all rows; skip slicing.
|
||||
if (limit !== undefined && limit >= 0) sorted = sorted.slice(0, limit);
|
||||
|
||||
return sorted.map((r) => ({ id: r.id }));
|
||||
}
|
||||
|
||||
// Generic SELECT → return all rows.
|
||||
return rows;
|
||||
}
|
||||
|
||||
/**
|
||||
* Build a fake prepared statement.
|
||||
*
|
||||
* @param {string} query
|
||||
* @param {any[]} binds
|
||||
*/
|
||||
function makePrepared(query, binds) {
|
||||
return {
|
||||
bind(...moreBinds) {
|
||||
return makePrepared(query, [...binds, ...moreBinds]);
|
||||
},
|
||||
|
||||
async run() {
|
||||
runLog.push({ query, binds });
|
||||
const table = extractTable(query);
|
||||
const upper = query.trim().toUpperCase();
|
||||
|
||||
// Simulate INSERT: push a row built from binds.
|
||||
if (upper.startsWith("INSERT") && table) {
|
||||
const existing = tables.get(table) ?? [];
|
||||
const newRow = { _binds: binds };
|
||||
tables.set(table, [...existing, newRow]);
|
||||
return { meta: { changes: 1, last_row_id: existing.length + 1 } };
|
||||
}
|
||||
|
||||
// DELETE: check for WHERE id IN (...) first, otherwise clear table.
|
||||
if (upper.startsWith("DELETE") && table) {
|
||||
const existing = tables.get(table) ?? [];
|
||||
const deleteIds = parseDeleteIds(query, binds);
|
||||
if (deleteIds !== null) {
|
||||
// Targeted delete by id set.
|
||||
const remaining = existing.filter((r) => !deleteIds.has(r.id));
|
||||
const changes = existing.length - remaining.length;
|
||||
tables.set(table, remaining);
|
||||
return { meta: { changes, last_row_id: 0 } };
|
||||
}
|
||||
// Fallback: clear all (naive — legacy tests rely on this).
|
||||
tables.set(table, []);
|
||||
return { meta: { changes: existing.length, last_row_id: 0 } };
|
||||
}
|
||||
|
||||
return { meta: { changes: 0, last_row_id: 0 } };
|
||||
},
|
||||
|
||||
async all() {
|
||||
queryLog.push({ query, binds });
|
||||
const results = executeSelect(query, binds);
|
||||
return { results };
|
||||
},
|
||||
|
||||
async first() {
|
||||
queryLog.push({ query, binds });
|
||||
const results = executeSelect(query, binds);
|
||||
return results[0] ?? null;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
tables,
|
||||
runLog,
|
||||
queryLog,
|
||||
seed,
|
||||
|
||||
/**
|
||||
* D1Database.prepare() — returns a fake prepared statement.
|
||||
*
|
||||
* @param {string} query
|
||||
* @returns {ReturnType<typeof makePrepared>}
|
||||
*/
|
||||
prepare(query) {
|
||||
return makePrepared(query, []);
|
||||
},
|
||||
|
||||
/**
|
||||
* D1Database.batch() — runs each statement's all() and collects results.
|
||||
*
|
||||
* @param {Array<ReturnType<typeof makePrepared>>} statements
|
||||
* @returns {Promise<Array<{results: any[]}>>}
|
||||
*/
|
||||
async batch(statements) {
|
||||
return Promise.all(statements.map((s) => s.all()));
|
||||
},
|
||||
};
|
||||
}
|
||||
@@ -17,6 +17,24 @@ binding = "KV"
|
||||
id = "REPLACE_ME"
|
||||
preview_id = "REPLACE_ME"
|
||||
|
||||
# D1 database for module persistent storage. Each module prefixes its tables
|
||||
# with `{moduleName}_` (e.g. `trading_trades`). Migrations are applied via
|
||||
# `npm run db:migrate` (chained into `npm run deploy`).
|
||||
# Create with: npx wrangler d1 create miti99bot-db
|
||||
# then replace REPLACE_ME_D1_UUID below with the returned database_id.
|
||||
[[d1_databases]]
|
||||
binding = "DB"
|
||||
database_name = "miti99bot-db"
|
||||
database_id = "REPLACE_ME_D1_UUID"
|
||||
|
||||
# Cron Triggers — union of all schedules declared by modules.
|
||||
# When adding a module with cron entries, append its schedule(s) here.
|
||||
# See docs/adding-a-module.md for the full module author workflow.
|
||||
# Local testing: curl "http://localhost:8787/__scheduled?cron=0+1+*+*+*"
|
||||
# (requires `wrangler dev --test-scheduled`)
|
||||
[triggers]
|
||||
crons = ["0 17 * * *"]
|
||||
|
||||
# Secrets (set via `wrangler secret put <name>`, NOT in this file):
|
||||
# TELEGRAM_BOT_TOKEN — bot token from @BotFather
|
||||
# TELEGRAM_WEBHOOK_SECRET — arbitrary high-entropy string, also set in .env.deploy
|
||||
|
||||
Reference in New Issue
Block a user