Files
miti99bot/tests/fakes/fake-ai.js
T
tiennm99 f6ab94ffb0 feat(twentyq): LLM-generated category + initial hint from bare keyword seeds
Previously seeds carried hand-curated {category, target, initialHint}.
Now SEEDS is a flat string[] of keywords — at round-start, the model
generates {category, initialHint} on the fly. Benefits:
- adding a seed is trivial (just append a word)
- every round gets a fresh cryptic opener (varies across plays of the
  same word)
- HINT STYLE rules apply to the opening hint too, so the initial clue
  isn't a definitional giveaway

Implementation:
- prompts.buildStartRoundPrompt(target) — with good/bad examples
- ai-client.generateRoundStart(env, target) — same JSON-in-content
  approach as judge(), with defensive fallbacks + redactSecret
- handlers.startFreshGame now async; surfaces roundstart errors via the
  existing UPSTREAM_FAIL path

Tests: 449 pass (5 new for generateRoundStart, 1 for roundstart error path).
2026-04-24 16:26:55 +07:00

40 lines
1.2 KiB
JavaScript

/**
* @file fake-ai — minimal stub for the Workers AI binding (env.AI).
*
* Real shape: `{ run(modelId, body) -> Promise<any> }`. Tests configure the
* mock via `mockJudgement(ai, { is_guess, answer, hint })` to return a
* Workers-AI-traditional { response: "<json-line>" } payload that
* ai-client.extractText + parseJudgementJson consume.
*/
import { vi } from "vitest";
export function makeFakeAi() {
return { run: vi.fn() };
}
/**
* Configure the next ai.run call to return a response whose `.response`
* string contains the canonical one-line JSON the judge expects.
*/
export function mockJudgement(ai, { is_guess = false, answer = "no", hint = "default hint" } = {}) {
ai.run.mockResolvedValueOnce({
response: JSON.stringify({ is_guess, answer, hint }),
});
}
/**
* Configure the next ai.run call to return a round-start response
* with { category, initialHint } JSON.
*/
export function mockRoundStart(ai, { category = "object", initialHint = "cryptic clue" } = {}) {
ai.run.mockResolvedValueOnce({
response: JSON.stringify({ category, initialHint }),
});
}
/** Configure the next call to throw (simulate Workers AI outage). */
export function mockFailure(ai, err = new Error("AI down")) {
ai.run.mockRejectedValueOnce(err);
}