refactor: Modularize proxy and add tool_use support

- Split monolithic messages.js (223 lines) into 5 focused modules
- Add full tool_use/tool_result round-trip translation
- Add x-api-key header authentication (Anthropic SDK default)
- Fix SSE Content-Type via Hono streamSSE helper
- Fix streaming usage tracking with stream_options.include_usage
- Add stop_reason mapping (end_turn, max_tokens, tool_use, stop_sequence)
- Forward stop_sequences to OpenAI stop parameter
- Handle system message as string or array of content blocks
- Use timing-safe XOR comparison for auth tokens
- Cache OpenAI client and model map across requests
- Sanitize error responses to prevent upstream detail leakage
- Use crypto.randomUUID() for unique message IDs
- Remove non-existent build/dev commands from vercel.json
This commit is contained in:
2026-04-05 11:47:06 +07:00
parent 415790a9f1
commit a1113e02aa
7 changed files with 432 additions and 202 deletions

31
src/openai-client.js Normal file
View File

@@ -0,0 +1,31 @@
import OpenAI from 'openai';
// Cache OpenAI client and parsed model map to avoid re-creation per request
let cachedClient = null;
let cachedApiKey = null;
let cachedModelMap = null;
let cachedModelMapRaw = null;
export function getOpenAIClient(env) {
if (cachedClient && cachedApiKey === env.OPENAI_API_KEY) {
return cachedClient;
}
cachedApiKey = env.OPENAI_API_KEY;
cachedClient = new OpenAI({ apiKey: env.OPENAI_API_KEY });
return cachedClient;
}
export function mapModel(claudeModel, env) {
const raw = env.MODEL_MAP || '';
if (raw !== cachedModelMapRaw) {
cachedModelMapRaw = raw;
cachedModelMap = Object.fromEntries(
raw.split(',').filter(Boolean).map((p) => {
const trimmed = p.trim();
const idx = trimmed.indexOf(':');
return idx > 0 ? [trimmed.slice(0, idx), trimmed.slice(idx + 1)] : [trimmed, trimmed];
})
);
}
return cachedModelMap[claudeModel] || claudeModel;
}