This commit is contained in:
decolua
2026-04-11 11:34:09 +07:00
parent ed17a8ffac
commit 875a1282ea
5 changed files with 102 additions and 34 deletions

View File

@@ -1,8 +1,62 @@
import { createHash } from "crypto";
import { BaseExecutor } from "./base.js";
import { CODEX_DEFAULT_INSTRUCTIONS } from "../config/codexInstructions.js";
import { PROVIDERS } from "../config/providers.js";
import { normalizeResponsesInput } from "../translator/helpers/responsesApiHelper.js";
// In-memory map: hash(first assistant content) → { sessionId, lastUsed }
const SESSION_TTL_MS = 60 * 60 * 1000; // 1 hour
const assistantSessionMap = new Map();
function hashContent(text) {
return createHash("sha256").update(text).digest("hex").slice(0, 16);
}
function generateSessionId() {
return `sess_${Date.now().toString(36)}_${Math.random().toString(36).slice(2, 9)}`;
}
// Extract text content from an input item
function extractItemText(item) {
if (!item) return "";
if (typeof item.content === "string") return item.content;
if (Array.isArray(item.content)) {
return item.content.map(c => c.text || c.output || "").filter(Boolean).join("");
}
return "";
}
// Resolve session_id from first assistant message in conversation history
function resolveConversationSessionId(input) {
if (!Array.isArray(input) || input.length === 0) return generateSessionId();
const firstAssistant = input.find(item => item.role === "assistant");
if (!firstAssistant) return generateSessionId(); // Turn 1: no assistant yet
const text = extractItemText(firstAssistant);
if (!text) return generateSessionId();
const hash = hashContent(text);
const entry = assistantSessionMap.get(hash);
if (entry) {
entry.lastUsed = Date.now();
return entry.sessionId;
}
const sessionId = generateSessionId();
assistantSessionMap.set(hash, { sessionId, lastUsed: Date.now() });
return sessionId;
}
// Cleanup expired entries periodically
setInterval(() => {
const now = Date.now();
for (const [key, entry] of assistantSessionMap) {
if (now - entry.lastUsed > SESSION_TTL_MS) assistantSessionMap.delete(key);
}
}, 10 * 60 * 1000);
/**
* Codex Executor - handles OpenAI Codex API (Responses API format)
* Automatically injects default instructions if missing
@@ -10,14 +64,16 @@ import { normalizeResponsesInput } from "../translator/helpers/responsesApiHelpe
export class CodexExecutor extends BaseExecutor {
constructor() {
super("codex", PROVIDERS.codex);
this._currentSessionId = null;
}
/**
* Override headers to add session_id per request
* Override headers to add session_id per conversation
* transformRequest runs BEFORE buildHeaders, sets this._currentSessionId
*/
buildHeaders(credentials, stream = true) {
const headers = super.buildHeaders(credentials, stream);
headers["session_id"] = `${Date.now()}-${Math.random().toString(36).slice(2, 11)}`;
headers["session_id"] = this._currentSessionId || credentials?.connectionId || "default";
return headers;
}
@@ -25,6 +81,8 @@ export class CodexExecutor extends BaseExecutor {
* Transform request before sending - inject default instructions if missing
*/
transformRequest(model, body, stream, credentials) {
// Resolve conversation-stable session_id from input history
this._currentSessionId = resolveConversationSessionId(body.input);
// Convert string input to array format (Codex API requires input as array)
const normalized = normalizeResponsesInput(body.input);
if (normalized) body.input = normalized;

View File

@@ -3,9 +3,10 @@
// Anthropic tool_use.id must match: ^[a-zA-Z0-9_-]+$
const TOOL_ID_PATTERN = /^[a-zA-Z0-9_-]+$/;
// Generate unique tool call ID (always valid for Anthropic)
export function generateToolCallId() {
return `call_${Date.now().toString(36)}_${Math.random().toString(36).slice(2, 9)}`;
// Generate deterministic tool call ID from position + tool name (cache-friendly)
export function generateToolCallId(msgIndex = 0, tcIndex = 0, toolName = "") {
const name = toolName ? `_${toolName.replace(/[^a-zA-Z0-9_-]/g, "")}` : "";
return `call_msg${msgIndex}_tc${tcIndex}${name}`;
}
// Sanitize ID to match Anthropic pattern: keep only alphanumeric, underscore, hyphen
@@ -19,13 +20,15 @@ function sanitizeToolId(id) {
export function ensureToolCallIds(body) {
if (!body.messages || !Array.isArray(body.messages)) return body;
for (const msg of body.messages) {
for (let i = 0; i < body.messages.length; i++) {
const msg = body.messages[i];
if (msg.role === "assistant" && msg.tool_calls && Array.isArray(msg.tool_calls)) {
for (const tc of msg.tool_calls) {
for (let j = 0; j < msg.tool_calls.length; j++) {
const tc = msg.tool_calls[j];
// Validate or regenerate ID for Anthropic compatibility
if (!tc.id || !TOOL_ID_PATTERN.test(tc.id)) {
const sanitized = sanitizeToolId(tc.id);
tc.id = sanitized || generateToolCallId();
tc.id = sanitized || generateToolCallId(i, j, tc.function?.name);
}
if (!tc.type) {
tc.type = "function";
@@ -40,20 +43,21 @@ export function ensureToolCallIds(body) {
// Validate tool_call_id in tool messages (role: "tool")
if (msg.role === "tool" && msg.tool_call_id && !TOOL_ID_PATTERN.test(msg.tool_call_id)) {
const sanitized = sanitizeToolId(msg.tool_call_id);
msg.tool_call_id = sanitized || generateToolCallId();
msg.tool_call_id = sanitized || generateToolCallId(i, 0);
}
// Also validate tool_use blocks in content (Claude format)
if (Array.isArray(msg.content)) {
for (const block of msg.content) {
for (let k = 0; k < msg.content.length; k++) {
const block = msg.content[k];
if (block.type === "tool_use" && block.id && !TOOL_ID_PATTERN.test(block.id)) {
const sanitized = sanitizeToolId(block.id);
block.id = sanitized || generateToolCallId();
block.id = sanitized || generateToolCallId(i, k, block.name);
}
// Validate tool_use_id in tool_result blocks
if (block.type === "tool_result" && block.tool_use_id && !TOOL_ID_PATTERN.test(block.tool_use_id)) {
const sanitized = sanitizeToolId(block.tool_use_id);
block.tool_use_id = sanitized || generateToolCallId();
block.tool_use_id = sanitized || generateToolCallId(i, k);
}
}
}

View File

@@ -496,27 +496,21 @@ export function openaiResponsesToOpenAIResponse(chunk, state) {
if (responseUsage && typeof responseUsage === "object") {
const inputTokens = responseUsage.input_tokens || responseUsage.prompt_tokens || 0;
const outputTokens = responseUsage.output_tokens || responseUsage.completion_tokens || 0;
const cacheReadTokens = responseUsage.cache_read_input_tokens || 0;
const cacheCreationTokens = responseUsage.cache_creation_input_tokens || 0;
// prompt_tokens = input_tokens + cache_read + cache_creation (all prompt-side tokens)
const promptTokens = inputTokens + cacheReadTokens + cacheCreationTokens;
// OpenAI Responses API: input_tokens already includes cached_tokens
// Cache info is in input_tokens_details.cached_tokens
const cacheReadTokens = responseUsage.input_tokens_details?.cached_tokens || responseUsage.cache_read_input_tokens || 0;
state.usage = {
prompt_tokens: promptTokens,
prompt_tokens: inputTokens,
completion_tokens: outputTokens,
total_tokens: promptTokens + outputTokens
total_tokens: inputTokens + outputTokens
};
// Add prompt_tokens_details if cache tokens exist
if (cacheReadTokens > 0 || cacheCreationTokens > 0) {
state.usage.prompt_tokens_details = {};
if (cacheReadTokens > 0) {
state.usage.prompt_tokens_details.cached_tokens = cacheReadTokens;
}
if (cacheCreationTokens > 0) {
state.usage.prompt_tokens_details.cache_creation_tokens = cacheCreationTokens;
}
if (cacheReadTokens > 0) {
state.usage.prompt_tokens_details = {
cached_tokens: cacheReadTokens
};
}
}

View File

@@ -130,6 +130,14 @@ export function normalizeUsage(usage) {
assignNumber("cached_tokens", usage?.cached_tokens);
assignNumber("reasoning_tokens", usage?.reasoning_tokens);
// Preserve nested details objects for OpenAI format forwarding
if (usage?.prompt_tokens_details && typeof usage.prompt_tokens_details === "object") {
normalized.prompt_tokens_details = usage.prompt_tokens_details;
}
if (usage?.completion_tokens_details && typeof usage.completion_tokens_details === "object") {
normalized.completion_tokens_details = usage.completion_tokens_details;
}
if (Object.keys(normalized).length === 0) return null;
return normalized;
}
@@ -177,21 +185,25 @@ export function extractUsage(chunk) {
// OpenAI Responses API format (response.completed or response.done)
if ((chunk.type === "response.completed" || chunk.type === "response.done") && chunk.response?.usage && typeof chunk.response.usage === "object") {
const usage = chunk.response.usage;
const cachedTokens = usage.input_tokens_details?.cached_tokens;
return normalizeUsage({
prompt_tokens: usage.input_tokens || usage.prompt_tokens || 0,
completion_tokens: usage.output_tokens || usage.completion_tokens || 0,
cached_tokens: usage.input_tokens_details?.cached_tokens,
reasoning_tokens: usage.output_tokens_details?.reasoning_tokens
cached_tokens: cachedTokens,
reasoning_tokens: usage.output_tokens_details?.reasoning_tokens,
prompt_tokens_details: cachedTokens ? { cached_tokens: cachedTokens } : undefined
});
}
// OpenAI format
// OpenAI format (also covers DeepSeek which uses prompt_cache_hit_tokens)
if (chunk.usage && typeof chunk.usage === "object" && chunk.usage.prompt_tokens !== undefined) {
return normalizeUsage({
prompt_tokens: chunk.usage.prompt_tokens,
completion_tokens: chunk.usage.completion_tokens || 0,
cached_tokens: chunk.usage.prompt_tokens_details?.cached_tokens,
reasoning_tokens: chunk.usage.completion_tokens_details?.reasoning_tokens
cached_tokens: chunk.usage.prompt_tokens_details?.cached_tokens || chunk.usage.prompt_cache_hit_tokens,
reasoning_tokens: chunk.usage.completion_tokens_details?.reasoning_tokens,
prompt_tokens_details: chunk.usage.prompt_tokens_details,
completion_tokens_details: chunk.usage.completion_tokens_details
});
}
@@ -301,7 +313,7 @@ export function logUsage(provider, usage, model = null, connectionId = null, api
}
// Add cache info if present (unified from different formats)
const cacheRead = usage.cache_read_input_tokens || usage.cached_tokens;
const cacheRead = usage.cache_read_input_tokens || usage.cached_tokens || usage.prompt_tokens_details?.cached_tokens;
if (cacheRead) msg += ` | cache_read=${cacheRead}`;
const cacheCreation = usage.cache_creation_input_tokens;

View File

@@ -1,6 +1,6 @@
{
"name": "9router-app",
"version": "0.3.83",
"version": "0.3.85",
"description": "9Router web dashboard",
"private": true,
"scripts": {