add GPT 5.5 model

This commit is contained in:
decolua
2026-04-24 09:51:05 +07:00
parent f2e7a98ce0
commit 5abc9e5c74
4 changed files with 19 additions and 2 deletions

View File

@@ -16,6 +16,7 @@ export const PROVIDER_MODELS = {
{ id: "claude-haiku-4-5-20251001", name: "Claude 4.5 Haiku" }, { id: "claude-haiku-4-5-20251001", name: "Claude 4.5 Haiku" },
], ],
cx: [ // OpenAI Codex cx: [ // OpenAI Codex
{ id: "gpt-5.5", name: "GPT 5.5" },
{ id: "gpt-5.4", name: "GPT 5.4" }, { id: "gpt-5.4", name: "GPT 5.4" },
// GPT 5.3 Codex - all thinking levels // GPT 5.3 Codex - all thinking levels
{ id: "gpt-5.3-codex", name: "GPT 5.3 Codex" }, { id: "gpt-5.3-codex", name: "GPT 5.3 Codex" },

View File

@@ -227,7 +227,7 @@ export async function handleChatCore({ body, modelInfo, credentials, log, onCred
// True non-streaming response // True non-streaming response
if (!stream) { if (!stream) {
const result = await handleNonStreamingResponse({ ...sharedCtx, providerResponse, sourceFormat, targetFormat, reqLogger, trackDone, appendLog }); const result = await handleNonStreamingResponse({ ...sharedCtx, providerResponse, sourceFormat, targetFormat, reqLogger, toolNameMap, trackDone, appendLog });
streamController.handleComplete(); streamController.handleComplete();
return result; return result;
} }

View File

@@ -7,6 +7,7 @@ import { HTTP_STATUS } from "../../config/runtimeConfig.js";
import { parseSSEToOpenAIResponse } from "./sseToJsonHandler.js"; import { parseSSEToOpenAIResponse } from "./sseToJsonHandler.js";
import { buildRequestDetail, extractRequestConfig, extractUsageFromResponse, saveUsageStats } from "./requestDetail.js"; import { buildRequestDetail, extractRequestConfig, extractUsageFromResponse, saveUsageStats } from "./requestDetail.js";
import { appendRequestLog, saveRequestDetail } from "@/lib/usageDb.js"; import { appendRequestLog, saveRequestDetail } from "@/lib/usageDb.js";
import { decloakToolNames } from "../../utils/claudeCloaking.js";
/** /**
* Translate non-streaming response body from provider format → OpenAI format. * Translate non-streaming response body from provider format → OpenAI format.
@@ -127,7 +128,7 @@ export function translateNonStreamingResponse(responseBody, targetFormat, source
/** /**
* Handle non-streaming response from provider. * Handle non-streaming response from provider.
*/ */
export async function handleNonStreamingResponse({ providerResponse, provider, model, sourceFormat, targetFormat, body, stream, translatedBody, finalBody, requestStartTime, connectionId, apiKey, clientRawRequest, onRequestSuccess, reqLogger, trackDone, appendLog }) { export async function handleNonStreamingResponse({ providerResponse, provider, model, sourceFormat, targetFormat, body, stream, translatedBody, finalBody, requestStartTime, connectionId, apiKey, clientRawRequest, onRequestSuccess, reqLogger, toolNameMap, trackDone, appendLog }) {
trackDone(); trackDone();
const contentType = providerResponse.headers.get("content-type") || ""; const contentType = providerResponse.headers.get("content-type") || "";
let responseBody; let responseBody;
@@ -153,6 +154,9 @@ export async function handleNonStreamingResponse({ providerResponse, provider, m
reqLogger.logProviderResponse(providerResponse.status, providerResponse.statusText, providerResponse.headers, responseBody); reqLogger.logProviderResponse(providerResponse.status, providerResponse.statusText, providerResponse.headers, responseBody);
if (onRequestSuccess) await onRequestSuccess(); if (onRequestSuccess) await onRequestSuccess();
// Decloak tool_use names once on raw Claude body, before any translation (INPUT side)
responseBody = decloakToolNames(responseBody, toolNameMap);
const usage = extractUsageFromResponse(responseBody); const usage = extractUsageFromResponse(responseBody);
appendLog({ tokens: usage, status: "200 OK" }); appendLog({ tokens: usage, status: "200 OK" });
saveUsageStats({ provider, model, tokens: usage, connectionId, apiKey, endpoint: clientRawRequest?.endpoint }); saveUsageStats({ provider, model, tokens: usage, connectionId, apiKey, endpoint: clientRawRequest?.endpoint });

View File

@@ -66,6 +66,18 @@ export function cloakClaudeTools(body) {
}; };
} }
// Decloak tool_use names in non-streaming Claude response body (INPUT side)
export function decloakToolNames(body, toolNameMap) {
if (!toolNameMap?.size || !Array.isArray(body?.content)) return body;
const content = body.content.map(block => {
if (block?.type === "tool_use" && toolNameMap.has(block.name)) {
return { ...block, name: toolNameMap.get(block.name) };
}
return block;
});
return { ...body, content };
}
// CC decoy tools — Claude Code native tool names, marked unavailable // CC decoy tools — Claude Code native tool names, marked unavailable
const CC_DECOY_TOOLS = [ const CC_DECOY_TOOLS = [
{ name: "Task", description: "This tool is currently unavailable.", input_schema: { type: "object", properties: {} } }, { name: "Task", description: "This tool is currently unavailable.", input_schema: { type: "object", properties: {} } },