refactor: restructure translator from from-openai/to-openai to request/response folders

This commit is contained in:
decolua
2026-01-09 17:14:51 +07:00
parent 23181afb63
commit 18533505ef
19 changed files with 893 additions and 1034 deletions

4
.gitignore vendored
View File

@@ -55,3 +55,7 @@ RM.md
cursor/*
PUBLIC.md
scripts/*
Thanks.md
package.json
PUBLIC.en.md
PR/*

View File

@@ -70,6 +70,9 @@ export async function handleChatCore({ body, modelInfo, credentials, log, onCred
let translatedBody = body;
translatedBody = translateRequest(sourceFormat, targetFormat, model, body, stream, credentials, provider);
// Extract toolNameMap for response translation (Claude OAuth)
const toolNameMap = translatedBody._toolNameMap;
delete translatedBody._toolNameMap;
// Update model in body
translatedBody.model = model;
@@ -251,7 +254,7 @@ export async function handleChatCore({ body, modelInfo, credentials, log, onCred
// Create transform stream with logger for streaming response
let transformStream;
if (needsTranslation(targetFormat, sourceFormat)) {
transformStream = createSSETransformStreamWithLogger(targetFormat, sourceFormat, provider, reqLogger);
transformStream = createSSETransformStreamWithLogger(targetFormat, sourceFormat, provider, reqLogger, toolNameMap);
} else {
transformStream = createPassthroughStreamWithLogger(provider, reqLogger);
}

View File

@@ -1,348 +0,0 @@
import { register } from "../index.js";
import { FORMATS } from "../formats.js";
// Create OpenAI chunk helper
function createChunk(state, delta, finishReason = null) {
return {
id: `chatcmpl-${state.messageId}`,
object: "chat.completion.chunk",
created: Math.floor(Date.now() / 1000),
model: state.model,
choices: [{
index: 0,
delta,
finish_reason: finishReason
}]
};
}
// Convert Claude stream chunk to OpenAI format
function claudeToOpenAIResponse(chunk, state) {
if (!chunk) return null;
const results = [];
const event = chunk.type;
switch (event) {
case "message_start": {
state.messageId = chunk.message?.id || `msg_${Date.now()}`;
state.model = chunk.message?.model;
state.toolCallIndex = 0; // Reset tool call counter for OpenAI format
console.log("🔍 ----------- toolCallIndex", state.toolCallIndex);
results.push(createChunk(state, { role: "assistant" }));
break;
}
case "content_block_start": {
const block = chunk.content_block;
if (block?.type === "text") {
state.textBlockStarted = true;
} else if (block?.type === "thinking") {
// console.log("🧠 Thinking block started");
state.inThinkingBlock = true;
state.currentBlockIndex = chunk.index;
results.push(createChunk(state, { content: "<think>" }));
} else if (block?.type === "tool_use") {
// OpenAI format: tool_calls index must be independent and start from 0
const toolCallIndex = state.toolCallIndex++;
const toolCall = {
index: toolCallIndex,
id: block.id,
type: "function",
function: {
name: block.name,
arguments: ""
}
};
// Map Claude content_block index to OpenAI tool_call index
state.toolCalls.set(chunk.index, toolCall);
results.push(createChunk(state, { tool_calls: [toolCall] }));
}
break;
}
case "content_block_delta": {
const delta = chunk.delta;
if (delta?.type === "text_delta" && delta.text) {
results.push(createChunk(state, { content: delta.text }));
} else if (delta?.type === "thinking_delta" && delta.thinking) {
// Stream thinking content
results.push(createChunk(state, { content: delta.thinking }));
} else if (delta?.type === "input_json_delta" && delta.partial_json) {
const toolCall = state.toolCalls.get(chunk.index);
if (toolCall) {
toolCall.function.arguments += delta.partial_json;
// Include both index and id for better client compatibility
results.push(createChunk(state, {
tool_calls: [{
index: toolCall.index,
id: toolCall.id,
function: { arguments: delta.partial_json }
}]
}));
}
}
break;
}
case "content_block_stop": {
if (state.inThinkingBlock && chunk.index === state.currentBlockIndex) {
// console.log("✅ Thinking block ended");
results.push(createChunk(state, { content: "</think>" }));
state.inThinkingBlock = false;
}
state.textBlockStarted = false;
state.thinkingBlockStarted = false;
break;
}
case "message_delta": {
if (chunk.delta?.stop_reason) {
state.finishReason = convertStopReason(chunk.delta.stop_reason);
// Send the final chunk with finish_reason immediately
results.push({
id: `chatcmpl-${state.messageId}`,
object: "chat.completion.chunk",
created: Math.floor(Date.now() / 1000),
model: state.model,
choices: [{
index: 0,
delta: {},
finish_reason: state.finishReason
}]
});
state.finishReasonSent = true;
}
// Usage is now extracted in stream.js extractUsage()
break;
}
case "message_stop": {
// CLIProxyAPI and OpenAI standard: message_stop should send the final chunk with finish_reason
// This ensures proper signaling to the client that the response is complete
// Only send a chunk if we haven't already sent the finish_reason in message_delta
// In some cases, finish_reason might not have been sent yet
if (!state.finishReasonSent) {
const finishReason = state.finishReason || (state.toolCalls?.size > 0 ? "tool_calls" : "stop");
results.push({
id: `chatcmpl-${state.messageId}`,
object: "chat.completion.chunk",
created: Math.floor(Date.now() / 1000),
model: state.model,
choices: [{
index: 0,
delta: {},
finish_reason: finishReason
}],
...(state.usage && {
usage: {
prompt_tokens: state.usage.input_tokens || 0,
completion_tokens: state.usage.output_tokens || 0,
total_tokens: (state.usage.input_tokens || 0) + (state.usage.output_tokens || 0)
}
})
});
state.finishReasonSent = true;
}
break;
}
}
return results.length > 0 ? results : null;
}
// Helper: stop thinking block if started
function stopThinkingBlock(state, results) {
if (!state.thinkingBlockStarted) return;
results.push({
type: "content_block_stop",
index: state.thinkingBlockIndex
});
state.thinkingBlockStarted = false;
}
// Helper: stop text block if started
function stopTextBlock(state, results) {
if (!state.textBlockStarted || state.textBlockClosed) return;
state.textBlockClosed = true;
results.push({
type: "content_block_stop",
index: state.textBlockIndex
});
state.textBlockStarted = false;
}
// Convert OpenAI stream chunk to Claude format
function openaiToClaudeResponse(chunk, state) {
if (!chunk || !chunk.choices?.[0]) return null;
const results = [];
const choice = chunk.choices[0];
const delta = choice.delta;
// First chunk - ALWAYS send message_start first
if (!state.messageStartSent) {
state.messageStartSent = true;
state.messageId = chunk.id?.replace("chatcmpl-", "") || `msg_${Date.now()}`;
if (!state.messageId || state.messageId === "chat" || state.messageId.length < 8) {
state.messageId = chunk.extend_fields?.requestId ||
chunk.extend_fields?.traceId ||
`msg_${Date.now()}`;
}
state.model = chunk.model || "unknown";
state.nextBlockIndex = 0;
results.push({
type: "message_start",
message: {
id: state.messageId,
type: "message",
role: "assistant",
model: state.model,
content: [],
stop_reason: null,
stop_sequence: null,
usage: { input_tokens: 0, output_tokens: 0 }
}
});
}
// Handle reasoning_content (thinking) - GLM, DeepSeek, etc.
const reasoningContent = delta?.reasoning_content || delta?.reasoning;
if (reasoningContent) {
// Stop text block before thinking
stopTextBlock(state, results);
// Start thinking block if needed
if (!state.thinkingBlockStarted) {
state.thinkingBlockIndex = state.nextBlockIndex++;
state.thinkingBlockStarted = true;
results.push({
type: "content_block_start",
index: state.thinkingBlockIndex,
content_block: { type: "thinking", thinking: "" }
});
}
// Send thinking delta
results.push({
type: "content_block_delta",
index: state.thinkingBlockIndex,
delta: { type: "thinking_delta", thinking: reasoningContent }
});
}
// Handle regular content
if (delta?.content) {
// Stop thinking block before text
stopThinkingBlock(state, results);
// Start text block if needed
if (!state.textBlockStarted) {
state.textBlockIndex = state.nextBlockIndex++;
state.textBlockStarted = true;
state.textBlockClosed = false;
results.push({
type: "content_block_start",
index: state.textBlockIndex,
content_block: { type: "text", text: "" }
});
}
// Send text delta
results.push({
type: "content_block_delta",
index: state.textBlockIndex,
delta: { type: "text_delta", text: delta.content }
});
}
// Tool calls
if (delta?.tool_calls) {
for (const tc of delta.tool_calls) {
const idx = tc.index ?? 0;
if (tc.id) {
// Stop thinking and text blocks before tool use
stopThinkingBlock(state, results);
stopTextBlock(state, results);
// New tool call
const toolBlockIndex = state.nextBlockIndex++;
state.toolCalls.set(idx, { id: tc.id, name: tc.function?.name || "", blockIndex: toolBlockIndex });
results.push({
type: "content_block_start",
index: toolBlockIndex,
content_block: {
type: "tool_use",
id: tc.id,
name: tc.function?.name || "",
input: {}
}
});
}
if (tc.function?.arguments) {
const toolInfo = state.toolCalls.get(idx);
if (toolInfo) {
results.push({
type: "content_block_delta",
index: toolInfo.blockIndex,
delta: { type: "input_json_delta", partial_json: tc.function.arguments }
});
}
}
}
}
// Finish
if (choice.finish_reason) {
// Stop all open blocks
stopThinkingBlock(state, results);
stopTextBlock(state, results);
// Close tool call blocks
for (const [, toolInfo] of state.toolCalls) {
results.push({
type: "content_block_stop",
index: toolInfo.blockIndex
});
}
results.push({
type: "message_delta",
delta: { stop_reason: convertFinishReason(choice.finish_reason) },
usage: { output_tokens: 0 }
});
results.push({ type: "message_stop" });
}
return results.length > 0 ? results : null;
}
// Convert Claude stop_reason to OpenAI finish_reason
function convertStopReason(reason) {
switch (reason) {
case "end_turn": return "stop";
case "max_tokens": return "length";
case "tool_use": return "tool_calls";
case "stop_sequence": return "stop";
default: return "stop";
}
}
// Convert OpenAI finish_reason to Claude stop_reason
function convertFinishReason(reason) {
switch (reason) {
case "stop": return "end_turn";
case "length": return "max_tokens";
case "tool_calls": return "tool_use";
default: return "end_turn";
}
}
// Register
register(FORMATS.CLAUDE, FORMATS.OPENAI, null, claudeToOpenAIResponse);
register(FORMATS.OPENAI, FORMATS.CLAUDE, null, openaiToClaudeResponse);

View File

@@ -1,469 +0,0 @@
import { register } from "../index.js";
import { FORMATS } from "../formats.js";
import { DEFAULT_THINKING_GEMINI_SIGNATURE } from "../../config/defaultThinkingSignature.js";
import {
UNSUPPORTED_SCHEMA_CONSTRAINTS,
DEFAULT_SAFETY_SETTINGS,
convertOpenAIContentToParts,
extractTextContent,
tryParseJSON,
generateRequestId,
generateSessionId,
generateProjectId,
cleanJSONSchemaForAntigravity
} from "../helpers/geminiHelper.js";
// ============================================
// REQUEST TRANSLATORS: OpenAI -> Gemini/GeminiCLI/Antigravity
// ============================================
// Core: Convert OpenAI request to Gemini format (base for all variants)
function openaiToGeminiBase(model, body, stream) {
const result = {
model: model,
contents: [],
generationConfig: {},
safetySettings: DEFAULT_SAFETY_SETTINGS
};
// Generation config
if (body.temperature !== undefined) {
result.generationConfig.temperature = body.temperature;
}
if (body.top_p !== undefined) {
result.generationConfig.topP = body.top_p;
}
if (body.top_k !== undefined) {
result.generationConfig.topK = body.top_k;
}
if (body.max_tokens !== undefined) {
result.generationConfig.maxOutputTokens = body.max_tokens;
}
// Build tool_call_id -> name map
const tcID2Name = {};
if (body.messages && Array.isArray(body.messages)) {
for (const msg of body.messages) {
if (msg.role === "assistant" && msg.tool_calls) {
for (const tc of msg.tool_calls) {
if (tc.type === "function" && tc.id && tc.function?.name) {
tcID2Name[tc.id] = tc.function.name;
}
}
}
}
}
// Build tool responses cache
const toolResponses = {};
if (body.messages && Array.isArray(body.messages)) {
for (const msg of body.messages) {
if (msg.role === "tool" && msg.tool_call_id) {
toolResponses[msg.tool_call_id] = msg.content;
}
}
}
// Convert messages
if (body.messages && Array.isArray(body.messages)) {
for (let i = 0; i < body.messages.length; i++) {
const msg = body.messages[i];
const role = msg.role;
const content = msg.content;
if (role === "system" && body.messages.length > 1) {
result.systemInstruction = {
role: "user",
parts: [{ text: typeof content === "string" ? content : extractTextContent(content) }]
};
} else if (role === "user" || (role === "system" && body.messages.length === 1)) {
const parts = convertOpenAIContentToParts(content);
if (parts.length > 0) {
result.contents.push({ role: "user", parts });
}
} else if (role === "assistant") {
const parts = [];
if (content) {
const text = typeof content === "string" ? content : extractTextContent(content);
if (text) {
parts.push({ text });
}
}
if (msg.tool_calls && Array.isArray(msg.tool_calls)) {
const toolCallIds = [];
for (const tc of msg.tool_calls) {
if (tc.type !== "function") continue;
const args = tryParseJSON(tc.function?.arguments || "{}");
parts.push({
thoughtSignature: DEFAULT_THINKING_GEMINI_SIGNATURE,
functionCall: {
id: tc.id,
name: tc.function.name,
args: args
}
});
toolCallIds.push(tc.id);
}
if (parts.length > 0) {
result.contents.push({ role: "model", parts });
}
// Append function responses - extract name from tool_call_id format "ToolName-timestamp-index"
const toolParts = [];
for (const fid of toolCallIds) {
// Try to get name from tcID2Name map first, then extract from id format
let name = tcID2Name[fid];
if (!name) {
// Extract name from id format: "ToolName-timestamp-index"
const idParts = fid.split("-");
if (idParts.length > 2) {
name = idParts.slice(0, -2).join("-");
} else {
name = fid;
}
}
let resp = toolResponses[fid] || "{}";
let parsedResp = tryParseJSON(resp);
if (parsedResp === null) {
parsedResp = { result: resp };
} else if (typeof parsedResp !== "object") {
parsedResp = { result: parsedResp };
}
toolParts.push({
functionResponse: {
id: fid,
name: name,
response: { result: parsedResp }
}
});
}
if (toolParts.length > 0) {
result.contents.push({ role: "user", parts: toolParts });
}
} else if (parts.length > 0) {
result.contents.push({ role: "model", parts });
}
}
}
}
// Convert tools
if (body.tools && Array.isArray(body.tools) && body.tools.length > 0) {
const functionDeclarations = [];
for (const t of body.tools) {
if (t.type === "function" && t.function) {
const fn = t.function;
functionDeclarations.push({
name: fn.name,
description: fn.description || "",
parameters: fn.parameters || { type: "object", properties: {} }
});
}
}
if (functionDeclarations.length > 0) {
result.tools = [{ functionDeclarations }];
}
}
return result;
}
// OpenAI -> Gemini (standard API)
function openaiToGemini(model, body, stream) {
return openaiToGeminiBase(model, body, stream);
}
// OpenAI -> Gemini CLI (Cloud Code Assist)
function openaiToGeminiCLI(model, body, stream) {
const gemini = openaiToGeminiBase(model, body, stream);
const isClaude = model.toLowerCase().includes("claude");
// Add thinking config for CLI
if (body.reasoning_effort) {
const budgetMap = { low: 1024, medium: 8192, high: 32768 };
const budget = budgetMap[body.reasoning_effort] || 8192;
gemini.generationConfig.thinkingConfig = {
thinkingBudget: budget,
include_thoughts: true
};
}
// Thinking config from Claude format
if (body.thinking?.type === "enabled" && body.thinking.budget_tokens) {
gemini.generationConfig.thinkingConfig = {
thinkingBudget: body.thinking.budget_tokens,
include_thoughts: true
};
}
// Clean schema for tools
// Claude models: use "parameters" (backend converts parametersJsonSchema -> parameters)
// Gemini native: use "parametersJsonSchema" (backend expects this field)
if (gemini.tools?.[0]?.functionDeclarations) {
for (const fn of gemini.tools[0].functionDeclarations) {
if (fn.parameters) {
const cleanedSchema = cleanJSONSchemaForAntigravity(fn.parameters);
if (isClaude) {
fn.parameters = cleanedSchema;
} else {
fn.parametersJsonSchema = cleanedSchema;
delete fn.parameters;
}
}
}
}
return gemini;
}
// Wrap Gemini CLI format in Cloud Code wrapper
function wrapInCloudCodeEnvelope(model, geminiCLI, credentials = null) {
// Use real project ID if available, otherwise generate random
const projectId = credentials?.projectId || generateProjectId();
return {
project: projectId,
model: model,
userAgent: "gemini-cli",
requestId: generateRequestId(),
request: {
sessionId: generateSessionId(),
contents: geminiCLI.contents,
systemInstruction: geminiCLI.systemInstruction,
generationConfig: geminiCLI.generationConfig,
safetySettings: geminiCLI.safetySettings,
tools: geminiCLI.tools,
}
};
}
// OpenAI -> Antigravity (Sandbox Cloud Code with wrapper)
function openaiToAntigravity(model, body, stream, credentials = null) {
const geminiCLI = openaiToGeminiCLI(model, body, stream);
return wrapInCloudCodeEnvelope(model, geminiCLI, credentials);
}
// ============================================
// RESPONSE TRANSLATORS: Gemini/GeminiCLI/Antigravity -> OpenAI
// ============================================
// Core: Convert Gemini response chunk to OpenAI format
function geminiToOpenAIResponse(chunk, state) {
if (!chunk) return null;
// Handle Antigravity wrapper
const response = chunk.response || chunk;
if (!response || !response.candidates?.[0]) return null;
const results = [];
const candidate = response.candidates[0];
const content = candidate.content;
// Initialize state
if (!state.messageId) {
state.messageId = response.responseId || `msg_${Date.now()}`;
state.model = response.modelVersion || "gemini";
state.functionIndex = 0;
results.push({
id: `chatcmpl-${state.messageId}`,
object: "chat.completion.chunk",
created: Math.floor(Date.now() / 1000),
model: state.model,
choices: [{
index: 0,
delta: { role: "assistant" },
finish_reason: null
}]
});
}
// Process parts
if (content?.parts) {
for (const part of content.parts) {
const hasThoughtSig = part.thoughtSignature || part.thought_signature;
const isThought = part.thought === true;
// Handle thought signature (thinking mode)
if (hasThoughtSig) {
const hasTextContent = part.text !== undefined && part.text !== "";
const hasFunctionCall = !!part.functionCall;
// If there's text with thoughtSignature
if (hasTextContent) {
results.push({
id: `chatcmpl-${state.messageId}`,
object: "chat.completion.chunk",
created: Math.floor(Date.now() / 1000),
model: state.model,
choices: [{
index: 0,
delta: isThought
? { reasoning_content: part.text }
: { content: part.text },
finish_reason: null
}]
});
}
// Process functionCall if exists, then skip to next part
if (hasFunctionCall) {
const fcName = part.functionCall.name;
const fcArgs = part.functionCall.args || {};
const toolCallIndex = state.functionIndex++;
const toolCall = {
id: `${fcName}-${Date.now()}-${toolCallIndex}`,
index: toolCallIndex,
type: "function",
function: {
name: fcName,
arguments: JSON.stringify(fcArgs)
}
};
state.toolCalls.set(toolCallIndex, toolCall);
results.push({
id: `chatcmpl-${state.messageId}`,
object: "chat.completion.chunk",
created: Math.floor(Date.now() / 1000),
model: state.model,
choices: [{
index: 0,
delta: { tool_calls: [toolCall] },
finish_reason: null
}]
});
}
continue;
}
// Text content (non-thinking) - skip empty text
if (part.text !== undefined && part.text !== "") {
results.push({
id: `chatcmpl-${state.messageId}`,
object: "chat.completion.chunk",
created: Math.floor(Date.now() / 1000),
model: state.model,
choices: [{
index: 0,
delta: { content: part.text },
finish_reason: null
}]
});
}
// Function call
if (part.functionCall) {
const fcName = part.functionCall.name;
const fcArgs = part.functionCall.args || {};
const toolCallIndex = state.functionIndex++;
const toolCall = {
id: `${fcName}-${Date.now()}-${toolCallIndex}`,
index: toolCallIndex,
type: "function",
function: {
name: fcName,
arguments: JSON.stringify(fcArgs)
}
};
state.toolCalls.set(toolCallIndex, toolCall);
results.push({
id: `chatcmpl-${state.messageId}`,
object: "chat.completion.chunk",
created: Math.floor(Date.now() / 1000),
model: state.model,
choices: [{
index: 0,
delta: { tool_calls: [toolCall] },
finish_reason: null
}]
});
}
// Inline data (images)
const inlineData = part.inlineData || part.inline_data;
if (inlineData?.data) {
const mimeType = inlineData.mimeType || inlineData.mime_type || "image/png";
results.push({
id: `chatcmpl-${state.messageId}`,
object: "chat.completion.chunk",
created: Math.floor(Date.now() / 1000),
model: state.model,
choices: [{
index: 0,
delta: {
images: [{
type: "image_url",
image_url: { url: `data:${mimeType};base64,${inlineData.data}` }
}]
},
finish_reason: null
}]
});
}
}
}
// Finish reason
if (candidate.finishReason) {
let finishReason = candidate.finishReason.toLowerCase();
if (finishReason === "stop" && state.toolCalls.size > 0) {
finishReason = "tool_calls";
}
results.push({
id: `chatcmpl-${state.messageId}`,
object: "chat.completion.chunk",
created: Math.floor(Date.now() / 1000),
model: state.model,
choices: [{
index: 0,
delta: {},
finish_reason: finishReason
}]
});
state.finishReason = finishReason;
}
// Usage metadata
const usage = response.usageMetadata || chunk.usageMetadata;
if (usage) {
const promptTokens = (usage.promptTokenCount || 0) + (usage.thoughtsTokenCount || 0);
state.usage = {
prompt_tokens: promptTokens,
completion_tokens: usage.candidatesTokenCount || 0,
total_tokens: usage.totalTokenCount || 0
};
if (usage.thoughtsTokenCount > 0) {
state.usage.completion_tokens_details = {
reasoning_tokens: usage.thoughtsTokenCount
};
}
}
return results.length > 0 ? results : null;
}
// ============================================
// REGISTER ALL TRANSLATORS
// ============================================
// Request: OpenAI -> Gemini variants
register(FORMATS.OPENAI, FORMATS.GEMINI, openaiToGemini, null);
register(FORMATS.OPENAI, FORMATS.GEMINI_CLI, (model, body, stream, credentials) => wrapInCloudCodeEnvelope(model, openaiToGeminiCLI(model, body, stream), credentials), null);
register(FORMATS.OPENAI, FORMATS.ANTIGRAVITY, openaiToAntigravity, null);
// Response: Gemini variants -> OpenAI (all use same handler)
register(FORMATS.GEMINI, FORMATS.OPENAI, null, geminiToOpenAIResponse);
register(FORMATS.GEMINI_CLI, FORMATS.OPENAI, null, geminiToOpenAIResponse);
register(FORMATS.ANTIGRAVITY, FORMATS.OPENAI, null, geminiToOpenAIResponse);

View File

@@ -51,7 +51,7 @@ export function translateRequest(sourceFormat, targetFormat, model, body, stream
if (targetFormat !== FORMATS.OPENAI) {
const fromOpenAI = requestRegistry.get(`${FORMATS.OPENAI}:${targetFormat}`);
if (fromOpenAI) {
result = fromOpenAI(model, result, stream, credentials);
result = fromOpenAI(model, result, stream);
}
}
}
@@ -157,11 +157,16 @@ export function initState(sourceFormat) {
// Initialize all translators
export async function initTranslators() {
await import("./to-openai/claude.js");
await import("./to-openai/gemini.js");
await import("./to-openai/openai.js");
await import("./to-openai/openai-responses.js");
await import("./from-openai/claude.js");
await import("./from-openai/gemini.js");
await import("./from-openai/openai-responses.js");
// Request translators
await import("./request/claude-to-openai.js");
await import("./request/openai-to-claude.js");
await import("./request/gemini-to-openai.js");
await import("./request/openai-to-gemini.js");
await import("./request/openai-responses.js");
// Response translators
await import("./response/claude-to-openai.js");
await import("./response/openai-to-claude.js");
await import("./response/gemini-to-openai.js");
await import("./response/openai-responses.js");
}

View File

@@ -3,7 +3,7 @@ import { FORMATS } from "../formats.js";
import { adjustMaxTokens } from "../helpers/maxTokensHelper.js";
// Convert Claude request to OpenAI format
function claudeToOpenAI(model, body, stream) {
function claudeToOpenAIRequest(model, body, stream) {
const result = {
model: model,
messages: [],
@@ -81,7 +81,6 @@ function fixMissingToolResponses(messages) {
const toolCallIds = msg.tool_calls.map(tc => tc.id);
// Collect all tool response IDs that IMMEDIATELY follow this assistant message
// Stop at any non-tool message (user or assistant)
const respondedIds = new Set();
let insertPosition = i + 1;
for (let j = i + 1; j < messages.length; j++) {
@@ -90,7 +89,6 @@ function fixMissingToolResponses(messages) {
respondedIds.add(nextMsg.tool_call_id);
insertPosition = j + 1;
} else {
// Stop at any non-tool message (user or assistant)
break;
}
}
@@ -104,9 +102,7 @@ function fixMissingToolResponses(messages) {
tool_call_id: id,
content: "[No response received]"
}));
// Insert missing responses at the correct position
messages.splice(insertPosition, 0, ...missingResponses);
// Adjust index to skip inserted messages
i = insertPosition + missingResponses.length - 1;
}
}
@@ -157,12 +153,10 @@ function convertClaudeMessage(msg) {
break;
case "tool_result":
// Extract actual content from tool_result
let resultContent = "";
if (typeof block.content === "string") {
resultContent = block.content;
} else if (Array.isArray(block.content)) {
// Claude tool_result content can be array of text blocks
resultContent = block.content
.filter(c => c.type === "text")
.map(c => c.text)
@@ -182,7 +176,6 @@ function convertClaudeMessage(msg) {
// If has tool results, return array of tool messages
if (toolResults.length > 0) {
// Also include text parts as user message if any
if (parts.length > 0) {
const textContent = parts.length === 1 && parts[0].type === "text"
? parts[0].text
@@ -212,7 +205,7 @@ function convertClaudeMessage(msg) {
};
}
// Empty content array - return empty string content to keep message in conversation
// Empty content array
if (msg.content.length === 0) {
return { role, content: "" };
}
@@ -235,5 +228,5 @@ function convertToolChoice(choice) {
}
// Register
register(FORMATS.CLAUDE, FORMATS.OPENAI, claudeToOpenAI, null);
register(FORMATS.CLAUDE, FORMATS.OPENAI, claudeToOpenAIRequest, null);

View File

@@ -3,7 +3,7 @@ import { FORMATS } from "../formats.js";
import { adjustMaxTokens } from "../helpers/maxTokensHelper.js";
// Convert Gemini request to OpenAI format
function geminiToOpenAI(model, body, stream) {
function geminiToOpenAIRequest(model, body, stream) {
const result = {
model: model,
messages: [],
@@ -14,7 +14,6 @@ function geminiToOpenAI(model, body, stream) {
if (body.generationConfig) {
const config = body.generationConfig;
if (config.maxOutputTokens) {
// Create temporary body object for adjustMaxTokens
const tempBody = { max_tokens: config.maxOutputTokens, tools: body.tools };
result.max_tokens = adjustMaxTokens(tempBody);
}
@@ -81,12 +80,10 @@ function convertGeminiContent(content) {
const toolCalls = [];
for (const part of content.parts) {
// Text
if (part.text !== undefined) {
parts.push({ type: "text", text: part.text });
}
// Image
if (part.inlineData) {
parts.push({
type: "image_url",
@@ -96,7 +93,6 @@ function convertGeminiContent(content) {
});
}
// Function call
if (part.functionCall) {
toolCalls.push({
id: `call_${Date.now()}_${Math.random().toString(36).slice(2, 8)}`,
@@ -108,7 +104,6 @@ function convertGeminiContent(content) {
});
}
// Function response - use id if available, fallback to name
if (part.functionResponse) {
return {
role: "tool",
@@ -118,7 +113,6 @@ function convertGeminiContent(content) {
}
}
// Has tool calls
if (toolCalls.length > 0) {
const result = { role: "assistant" };
if (parts.length > 0) {
@@ -128,7 +122,6 @@ function convertGeminiContent(content) {
return result;
}
// Regular message
if (parts.length > 0) {
return {
role,
@@ -149,6 +142,6 @@ function extractGeminiText(content) {
}
// Register
register(FORMATS.GEMINI, FORMATS.OPENAI, geminiToOpenAI, null);
register(FORMATS.GEMINI_CLI, FORMATS.OPENAI, geminiToOpenAI, null);
register(FORMATS.GEMINI, FORMATS.OPENAI, geminiToOpenAIRequest, null);
register(FORMATS.GEMINI_CLI, FORMATS.OPENAI, geminiToOpenAIRequest, null);

View File

@@ -10,7 +10,7 @@ import { FORMATS } from "../formats.js";
/**
* Convert OpenAI Responses API request to OpenAI Chat Completions format
*/
function translateRequest(model, body, stream, credentials) {
function openaiResponsesToOpenAIRequest(model, body, stream, credentials) {
if (!body.input) return body;
const result = { ...body };
@@ -81,7 +81,7 @@ function translateRequest(model, body, stream, credentials) {
}
pendingToolResults = [];
}
// Add tool result immediately (not pending)
// Add tool result immediately
result.messages.push({
role: "tool",
tool_call_id: item.call_id,
@@ -104,14 +104,10 @@ function translateRequest(model, body, stream, credentials) {
}
}
// Tools are already in OpenAI format, just keep them
// Responses API tools: { type: "function", name, description, parameters }
// OpenAI tools: { type: "function", function: { name, description, parameters } }
// Convert tools format
if (body.tools && Array.isArray(body.tools)) {
result.tools = body.tools.map(tool => {
// Already has function wrapper
if (tool.function) return tool;
// Responses API format: flatten to OpenAI format
return {
type: "function",
function: {
@@ -135,6 +131,6 @@ function translateRequest(model, body, stream, credentials) {
return result;
}
// Register translator
register(FORMATS.OPENAI_RESPONSES, FORMATS.OPENAI, translateRequest, null);
// Register
register(FORMATS.OPENAI_RESPONSES, FORMATS.OPENAI, openaiResponsesToOpenAIRequest, null);

View File

@@ -4,7 +4,9 @@ import { CLAUDE_SYSTEM_PROMPT } from "../../config/constants.js";
import { adjustMaxTokens } from "../helpers/maxTokensHelper.js";
// Convert OpenAI request to Claude format
function openaiToClaude(model, body, stream) {
function openaiToClaudeRequest(model, body, stream) {
// Tool name mapping for Claude OAuth (capitalizedName → originalName)
const toolNameMap = new Map();
const result = {
model: model,
max_tokens: adjustMaxTokens(body),
@@ -27,10 +29,10 @@ function openaiToClaude(model, body, stream) {
systemParts.push(typeof msg.content === "string" ? msg.content : extractTextContent(msg.content));
}
}
// Filter out system messages for separate processing
const nonSystemMessages = body.messages.filter(m => m.role !== "system");
// Process messages with merging logic
// CRITICAL: tool_result must be in separate message immediately after tool_use
let currentRole = undefined;
@@ -54,15 +56,12 @@ function openaiToClaude(model, body, stream) {
const toolResultBlocks = blocks.filter(b => b.type === "tool_result");
const otherBlocks = blocks.filter(b => b.type !== "tool_result");
// Flush current message first
flushCurrentMessage();
// Add tool_result as separate user message
if (toolResultBlocks.length > 0) {
result.messages.push({ role: "user", content: toolResultBlocks });
}
// Add other blocks to current parts for next message
if (otherBlocks.length > 0) {
currentRole = newRole;
currentParts.push(...otherBlocks);
@@ -83,8 +82,8 @@ function openaiToClaude(model, body, stream) {
}
flushCurrentMessage();
// Add cache_control to last assistant message (like worker.old)
// Add cache_control to last assistant message
for (let i = result.messages.length - 1; i >= 0; i--) {
const message = result.messages[i];
if (message.role === "assistant" && Array.isArray(message.content) && message.content.length > 0) {
@@ -99,7 +98,7 @@ function openaiToClaude(model, body, stream) {
// System with Claude Code prompt and cache_control
const claudeCodePrompt = { type: "text", text: CLAUDE_SYSTEM_PROMPT };
if (systemParts.length > 0) {
const systemText = systemParts.join("\n");
result.system = [
@@ -113,21 +112,27 @@ function openaiToClaude(model, body, stream) {
// Tools - convert from OpenAI format to Claude format
if (body.tools && Array.isArray(body.tools)) {
result.tools = body.tools.map(tool => {
// Handle both OpenAI format {type: "function", function: {...}} and direct format
const toolData = tool.type === "function" && tool.function ? tool.function : tool;
const originalName = toolData.name;
// Claude requires capitalized tool names
const toolName = originalName.charAt(0).toUpperCase() + originalName.slice(1);
// Store mapping for response translation
if (toolName !== originalName) {
toolNameMap.set(toolName, originalName);
}
return {
name: toolData.name,
name: toolName,
description: toolData.description || "",
input_schema: toolData.parameters || toolData.input_schema || { type: "object", properties: {}, required: [] }
};
});
// Add cache control to last tool (like worker.old)
if (result.tools.length > 0) {
result.tools[result.tools.length - 1].cache_control = { type: "ephemeral", ttl: "1h" };
}
// console.log("[CLAUDE TOOLS DEBUG] Converted tools:", result.tools.map(t => t.name));
}
// Tool choice
@@ -135,71 +140,15 @@ function openaiToClaude(model, body, stream) {
result.tool_choice = convertOpenAIToolChoice(body.tool_choice);
}
return result;
}
// Convert OpenAI request to Gemini format
function openaiToGemini(model, body, stream) {
const result = {
contents: [],
generationConfig: {}
};
// Generation config
if (body.max_tokens) {
result.generationConfig.maxOutputTokens = body.max_tokens;
}
if (body.temperature !== undefined) {
result.generationConfig.temperature = body.temperature;
}
if (body.top_p !== undefined) {
result.generationConfig.topP = body.top_p;
}
// Messages
if (body.messages && Array.isArray(body.messages)) {
for (const msg of body.messages) {
if (msg.role === "system") {
result.systemInstruction = {
parts: [{ text: typeof msg.content === "string" ? msg.content : extractTextContent(msg.content) }]
};
} else if (msg.role === "tool") {
result.contents.push({
role: "function",
parts: [{
functionResponse: {
name: msg.tool_call_id,
response: tryParseJSON(msg.content)
}
}]
});
} else {
const converted = convertOpenAIToGeminiContent(msg);
if (converted) {
result.contents.push(converted);
}
}
}
}
// Tools
if (body.tools && Array.isArray(body.tools)) {
const validTools = body.tools.filter(tool => tool && tool.function && tool.function.name);
if (validTools.length > 0) {
result.tools = [{
functionDeclarations: validTools.map(tool => ({
name: tool.function.name,
description: tool.function.description || "",
parameters: tool.function.parameters || { type: "object", properties: {} }
}))
}];
}
// Attach toolNameMap to result for response translation
if (toolNameMap.size > 0) {
result._toolNameMap = toolNameMap;
}
return result;
}
// Get content blocks from single message (like src.cc getContentBlocksFromMessage)
// Get content blocks from single message
function getContentBlocksFromMessage(msg) {
const blocks = [];
@@ -240,7 +189,6 @@ function getContentBlocksFromMessage(msg) {
}
}
} else if (msg.role === "assistant") {
// Handle Anthropic format: content is array with tool_use blocks
if (Array.isArray(msg.content)) {
for (const part of msg.content) {
if (part.type === "text" && part.text) {
@@ -256,7 +204,6 @@ function getContentBlocksFromMessage(msg) {
}
}
// Handle OpenAI format: tool_calls array
if (msg.tool_calls && Array.isArray(msg.tool_calls)) {
for (const tc of msg.tool_calls) {
if (tc.type === "function") {
@@ -274,68 +221,9 @@ function getContentBlocksFromMessage(msg) {
return blocks;
}
// Convert single OpenAI message to Claude format (for backward compatibility)
function convertOpenAIMessage(msg) {
const role = msg.role === "assistant" ? "assistant" : "user";
const content = convertOpenAIMessageContent(msg);
if (content.length === 0) return null;
return { role, content };
}
// Convert OpenAI message to Gemini content
function convertOpenAIToGeminiContent(msg) {
const role = msg.role === "assistant" ? "model" : "user";
const parts = [];
// Text content
if (typeof msg.content === "string") {
if (msg.content) {
parts.push({ text: msg.content });
}
} else if (Array.isArray(msg.content)) {
for (const part of msg.content) {
if (part.type === "text") {
parts.push({ text: part.text });
} else if (part.type === "image_url") {
const url = part.image_url.url;
if (url.startsWith("data:")) {
const match = url.match(/^data:([^;]+);base64,(.+)$/);
if (match) {
parts.push({
inlineData: {
mimeType: match[1],
data: match[2]
}
});
}
}
}
}
}
// Tool calls
if (msg.tool_calls && Array.isArray(msg.tool_calls)) {
for (const tc of msg.tool_calls) {
parts.push({
functionCall: {
name: tc.function.name,
args: tryParseJSON(tc.function.arguments)
}
});
}
}
if (parts.length === 0) return null;
return { role, parts };
}
// Convert tool choice
// Convert OpenAI tool choice to Claude format
function convertOpenAIToolChoice(choice) {
if (!choice) return { type: "auto" };
// Passthrough if already Claude format
if (typeof choice === "object" && choice.type) return choice;
if (choice === "auto" || choice === "none") return { type: "auto" };
if (choice === "required") return { type: "any" };
@@ -365,8 +253,5 @@ function tryParseJSON(str) {
}
// Register
register(FORMATS.OPENAI, FORMATS.CLAUDE, openaiToClaude, null);
register(FORMATS.OPENAI, FORMATS.GEMINI, openaiToGemini, null);
register(FORMATS.OPENAI, FORMATS.GEMINI_CLI, openaiToGemini, null);
register(FORMATS.OPENAI, FORMATS.CLAUDE, openaiToClaudeRequest, null);

View File

@@ -0,0 +1,247 @@
import { register } from "../index.js";
import { FORMATS } from "../formats.js";
import { DEFAULT_THINKING_GEMINI_SIGNATURE } from "../../config/defaultThinkingSignature.js";
import {
DEFAULT_SAFETY_SETTINGS,
convertOpenAIContentToParts,
extractTextContent,
tryParseJSON,
generateRequestId,
generateSessionId,
generateProjectId,
cleanJSONSchemaForAntigravity
} from "../helpers/geminiHelper.js";
// Core: Convert OpenAI request to Gemini format (base for all variants)
function openaiToGeminiBase(model, body, stream) {
const result = {
model: model,
contents: [],
generationConfig: {},
safetySettings: DEFAULT_SAFETY_SETTINGS
};
// Generation config
if (body.temperature !== undefined) {
result.generationConfig.temperature = body.temperature;
}
if (body.top_p !== undefined) {
result.generationConfig.topP = body.top_p;
}
if (body.top_k !== undefined) {
result.generationConfig.topK = body.top_k;
}
if (body.max_tokens !== undefined) {
result.generationConfig.maxOutputTokens = body.max_tokens;
}
// Build tool_call_id -> name map
const tcID2Name = {};
if (body.messages && Array.isArray(body.messages)) {
for (const msg of body.messages) {
if (msg.role === "assistant" && msg.tool_calls) {
for (const tc of msg.tool_calls) {
if (tc.type === "function" && tc.id && tc.function?.name) {
tcID2Name[tc.id] = tc.function.name;
}
}
}
}
}
// Build tool responses cache
const toolResponses = {};
if (body.messages && Array.isArray(body.messages)) {
for (const msg of body.messages) {
if (msg.role === "tool" && msg.tool_call_id) {
toolResponses[msg.tool_call_id] = msg.content;
}
}
}
// Convert messages
if (body.messages && Array.isArray(body.messages)) {
for (let i = 0; i < body.messages.length; i++) {
const msg = body.messages[i];
const role = msg.role;
const content = msg.content;
if (role === "system" && body.messages.length > 1) {
result.systemInstruction = {
role: "user",
parts: [{ text: typeof content === "string" ? content : extractTextContent(content) }]
};
} else if (role === "user" || (role === "system" && body.messages.length === 1)) {
const parts = convertOpenAIContentToParts(content);
if (parts.length > 0) {
result.contents.push({ role: "user", parts });
}
} else if (role === "assistant") {
const parts = [];
if (content) {
const text = typeof content === "string" ? content : extractTextContent(content);
if (text) {
parts.push({ text });
}
}
if (msg.tool_calls && Array.isArray(msg.tool_calls)) {
const toolCallIds = [];
for (const tc of msg.tool_calls) {
if (tc.type !== "function") continue;
const args = tryParseJSON(tc.function?.arguments || "{}");
parts.push({
thoughtSignature: DEFAULT_THINKING_GEMINI_SIGNATURE,
functionCall: {
id: tc.id,
name: tc.function.name,
args: args
}
});
toolCallIds.push(tc.id);
}
if (parts.length > 0) {
result.contents.push({ role: "model", parts });
}
// Append function responses
const toolParts = [];
for (const fid of toolCallIds) {
let name = tcID2Name[fid];
if (!name) {
const idParts = fid.split("-");
if (idParts.length > 2) {
name = idParts.slice(0, -2).join("-");
} else {
name = fid;
}
}
let resp = toolResponses[fid] || "{}";
let parsedResp = tryParseJSON(resp);
if (parsedResp === null) {
parsedResp = { result: resp };
} else if (typeof parsedResp !== "object") {
parsedResp = { result: parsedResp };
}
toolParts.push({
functionResponse: {
id: fid,
name: name,
response: { result: parsedResp }
}
});
}
if (toolParts.length > 0) {
result.contents.push({ role: "user", parts: toolParts });
}
} else if (parts.length > 0) {
result.contents.push({ role: "model", parts });
}
}
}
}
// Convert tools
if (body.tools && Array.isArray(body.tools) && body.tools.length > 0) {
const functionDeclarations = [];
for (const t of body.tools) {
if (t.type === "function" && t.function) {
const fn = t.function;
functionDeclarations.push({
name: fn.name,
description: fn.description || "",
parameters: fn.parameters || { type: "object", properties: {} }
});
}
}
if (functionDeclarations.length > 0) {
result.tools = [{ functionDeclarations }];
}
}
return result;
}
// OpenAI -> Gemini (standard API)
function openaiToGeminiRequest(model, body, stream) {
return openaiToGeminiBase(model, body, stream);
}
// OpenAI -> Gemini CLI (Cloud Code Assist)
function openaiToGeminiCLIRequest(model, body, stream) {
const gemini = openaiToGeminiBase(model, body, stream);
const isClaude = model.toLowerCase().includes("claude");
// Add thinking config for CLI
if (body.reasoning_effort) {
const budgetMap = { low: 1024, medium: 8192, high: 32768 };
const budget = budgetMap[body.reasoning_effort] || 8192;
gemini.generationConfig.thinkingConfig = {
thinkingBudget: budget,
include_thoughts: true
};
}
// Thinking config from Claude format
if (body.thinking?.type === "enabled" && body.thinking.budget_tokens) {
gemini.generationConfig.thinkingConfig = {
thinkingBudget: body.thinking.budget_tokens,
include_thoughts: true
};
}
// Clean schema for tools
if (gemini.tools?.[0]?.functionDeclarations) {
for (const fn of gemini.tools[0].functionDeclarations) {
if (fn.parameters) {
const cleanedSchema = cleanJSONSchemaForAntigravity(fn.parameters);
if (isClaude) {
fn.parameters = cleanedSchema;
} else {
fn.parametersJsonSchema = cleanedSchema;
delete fn.parameters;
}
}
}
}
return gemini;
}
// Wrap Gemini CLI format in Cloud Code wrapper
function wrapInCloudCodeEnvelope(model, geminiCLI, credentials = null) {
const projectId = credentials?.projectId || generateProjectId();
return {
project: projectId,
model: model,
userAgent: "gemini-cli",
requestId: generateRequestId(),
request: {
sessionId: generateSessionId(),
contents: geminiCLI.contents,
systemInstruction: geminiCLI.systemInstruction,
generationConfig: geminiCLI.generationConfig,
safetySettings: geminiCLI.safetySettings,
tools: geminiCLI.tools,
}
};
}
// OpenAI -> Antigravity (Sandbox Cloud Code with wrapper)
function openaiToAntigravityRequest(model, body, stream, credentials = null) {
const geminiCLI = openaiToGeminiCLIRequest(model, body, stream);
return wrapInCloudCodeEnvelope(model, geminiCLI, credentials);
}
// Register
register(FORMATS.OPENAI, FORMATS.GEMINI, openaiToGeminiRequest, null);
register(FORMATS.OPENAI, FORMATS.GEMINI_CLI, (model, body, stream, credentials) => wrapInCloudCodeEnvelope(model, openaiToGeminiCLIRequest(model, body, stream), credentials), null);
register(FORMATS.OPENAI, FORMATS.ANTIGRAVITY, openaiToAntigravityRequest, null);

View File

@@ -0,0 +1,157 @@
import { register } from "../index.js";
import { FORMATS } from "../formats.js";
// Create OpenAI chunk helper
function createChunk(state, delta, finishReason = null) {
return {
id: `chatcmpl-${state.messageId}`,
object: "chat.completion.chunk",
created: Math.floor(Date.now() / 1000),
model: state.model,
choices: [{
index: 0,
delta,
finish_reason: finishReason
}]
};
}
// Convert Claude stream chunk to OpenAI format
function claudeToOpenAIResponse(chunk, state) {
if (!chunk) return null;
const results = [];
const event = chunk.type;
switch (event) {
case "message_start": {
state.messageId = chunk.message?.id || `msg_${Date.now()}`;
state.model = chunk.message?.model;
state.toolCallIndex = 0;
console.log("🔍 ----------- toolCallIndex", state.toolCallIndex);
results.push(createChunk(state, { role: "assistant" }));
break;
}
case "content_block_start": {
const block = chunk.content_block;
if (block?.type === "text") {
state.textBlockStarted = true;
} else if (block?.type === "thinking") {
state.inThinkingBlock = true;
state.currentBlockIndex = chunk.index;
results.push(createChunk(state, { content: "<think>" }));
} else if (block?.type === "tool_use") {
const toolCallIndex = state.toolCallIndex++;
// Restore original tool name from mapping (Claude OAuth)
const toolName = state.toolNameMap?.get(block.name) || block.name;
const toolCall = {
index: toolCallIndex,
id: block.id,
type: "function",
function: {
name: toolName,
arguments: ""
}
};
state.toolCalls.set(chunk.index, toolCall);
results.push(createChunk(state, { tool_calls: [toolCall] }));
}
break;
}
case "content_block_delta": {
const delta = chunk.delta;
if (delta?.type === "text_delta" && delta.text) {
results.push(createChunk(state, { content: delta.text }));
} else if (delta?.type === "thinking_delta" && delta.thinking) {
results.push(createChunk(state, { content: delta.thinking }));
} else if (delta?.type === "input_json_delta" && delta.partial_json) {
const toolCall = state.toolCalls.get(chunk.index);
if (toolCall) {
toolCall.function.arguments += delta.partial_json;
results.push(createChunk(state, {
tool_calls: [{
index: toolCall.index,
id: toolCall.id,
function: { arguments: delta.partial_json }
}]
}));
}
}
break;
}
case "content_block_stop": {
if (state.inThinkingBlock && chunk.index === state.currentBlockIndex) {
results.push(createChunk(state, { content: "</think>" }));
state.inThinkingBlock = false;
}
state.textBlockStarted = false;
state.thinkingBlockStarted = false;
break;
}
case "message_delta": {
if (chunk.delta?.stop_reason) {
state.finishReason = convertStopReason(chunk.delta.stop_reason);
results.push({
id: `chatcmpl-${state.messageId}`,
object: "chat.completion.chunk",
created: Math.floor(Date.now() / 1000),
model: state.model,
choices: [{
index: 0,
delta: {},
finish_reason: state.finishReason
}]
});
state.finishReasonSent = true;
}
break;
}
case "message_stop": {
if (!state.finishReasonSent) {
const finishReason = state.finishReason || (state.toolCalls?.size > 0 ? "tool_calls" : "stop");
results.push({
id: `chatcmpl-${state.messageId}`,
object: "chat.completion.chunk",
created: Math.floor(Date.now() / 1000),
model: state.model,
choices: [{
index: 0,
delta: {},
finish_reason: finishReason
}],
...(state.usage && {
usage: {
prompt_tokens: state.usage.input_tokens || 0,
completion_tokens: state.usage.output_tokens || 0,
total_tokens: (state.usage.input_tokens || 0) + (state.usage.output_tokens || 0)
}
})
});
state.finishReasonSent = true;
}
break;
}
}
return results.length > 0 ? results : null;
}
// Convert Claude stop_reason to OpenAI finish_reason
function convertStopReason(reason) {
switch (reason) {
case "end_turn": return "stop";
case "max_tokens": return "length";
case "tool_use": return "tool_calls";
case "stop_sequence": return "stop";
default: return "stop";
}
}
// Register
register(FORMATS.CLAUDE, FORMATS.OPENAI, null, claudeToOpenAIResponse);

View File

@@ -0,0 +1,207 @@
import { register } from "../index.js";
import { FORMATS } from "../formats.js";
// Convert Gemini response chunk to OpenAI format
function geminiToOpenAIResponse(chunk, state) {
if (!chunk) return null;
// Handle Antigravity wrapper
const response = chunk.response || chunk;
if (!response || !response.candidates?.[0]) return null;
const results = [];
const candidate = response.candidates[0];
const content = candidate.content;
// Initialize state
if (!state.messageId) {
state.messageId = response.responseId || `msg_${Date.now()}`;
state.model = response.modelVersion || "gemini";
state.functionIndex = 0;
results.push({
id: `chatcmpl-${state.messageId}`,
object: "chat.completion.chunk",
created: Math.floor(Date.now() / 1000),
model: state.model,
choices: [{
index: 0,
delta: { role: "assistant" },
finish_reason: null
}]
});
}
// Process parts
if (content?.parts) {
for (const part of content.parts) {
const hasThoughtSig = part.thoughtSignature || part.thought_signature;
const isThought = part.thought === true;
// Handle thought signature (thinking mode)
if (hasThoughtSig) {
const hasTextContent = part.text !== undefined && part.text !== "";
const hasFunctionCall = !!part.functionCall;
if (hasTextContent) {
results.push({
id: `chatcmpl-${state.messageId}`,
object: "chat.completion.chunk",
created: Math.floor(Date.now() / 1000),
model: state.model,
choices: [{
index: 0,
delta: isThought
? { reasoning_content: part.text }
: { content: part.text },
finish_reason: null
}]
});
}
if (hasFunctionCall) {
const fcName = part.functionCall.name;
const fcArgs = part.functionCall.args || {};
const toolCallIndex = state.functionIndex++;
const toolCall = {
id: `${fcName}-${Date.now()}-${toolCallIndex}`,
index: toolCallIndex,
type: "function",
function: {
name: fcName,
arguments: JSON.stringify(fcArgs)
}
};
state.toolCalls.set(toolCallIndex, toolCall);
results.push({
id: `chatcmpl-${state.messageId}`,
object: "chat.completion.chunk",
created: Math.floor(Date.now() / 1000),
model: state.model,
choices: [{
index: 0,
delta: { tool_calls: [toolCall] },
finish_reason: null
}]
});
}
continue;
}
// Text content (non-thinking)
if (part.text !== undefined && part.text !== "") {
results.push({
id: `chatcmpl-${state.messageId}`,
object: "chat.completion.chunk",
created: Math.floor(Date.now() / 1000),
model: state.model,
choices: [{
index: 0,
delta: { content: part.text },
finish_reason: null
}]
});
}
// Function call
if (part.functionCall) {
const fcName = part.functionCall.name;
const fcArgs = part.functionCall.args || {};
const toolCallIndex = state.functionIndex++;
const toolCall = {
id: `${fcName}-${Date.now()}-${toolCallIndex}`,
index: toolCallIndex,
type: "function",
function: {
name: fcName,
arguments: JSON.stringify(fcArgs)
}
};
state.toolCalls.set(toolCallIndex, toolCall);
results.push({
id: `chatcmpl-${state.messageId}`,
object: "chat.completion.chunk",
created: Math.floor(Date.now() / 1000),
model: state.model,
choices: [{
index: 0,
delta: { tool_calls: [toolCall] },
finish_reason: null
}]
});
}
// Inline data (images)
const inlineData = part.inlineData || part.inline_data;
if (inlineData?.data) {
const mimeType = inlineData.mimeType || inlineData.mime_type || "image/png";
results.push({
id: `chatcmpl-${state.messageId}`,
object: "chat.completion.chunk",
created: Math.floor(Date.now() / 1000),
model: state.model,
choices: [{
index: 0,
delta: {
images: [{
type: "image_url",
image_url: { url: `data:${mimeType};base64,${inlineData.data}` }
}]
},
finish_reason: null
}]
});
}
}
}
// Finish reason
if (candidate.finishReason) {
let finishReason = candidate.finishReason.toLowerCase();
if (finishReason === "stop" && state.toolCalls.size > 0) {
finishReason = "tool_calls";
}
results.push({
id: `chatcmpl-${state.messageId}`,
object: "chat.completion.chunk",
created: Math.floor(Date.now() / 1000),
model: state.model,
choices: [{
index: 0,
delta: {},
finish_reason: finishReason
}]
});
state.finishReason = finishReason;
}
// Usage metadata
const usage = response.usageMetadata || chunk.usageMetadata;
if (usage) {
const promptTokens = (usage.promptTokenCount || 0) + (usage.thoughtsTokenCount || 0);
state.usage = {
prompt_tokens: promptTokens,
completion_tokens: usage.candidatesTokenCount || 0,
total_tokens: usage.totalTokenCount || 0
};
if (usage.thoughtsTokenCount > 0) {
state.usage.completion_tokens_details = {
reasoning_tokens: usage.thoughtsTokenCount
};
}
}
return results.length > 0 ? results : null;
}
// Register
register(FORMATS.GEMINI, FORMATS.OPENAI, null, geminiToOpenAIResponse);
register(FORMATS.GEMINI_CLI, FORMATS.OPENAI, null, geminiToOpenAIResponse);
register(FORMATS.ANTIGRAVITY, FORMATS.OPENAI, null, geminiToOpenAIResponse);

View File

@@ -9,9 +9,8 @@ import { FORMATS } from "../formats.js";
* Translate OpenAI chunk to Responses API events
* @returns {Array} Array of events with { event, data } structure
*/
function translateResponse(chunk, state) {
function openaiToOpenAIResponsesResponse(chunk, state) {
if (!chunk) {
// Flush remaining events
return flushEvents(state);
}
@@ -356,6 +355,6 @@ function flushEvents(state) {
return events;
}
// Register translator
register(FORMATS.OPENAI, FORMATS.OPENAI_RESPONSES, null, translateResponse);
// Register
register(FORMATS.OPENAI, FORMATS.OPENAI_RESPONSES, null, openaiToOpenAIResponsesResponse);

View File

@@ -0,0 +1,174 @@
import { register } from "../index.js";
import { FORMATS } from "../formats.js";
// Helper: stop thinking block if started
function stopThinkingBlock(state, results) {
if (!state.thinkingBlockStarted) return;
results.push({
type: "content_block_stop",
index: state.thinkingBlockIndex
});
state.thinkingBlockStarted = false;
}
// Helper: stop text block if started
function stopTextBlock(state, results) {
if (!state.textBlockStarted || state.textBlockClosed) return;
state.textBlockClosed = true;
results.push({
type: "content_block_stop",
index: state.textBlockIndex
});
state.textBlockStarted = false;
}
// Convert OpenAI stream chunk to Claude format
function openaiToClaudeResponse(chunk, state) {
if (!chunk || !chunk.choices?.[0]) return null;
const results = [];
const choice = chunk.choices[0];
const delta = choice.delta;
// First chunk - ALWAYS send message_start first
if (!state.messageStartSent) {
state.messageStartSent = true;
state.messageId = chunk.id?.replace("chatcmpl-", "") || `msg_${Date.now()}`;
if (!state.messageId || state.messageId === "chat" || state.messageId.length < 8) {
state.messageId = chunk.extend_fields?.requestId ||
chunk.extend_fields?.traceId ||
`msg_${Date.now()}`;
}
state.model = chunk.model || "unknown";
state.nextBlockIndex = 0;
results.push({
type: "message_start",
message: {
id: state.messageId,
type: "message",
role: "assistant",
model: state.model,
content: [],
stop_reason: null,
stop_sequence: null,
usage: { input_tokens: 0, output_tokens: 0 }
}
});
}
// Handle reasoning_content (thinking) - GLM, DeepSeek, etc.
const reasoningContent = delta?.reasoning_content || delta?.reasoning;
if (reasoningContent) {
stopTextBlock(state, results);
if (!state.thinkingBlockStarted) {
state.thinkingBlockIndex = state.nextBlockIndex++;
state.thinkingBlockStarted = true;
results.push({
type: "content_block_start",
index: state.thinkingBlockIndex,
content_block: { type: "thinking", thinking: "" }
});
}
results.push({
type: "content_block_delta",
index: state.thinkingBlockIndex,
delta: { type: "thinking_delta", thinking: reasoningContent }
});
}
// Handle regular content
if (delta?.content) {
stopThinkingBlock(state, results);
if (!state.textBlockStarted) {
state.textBlockIndex = state.nextBlockIndex++;
state.textBlockStarted = true;
state.textBlockClosed = false;
results.push({
type: "content_block_start",
index: state.textBlockIndex,
content_block: { type: "text", text: "" }
});
}
results.push({
type: "content_block_delta",
index: state.textBlockIndex,
delta: { type: "text_delta", text: delta.content }
});
}
// Tool calls
if (delta?.tool_calls) {
for (const tc of delta.tool_calls) {
const idx = tc.index ?? 0;
if (tc.id) {
stopThinkingBlock(state, results);
stopTextBlock(state, results);
const toolBlockIndex = state.nextBlockIndex++;
state.toolCalls.set(idx, { id: tc.id, name: tc.function?.name || "", blockIndex: toolBlockIndex });
results.push({
type: "content_block_start",
index: toolBlockIndex,
content_block: {
type: "tool_use",
id: tc.id,
name: tc.function?.name || "",
input: {}
}
});
}
if (tc.function?.arguments) {
const toolInfo = state.toolCalls.get(idx);
if (toolInfo) {
results.push({
type: "content_block_delta",
index: toolInfo.blockIndex,
delta: { type: "input_json_delta", partial_json: tc.function.arguments }
});
}
}
}
}
// Finish
if (choice.finish_reason) {
stopThinkingBlock(state, results);
stopTextBlock(state, results);
for (const [, toolInfo] of state.toolCalls) {
results.push({
type: "content_block_stop",
index: toolInfo.blockIndex
});
}
results.push({
type: "message_delta",
delta: { stop_reason: convertFinishReason(choice.finish_reason) },
usage: { output_tokens: 0 }
});
results.push({ type: "message_stop" });
}
return results.length > 0 ? results : null;
}
// Convert OpenAI finish_reason to Claude stop_reason
function convertFinishReason(reason) {
switch (reason) {
case "stop": return "end_turn";
case "length": return "max_tokens";
case "tool_calls": return "tool_use";
default: return "end_turn";
}
}
// Register
register(FORMATS.OPENAI, FORMATS.CLAUDE, null, openaiToClaudeResponse);

View File

@@ -33,12 +33,12 @@ export function handleBypassRequest(body, model) {
if (firstText === "Warmup") shouldBypass = true;
// Check count pattern: [{"role":"user","content":"count"}]
if (!shouldBypass &&
messages.length === 1 &&
messages[0]?.role === "user" &&
firstText === "count") {
shouldBypass = true;
}
// if (!shouldBypass &&
// messages.length === 1 &&
// messages[0]?.role === "user" &&
// firstText === "count") {
// shouldBypass = true;
// }
// Check skip patterns
if (!shouldBypass && SKIP_PATTERNS?.length) {
@@ -54,8 +54,10 @@ export function handleBypassRequest(body, model) {
// Create bypass response using translator
if (stream) {
console.log("createStreamingResponse", sourceFormat, model);
return createStreamingResponse(sourceFormat, model);
} else {
console.log("createNonStreamingResponse", sourceFormat, model);
return createNonStreamingResponse(sourceFormat, model);
}
}

View File

@@ -1,9 +1,9 @@
import { translateResponse, initState } from "../translator/index.js";
import { FORMATS } from "../translator/formats.js";
// Get HH:MM timestamp
// Get HH:MM:SS timestamp
function getTimeString() {
return new Date().toLocaleTimeString("en-US", { hour12: false, hour: "2-digit", minute: "2-digit" });
return new Date().toLocaleTimeString("en-US", { hour12: false, hour: "2-digit", minute: "2-digit", second: "2-digit" });
}
// Extract usage from any format (Claude, OpenAI, Gemini)
@@ -128,7 +128,8 @@ export function createSSEStream(options = {}) {
targetFormat,
sourceFormat,
provider = null,
reqLogger = null
reqLogger = null,
toolNameMap = null
} = options;
const decoder = new TextDecoder();
@@ -137,7 +138,7 @@ export function createSSEStream(options = {}) {
let usage = null;
// State for translate mode
const state = mode === STREAM_MODE.TRANSLATE ? { ...initState(sourceFormat), provider } : null;
const state = mode === STREAM_MODE.TRANSLATE ? { ...initState(sourceFormat), provider, toolNameMap } : null;
return new TransformStream({
transform(chunk, controller) {
@@ -258,13 +259,14 @@ export function createSSEStream(options = {}) {
}
// Convenience functions for backward compatibility
export function createSSETransformStreamWithLogger(targetFormat, sourceFormat, provider = null, reqLogger = null) {
export function createSSETransformStreamWithLogger(targetFormat, sourceFormat, provider = null, reqLogger = null, toolNameMap = null) {
return createSSEStream({
mode: STREAM_MODE.TRANSLATE,
targetFormat,
sourceFormat,
provider,
reqLogger
reqLogger,
toolNameMap
});
}

View File

@@ -1,8 +1,8 @@
// Stream handler with disconnect detection - shared for all providers
// Get HH:MM timestamp
// Get HH:MM:SS timestamp
function getTimeString() {
return new Date().toLocaleTimeString("en-US", { hour12: false, hour: "2-digit", minute: "2-digit" });
return new Date().toLocaleTimeString("en-US", { hour12: false, hour: "2-digit", minute: "2-digit", second: "2-digit" });
}
/**

View File

@@ -4,19 +4,9 @@ import { v4 as uuidv4 } from "uuid";
import path from "path";
import os from "os";
import fs from "fs";
import { fileURLToPath } from "url";
// Get app name from root package.json config
// Get app name - fixed constant to avoid Windows path issues in standalone build
function getAppName() {
const __dirname = path.dirname(fileURLToPath(import.meta.url));
// Look for root package.json (monorepo root)
const rootPkgPath = path.resolve(__dirname, "../../../package.json");
try {
const pkg = JSON.parse(fs.readFileSync(rootPkgPath, "utf-8"));
return pkg.config?.appName || "9router";
} catch {
return "9router";
}
return "9router";
}
// Get user data directory based on platform

View File

@@ -31,12 +31,24 @@ export async function handleChat(request, clientRawRequest = null) {
};
}
// Log request endpoint and model
const url = new URL(request.url);
const modelStr = body.model;
// Count messages (support both messages[] and input[] formats)
const msgCount = body.messages?.length || body.input?.length || 0;
const toolCount = body.tools?.length || 0;
log.request("POST", `${body.model} | ${msgCount} msgs${toolCount ? ` | ${toolCount} tools` : ""}`);
log.request("POST", `${url.pathname} | ${modelStr} | ${msgCount} msgs${toolCount ? ` | ${toolCount} tools` : ""}`);
// Log API key (masked)
const apiKey = request.headers.get("Authorization");
if (apiKey) {
const masked = log.maskKey(apiKey.replace("Bearer ", ""));
log.debug("AUTH", `API Key: ${masked}`);
} else {
log.debug("AUTH", "No API key provided (local mode)");
}
const modelStr = body.model;
if (!modelStr) {
log.warn("CHAT", "Missing model");
return errorResponse(400, "Missing model");
@@ -70,6 +82,13 @@ async function handleSingleModelChat(body, modelStr, clientRawRequest = null) {
const { provider, model } = modelInfo;
// Log model routing (alias → actual model)
if (modelStr !== `${provider}/${model}`) {
log.info("ROUTING", `${modelStr}${provider}/${model}`);
} else {
log.info("ROUTING", `Provider: ${provider}, Model: ${model}`);
}
// Try with available accounts (fallback on errors)
let excludeConnectionId = null;
let lastError = null;
@@ -78,6 +97,7 @@ async function handleSingleModelChat(body, modelStr, clientRawRequest = null) {
const credentials = await getProviderCredentials(provider, excludeConnectionId);
if (!credentials) {
if (!excludeConnectionId) {
log.error("AUTH", `No credentials for provider: ${provider}`);
return errorResponse(400, `No credentials for provider: ${provider}`);
}
log.warn("CHAT", "No more accounts available", { provider });
@@ -87,7 +107,9 @@ async function handleSingleModelChat(body, modelStr, clientRawRequest = null) {
);
}
log.debug("CHAT", `Using account ${credentials.connectionId} for ${provider}`);
// Log account selection
const accountId = credentials.connectionId.slice(0, 8);
log.info("AUTH", `Using ${provider} account: ${accountId}...`);
const refreshedCredentials = await checkAndRefreshToken(provider, credentials);
@@ -118,11 +140,8 @@ async function handleSingleModelChat(body, modelStr, clientRawRequest = null) {
const { shouldFallback, cooldownMs } = checkFallbackError(result.status, result.error);
if (shouldFallback) {
log.warn("CHAT", "Account unavailable, trying next", {
provider,
connectionId: credentials.connectionId,
status: result.status
});
const accountId = credentials.connectionId.slice(0, 8);
log.warn("AUTH", `Account ${accountId}... unavailable (status: ${result.status}), trying fallback`);
await markAccountUnavailable(credentials.connectionId, cooldownMs, result.error?.slice(0, 100), result.status, provider);
excludeConnectionId = credentials.connectionId;
lastError = result.error;