Files
9router/open-sse/translator/request/openai-responses.js
2026-01-15 18:29:47 +07:00

234 lines
6.6 KiB
JavaScript

/**
* Translator: OpenAI Responses API → OpenAI Chat Completions
*
* Responses API uses: { input: [...], instructions: "..." }
* Chat API uses: { messages: [...] }
*/
import { register } from "../index.js";
import { FORMATS } from "../formats.js";
/**
* Convert OpenAI Responses API request to OpenAI Chat Completions format
*/
function openaiResponsesToOpenAIRequest(model, body, stream, credentials) {
if (!body.input) return body;
const result = { ...body };
result.messages = [];
// Convert instructions to system message
if (body.instructions) {
result.messages.push({ role: "system", content: body.instructions });
}
// Group items by conversation turn
let currentAssistantMsg = null;
let pendingToolResults = [];
for (const item of body.input) {
if (item.type === "message") {
// Flush any pending assistant message with tool calls
if (currentAssistantMsg) {
result.messages.push(currentAssistantMsg);
currentAssistantMsg = null;
}
// Flush pending tool results
if (pendingToolResults.length > 0) {
for (const tr of pendingToolResults) {
result.messages.push(tr);
}
pendingToolResults = [];
}
// Convert content: input_text → text, output_text → text
const content = Array.isArray(item.content)
? item.content.map(c => {
if (c.type === "input_text") return { type: "text", text: c.text };
if (c.type === "output_text") return { type: "text", text: c.text };
return c;
})
: item.content;
result.messages.push({ role: item.role, content });
}
else if (item.type === "function_call") {
// Start or append to assistant message with tool_calls
if (!currentAssistantMsg) {
currentAssistantMsg = {
role: "assistant",
content: null,
tool_calls: []
};
}
currentAssistantMsg.tool_calls.push({
id: item.call_id,
type: "function",
function: {
name: item.name,
arguments: item.arguments
}
});
}
else if (item.type === "function_call_output") {
// Flush assistant message first if exists
if (currentAssistantMsg) {
result.messages.push(currentAssistantMsg);
currentAssistantMsg = null;
}
// Flush any pending tool results first
if (pendingToolResults.length > 0) {
for (const tr of pendingToolResults) {
result.messages.push(tr);
}
pendingToolResults = [];
}
// Add tool result immediately
result.messages.push({
role: "tool",
tool_call_id: item.call_id,
content: typeof item.output === "string" ? item.output : JSON.stringify(item.output)
});
}
else if (item.type === "reasoning") {
// Skip reasoning items - they are for display only
continue;
}
}
// Flush remaining
if (currentAssistantMsg) {
result.messages.push(currentAssistantMsg);
}
if (pendingToolResults.length > 0) {
for (const tr of pendingToolResults) {
result.messages.push(tr);
}
}
// Convert tools format
if (body.tools && Array.isArray(body.tools)) {
result.tools = body.tools.map(tool => {
if (tool.function) return tool;
return {
type: "function",
function: {
name: tool.name,
description: tool.description,
parameters: tool.parameters,
strict: tool.strict
}
};
});
}
// Cleanup Responses API specific fields
delete result.input;
delete result.instructions;
delete result.include;
delete result.prompt_cache_key;
delete result.store;
delete result.reasoning;
return result;
}
/**
* Convert OpenAI Chat Completions to OpenAI Responses API format
*/
function openaiToOpenAIResponsesRequest(model, body, stream, credentials) {
const result = {
model,
input: [],
stream: true,
store: false
};
// Extract system message as instructions
let hasSystemMessage = false;
const messages = body.messages || [];
for (const msg of messages) {
if (msg.role === "system") {
// Use first system message as instructions
if (!hasSystemMessage) {
result.instructions = typeof msg.content === "string" ? msg.content : "";
hasSystemMessage = true;
}
continue; // Skip system messages in input
}
// Convert user/assistant messages to input items
if (msg.role === "user" || msg.role === "assistant") {
const contentType = msg.role === "user" ? "input_text" : "output_text";
const content = typeof msg.content === "string"
? [{ type: contentType, text: msg.content }]
: Array.isArray(msg.content)
? msg.content.map(c => {
if (c.type === "text") return { type: contentType, text: c.text };
if (c.type === "image_url") return { type: contentType, text: "[Image content]" };
return c;
})
: [];
result.input.push({
type: "message",
role: msg.role,
content
});
}
// Convert tool calls
if (msg.role === "assistant" && msg.tool_calls) {
for (const tc of msg.tool_calls) {
result.input.push({
type: "function_call",
call_id: tc.id,
name: tc.function?.name || "",
arguments: tc.function?.arguments || "{}"
});
}
}
// Convert tool results
if (msg.role === "tool") {
result.input.push({
type: "function_call_output",
call_id: msg.tool_call_id,
output: msg.content
});
}
}
// If no system message, leave instructions empty (will be filled by executor)
if (!hasSystemMessage) {
result.instructions = "";
}
// Convert tools format
if (body.tools && Array.isArray(body.tools)) {
result.tools = body.tools.map(tool => {
if (tool.type === "function") {
return {
type: "function",
name: tool.function.name,
description: tool.function.description,
parameters: tool.function.parameters,
strict: tool.function.strict
};
}
return tool;
});
}
// Pass through other relevant fields
if (body.temperature !== undefined) result.temperature = body.temperature;
if (body.max_tokens !== undefined) result.max_tokens = body.max_tokens;
if (body.top_p !== undefined) result.top_p = body.top_p;
return result;
}
// Register both directions
register(FORMATS.OPENAI_RESPONSES, FORMATS.OPENAI, openaiResponsesToOpenAIRequest, null);
register(FORMATS.OPENAI, FORMATS.OPENAI_RESPONSES, openaiToOpenAIResponsesRequest, null);