mirror of
https://github.com/decolua/9router.git
synced 2026-05-08 12:01:28 +00:00
195 lines
6.3 KiB
JavaScript
195 lines
6.3 KiB
JavaScript
/**
|
|
* CommandCode → OpenAI response translator
|
|
*
|
|
* CommandCode upstream emits NDJSON-style AI SDK v5 stream events:
|
|
* {"type":"start"} {"type":"start-step", ...}
|
|
* {"type":"reasoning-start","id":"..."} {"type":"reasoning-delta","text":"..."}
|
|
* {"type":"text-start","id":"..."} {"type":"text-delta","text":"..."}
|
|
* {"type":"tool-input-start","toolCallId","toolName"}
|
|
* {"type":"tool-input-delta","toolCallId","inputTextDelta"}
|
|
* {"type":"tool-call","toolCallId","toolName","input"}
|
|
* {"type":"finish-step","finishReason","usage": {...}, ...}
|
|
* {"type":"finish",...}
|
|
*
|
|
* Each upstream "event" arrives as one JSON object per line — we receive it as a string chunk
|
|
* already split per line by the upstream SSE/JSON-line reader in 9router.
|
|
*/
|
|
import { register } from "../index.js";
|
|
import { FORMATS } from "../formats.js";
|
|
|
|
function ensureState(state, model) {
|
|
if (!state.responseId) {
|
|
state.responseId = `chatcmpl-${Date.now()}`;
|
|
state.created = Math.floor(Date.now() / 1000);
|
|
state.model = state.model || model || "commandcode";
|
|
state.chunkIndex = 0;
|
|
state.toolIndex = 0;
|
|
state.toolIndexById = new Map();
|
|
state.openTools = new Set();
|
|
state.openText = false;
|
|
state.finishReason = null;
|
|
state.usage = null;
|
|
}
|
|
}
|
|
|
|
function makeChunk(state, delta, finishReason = null) {
|
|
return {
|
|
id: state.responseId,
|
|
object: "chat.completion.chunk",
|
|
created: state.created,
|
|
model: state.model,
|
|
choices: [{ index: 0, delta, finish_reason: finishReason }],
|
|
};
|
|
}
|
|
|
|
function mapFinishReason(reason) {
|
|
switch (reason) {
|
|
case "stop": return "stop";
|
|
case "length": return "length";
|
|
case "tool-calls":
|
|
case "tool_use": return "tool_calls";
|
|
case "content-filter": return "content_filter";
|
|
case "error": return "stop";
|
|
default: return reason || "stop";
|
|
}
|
|
}
|
|
|
|
export function convertCommandCodeToOpenAI(chunk, state) {
|
|
if (!chunk) return null;
|
|
|
|
// Already-OpenAI chunk: pass through
|
|
if (chunk && typeof chunk === "object" && chunk.object === "chat.completion.chunk") {
|
|
return chunk;
|
|
}
|
|
|
|
// Parse string lines coming out of upstream
|
|
let event = chunk;
|
|
if (typeof chunk === "string") {
|
|
const line = chunk.trim();
|
|
if (!line) return null;
|
|
// Tolerate raw "data: {...}" framing if the upstream wrapper inserts it
|
|
const json = line.startsWith("data:") ? line.slice(5).trim() : line;
|
|
if (!json || json === "[DONE]") return null;
|
|
try {
|
|
event = JSON.parse(json);
|
|
} catch {
|
|
return null;
|
|
}
|
|
}
|
|
|
|
if (!event || typeof event !== "object" || !event.type) return null;
|
|
|
|
ensureState(state, event.model);
|
|
const out = [];
|
|
|
|
switch (event.type) {
|
|
case "text-delta": {
|
|
const text = event.text || event.delta || "";
|
|
if (!text) break;
|
|
const delta = state.chunkIndex === 0 ? { role: "assistant", content: text } : { content: text };
|
|
state.chunkIndex++;
|
|
state.openText = true;
|
|
out.push(makeChunk(state, delta));
|
|
break;
|
|
}
|
|
case "reasoning-delta": {
|
|
const text = event.text || "";
|
|
if (!text) break;
|
|
// Map reasoning to OpenAI "reasoning_content" field (used by deepseek-reasoner-style clients).
|
|
const delta = state.chunkIndex === 0
|
|
? { role: "assistant", reasoning_content: text }
|
|
: { reasoning_content: text };
|
|
state.chunkIndex++;
|
|
out.push(makeChunk(state, delta));
|
|
break;
|
|
}
|
|
case "tool-input-start": {
|
|
const id = event.toolCallId || `call_${Date.now()}_${state.toolIndex}`;
|
|
let idx = state.toolIndexById.get(id);
|
|
if (idx == null) {
|
|
idx = state.toolIndex++;
|
|
state.toolIndexById.set(id, idx);
|
|
}
|
|
state.openTools.add(id);
|
|
const delta = {
|
|
...(state.chunkIndex === 0 ? { role: "assistant" } : {}),
|
|
tool_calls: [{
|
|
index: idx,
|
|
id,
|
|
type: "function",
|
|
function: { name: event.toolName || "", arguments: "" },
|
|
}],
|
|
};
|
|
state.chunkIndex++;
|
|
out.push(makeChunk(state, delta));
|
|
break;
|
|
}
|
|
case "tool-input-delta": {
|
|
const id = event.toolCallId;
|
|
const idx = state.toolIndexById.get(id);
|
|
if (idx == null) break;
|
|
const delta = {
|
|
tool_calls: [{
|
|
index: idx,
|
|
function: { arguments: event.inputTextDelta || event.delta || "" },
|
|
}],
|
|
};
|
|
out.push(makeChunk(state, delta));
|
|
break;
|
|
}
|
|
case "tool-call": {
|
|
// Final consolidated tool call — only emit if we never saw tool-input-* deltas.
|
|
const id = event.toolCallId;
|
|
if (state.toolIndexById.has(id)) break;
|
|
const idx = state.toolIndex++;
|
|
state.toolIndexById.set(id, idx);
|
|
const argsStr = typeof event.input === "string" ? event.input : JSON.stringify(event.input ?? {});
|
|
const delta = {
|
|
...(state.chunkIndex === 0 ? { role: "assistant" } : {}),
|
|
tool_calls: [{
|
|
index: idx,
|
|
id,
|
|
type: "function",
|
|
function: { name: event.toolName || "", arguments: argsStr },
|
|
}],
|
|
};
|
|
state.chunkIndex++;
|
|
out.push(makeChunk(state, delta));
|
|
break;
|
|
}
|
|
case "finish-step": {
|
|
state.finishReason = mapFinishReason(event.finishReason);
|
|
if (event.usage) state.usage = event.usage;
|
|
break;
|
|
}
|
|
case "finish": {
|
|
const finishReason = state.finishReason || mapFinishReason(event.finishReason || "stop");
|
|
const finalChunk = makeChunk(state, {}, finishReason);
|
|
const totalUsage = event.totalUsage || state.usage;
|
|
if (totalUsage) {
|
|
finalChunk.usage = {
|
|
prompt_tokens: totalUsage.inputTokens ?? 0,
|
|
completion_tokens: totalUsage.outputTokens ?? 0,
|
|
total_tokens: totalUsage.totalTokens ?? ((totalUsage.inputTokens ?? 0) + (totalUsage.outputTokens ?? 0)),
|
|
};
|
|
}
|
|
out.push(finalChunk);
|
|
break;
|
|
}
|
|
case "error": {
|
|
state.finishReason = "stop";
|
|
out.push(makeChunk(state, { content: `\n\n[CommandCode error: ${event.error || event.message || "unknown"}]` }));
|
|
out.push(makeChunk(state, {}, "stop"));
|
|
break;
|
|
}
|
|
// Silently ignore: start, start-step, reasoning-start, reasoning-end, text-start, text-end,
|
|
// provider-metadata, message-metadata, etc. They carry no client-visible content.
|
|
default:
|
|
break;
|
|
}
|
|
|
|
return out.length ? out : null;
|
|
}
|
|
|
|
register(FORMATS.COMMANDCODE, FORMATS.OPENAI, null, convertCommandCodeToOpenAI);
|