feat: enhance CommandCode integration with improved message handling

This commit is contained in:
decolua
2026-05-07 23:02:07 +07:00
parent b72a443bd3
commit ad661c1286
5 changed files with 446 additions and 23 deletions

View File

@@ -1,17 +1,32 @@
/** /**
* OpenAI → CommandCode request translator * OpenAI → CommandCode request translator
* *
* CommandCode endpoint expects an envelope: * Upstream `/alpha/generate` schema (verified live with curl 2026-05-07):
* { threadId, memory, config, params: { model, messages, stream, max_tokens, temperature, tools? } } * - params.system: STRING at top level (Anthropic-style; system messages NOT allowed in messages[])
* where `params.messages` are Anthropic-style content blocks ([{type:"text", text}, ...]). * - params.messages[*].role ∈ {"user","assistant","tool"}
* * - params.messages[*].content: Array of content blocks (NEVER a string)
* The model id received here is the upstream id (e.g. "deepseek/deepseek-v4-pro") thanks to the * - tool_use blocks (assistant): {type:"tool-call", toolCallId, toolName, input}
* `provider/model` registration in providerModels.js. * - tool_result blocks (role=user): {type:"tool-result", toolCallId, toolName, output}
* - tools[*]: Anthropic plain {name, description, input_schema}
*/ */
import { register } from "../index.js"; import { register } from "../index.js";
import { FORMATS } from "../formats.js"; import { FORMATS } from "../formats.js";
import { randomUUID } from "crypto"; import { randomUUID } from "crypto";
function flattenText(content) {
if (content == null) return "";
if (typeof content === "string") return content;
if (Array.isArray(content)) {
const parts = [];
for (const p of content) {
if (typeof p === "string") parts.push(p);
else if (p && typeof p === "object" && typeof p.text === "string") parts.push(p.text);
}
return parts.join("\n");
}
return String(content);
}
function toContentBlocks(content) { function toContentBlocks(content) {
if (content == null) return [{ type: "text", text: "" }]; if (content == null) return [{ type: "text", text: "" }];
if (typeof content === "string") return [{ type: "text", text: content }]; if (typeof content === "string") return [{ type: "text", text: content }];
@@ -24,8 +39,6 @@ function toContentBlocks(content) {
if (part.type === "text" && typeof part.text === "string") { if (part.type === "text" && typeof part.text === "string") {
blocks.push({ type: "text", text: part.text }); blocks.push({ type: "text", text: part.text });
} else if (part.type === "image_url" || part.type === "image") { } else if (part.type === "image_url" || part.type === "image") {
// CommandCode currently rejects multimodal blocks via this gateway;
// collapse to a textual placeholder so the request still validates.
blocks.push({ type: "text", text: "[image omitted]" }); blocks.push({ type: "text", text: "[image omitted]" });
} else if (typeof part.text === "string") { } else if (typeof part.text === "string") {
blocks.push({ type: "text", text: part.text }); blocks.push({ type: "text", text: part.text });
@@ -37,25 +50,101 @@ function toContentBlocks(content) {
return [{ type: "text", text: String(content) }]; return [{ type: "text", text: String(content) }];
} }
function safeParseJson(s) {
if (s == null) return {};
if (typeof s !== "string") return s;
try { return JSON.parse(s); } catch { return {}; }
}
function convertMessages(messages = []) { function convertMessages(messages = []) {
return messages.map((m) => { const out = [];
const role = m.role === "tool" ? "user" : (m.role || "user"); const systemTexts = [];
return { role, content: toContentBlocks(m.content) };
for (const m of messages) {
if (!m) continue;
const role = m.role;
if (role === "system") {
const t = flattenText(m.content);
if (t) systemTexts.push(t);
continue;
}
if (role === "tool") {
const value = typeof m.content === "string" ? m.content : flattenText(m.content);
out.push({
role: "tool",
content: [{
type: "tool-result",
toolCallId: m.tool_call_id || "",
toolName: m.name || "",
output: { type: "text", value },
}],
}); });
continue;
}
if (role === "assistant") {
const blocks = [];
const text = flattenText(m.content);
if (text) blocks.push({ type: "text", text });
if (Array.isArray(m.tool_calls)) {
for (const tc of m.tool_calls) {
const fn = tc.function || {};
blocks.push({
type: "tool-call",
toolCallId: tc.id || "",
toolName: fn.name || "",
input: safeParseJson(fn.arguments),
});
}
}
out.push({ role: "assistant", content: blocks.length ? blocks : [{ type: "text", text: "" }] });
continue;
}
out.push({ role: "user", content: toContentBlocks(m.content) });
}
return { messages: out, system: systemTexts.join("\n\n") };
}
function convertTools(tools) {
if (!Array.isArray(tools) || tools.length === 0) return undefined;
const result = [];
for (const t of tools) {
if (!t) continue;
if (t.type === "function" && t.function) {
result.push({
name: t.function.name,
description: t.function.description,
input_schema: t.function.parameters || { type: "object" },
});
} else if (t.name && (t.input_schema || t.parameters)) {
result.push({
name: t.name,
description: t.description,
input_schema: t.input_schema || t.parameters,
});
}
}
return result.length ? result : undefined;
} }
export function openaiToCommandCode(model, body, stream /* , credentials */) { export function openaiToCommandCode(model, body, stream /* , credentials */) {
const { messages, system } = convertMessages(body.messages);
const params = { const params = {
model, model,
messages: convertMessages(body.messages), messages,
stream: stream !== false, stream: stream !== false,
max_tokens: body.max_tokens ?? body.max_output_tokens ?? 64000, max_tokens: body.max_tokens ?? body.max_output_tokens ?? 64000,
temperature: body.temperature ?? 0.3, temperature: body.temperature ?? 0.3,
}; };
if (Array.isArray(body.tools) && body.tools.length > 0) { if (system) params.system = system;
params.tools = body.tools;
} const tools = convertTools(body.tools);
if (tools) params.tools = tools;
if (body.top_p != null) params.top_p = body.top_p; if (body.top_p != null) params.top_p = body.top_p;
const today = new Date().toISOString().slice(0, 10); const today = new Date().toISOString().slice(0, 10);

View File

@@ -5,8 +5,9 @@
* {"type":"start"} {"type":"start-step", ...} * {"type":"start"} {"type":"start-step", ...}
* {"type":"reasoning-start","id":"..."} {"type":"reasoning-delta","text":"..."} * {"type":"reasoning-start","id":"..."} {"type":"reasoning-delta","text":"..."}
* {"type":"text-start","id":"..."} {"type":"text-delta","text":"..."} * {"type":"text-start","id":"..."} {"type":"text-delta","text":"..."}
* {"type":"tool-input-start","toolCallId","toolName"} * {"type":"tool-input-start","id","toolName"}
* {"type":"tool-input-delta","toolCallId","inputTextDelta"} * {"type":"tool-input-delta","id","delta"}
* {"type":"tool-input-end","id"}
* {"type":"tool-call","toolCallId","toolName","input"} * {"type":"tool-call","toolCallId","toolName","input"}
* {"type":"finish-step","finishReason","usage": {...}, ...} * {"type":"finish-step","finishReason","usage": {...}, ...}
* {"type":"finish",...} * {"type":"finish",...}
@@ -104,7 +105,7 @@ export function convertCommandCodeToOpenAI(chunk, state) {
break; break;
} }
case "tool-input-start": { case "tool-input-start": {
const id = event.toolCallId || `call_${Date.now()}_${state.toolIndex}`; const id = event.id || event.toolCallId || `call_${Date.now()}_${state.toolIndex}`;
let idx = state.toolIndexById.get(id); let idx = state.toolIndexById.get(id);
if (idx == null) { if (idx == null) {
idx = state.toolIndex++; idx = state.toolIndex++;
@@ -125,13 +126,13 @@ export function convertCommandCodeToOpenAI(chunk, state) {
break; break;
} }
case "tool-input-delta": { case "tool-input-delta": {
const id = event.toolCallId; const id = event.id || event.toolCallId;
const idx = state.toolIndexById.get(id); const idx = state.toolIndexById.get(id);
if (idx == null) break; if (idx == null) break;
const delta = { const delta = {
tool_calls: [{ tool_calls: [{
index: idx, index: idx,
function: { arguments: event.inputTextDelta || event.delta || "" }, function: { arguments: event.delta || event.inputTextDelta || "" },
}], }],
}; };
out.push(makeChunk(state, delta)); out.push(makeChunk(state, delta));
@@ -178,7 +179,9 @@ export function convertCommandCodeToOpenAI(chunk, state) {
} }
case "error": { case "error": {
state.finishReason = "stop"; state.finishReason = "stop";
out.push(makeChunk(state, { content: `\n\n[CommandCode error: ${event.error || event.message || "unknown"}]` })); const errVal = event.error ?? event.message ?? "unknown";
const errStr = typeof errVal === "string" ? errVal : JSON.stringify(errVal);
out.push(makeChunk(state, { content: `\n\n[CommandCode error: ${errStr}]` }));
out.push(makeChunk(state, {}, "stop")); out.push(makeChunk(state, {}, "stop"));
break; break;
} }

View File

@@ -2,7 +2,8 @@ import { NextResponse } from "next/server";
import { getProviderNodeById } from "@/models"; import { getProviderNodeById } from "@/models";
import { isOpenAICompatibleProvider, isAnthropicCompatibleProvider, isCustomEmbeddingProvider, AI_PROVIDERS } from "@/shared/constants/providers"; import { isOpenAICompatibleProvider, isAnthropicCompatibleProvider, isCustomEmbeddingProvider, AI_PROVIDERS } from "@/shared/constants/providers";
import { getDefaultModel } from "open-sse/config/providerModels.js"; import { getDefaultModel } from "open-sse/config/providerModels.js";
import { resolveOllamaLocalHost } from "open-sse/config/providers.js"; import { resolveOllamaLocalHost, PROVIDERS } from "open-sse/config/providers.js";
import { openaiToCommandCode } from "open-sse/translator/request/openai-to-commandcode.js";
import { PROVIDER_ENDPOINTS } from "@/shared/constants/config"; import { PROVIDER_ENDPOINTS } from "@/shared/constants/config";
// Probe a webSearch/webFetch provider using its searchConfig/fetchConfig. // Probe a webSearch/webFetch provider using its searchConfig/fetchConfig.
@@ -413,6 +414,28 @@ export async function POST(request) {
break; break;
} }
case "commandcode": {
const cfg = PROVIDERS.commandcode;
const model = getDefaultModel("commandcode");
const payload = openaiToCommandCode(model, {
messages: [{ role: "user", content: "ping" }],
max_tokens: 1,
stream: false,
}, false);
const res = await fetch(cfg.baseUrl, {
method: "POST",
headers: {
"Content-Type": "application/json",
...(cfg.headers || {}),
"x-session-id": crypto.randomUUID(),
"Authorization": `Bearer ${apiKey}`,
},
body: JSON.stringify(payload),
});
isValid = res.status !== 401 && res.status !== 403;
break;
}
case "deepgram": { case "deepgram": {
const res = await fetch("https://api.deepgram.com/v1/projects", { const res = await fetch("https://api.deepgram.com/v1/projects", {
headers: { "Authorization": `Token ${apiKey}` }, headers: { "Authorization": `Token ${apiKey}` },

View File

@@ -0,0 +1,127 @@
/**
* Unit tests for open-sse/translator/response/commandcode-to-openai.js
*
* Verified live against upstream stream (curl, 2026-05-07):
* - tool-input-start: { id, toolName } (id, NOT toolCallId)
* - tool-input-delta: { id, delta } (id, NOT toolCallId; delta, NOT inputTextDelta)
* - tool-input-end: { id }
* - tool-call (final): { toolCallId, toolName, input }
*/
import { describe, it, expect } from "vitest";
import { convertCommandCodeToOpenAI } from "../../open-sse/translator/response/commandcode-to-openai.js";
function feed(events) {
const state = {};
const all = [];
for (const e of events) {
const out = convertCommandCodeToOpenAI(JSON.stringify(e), state);
if (out) for (const c of out) all.push(c);
}
return { state, chunks: all };
}
describe("commandcode-to-openai — text-delta", () => {
it("emits assistant role on first delta then content-only", () => {
const { chunks } = feed([
{ type: "text-delta", text: "Hello" },
{ type: "text-delta", text: " world" },
]);
expect(chunks[0].choices[0].delta.role).toBe("assistant");
expect(chunks[0].choices[0].delta.content).toBe("Hello");
expect(chunks[1].choices[0].delta.role).toBeUndefined();
expect(chunks[1].choices[0].delta.content).toBe(" world");
});
});
describe("commandcode-to-openai — reasoning-delta", () => {
it("maps reasoning-delta to reasoning_content delta", () => {
const { chunks } = feed([
{ type: "reasoning-delta", text: "thinking..." },
]);
expect(chunks[0].choices[0].delta.reasoning_content).toBe("thinking...");
});
});
describe("commandcode-to-openai — tool-input-* with id field (live schema)", () => {
it("registers tool index using event.id (NOT toolCallId)", () => {
const { chunks } = feed([
{ type: "tool-input-start", id: "call_X", toolName: "Bash" },
{ type: "tool-input-delta", id: "call_X", delta: "{\"cmd" },
{ type: "tool-input-delta", id: "call_X", delta: "\":\"ls\"}" },
]);
// First chunk emits tool_calls with id
const startChunk = chunks[0].choices[0].delta.tool_calls[0];
expect(startChunk.id).toBe("call_X");
expect(startChunk.function.name).toBe("Bash");
// Subsequent deltas accumulate arguments
expect(chunks[1].choices[0].delta.tool_calls[0].function.arguments).toBe("{\"cmd");
expect(chunks[2].choices[0].delta.tool_calls[0].function.arguments).toBe("\":\"ls\"}");
});
it("ignores tool-input-delta when id is unknown (no prior start)", () => {
const { chunks } = feed([
{ type: "tool-input-delta", id: "unknown", delta: "x" },
]);
expect(chunks.length).toBe(0);
});
});
describe("commandcode-to-openai — final tool-call event", () => {
it("does NOT re-emit tool_calls when tool-input-* deltas already fired", () => {
const { chunks } = feed([
{ type: "tool-input-start", id: "call_Y", toolName: "Write" },
{ type: "tool-input-delta", id: "call_Y", delta: "{\"file\":\"a\"}" },
{ type: "tool-call", toolCallId: "call_Y", toolName: "Write", input: { file: "a" } },
]);
// Should be exactly 2 chunks (start + delta), no duplicate from final tool-call
expect(chunks.length).toBe(2);
});
it("emits a consolidated tool_calls when only the final tool-call event arrives", () => {
const { chunks } = feed([
{ type: "tool-call", toolCallId: "call_Z", toolName: "Read", input: { path: "/x" } },
]);
expect(chunks.length).toBe(1);
const tc = chunks[0].choices[0].delta.tool_calls[0];
expect(tc.id).toBe("call_Z");
expect(tc.function.name).toBe("Read");
expect(tc.function.arguments).toBe(JSON.stringify({ path: "/x" }));
});
});
describe("commandcode-to-openai — finish", () => {
it("emits a final chunk with finish_reason=tool_calls when finishReason is tool-calls", () => {
const { chunks } = feed([
{ type: "tool-input-start", id: "call_F", toolName: "Bash" },
{ type: "tool-input-delta", id: "call_F", delta: "{}" },
{ type: "finish-step", finishReason: "tool-calls" },
{ type: "finish" },
]);
const last = chunks[chunks.length - 1];
expect(last.choices[0].finish_reason).toBe("tool_calls");
});
it("includes usage on the final chunk when totalUsage provided", () => {
const { chunks } = feed([
{ type: "text-delta", text: "hi" },
{ type: "finish-step", finishReason: "stop", usage: { inputTokens: 10, outputTokens: 5, totalTokens: 15 } },
{ type: "finish", totalUsage: { inputTokens: 10, outputTokens: 5, totalTokens: 15 } },
]);
const last = chunks[chunks.length - 1];
expect(last.usage).toEqual({ prompt_tokens: 10, completion_tokens: 5, total_tokens: 15 });
});
});
describe("commandcode-to-openai — error event", () => {
it("stringifies object errors so client sees readable message", () => {
const { chunks } = feed([
{ type: "error", error: { type: "server_error", message: "Boom" } },
]);
const text = chunks[0].choices[0].delta.content;
expect(text).toContain("Boom");
expect(text).not.toContain("[object Object]");
});
});

View File

@@ -0,0 +1,181 @@
/**
* Unit tests for open-sse/translator/request/openai-to-commandcode.js
*
* Verified live against upstream `/alpha/generate` (curl, 2026-05-07):
* - params.system: STRING at top level (Anthropic-style; "system" role NOT in messages[])
* - params.messages[*].role ∈ {"user","assistant","tool"}
* - params.messages[*].content: Array<content_block> (NEVER string)
* - tools[*]: Anthropic plain {name, description, input_schema}
*/
import { describe, it, expect } from "vitest";
import { openaiToCommandCode } from "../../open-sse/translator/request/openai-to-commandcode.js";
const MODEL = "moonshotai/Kimi-K2.6";
describe("openaiToCommandCode — basic envelope", () => {
it("returns the expected top-level envelope shape", () => {
const out = openaiToCommandCode(MODEL, {
messages: [{ role: "user", content: "hi" }],
}, true);
expect(out).toHaveProperty("threadId");
expect(out).toHaveProperty("memory");
expect(out).toHaveProperty("config");
expect(out).toHaveProperty("params");
expect(out.params.model).toBe(MODEL);
expect(out.params.stream).toBe(true);
});
});
describe("openaiToCommandCode — system handling", () => {
it("hoists system messages to params.system (string), not messages[]", () => {
const out = openaiToCommandCode(MODEL, {
messages: [
{ role: "system", content: "You are concise." },
{ role: "user", content: "hi" },
],
}, true);
expect(typeof out.params.system).toBe("string");
expect(out.params.system).toBe("You are concise.");
const roles = out.params.messages.map((m) => m.role);
expect(roles).not.toContain("system");
});
it("joins multiple system messages with blank line", () => {
const out = openaiToCommandCode(MODEL, {
messages: [
{ role: "system", content: "A" },
{ role: "system", content: "B" },
{ role: "user", content: "hi" },
],
}, true);
expect(out.params.system).toBe("A\n\nB");
});
it("omits params.system when no system messages", () => {
const out = openaiToCommandCode(MODEL, {
messages: [{ role: "user", content: "hi" }],
}, true);
expect(out.params.system).toBeUndefined();
});
});
describe("openaiToCommandCode — content shape", () => {
it("MUST always emit content as Array (never string) for user", () => {
const out = openaiToCommandCode(MODEL, {
messages: [{ role: "user", content: "hello" }],
}, true);
const u = out.params.messages[0];
expect(Array.isArray(u.content)).toBe(true);
expect(u.content[0]).toEqual({ type: "text", text: "hello" });
});
it("MUST always emit content as Array for assistant", () => {
const out = openaiToCommandCode(MODEL, {
messages: [
{ role: "user", content: "a" },
{ role: "assistant", content: "b" },
],
}, true);
const a = out.params.messages[1];
expect(Array.isArray(a.content)).toBe(true);
expect(a.content[0]).toEqual({ type: "text", text: "b" });
});
});
describe("openaiToCommandCode — tool role / tool-result (AI SDK)", () => {
it("converts role:\"tool\" to role:\"tool\" with tool-result block; output is {type:\"text\",value}", () => {
const out = openaiToCommandCode(MODEL, {
messages: [
{ role: "user", content: "run X" },
{
role: "assistant",
content: null,
tool_calls: [
{ id: "call_1", type: "function", function: { name: "do_x", arguments: "{\"a\":1}" } },
],
},
{ role: "tool", tool_call_id: "call_1", name: "do_x", content: "RESULT_OK" },
],
}, true);
const toolMsg = out.params.messages[out.params.messages.length - 1];
expect(toolMsg.role).toBe("tool");
const block = toolMsg.content[0];
expect(block.type).toBe("tool-result");
expect(block.toolCallId).toBe("call_1");
expect(block.toolName).toBe("do_x");
expect(block.output).toEqual({ type: "text", value: "RESULT_OK" });
});
});
describe("openaiToCommandCode — assistant tool_calls / tool-call", () => {
it("converts assistant.tool_calls[] into content blocks of type tool-call", () => {
const out = openaiToCommandCode(MODEL, {
messages: [
{ role: "user", content: "go" },
{
role: "assistant",
content: null,
tool_calls: [
{ id: "call_42", type: "function", function: { name: "search", arguments: "{\"q\":\"hi\"}" } },
],
},
],
}, true);
const asst = out.params.messages[1];
expect(asst.role).toBe("assistant");
const tc = asst.content.find((b) => b.type === "tool-call");
expect(tc).toBeDefined();
expect(tc.toolCallId).toBe("call_42");
expect(tc.toolName).toBe("search");
expect(tc.input).toEqual({ q: "hi" });
});
});
describe("openaiToCommandCode — tools schema conversion", () => {
it("converts OpenAI {type:\"function\", function:{...}} to Anthropic plain {name, input_schema}", () => {
const out = openaiToCommandCode(MODEL, {
messages: [{ role: "user", content: "hi" }],
tools: [
{
type: "function",
function: {
name: "weather",
description: "Get weather",
parameters: { type: "object", properties: { city: { type: "string" } }, required: ["city"] },
},
},
],
}, true);
const t = out.params.tools[0];
expect(t.name).toBe("weather");
expect(t.input_schema).toBeDefined();
expect(t.input_schema.type).toBe("object");
expect(t.function).toBeUndefined();
expect(t.parameters).toBeUndefined();
});
it("preserves description on converted tool", () => {
const out = openaiToCommandCode(MODEL, {
messages: [{ role: "user", content: "hi" }],
tools: [
{ type: "function", function: { name: "ping", description: "Ping the server", parameters: { type: "object" } } },
],
}, true);
expect(out.params.tools[0].description).toBe("Ping the server");
});
it("does not include tools field when input has none", () => {
const out = openaiToCommandCode(MODEL, {
messages: [{ role: "user", content: "hi" }],
}, true);
expect(out.params.tools).toBeUndefined();
});
});