feat(ollama): Add Ollama provider support with models and configuration, including API endpoints and UI updates.

This commit is contained in:
decolua
2026-03-12 15:24:02 +07:00
parent a224f68e5c
commit 32e3980a13
10 changed files with 158 additions and 28 deletions

View File

@@ -0,0 +1,118 @@
import { describe, it, expect } from "vitest";
import { FORMATS } from "../../open-sse/translator/formats.js";
import { translateRequest } from "../../open-sse/translator/index.js";
import { claudeToOpenAIRequest } from "../../open-sse/translator/request/claude-to-openai.js";
import { filterToOpenAIFormat } from "../../open-sse/translator/helpers/openaiHelper.js";
import { parseSSELine } from "../../open-sse/utils/streamHelpers.js";
describe("request normalization", () => {
it("claudeToOpenAIRequest flattens text-only content arrays into string", () => {
const body = {
messages: [
{
role: "user",
content: [
{ type: "text", text: "hi" },
{ type: "text", text: "there" },
],
},
],
};
const result = claudeToOpenAIRequest("gpt-oss:120b", body, true);
expect(result.messages[0].content).toBe("hi\nthere");
});
it("claudeToOpenAIRequest preserves multimodal arrays", () => {
const body = {
messages: [
{
role: "user",
content: [
{ type: "text", text: "describe" },
{
type: "image",
source: {
type: "base64",
media_type: "image/png",
data: "ZmFrZQ==",
},
},
],
},
],
};
const result = claudeToOpenAIRequest("gpt-4o", body, true);
expect(Array.isArray(result.messages[0].content)).toBe(true);
});
it("filterToOpenAIFormat flattens text-only arrays to string", () => {
const body = {
messages: [
{
role: "user",
content: [
{ type: "text", text: "a" },
{ type: "text", text: "b" },
],
},
],
};
const result = filterToOpenAIFormat(JSON.parse(JSON.stringify(body)));
expect(result.messages[0].content).toBe("a\nb");
});
it("translateRequest keeps /v1/messages Claude->OpenAI text payloads string-safe", () => {
const body = {
model: "ollama/gpt-oss:120b",
system: [{ type: "text", text: "You are helpful." }],
messages: [
{
role: "user",
content: [
{ type: "text", text: "hello" },
{ type: "text", text: "world" },
],
},
],
stream: true,
};
const result = translateRequest(
FORMATS.CLAUDE,
FORMATS.OPENAI,
"gpt-oss:120b",
JSON.parse(JSON.stringify(body)),
true,
null,
"ollama",
);
const userMessage = result.messages.find((m) => m.role === "user");
expect(typeof userMessage.content).toBe("string");
expect(userMessage.content).toBe("hello\nworld");
});
it("parseSSELine supports provider raw NDJSON stream lines", () => {
const raw = JSON.stringify({
model: "gpt-oss:120b",
message: { role: "assistant", content: "hello" },
done: false,
});
const parsed = parseSSELine(raw);
expect(parsed).toEqual({
model: "gpt-oss:120b",
message: { role: "assistant", content: "hello" },
done: false,
});
});
it("parseSSELine still supports SSE data lines", () => {
const parsed = parseSSELine('data: {"choices":[{"delta":{"content":"hi"}}]}');
expect(parsed.choices[0].delta.content).toBe("hi");
});
});