feat: Add support for local Ollama Local provider

This commit is contained in:
decolua
2026-03-13 10:22:59 +07:00
parent 754a24d52a
commit 399adca63d
7 changed files with 14 additions and 0 deletions

View File

@@ -294,4 +294,8 @@ export const PROVIDERS = {
baseUrl: "https://ollama.com/api/chat",
format: "ollama"
},
"ollama-local": {
baseUrl: "http://localhost:11434/api/chat",
format: "ollama"
},
};

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.3 KiB

View File

@@ -163,6 +163,7 @@ const PROVIDER_MODELS_CONFIG = {
siliconflow: createOpenAIModelsConfig("https://api.siliconflow.cn/v1/models"),
hyperbolic: createOpenAIModelsConfig("https://api.hyperbolic.xyz/v1/models"),
ollama: createOpenAIModelsConfig("https://ollama.com/api/tags"),
"ollama-local": createOpenAIModelsConfig("http://localhost:11434/api/tags"),
nanobanana: createOpenAIModelsConfig("https://api.nanobananaapi.ai/v1/models"),
chutes: createOpenAIModelsConfig("https://llm.chutes.ai/v1/models"),
nvidia: createOpenAIModelsConfig("https://integrate.api.nvidia.com/v1/models"),

View File

@@ -464,6 +464,11 @@ async function testApiKeyConnection(connection, effectiveProxy = null) {
const res = await fetch("https://ollama.com/api/tags", { headers: { Authorization: `Bearer ${connection.apiKey}` } });
return { valid: res.ok, error: res.ok ? null : "Invalid API key" };
}
case "ollama-local": {
// No auth required for local Ollama
const res = await fetch("http://localhost:11434/api/tags");
return { valid: res.ok, error: res.ok ? null : "Ollama not running on localhost:11434" };
}
case "deepgram": {
const res = await fetchWithConnectionProxy("https://api.deepgram.com/v1/projects", { headers: { Authorization: `Token ${connection.apiKey}` } }, effectiveProxy);
return { valid: res.ok, error: res.ok ? null : "Invalid API key" };

View File

@@ -164,6 +164,7 @@ export async function POST(request) {
case "siliconflow":
case "hyperbolic":
case "ollama":
case "ollama-local":
case "assemblyai":
case "nanobanana":
case "chutes":
@@ -182,6 +183,7 @@ export async function POST(request) {
siliconflow: "https://api.siliconflow.cn/v1/models",
hyperbolic: "https://api.hyperbolic.xyz/v1/models",
ollama: "https://ollama.com/api/tags",
"ollama-local": "http://localhost:11434/api/tags",
assemblyai: "https://api.assemblyai.com/v1/account",
nanobanana: "https://api.nanobananaapi.ai/v1/models",
chutes: "https://llm.chutes.ai/v1/models",

View File

@@ -48,6 +48,7 @@ export const PROVIDER_ENDPOINTS = {
anthropic: "https://api.anthropic.com/v1/messages",
gemini: "https://generativelanguage.googleapis.com/v1beta/models",
ollama: "https://ollama.com/api/chat",
"ollama-local": "http://localhost:11434/api/chat",
};
// Re-export from providers.js for backward compatibility

View File

@@ -50,6 +50,7 @@ export const APIKEY_PROVIDERS = {
nanobanana: { id: "nanobanana", alias: "nb", name: "NanoBanana", icon: "image", color: "#FFD700", textIcon: "NB", website: "https://nanobananaapi.ai" },
chutes: { id: "chutes", alias: "ch", name: "Chutes AI", icon: "water_drop", color: "#ffffffff", textIcon: "CH", website: "https://chutes.ai" },
ollama: { id: "ollama", alias: "ollama", name: "Ollama Cloud", icon: "cloud", color: "#ffffffff", textIcon: "OL", website: "https://ollama.com" },
"ollama-local": { id: "ollama-local", alias: "ollama-local", name: "Ollama Local", icon: "cloud", color: "#ffffffff", textIcon: "OL", website: "https://ollama.com" },
};
export const OPENAI_COMPATIBLE_PREFIX = "openai-compatible-";