diff --git a/open-sse/executors/base.js b/open-sse/executors/base.js index 39f89bf2..8cb46721 100644 --- a/open-sse/executors/base.js +++ b/open-sse/executors/base.js @@ -1,5 +1,4 @@ import { HTTP_STATUS, RETRY_CONFIG, DEFAULT_RETRY_CONFIG, resolveRetryEntry } from "../config/runtimeConfig.js"; -import { resolveOllamaLocalHost } from "../config/providers.js"; import { proxyAwareFetch } from "../utils/proxyFetch.js"; /** @@ -36,9 +35,6 @@ export class BaseExecutor { const normalized = baseUrl.replace(/\/$/, ""); return `${normalized}/messages`; } - if (this.provider === "ollama-local") { - return `${resolveOllamaLocalHost(credentials)}/api/chat`; - } const baseUrls = this.getBaseUrls(); return baseUrls[urlIndex] || baseUrls[0] || this.config.baseUrl; } diff --git a/open-sse/executors/index.js b/open-sse/executors/index.js index 655fb3c0..9479f4a5 100644 --- a/open-sse/executors/index.js +++ b/open-sse/executors/index.js @@ -13,6 +13,7 @@ import { OpenCodeExecutor } from "./opencode.js"; import { OpenCodeGoExecutor } from "./opencode-go.js"; import { GrokWebExecutor } from "./grok-web.js"; import { PerplexityWebExecutor } from "./perplexity-web.js"; +import { OllamaLocalExecutor } from "./ollama-local.js"; import { DefaultExecutor } from "./default.js"; const executors = { @@ -33,6 +34,7 @@ const executors = { "opencode-go": new OpenCodeGoExecutor(), "grok-web": new GrokWebExecutor(), "perplexity-web": new PerplexityWebExecutor(), + "ollama-local": new OllamaLocalExecutor(), }; const defaultCache = new Map(); @@ -64,3 +66,4 @@ export { OpenCodeExecutor } from "./opencode.js"; export { OpenCodeGoExecutor } from "./opencode-go.js"; export { GrokWebExecutor } from "./grok-web.js"; export { PerplexityWebExecutor } from "./perplexity-web.js"; +export { OllamaLocalExecutor } from "./ollama-local.js"; diff --git a/open-sse/executors/ollama-local.js b/open-sse/executors/ollama-local.js new file mode 100644 index 00000000..83de7b87 --- /dev/null +++ b/open-sse/executors/ollama-local.js @@ -0,0 +1,14 @@ +import { DefaultExecutor } from "./default.js"; +import { resolveOllamaLocalHost } from "../config/providers.js"; + +export class OllamaLocalExecutor extends DefaultExecutor { + constructor() { + super("ollama-local"); + } + + buildUrl(model, stream, urlIndex = 0, credentials = null) { + return `${resolveOllamaLocalHost(credentials)}/api/chat`; + } +} + +export default OllamaLocalExecutor; diff --git a/src/app/(dashboard)/dashboard/providers/page.js b/src/app/(dashboard)/dashboard/providers/page.js index 0940cb91..bc5fed48 100644 --- a/src/app/(dashboard)/dashboard/providers/page.js +++ b/src/app/(dashboard)/dashboard/providers/page.js @@ -292,6 +292,58 @@ export default function ProvidersPage() { )} + {/* Custom Providers (OpenAI/Anthropic Compatible) — dynamic */} +
- No compatible providers added yet -
-- Use the buttons above to add OpenAI or Anthropic compatible - endpoints -
-