feat: add OllamaLocalExecutor and update provider handling

- Introduced OllamaLocalExecutor to handle requests for the "ollama-local" provider.
- Removed the direct URL construction for "ollama-local" from BaseExecutor.
- Updated index.js to include the new OllamaLocalExecutor in the executors mapping.
- Enhanced the ProvidersPage component to support dynamic addition of OpenAI/Anthropic compatible providers.
This commit is contained in:
decolua
2026-05-07 16:42:36 +07:00
parent 050e56f20b
commit 0d61a1d546
8 changed files with 127 additions and 94 deletions

View File

@@ -1,5 +1,4 @@
import { HTTP_STATUS, RETRY_CONFIG, DEFAULT_RETRY_CONFIG, resolveRetryEntry } from "../config/runtimeConfig.js";
import { resolveOllamaLocalHost } from "../config/providers.js";
import { proxyAwareFetch } from "../utils/proxyFetch.js";
/**
@@ -36,9 +35,6 @@ export class BaseExecutor {
const normalized = baseUrl.replace(/\/$/, "");
return `${normalized}/messages`;
}
if (this.provider === "ollama-local") {
return `${resolveOllamaLocalHost(credentials)}/api/chat`;
}
const baseUrls = this.getBaseUrls();
return baseUrls[urlIndex] || baseUrls[0] || this.config.baseUrl;
}

View File

@@ -13,6 +13,7 @@ import { OpenCodeExecutor } from "./opencode.js";
import { OpenCodeGoExecutor } from "./opencode-go.js";
import { GrokWebExecutor } from "./grok-web.js";
import { PerplexityWebExecutor } from "./perplexity-web.js";
import { OllamaLocalExecutor } from "./ollama-local.js";
import { DefaultExecutor } from "./default.js";
const executors = {
@@ -33,6 +34,7 @@ const executors = {
"opencode-go": new OpenCodeGoExecutor(),
"grok-web": new GrokWebExecutor(),
"perplexity-web": new PerplexityWebExecutor(),
"ollama-local": new OllamaLocalExecutor(),
};
const defaultCache = new Map();
@@ -64,3 +66,4 @@ export { OpenCodeExecutor } from "./opencode.js";
export { OpenCodeGoExecutor } from "./opencode-go.js";
export { GrokWebExecutor } from "./grok-web.js";
export { PerplexityWebExecutor } from "./perplexity-web.js";
export { OllamaLocalExecutor } from "./ollama-local.js";

View File

@@ -0,0 +1,14 @@
import { DefaultExecutor } from "./default.js";
import { resolveOllamaLocalHost } from "../config/providers.js";
export class OllamaLocalExecutor extends DefaultExecutor {
constructor() {
super("ollama-local");
}
buildUrl(model, stream, urlIndex = 0, credentials = null) {
return `${resolveOllamaLocalHost(credentials)}/api/chat`;
}
}
export default OllamaLocalExecutor;