mirror of
https://github.com/decolua/9router.git
synced 2026-05-08 12:01:28 +00:00
Add a shared resolveOllamaLocalHost() helper and wire it through the executor, models/validate/test routes, so users can point ollama-local at a remote Ollama instance instead of being locked to localhost:11434. Also expose the host as an "Ollama Host URL" field in AddApiKeyModal (empty = default localhost:11434), making the option reachable from the dashboard without hand-editing db.json. Co-authored-by: anuragg-saxenaa <anuragg.saxenaa@gmail.com> Made-with: Cursor
158 lines
5.3 KiB
JavaScript
158 lines
5.3 KiB
JavaScript
import { HTTP_STATUS, RETRY_CONFIG, DEFAULT_RETRY_CONFIG } from "../config/runtimeConfig.js";
|
|
import { resolveOllamaLocalHost } from "../config/providers.js";
|
|
import { proxyAwareFetch } from "../utils/proxyFetch.js";
|
|
|
|
/**
|
|
* BaseExecutor - Base class for provider executors
|
|
*/
|
|
export class BaseExecutor {
|
|
constructor(provider, config) {
|
|
this.provider = provider;
|
|
this.config = config;
|
|
this.noAuth = config?.noAuth || false;
|
|
}
|
|
|
|
getProvider() {
|
|
return this.provider;
|
|
}
|
|
|
|
getBaseUrls() {
|
|
return this.config.baseUrls || (this.config.baseUrl ? [this.config.baseUrl] : []);
|
|
}
|
|
|
|
getFallbackCount() {
|
|
return this.getBaseUrls().length || 1;
|
|
}
|
|
|
|
buildUrl(model, stream, urlIndex = 0, credentials = null) {
|
|
if (this.provider?.startsWith?.("openai-compatible-")) {
|
|
const baseUrl = credentials?.providerSpecificData?.baseUrl || "https://api.openai.com/v1";
|
|
const normalized = baseUrl.replace(/\/$/, "");
|
|
const path = this.provider.includes("responses") ? "/responses" : "/chat/completions";
|
|
return `${normalized}${path}`;
|
|
}
|
|
if (this.provider?.startsWith?.("anthropic-compatible-")) {
|
|
const baseUrl = credentials?.providerSpecificData?.baseUrl || "https://api.anthropic.com/v1";
|
|
const normalized = baseUrl.replace(/\/$/, "");
|
|
return `${normalized}/messages`;
|
|
}
|
|
if (this.provider === "ollama-local") {
|
|
return `${resolveOllamaLocalHost(credentials)}/api/chat`;
|
|
}
|
|
const baseUrls = this.getBaseUrls();
|
|
return baseUrls[urlIndex] || baseUrls[0] || this.config.baseUrl;
|
|
}
|
|
|
|
buildHeaders(credentials, stream = true) {
|
|
const headers = {
|
|
"Content-Type": "application/json",
|
|
...this.config.headers
|
|
};
|
|
|
|
if (this.provider?.startsWith?.("anthropic-compatible-")) {
|
|
// Anthropic-compatible providers use x-api-key header
|
|
if (credentials.apiKey) {
|
|
headers["x-api-key"] = credentials.apiKey;
|
|
} else if (credentials.accessToken) {
|
|
headers["Authorization"] = `Bearer ${credentials.accessToken}`;
|
|
}
|
|
if (!headers["anthropic-version"]) {
|
|
headers["anthropic-version"] = "2023-06-01";
|
|
}
|
|
} else {
|
|
// Standard Bearer token auth for other providers
|
|
if (credentials.accessToken) {
|
|
headers["Authorization"] = `Bearer ${credentials.accessToken}`;
|
|
} else if (credentials.apiKey) {
|
|
headers["Authorization"] = `Bearer ${credentials.apiKey}`;
|
|
}
|
|
}
|
|
|
|
if (stream) {
|
|
headers["Accept"] = "text/event-stream";
|
|
}
|
|
|
|
return headers;
|
|
}
|
|
|
|
// Override in subclass for provider-specific transformations
|
|
transformRequest(model, body, stream, credentials) {
|
|
return body;
|
|
}
|
|
|
|
shouldRetry(status, urlIndex) {
|
|
return status === HTTP_STATUS.RATE_LIMITED && urlIndex + 1 < this.getFallbackCount();
|
|
}
|
|
|
|
// Override in subclass for provider-specific refresh
|
|
async refreshCredentials(credentials, log) {
|
|
return null;
|
|
}
|
|
|
|
needsRefresh(credentials) {
|
|
if (!credentials.expiresAt) return false;
|
|
const expiresAtMs = new Date(credentials.expiresAt).getTime();
|
|
return expiresAtMs - Date.now() < 5 * 60 * 1000;
|
|
}
|
|
|
|
parseError(response, bodyText) {
|
|
return { status: response.status, message: bodyText || `HTTP ${response.status}` };
|
|
}
|
|
|
|
async execute({ model, body, stream, credentials, signal, log, proxyOptions = null }) {
|
|
const fallbackCount = this.getFallbackCount();
|
|
let lastError = null;
|
|
let lastStatus = 0;
|
|
const retryAttemptsByUrl = {};
|
|
|
|
// Merge default retry config with provider-specific config
|
|
const retryConfig = { ...DEFAULT_RETRY_CONFIG, ...this.config.retry };
|
|
|
|
for (let urlIndex = 0; urlIndex < fallbackCount; urlIndex++) {
|
|
const url = this.buildUrl(model, stream, urlIndex, credentials);
|
|
const transformedBody = this.transformRequest(model, body, stream, credentials);
|
|
const headers = this.buildHeaders(credentials, stream);
|
|
|
|
if (!retryAttemptsByUrl[urlIndex]) retryAttemptsByUrl[urlIndex] = 0;
|
|
|
|
try {
|
|
const response = await proxyAwareFetch(url, {
|
|
method: "POST",
|
|
headers,
|
|
body: JSON.stringify(transformedBody),
|
|
signal
|
|
}, proxyOptions);
|
|
|
|
// Retry based on status code config
|
|
const maxRetries = retryConfig[response.status] || 0;
|
|
if (maxRetries > 0 && retryAttemptsByUrl[urlIndex] < maxRetries) {
|
|
retryAttemptsByUrl[urlIndex]++;
|
|
log?.debug?.("RETRY", `${response.status} retry ${retryAttemptsByUrl[urlIndex]}/${maxRetries} after ${RETRY_CONFIG.delayMs / 1000}s`);
|
|
await new Promise(resolve => setTimeout(resolve, RETRY_CONFIG.delayMs));
|
|
urlIndex--;
|
|
continue;
|
|
}
|
|
|
|
if (this.shouldRetry(response.status, urlIndex)) {
|
|
log?.debug?.("RETRY", `${response.status} on ${url}, trying fallback ${urlIndex + 1}`);
|
|
lastStatus = response.status;
|
|
continue;
|
|
}
|
|
|
|
return { response, url, headers, transformedBody };
|
|
} catch (error) {
|
|
lastError = error;
|
|
if (urlIndex + 1 < fallbackCount) {
|
|
log?.debug?.("RETRY", `Error on ${url}, trying fallback ${urlIndex + 1}`);
|
|
continue;
|
|
}
|
|
throw error;
|
|
}
|
|
}
|
|
|
|
throw lastError || new Error(`All ${fallbackCount} URLs failed with status ${lastStatus}`);
|
|
}
|
|
}
|
|
|
|
export default BaseExecutor;
|