fix(ollama-local): support custom host URL for remote Ollama servers (closes #578)

Add a shared resolveOllamaLocalHost() helper and wire it through the
executor, models/validate/test routes, so users can point ollama-local
at a remote Ollama instance instead of being locked to localhost:11434.

Also expose the host as an "Ollama Host URL" field in AddApiKeyModal
(empty = default localhost:11434), making the option reachable from the
dashboard without hand-editing db.json.

Co-authored-by: anuragg-saxenaa <anuragg.saxenaa@gmail.com>
Made-with: Cursor
This commit is contained in:
anuragg-saxenaa
2026-04-22 10:42:34 +07:00
committed by decolua
parent 94ab0d715d
commit 4638cf0e81
6 changed files with 77 additions and 31 deletions

View File

@@ -339,3 +339,10 @@ export const PROVIDERS = {
noAuth: true
},
};
export const OLLAMA_LOCAL_DEFAULT_HOST = "http://localhost:11434";
export function resolveOllamaLocalHost(credentials) {
const raw = credentials?.providerSpecificData?.baseUrl?.trim();
return (raw || OLLAMA_LOCAL_DEFAULT_HOST).replace(/\/$/, "");
}

View File

@@ -1,4 +1,5 @@
import { HTTP_STATUS, RETRY_CONFIG, DEFAULT_RETRY_CONFIG } from "../config/runtimeConfig.js";
import { resolveOllamaLocalHost } from "../config/providers.js";
import { proxyAwareFetch } from "../utils/proxyFetch.js";
/**
@@ -35,6 +36,9 @@ export class BaseExecutor {
const normalized = baseUrl.replace(/\/$/, "");
return `${normalized}/messages`;
}
if (this.provider === "ollama-local") {
return `${resolveOllamaLocalHost(credentials)}/api/chat`;
}
const baseUrls = this.getBaseUrls();
return baseUrls[urlIndex] || baseUrls[0] || this.config.baseUrl;
}

View File

@@ -6,24 +6,33 @@ import { Button, Badge, Input, Modal, Select } from "@/shared/components";
export default function AddApiKeyModal({ isOpen, provider, providerName, isCompatible, isAnthropic, proxyPools, onSave, onClose }) {
const NONE_PROXY_POOL_VALUE = "__none__";
const isOllamaLocal = provider === "ollama-local";
const [formData, setFormData] = useState({
name: "",
apiKey: "",
priority: 1,
proxyPoolId: NONE_PROXY_POOL_VALUE,
ollamaHostUrl: "",
});
const [validating, setValidating] = useState(false);
const [validationResult, setValidationResult] = useState(null);
const [saving, setSaving] = useState(false);
const buildProviderSpecificData = () => {
if (isOllamaLocal && formData.ollamaHostUrl.trim()) {
return { baseUrl: formData.ollamaHostUrl.trim() };
}
return undefined;
};
const handleValidate = async () => {
setValidating(true);
try {
const res = await fetch("/api/providers/validate", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ provider, apiKey: formData.apiKey }),
body: JSON.stringify({ provider, apiKey: formData.apiKey, providerSpecificData: buildProviderSpecificData() }),
});
const data = await res.json();
setValidationResult(data.valid ? "success" : "failed");
@@ -35,7 +44,12 @@ export default function AddApiKeyModal({ isOpen, provider, providerName, isCompa
};
const handleSubmit = async () => {
if (!provider || !formData.apiKey) return;
if (!provider) return;
if (!isOllamaLocal && !formData.apiKey) return;
if (!isOllamaLocal) {
// Non-ollama providers require a name
if (!formData.name) return;
}
setSaving(true);
try {
@@ -46,7 +60,7 @@ export default function AddApiKeyModal({ isOpen, provider, providerName, isCompa
const res = await fetch("/api/providers/validate", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ provider, apiKey: formData.apiKey }),
body: JSON.stringify({ provider, apiKey: formData.apiKey, providerSpecificData: buildProviderSpecificData() }),
});
const data = await res.json();
isValid = !!data.valid;
@@ -58,12 +72,12 @@ export default function AddApiKeyModal({ isOpen, provider, providerName, isCompa
}
await onSave({
name: formData.name,
name: formData.name || (isOllamaLocal ? "Ollama Local" : ""),
apiKey: formData.apiKey,
priority: formData.priority,
proxyPoolId: formData.proxyPoolId === NONE_PROXY_POOL_VALUE ? null : formData.proxyPoolId,
testStatus: isValid ? "active" : "unknown",
providerSpecificData: undefined
providerSpecificData: buildProviderSpecificData()
});
} finally {
setSaving(false);
@@ -79,8 +93,25 @@ export default function AddApiKeyModal({ isOpen, provider, providerName, isCompa
label="Name"
value={formData.name}
onChange={(e) => setFormData({ ...formData, name: e.target.value })}
placeholder="Production Key"
placeholder={isOllamaLocal ? "Ollama Local" : "Production Key"}
/>
{isOllamaLocal && (
<div className="flex gap-2">
<Input
label="Ollama Host URL"
value={formData.ollamaHostUrl}
onChange={(e) => setFormData({ ...formData, ollamaHostUrl: e.target.value })}
placeholder="http://localhost:11434"
className="flex-1"
/>
<div className="pt-6">
<Button onClick={handleValidate} disabled={validating || saving} variant="secondary">
{validating ? "Checking..." : "Check"}
</Button>
</div>
</div>
)}
{!isOllamaLocal && (
<div className="flex gap-2">
<Input
label="API Key"
@@ -95,6 +126,12 @@ export default function AddApiKeyModal({ isOpen, provider, providerName, isCompa
</Button>
</div>
</div>
)}
{isOllamaLocal && (
<p className="text-xs text-text-muted">
Leave blank to use <code>http://localhost:11434</code>. For remote Ollama, enter the full host URL (e.g. <code>http://192.168.1.10:11434</code>).
</p>
)}
{validationResult && (
<Badge variant={validationResult === "success" ? "success" : "error"}>
{validationResult === "success" ? "Valid" : "Invalid"}
@@ -137,7 +174,7 @@ export default function AddApiKeyModal({ isOpen, provider, providerName, isCompa
</p>
<div className="flex gap-2">
<Button onClick={handleSubmit} fullWidth disabled={!formData.name || !formData.apiKey || saving}>
<Button onClick={handleSubmit} fullWidth disabled={saving || (!isOllamaLocal && (!formData.name || !formData.apiKey))}>
{saving ? "Saving..." : "Save"}
</Button>
<Button onClick={onClose} variant="ghost" fullWidth>

View File

@@ -4,6 +4,7 @@ import { isOpenAICompatibleProvider, isAnthropicCompatibleProvider } from "@/sha
import { KiroService } from "@/lib/oauth/services/kiro";
import { GEMINI_CONFIG } from "@/lib/oauth/constants/oauth";
import { refreshGoogleToken, updateProviderCredentials, refreshKiroToken } from "@/sse/services/tokenRefresh";
import { resolveOllamaLocalHost } from "open-sse/config/providers.js";
const GEMINI_CLI_MODELS_URL = "https://cloudcode-pa.googleapis.com/v1internal:fetchAvailableModels";
@@ -380,13 +381,8 @@ export async function GET(request, { params }) {
});
}
// Handle ollama-local: resolve URL from providerSpecificData.baseUrl if provided,
// otherwise fall back to default localhost address.
if (connection.provider === "ollama-local") {
const baseUrl = connection.providerSpecificData?.baseUrl;
const url = baseUrl
? `${baseUrl.replace(/\/$/, "")}/api/tags`
: "http://localhost:11434/api/tags";
const url = `${resolveOllamaLocalHost(connection)}/api/tags`;
const response = await fetch(url, {
method: "GET",
headers: { "Content-Type": "application/json" },

View File

@@ -3,6 +3,7 @@ import { resolveConnectionProxyConfig } from "@/lib/network/connectionProxy";
import { testProxyUrl } from "@/lib/network/proxyTest";
import { isOpenAICompatibleProvider, isAnthropicCompatibleProvider } from "@/shared/constants/providers";
import { getDefaultModel } from "open-sse/config/providerModels.js";
import { resolveOllamaLocalHost } from "open-sse/config/providers.js";
import {
GEMINI_CONFIG,
ANTIGRAVITY_CONFIG,
@@ -495,9 +496,9 @@ async function testApiKeyConnection(connection, effectiveProxy = null) {
return { valid: res.ok, error: res.ok ? null : "Invalid API key" };
}
case "ollama-local": {
// No auth required for local Ollama
const res = await fetch("http://localhost:11434/api/tags");
return { valid: res.ok, error: res.ok ? null : "Ollama not running on localhost:11434" };
const host = resolveOllamaLocalHost(connection);
const res = await fetch(`${host}/api/tags`);
return { valid: res.ok, error: res.ok ? null : `Ollama not reachable at ${host}` };
}
case "deepgram": {
const res = await fetchWithConnectionProxy("https://api.deepgram.com/v1/projects", { headers: { Authorization: `Token ${connection.apiKey}` } }, effectiveProxy);

View File

@@ -2,12 +2,13 @@ import { NextResponse } from "next/server";
import { getProviderNodeById } from "@/models";
import { isOpenAICompatibleProvider, isAnthropicCompatibleProvider } from "@/shared/constants/providers";
import { getDefaultModel } from "open-sse/config/providerModels.js";
import { resolveOllamaLocalHost } from "open-sse/config/providers.js";
// POST /api/providers/validate - Validate API key with provider
export async function POST(request) {
try {
const body = await request.json();
const { provider, apiKey } = body;
const { provider, apiKey, providerSpecificData } = body;
if (!provider || (!apiKey && provider !== "ollama-local")) {
return NextResponse.json({ error: "Provider and API key required" }, { status: 400 });
@@ -183,7 +184,7 @@ export async function POST(request) {
siliconflow: "https://api.siliconflow.cn/v1/models",
hyperbolic: "https://api.hyperbolic.xyz/v1/models",
ollama: "https://ollama.com/api/tags",
"ollama-local": "http://localhost:11434/api/tags",
"ollama-local": `${resolveOllamaLocalHost({ providerSpecificData })}/api/tags`,
assemblyai: "https://api.assemblyai.com/v1/account",
nanobanana: "https://api.nanobananaapi.ai/v1/models",
chutes: "https://llm.chutes.ai/v1/models",