diff --git a/open-sse/services/usage.js b/open-sse/services/usage.js index 6ae5446a..a6882d7c 100644 --- a/open-sse/services/usage.js +++ b/open-sse/services/usage.js @@ -58,6 +58,8 @@ export async function getUsageForProvider(connection) { return await getQwenUsage(accessToken, providerSpecificData); case "iflow": return await getIflowUsage(accessToken); + case "ollama": + return await getOllamaUsage(accessToken); default: return { message: `Usage API not implemented for ${provider}` }; } @@ -719,3 +721,25 @@ async function getIflowUsage(accessToken) { return { message: "Unable to fetch iFlow usage." }; } } + +/** + * Ollama Cloud Usage + * Ollama Cloud uses an API key from ollama.com/settings/keys + * and has no public usage API — free tier has light usage limits (resets every 5h & 7d). + * This returns an informational message with the plan details. + */ +async function getOllamaUsage(accessToken, providerSpecificData) { + try { + // Ollama Cloud does not expose a public quota/usage API. + // The provider is configured as noAuth with a notice explaining limits. + // We return a graceful message so the UI shows a friendly state instead of an error. + const plan = providerSpecificData?.plan || "Free"; + return { + plan, + message: "Ollama Cloud uses a free tier with light usage limits (resets every 5h & 7d). For detailed usage tracking, visit ollama.com/settings/keys.", + quotas: [], + }; + } catch (error) { + return { message: "Unable to fetch Ollama Cloud usage." }; + } +} diff --git a/src/app/api/providers/[id]/models/route.js b/src/app/api/providers/[id]/models/route.js index a14beb14..7d077aaf 100644 --- a/src/app/api/providers/[id]/models/route.js +++ b/src/app/api/providers/[id]/models/route.js @@ -163,7 +163,7 @@ const PROVIDER_MODELS_CONFIG = { siliconflow: createOpenAIModelsConfig("https://api.siliconflow.cn/v1/models"), hyperbolic: createOpenAIModelsConfig("https://api.hyperbolic.xyz/v1/models"), ollama: createOpenAIModelsConfig("https://ollama.com/api/tags"), - "ollama-local": createOpenAIModelsConfig("http://localhost:11434/api/tags"), + // ollama-local: url resolved dynamically below via providerSpecificData.baseUrl nanobanana: createOpenAIModelsConfig("https://api.nanobananaapi.ai/v1/models"), chutes: createOpenAIModelsConfig("https://llm.chutes.ai/v1/models"), nvidia: createOpenAIModelsConfig("https://integrate.api.nvidia.com/v1/models"), @@ -380,6 +380,34 @@ export async function GET(request, { params }) { }); } + // Handle ollama-local: resolve URL from providerSpecificData.baseUrl if provided, + // otherwise fall back to default localhost address. + if (connection.provider === "ollama-local") { + const baseUrl = connection.providerSpecificData?.baseUrl; + const url = baseUrl + ? `${baseUrl.replace(/\/$/, "")}/api/tags` + : "http://localhost:11434/api/tags"; + const response = await fetch(url, { + method: "GET", + headers: { "Content-Type": "application/json" }, + }); + if (!response.ok) { + const errorText = await response.text(); + console.log(`Error fetching models from ollama-local:`, errorText); + return NextResponse.json( + { error: `Failed to fetch models: ${response.status}` }, + { status: response.status } + ); + } + const data = await response.json(); + const models = parseOpenAIStyleModels(data); + return NextResponse.json({ + provider: connection.provider, + connectionId: connection.id, + models, + }); + } + const config = PROVIDER_MODELS_CONFIG[connection.provider]; if (!config) { return NextResponse.json( diff --git a/src/lib/oauth/constants/oauth.js b/src/lib/oauth/constants/oauth.js index 09010648..bb33bd94 100644 --- a/src/lib/oauth/constants/oauth.js +++ b/src/lib/oauth/constants/oauth.js @@ -57,8 +57,8 @@ export const GEMINI_CONFIG = { // Qwen OAuth Configuration (Device Code Flow with PKCE) export const QWEN_CONFIG = { clientId: "f0304373b74a44d2b584a3fb70ca9e56", - deviceCodeUrl: "https://chat.qwen.ai/api/v1/oauth2/device/code", - tokenUrl: "https://chat.qwen.ai/api/v1/oauth2/token", + deviceCodeUrl: "https://qwen.ai/api/v1/oauth2/device/code", + tokenUrl: "https://qwen.ai/api/v1/oauth2/token", scope: "openid profile email model.completion", codeChallengeMethod: "S256", }; diff --git a/src/shared/components/Sidebar.js b/src/shared/components/Sidebar.js index 4258a503..50ab12ed 100644 --- a/src/shared/components/Sidebar.js +++ b/src/shared/components/Sidebar.js @@ -7,6 +7,7 @@ import { usePathname } from "next/navigation"; import { cn } from "@/shared/utils/cn"; import { APP_CONFIG } from "@/shared/constants/config"; import { MEDIA_PROVIDER_KINDS } from "@/shared/constants/providers"; +import { useCopyToClipboard } from "@/shared/hooks/useCopyToClipboard"; import Button from "./Button"; import { ConfirmModal } from "./Modal"; @@ -41,6 +42,9 @@ export default function Sidebar({ onClose }) { const [isDisconnected, setIsDisconnected] = useState(false); const [updateInfo, setUpdateInfo] = useState(null); const [enableTranslator, setEnableTranslator] = useState(false); + const { copied, copy } = useCopyToClipboard(2000); + + const INSTALL_CMD = "npm install -g 9router@latest"; useEffect(() => { fetch("/api/settings") @@ -100,14 +104,18 @@ export default function Sidebar({ onClose }) { {updateInfo && ( -