feat: add STT support, Gemini TTS, and expand usage tracking
- Speech-to-Text: full pipeline with sttCore handler, /v1/audio/transcriptions endpoint, sttConfig for OpenAI, Gemini, Groq, Deepgram, AssemblyAI, HuggingFace, NVIDIA Parakeet; new 9router-stt skill - Gemini TTS: add gemini provider with 30 prebuilt voices and TTS_PROVIDER_CONFIG - Usage: implement GLM (intl/cn) and MiniMax (intl/cn) quota fetchers; refactor Gemini CLI usage to use retrieveUserQuota with per-model buckets - Disabled models: lowdb-backed disabledModelsDb + /api/models/disabled route - Header search: reusable Zustand store (headerSearchStore) wired into Header - CLI tools: add Claude Cowork tool card and cowork-settings API - Providers: introduce mediaPriority sorting in getProvidersByKind, add Kimi K2.6, reorder hermes, drop qwen STT kind - UI: expand media-providers/[kind]/[id] page (+314), enhance OAuthModal, ModelSelectModal, ProviderTopology, ProxyPools, ProviderLimits - Assets: refresh provider PNGs (alicode, byteplus, cloudflare-ai, nvidia, ollama, vertex, volcengine-ark) and add aws-polly, fal-ai, jina-ai, recraft, runwayml, stability-ai, topaz, black-forest-labs
@@ -156,6 +156,7 @@ export const PROVIDER_MODELS = {
|
||||
{ id: "gpt-5.3-codex", name: "GPT 5.3 Codex" },
|
||||
],
|
||||
kmc: [ // Kimi Coding
|
||||
{ id: "kimi-k2.6", name: "Kimi K2.6" },
|
||||
{ id: "kimi-k2.5", name: "Kimi K2.5" },
|
||||
{ id: "kimi-k2.5-thinking", name: "Kimi K2.5 Thinking" },
|
||||
{ id: "kimi-latest", name: "Kimi Latest" },
|
||||
@@ -233,6 +234,10 @@ export const PROVIDER_MODELS = {
|
||||
{ id: "tts-1", name: "TTS-1", type: "tts" },
|
||||
{ id: "tts-1-hd", name: "TTS-1 HD", type: "tts" },
|
||||
{ id: "gpt-4o-mini-tts", name: "GPT-4o Mini TTS", type: "tts" },
|
||||
// STT models
|
||||
{ id: "whisper-1", name: "Whisper 1", type: "stt", params: ["language", "response_format", "temperature", "prompt"] },
|
||||
{ id: "gpt-4o-transcribe", name: "GPT-4o Transcribe", type: "stt", params: ["language", "response_format", "temperature", "prompt"] },
|
||||
{ id: "gpt-4o-mini-transcribe", name: "GPT-4o Mini Transcribe", type: "stt", params: ["language", "response_format", "temperature", "prompt"] },
|
||||
// Image models
|
||||
{ id: "gpt-image-1", name: "GPT Image 1", type: "image", params: ["n", "size", "quality", "response_format"] },
|
||||
{ id: "dall-e-3", name: "DALL-E 3", type: "image", params: ["size", "quality", "style", "response_format"] },
|
||||
@@ -267,6 +272,11 @@ export const PROVIDER_MODELS = {
|
||||
{ id: "gemini-3.1-flash-image-preview", name: "Gemini 3.1 Flash Image (Nano Banana 2)", type: "image", params: [] },
|
||||
{ id: "gemini-3-pro-image-preview", name: "Gemini 3 Pro Image (Nano Banana Pro)", type: "image", params: [] },
|
||||
{ id: "gemini-2.5-flash-image", name: "Gemini 2.5 Flash Image (Nano Banana)", type: "image", params: [] },
|
||||
// STT models (multimodal generateContent)
|
||||
{ id: "gemini-2.5-pro", name: "Gemini 2.5 Pro (Best)", type: "stt", params: ["language", "prompt"] },
|
||||
{ id: "gemini-2.5-flash", name: "Gemini 2.5 Flash", type: "stt", params: ["language", "prompt"] },
|
||||
{ id: "gemini-2.5-flash-lite", name: "Gemini 2.5 Flash Lite (Cheapest)", type: "stt", params: ["language", "prompt"] },
|
||||
{ id: "gemini-2.0-flash", name: "Gemini 2.0 Flash", type: "stt", params: ["language", "prompt"] },
|
||||
],
|
||||
openrouter: [
|
||||
// Embedding models
|
||||
@@ -301,6 +311,7 @@ export const PROVIDER_MODELS = {
|
||||
{ id: "glm-4.5-air", name: "GLM-4.5-Air" },
|
||||
],
|
||||
kimi: [
|
||||
{ id: "kimi-k2.6", name: "Kimi K2.6" },
|
||||
{ id: "kimi-k2.5", name: "Kimi K2.5" },
|
||||
{ id: "kimi-k2.5-thinking", name: "Kimi K2.5 Thinking" },
|
||||
{ id: "kimi-latest", name: "Kimi Latest" },
|
||||
@@ -402,6 +413,10 @@ export const PROVIDER_MODELS = {
|
||||
{ id: "meta-llama/llama-4-maverick-17b-128e-instruct", name: "Llama 4 Maverick" },
|
||||
{ id: "qwen/qwen3-32b", name: "Qwen3 32B" },
|
||||
{ id: "openai/gpt-oss-120b", name: "GPT-OSS 120B" },
|
||||
// STT models
|
||||
{ id: "whisper-large-v3", name: "Whisper Large v3", type: "stt", params: ["language", "response_format", "temperature", "prompt"] },
|
||||
{ id: "whisper-large-v3-turbo", name: "Whisper Large v3 Turbo", type: "stt", params: ["language", "response_format", "temperature", "prompt"] },
|
||||
{ id: "distil-whisper-large-v3-en", name: "Distil Whisper Large v3 EN", type: "stt", params: ["language", "response_format", "temperature", "prompt"] },
|
||||
],
|
||||
xai: [
|
||||
{ id: "grok-4", name: "Grok 4" },
|
||||
@@ -450,6 +465,8 @@ export const PROVIDER_MODELS = {
|
||||
{ id: "minimaxai/minimax-m2.7", name: "Minimax M2.7" },
|
||||
{ id: "z-ai/glm4.7", name: "GLM 4.7" },
|
||||
{ id: "nvidia/nv-embedqa-e5-v5", name: "NV EmbedQA E5 v5", type: "embedding" },
|
||||
// STT models
|
||||
{ id: "nvidia/parakeet-ctc-1.1b-asr", name: "Parakeet CTC 1.1B", type: "stt", params: ["language"] },
|
||||
],
|
||||
nebius: [
|
||||
{ id: "meta-llama/Llama-3.3-70B-Instruct", name: "Llama 3.3 70B Instruct" },
|
||||
@@ -555,6 +572,18 @@ export const PROVIDER_MODELS = {
|
||||
huggingface: [
|
||||
{ id: "black-forest-labs/FLUX.1-schnell", name: "FLUX.1 Schnell", type: "image", params: [] },
|
||||
{ id: "stabilityai/stable-diffusion-xl-base-1.0", name: "SDXL Base 1.0", type: "image", params: [] },
|
||||
// STT models
|
||||
{ id: "openai/whisper-large-v3", name: "Whisper Large v3 (HF)", type: "stt", params: ["language"] },
|
||||
{ id: "openai/whisper-small", name: "Whisper Small (HF)", type: "stt", params: ["language"] },
|
||||
],
|
||||
deepgram: [
|
||||
{ id: "nova-3", name: "Nova 3", type: "stt", params: ["language"] },
|
||||
{ id: "nova-2", name: "Nova 2", type: "stt", params: ["language"] },
|
||||
{ id: "whisper-large", name: "Whisper Large", type: "stt", params: ["language"] },
|
||||
],
|
||||
assemblyai: [
|
||||
{ id: "universal-3-pro", name: "Universal 3 Pro", type: "stt", params: ["language"] },
|
||||
{ id: "universal-2", name: "Universal 2", type: "stt", params: ["language"] },
|
||||
],
|
||||
"fal-ai": [
|
||||
{ id: "fal-ai/flux/schnell", name: "FLUX Schnell", type: "image", params: ["n", "size"] },
|
||||
|
||||
@@ -24,6 +24,15 @@ const VOICES_STANDARD = v("alloy", "ash", "coral", "echo", "fable", "nova", "ony
|
||||
// 13 voices for gpt-4o-mini-tts
|
||||
const VOICES_FULL = v("alloy", "ash", "ballad", "cedar", "coral", "echo", "fable", "marin", "nova", "onyx", "sage", "shimmer", "verse");
|
||||
|
||||
// Gemini prebuilt voices (30 voices, multi-language auto-detect)
|
||||
const GEMINI_VOICES = [
|
||||
"Zephyr", "Puck", "Charon", "Kore", "Fenrir", "Leda", "Orus", "Aoede",
|
||||
"Callirrhoe", "Autonoe", "Enceladus", "Iapetus", "Umbriel", "Algieba",
|
||||
"Despina", "Erinome", "Algenib", "Rasalgethi", "Laomedeia", "Achernar",
|
||||
"Alnilam", "Schedar", "Gacrux", "Pulcherrima", "Achird", "Zubenelgenubi",
|
||||
"Vindemiatrix", "Sadachbia", "Sadaltager", "Sulafat",
|
||||
].map((id) => ({ id, name: id, type: "tts" }));
|
||||
|
||||
// ── TTS Config (config-driven, single source of truth) ─────────────────────
|
||||
export const TTS_MODELS_CONFIG = {
|
||||
openai: {
|
||||
@@ -85,6 +94,17 @@ export const TTS_MODELS_CONFIG = {
|
||||
"google-tts": {
|
||||
defaults: GOOGLE_TTS_LANGUAGES,
|
||||
},
|
||||
gemini: {
|
||||
models: [
|
||||
{ id: "gemini-2.5-flash-preview-tts", name: "Gemini 2.5 Flash TTS", type: "tts" },
|
||||
{ id: "gemini-2.5-pro-preview-tts", name: "Gemini 2.5 Pro TTS", type: "tts" },
|
||||
],
|
||||
voices: {
|
||||
"gemini-2.5-flash-preview-tts": GEMINI_VOICES,
|
||||
"gemini-2.5-pro-preview-tts": GEMINI_VOICES,
|
||||
},
|
||||
allVoices: GEMINI_VOICES,
|
||||
},
|
||||
};
|
||||
|
||||
// ── Helper: get voices for a specific model ────────────────────────────────
|
||||
|
||||
194
open-sse/handlers/sttCore.js
Normal file
@@ -0,0 +1,194 @@
|
||||
import { Buffer } from "node:buffer";
|
||||
import { createErrorResult } from "../utils/error.js";
|
||||
import { HTTP_STATUS } from "../config/runtimeConfig.js";
|
||||
import { AI_PROVIDERS } from "../../src/shared/constants/providers.js";
|
||||
|
||||
// Build auth headers from sttConfig + token
|
||||
function buildAuthHeaders(cfg, token) {
|
||||
if (!token) return {};
|
||||
switch (cfg.authHeader) {
|
||||
case "bearer": return { "Authorization": `Bearer ${token}` };
|
||||
case "token": return { "Authorization": `Token ${token}` };
|
||||
case "x-api-key": return { "x-api-key": token };
|
||||
case "key": return { "Authorization": `Key ${token}` };
|
||||
default: return { "Authorization": `Bearer ${token}` };
|
||||
}
|
||||
}
|
||||
|
||||
// Map browser file MIME / ext → audio MIME for binary formats (deepgram/HF)
|
||||
function resolveAudioContentType(file) {
|
||||
const t = (file.type || "").toLowerCase();
|
||||
if (t.startsWith("audio/")) return t;
|
||||
const name = typeof file.name === "string" ? file.name.toLowerCase() : "";
|
||||
const ext = name.includes(".") ? name.split(".").pop() : "";
|
||||
const map = { mp3: "audio/mpeg", mp4: "audio/mp4", m4a: "audio/mp4", wav: "audio/wav", ogg: "audio/ogg", flac: "audio/flac", webm: "audio/webm", aac: "audio/aac", opus: "audio/opus" };
|
||||
return map[ext] || "application/octet-stream";
|
||||
}
|
||||
|
||||
async function upstreamError(res) {
|
||||
let txt = "";
|
||||
try { txt = await res.text(); } catch {}
|
||||
let msg = txt || `Upstream error (${res.status})`;
|
||||
try { const j = JSON.parse(txt); msg = j?.error?.message || j?.error || j?.message || msg; } catch {}
|
||||
return createErrorResult(res.status, typeof msg === "string" ? msg : JSON.stringify(msg));
|
||||
}
|
||||
|
||||
// Deepgram: raw binary POST + model query param
|
||||
async function transcribeDeepgram(cfg, file, model, token, formData) {
|
||||
const url = new URL(cfg.baseUrl);
|
||||
url.searchParams.set("model", model);
|
||||
url.searchParams.set("smart_format", "true");
|
||||
url.searchParams.set("punctuate", "true");
|
||||
const lang = formData.get("language");
|
||||
if (typeof lang === "string" && lang.trim()) url.searchParams.set("language", lang.trim());
|
||||
else url.searchParams.set("detect_language", "true");
|
||||
|
||||
const buf = await file.arrayBuffer();
|
||||
const res = await fetch(url, {
|
||||
method: "POST",
|
||||
headers: { ...buildAuthHeaders(cfg, token), "Content-Type": resolveAudioContentType(file) },
|
||||
body: buf,
|
||||
});
|
||||
if (!res.ok) return upstreamError(res);
|
||||
const data = await res.json();
|
||||
const text = data.results?.channels?.[0]?.alternatives?.[0]?.transcript ?? "";
|
||||
return jsonResponse({ text });
|
||||
}
|
||||
|
||||
// AssemblyAI: upload → submit → poll (max 120s)
|
||||
async function transcribeAssemblyAI(cfg, file, model, token) {
|
||||
const auth = buildAuthHeaders(cfg, token);
|
||||
const buf = await file.arrayBuffer();
|
||||
const up = await fetch("https://api.assemblyai.com/v2/upload", {
|
||||
method: "POST", headers: { ...auth, "Content-Type": "application/octet-stream" }, body: buf,
|
||||
});
|
||||
if (!up.ok) return upstreamError(up);
|
||||
const { upload_url } = await up.json();
|
||||
|
||||
const sub = await fetch(cfg.baseUrl, {
|
||||
method: "POST",
|
||||
headers: { ...auth, "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ audio_url: upload_url, speech_models: [model], language_detection: true }),
|
||||
});
|
||||
if (!sub.ok) return upstreamError(sub);
|
||||
const { id } = await sub.json();
|
||||
|
||||
const start = Date.now();
|
||||
while (Date.now() - start < 120_000) {
|
||||
await new Promise((r) => setTimeout(r, 2000));
|
||||
const poll = await fetch(`${cfg.baseUrl}/${id}`, { headers: auth });
|
||||
if (!poll.ok) continue;
|
||||
const r = await poll.json();
|
||||
if (r.status === "completed") return jsonResponse({ text: r.text || "" });
|
||||
if (r.status === "error") return createErrorResult(500, r.error || "AssemblyAI failed");
|
||||
}
|
||||
return createErrorResult(504, "AssemblyAI timeout after 120s");
|
||||
}
|
||||
|
||||
// Nvidia NIM: multipart, normalize response
|
||||
async function transcribeNvidia(cfg, file, model, token) {
|
||||
const fd = new FormData();
|
||||
fd.append("file", file, file.name || "audio.wav");
|
||||
fd.append("model", model);
|
||||
const res = await fetch(cfg.baseUrl, { method: "POST", headers: buildAuthHeaders(cfg, token), body: fd });
|
||||
if (!res.ok) return upstreamError(res);
|
||||
const data = await res.json();
|
||||
return jsonResponse({ text: data.text || data.transcript || "" });
|
||||
}
|
||||
|
||||
// Gemini: generateContent with inline_data audio + transcription prompt
|
||||
async function transcribeGemini(cfg, file, model, token, formData) {
|
||||
const buf = await file.arrayBuffer();
|
||||
const b64 = Buffer.from(buf).toString("base64");
|
||||
const mime = resolveAudioContentType(file);
|
||||
const lang = formData.get("language");
|
||||
const userPrompt = formData.get("prompt");
|
||||
let promptText = userPrompt && typeof userPrompt === "string" && userPrompt.trim()
|
||||
? userPrompt.trim()
|
||||
: "Generate a transcript of the speech. Return only the transcribed text, no commentary.";
|
||||
if (typeof lang === "string" && lang.trim()) promptText += ` Language: ${lang.trim()}.`;
|
||||
|
||||
const url = `${cfg.baseUrl}/${model}:generateContent?key=${token}`;
|
||||
const res = await fetch(url, {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({
|
||||
contents: [{ parts: [{ text: promptText }, { inline_data: { mime_type: mime, data: b64 } }] }],
|
||||
}),
|
||||
});
|
||||
if (!res.ok) return upstreamError(res);
|
||||
const data = await res.json();
|
||||
const text = data?.candidates?.[0]?.content?.parts?.map((p) => p.text).filter(Boolean).join("") || "";
|
||||
return jsonResponse({ text });
|
||||
}
|
||||
|
||||
// HuggingFace: POST raw binary to {baseUrl}/{model_id}
|
||||
async function transcribeHuggingFace(cfg, file, model, token) {
|
||||
if (model.includes("..") || model.includes("//")) return createErrorResult(400, "Invalid model ID");
|
||||
const url = `${cfg.baseUrl.replace(/\/+$/, "")}/${model}`;
|
||||
const buf = await file.arrayBuffer();
|
||||
const res = await fetch(url, {
|
||||
method: "POST",
|
||||
headers: { ...buildAuthHeaders(cfg, token), "Content-Type": resolveAudioContentType(file) },
|
||||
body: buf,
|
||||
});
|
||||
if (!res.ok) return upstreamError(res);
|
||||
const data = await res.json();
|
||||
return jsonResponse({ text: data.text || "" });
|
||||
}
|
||||
|
||||
// Default: OpenAI/Groq/Whisper-compatible multipart
|
||||
async function transcribeOpenAICompatible(cfg, file, model, token, formData) {
|
||||
const fd = new FormData();
|
||||
fd.append("file", file, file.name || "audio.wav");
|
||||
fd.append("model", model);
|
||||
for (const k of ["language", "prompt", "response_format", "temperature"]) {
|
||||
const v = formData.get(k);
|
||||
if (v !== null && v !== undefined && v !== "") fd.append(k, v);
|
||||
}
|
||||
const res = await fetch(cfg.baseUrl, { method: "POST", headers: buildAuthHeaders(cfg, token), body: fd });
|
||||
if (!res.ok) return upstreamError(res);
|
||||
const ct = res.headers.get("content-type") || "application/json";
|
||||
const txt = await res.text();
|
||||
return { success: true, response: new Response(txt, { status: 200, headers: { "Content-Type": ct, "Access-Control-Allow-Origin": "*" } }) };
|
||||
}
|
||||
|
||||
function jsonResponse(obj) {
|
||||
return {
|
||||
success: true,
|
||||
response: new Response(JSON.stringify(obj), {
|
||||
status: 200,
|
||||
headers: { "Content-Type": "application/json", "Access-Control-Allow-Origin": "*" },
|
||||
}),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* STT core handler — dispatch by sttConfig.format.
|
||||
* @returns {Promise<{success, response, status?, error?}>}
|
||||
*/
|
||||
export async function handleSttCore({ provider, model, formData, credentials }) {
|
||||
const file = formData.get("file");
|
||||
if (!file) return createErrorResult(HTTP_STATUS.BAD_REQUEST, "Missing required field: file");
|
||||
|
||||
const cfg = AI_PROVIDERS[provider]?.sttConfig;
|
||||
if (!cfg) return createErrorResult(HTTP_STATUS.BAD_REQUEST, `Provider '${provider}' does not support STT`);
|
||||
|
||||
const token = cfg.authType === "none" ? null : (credentials?.apiKey || credentials?.accessToken);
|
||||
if (cfg.authType !== "none" && !token) {
|
||||
return createErrorResult(HTTP_STATUS.UNAUTHORIZED, `No credentials for STT provider: ${provider}`);
|
||||
}
|
||||
|
||||
try {
|
||||
switch (cfg.format) {
|
||||
case "deepgram": return await transcribeDeepgram(cfg, file, model, token, formData);
|
||||
case "assemblyai": return await transcribeAssemblyAI(cfg, file, model, token);
|
||||
case "nvidia-asr": return await transcribeNvidia(cfg, file, model, token);
|
||||
case "huggingface-asr": return await transcribeHuggingFace(cfg, file, model, token);
|
||||
case "gemini-stt": return await transcribeGemini(cfg, file, model, token, formData);
|
||||
default: return await transcribeOpenAICompatible(cfg, file, model, token, formData);
|
||||
}
|
||||
} catch (err) {
|
||||
return createErrorResult(HTTP_STATUS.BAD_GATEWAY, err.message || "STT request failed");
|
||||
}
|
||||
}
|
||||
@@ -48,16 +48,16 @@ function createTtsResponse(base64Audio, format, responseFormat) {
|
||||
*
|
||||
* @returns {Promise<{success, response, status?, error?}>}
|
||||
*/
|
||||
export async function handleTtsCore({ provider, model, input, credentials, responseFormat = "mp3" }) {
|
||||
export async function handleTtsCore({ provider, model, input, credentials, responseFormat = "mp3", language }) {
|
||||
if (!input?.trim()) {
|
||||
return createErrorResult(HTTP_STATUS.BAD_REQUEST, "Missing required field: input");
|
||||
}
|
||||
|
||||
try {
|
||||
// Special-case adapters (google-tts, edge-tts, local-device, elevenlabs, openai, openrouter)
|
||||
// Special-case adapters (google-tts, edge-tts, local-device, elevenlabs, openai, openrouter, gemini)
|
||||
const adapter = getTtsAdapter(provider);
|
||||
if (adapter) {
|
||||
const result = await adapter.synthesize(input.trim(), model, credentials, responseFormat);
|
||||
const result = await adapter.synthesize(input.trim(), model, credentials, responseFormat, { language });
|
||||
// Adapter may return a full {success, response} (legacy) or {base64, format}
|
||||
if (result.success !== undefined) return result;
|
||||
return createTtsResponse(result.base64, result.format, responseFormat);
|
||||
|
||||
117
open-sse/handlers/ttsProviders/gemini.js
Normal file
@@ -0,0 +1,117 @@
|
||||
// Gemini TTS — generateContent with AUDIO modality returns PCM L16, wrap as WAV
|
||||
import { Buffer } from "node:buffer";
|
||||
|
||||
const DEFAULT_MODEL = "gemini-2.5-flash-preview-tts";
|
||||
const DEFAULT_VOICE = "Kore";
|
||||
const KNOWN_MODELS = ["gemini-2.5-flash-preview-tts", "gemini-2.5-pro-preview-tts"];
|
||||
|
||||
// Parse "model/voice" — if input doesn't match a known TTS model, treat it as voice with default model
|
||||
function parseGeminiModelVoice(input) {
|
||||
if (!input) return { modelId: DEFAULT_MODEL, voiceId: DEFAULT_VOICE };
|
||||
for (const id of KNOWN_MODELS) {
|
||||
if (input === id) return { modelId: id, voiceId: DEFAULT_VOICE };
|
||||
if (input.startsWith(`${id}/`)) return { modelId: id, voiceId: input.slice(id.length + 1) };
|
||||
}
|
||||
return { modelId: DEFAULT_MODEL, voiceId: input };
|
||||
}
|
||||
// Gemini returns PCM 16-bit signed mono @ 24kHz
|
||||
const SAMPLE_RATE = 24000;
|
||||
const CHANNELS = 1;
|
||||
const BITS_PER_SAMPLE = 16;
|
||||
|
||||
// Build WAV header for raw PCM payload
|
||||
function pcmToWav(pcmBuffer) {
|
||||
const dataSize = pcmBuffer.length;
|
||||
const byteRate = SAMPLE_RATE * CHANNELS * BITS_PER_SAMPLE / 8;
|
||||
const blockAlign = CHANNELS * BITS_PER_SAMPLE / 8;
|
||||
const header = Buffer.alloc(44);
|
||||
header.write("RIFF", 0);
|
||||
header.writeUInt32LE(36 + dataSize, 4);
|
||||
header.write("WAVE", 8);
|
||||
header.write("fmt ", 12);
|
||||
header.writeUInt32LE(16, 16);
|
||||
header.writeUInt16LE(1, 20);
|
||||
header.writeUInt16LE(CHANNELS, 22);
|
||||
header.writeUInt32LE(SAMPLE_RATE, 24);
|
||||
header.writeUInt32LE(byteRate, 28);
|
||||
header.writeUInt16LE(blockAlign, 32);
|
||||
header.writeUInt16LE(BITS_PER_SAMPLE, 34);
|
||||
header.write("data", 36);
|
||||
header.writeUInt32LE(dataSize, 40);
|
||||
return Buffer.concat([header, pcmBuffer]);
|
||||
}
|
||||
|
||||
// Build TTS prompt: add "Say [in {language}]:" prefix to force TTS mode
|
||||
function buildPrompt(text, language) {
|
||||
if (/:\s/.test(text)) return text; // user already provided style instruction
|
||||
return language ? `Say in ${language}: ${text}` : `Say: ${text}`;
|
||||
}
|
||||
|
||||
export default {
|
||||
async synthesize(text, model, credentials, _responseFormat, opts = {}) {
|
||||
if (!credentials?.apiKey) throw new Error("No Gemini API key configured");
|
||||
const { modelId, voiceId } = parseGeminiModelVoice(model);
|
||||
const url = `https://generativelanguage.googleapis.com/v1beta/models/${modelId}:generateContent?key=${credentials.apiKey}`;
|
||||
const res = await fetch(url, {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({
|
||||
contents: [{ parts: [{ text: buildPrompt(text, opts.language) }] }],
|
||||
generationConfig: {
|
||||
responseModalities: ["AUDIO"],
|
||||
speechConfig: { voiceConfig: { prebuiltVoiceConfig: { voiceName: voiceId } } },
|
||||
},
|
||||
}),
|
||||
});
|
||||
if (!res.ok) {
|
||||
const err = await res.json().catch(() => ({}));
|
||||
throw new Error(err?.error?.message || `Gemini TTS failed: ${res.status}`);
|
||||
}
|
||||
const data = await res.json();
|
||||
const b64 = data?.candidates?.[0]?.content?.parts?.find((p) => p.inlineData?.data)?.inlineData?.data;
|
||||
if (!b64) {
|
||||
const reason = data?.candidates?.[0]?.finishReason || data?.promptFeedback?.blockReason || "unknown";
|
||||
throw new Error(`Gemini TTS returned no audio (finishReason: ${reason}, voice: ${voiceId}, model: ${modelId})`);
|
||||
}
|
||||
const wav = pcmToWav(Buffer.from(b64, "base64"));
|
||||
return { base64: wav.toString("base64"), format: "wav" };
|
||||
},
|
||||
};
|
||||
|
||||
// Voice fetcher — return prebuilt voices (Gemini has no list API)
|
||||
const PREBUILT_VOICES = [
|
||||
{ id: "Zephyr", lang: "en", gender: "Female" },
|
||||
{ id: "Puck", lang: "en", gender: "Male" },
|
||||
{ id: "Charon", lang: "en", gender: "Male" },
|
||||
{ id: "Kore", lang: "en", gender: "Female" },
|
||||
{ id: "Fenrir", lang: "en", gender: "Male" },
|
||||
{ id: "Leda", lang: "en", gender: "Female" },
|
||||
{ id: "Orus", lang: "en", gender: "Male" },
|
||||
{ id: "Aoede", lang: "en", gender: "Female" },
|
||||
{ id: "Callirrhoe", lang: "en", gender: "Female" },
|
||||
{ id: "Autonoe", lang: "en", gender: "Female" },
|
||||
{ id: "Enceladus", lang: "en", gender: "Male" },
|
||||
{ id: "Iapetus", lang: "en", gender: "Male" },
|
||||
{ id: "Umbriel", lang: "en", gender: "Male" },
|
||||
{ id: "Algieba", lang: "en", gender: "Male" },
|
||||
{ id: "Despina", lang: "en", gender: "Female" },
|
||||
{ id: "Erinome", lang: "en", gender: "Female" },
|
||||
{ id: "Algenib", lang: "en", gender: "Male" },
|
||||
{ id: "Rasalgethi", lang: "en", gender: "Male" },
|
||||
{ id: "Laomedeia", lang: "en", gender: "Female" },
|
||||
{ id: "Achernar", lang: "en", gender: "Female" },
|
||||
{ id: "Alnilam", lang: "en", gender: "Male" },
|
||||
{ id: "Schedar", lang: "en", gender: "Male" },
|
||||
{ id: "Gacrux", lang: "en", gender: "Female" },
|
||||
{ id: "Pulcherrima", lang: "en", gender: "Female" },
|
||||
{ id: "Achird", lang: "en", gender: "Male" },
|
||||
{ id: "Zubenelgenubi", lang: "en", gender: "Male" },
|
||||
{ id: "Vindemiatrix", lang: "en", gender: "Female" },
|
||||
{ id: "Sadachbia", lang: "en", gender: "Male" },
|
||||
{ id: "Sadaltager", lang: "en", gender: "Male" },
|
||||
{ id: "Sulafat", lang: "en", gender: "Female" },
|
||||
];
|
||||
|
||||
export async function fetchGeminiVoices() {
|
||||
return PREBUILT_VOICES.map((v) => ({ voice_id: v.id, name: v.id, labels: { language: v.lang, gender: v.gender } }));
|
||||
}
|
||||
@@ -5,6 +5,7 @@ import localDevice, { fetchLocalDeviceVoices } from "./localDevice.js";
|
||||
import elevenlabs, { fetchElevenLabsVoices } from "./elevenlabs.js";
|
||||
import openai from "./openai.js";
|
||||
import openrouter from "./openrouter.js";
|
||||
import gemini, { fetchGeminiVoices } from "./gemini.js";
|
||||
import { FORMAT_HANDLERS } from "./genericFormats.js";
|
||||
import { parseModelVoice } from "./_base.js";
|
||||
|
||||
@@ -16,6 +17,7 @@ const SPECIAL_ADAPTERS = {
|
||||
elevenlabs,
|
||||
openai,
|
||||
openrouter,
|
||||
gemini,
|
||||
};
|
||||
|
||||
export function getTtsAdapter(provider) {
|
||||
@@ -41,7 +43,8 @@ export const VOICE_FETCHERS = {
|
||||
"edge-tts": fetchEdgeTtsVoices,
|
||||
"local-device": fetchLocalDeviceVoices,
|
||||
elevenlabs: fetchElevenLabsVoices,
|
||||
gemini: fetchGeminiVoices,
|
||||
};
|
||||
|
||||
// Re-export for backward compat
|
||||
export { fetchEdgeTtsVoices, fetchLocalDeviceVoices, fetchElevenLabsVoices };
|
||||
export { fetchEdgeTtsVoices, fetchLocalDeviceVoices, fetchElevenLabsVoices, fetchGeminiVoices };
|
||||
|
||||
@@ -11,6 +11,24 @@ const GITHUB_CONFIG = {
|
||||
userAgent: "GitHubCopilotChat/0.26.7",
|
||||
};
|
||||
|
||||
// GLM quota endpoints (region-aware)
|
||||
const GLM_QUOTA_URLS = {
|
||||
international: "https://api.z.ai/api/monitor/usage/quota/limit",
|
||||
china: "https://open.bigmodel.cn/api/monitor/usage/quota/limit",
|
||||
};
|
||||
|
||||
// MiniMax usage endpoints (try in order, fallback on transient errors)
|
||||
const MINIMAX_USAGE_URLS = {
|
||||
minimax: [
|
||||
"https://www.minimax.io/v1/token_plan/remains",
|
||||
"https://api.minimax.io/v1/api/openplatform/coding_plan/remains",
|
||||
],
|
||||
"minimax-cn": [
|
||||
"https://www.minimaxi.com/v1/api/openplatform/coding_plan/remains",
|
||||
"https://api.minimaxi.com/v1/api/openplatform/coding_plan/remains",
|
||||
],
|
||||
};
|
||||
|
||||
// Antigravity API config (from Quotio)
|
||||
const ANTIGRAVITY_CONFIG = {
|
||||
quotaApiUrl: "https://cloudcode-pa.googleapis.com/v1internal:fetchAvailableModels",
|
||||
@@ -40,13 +58,13 @@ const CLAUDE_CONFIG = {
|
||||
* @returns {Object} Usage data with quotas
|
||||
*/
|
||||
export async function getUsageForProvider(connection, proxyOptions = null) {
|
||||
const { provider, accessToken, providerSpecificData } = connection;
|
||||
const { provider, accessToken, apiKey, providerSpecificData } = connection;
|
||||
|
||||
switch (provider) {
|
||||
case "github":
|
||||
return await getGitHubUsage(accessToken, providerSpecificData, proxyOptions);
|
||||
case "gemini-cli":
|
||||
return await getGeminiUsage(accessToken, proxyOptions);
|
||||
return await getGeminiUsage(accessToken, providerSpecificData, proxyOptions);
|
||||
case "antigravity":
|
||||
return await getAntigravityUsage(accessToken, providerSpecificData, proxyOptions);
|
||||
case "claude":
|
||||
@@ -61,6 +79,12 @@ export async function getUsageForProvider(connection, proxyOptions = null) {
|
||||
return await getIflowUsage(accessToken);
|
||||
case "ollama":
|
||||
return await getOllamaUsage(accessToken);
|
||||
case "glm":
|
||||
case "glm-cn":
|
||||
return await getGlmUsage(apiKey, provider, proxyOptions);
|
||||
case "minimax":
|
||||
case "minimax-cn":
|
||||
return await getMiniMaxUsage(apiKey, provider, proxyOptions);
|
||||
default:
|
||||
return { message: `Usage API not implemented for ${provider}` };
|
||||
}
|
||||
@@ -188,31 +212,115 @@ function formatGitHubQuotaSnapshot(quota) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Gemini CLI Usage (Google Cloud)
|
||||
* Gemini CLI Usage — fetch per-model quota via Cloud Code Assist API.
|
||||
* Uses retrieveUserQuota (same endpoint as `gemini /stats`) returning
|
||||
* per-model buckets with remainingFraction + resetTime.
|
||||
*/
|
||||
async function getGeminiUsage(accessToken, proxyOptions = null) {
|
||||
async function getGeminiUsage(accessToken, providerSpecificData, proxyOptions = null) {
|
||||
if (!accessToken) {
|
||||
return { plan: "Free", message: "Gemini CLI access token not available." };
|
||||
}
|
||||
|
||||
try {
|
||||
// Resolve project id: prefer connection-stored id, else loadCodeAssist lookup
|
||||
let projectId = providerSpecificData?.projectId || null;
|
||||
let plan = "Free";
|
||||
|
||||
if (!projectId) {
|
||||
const subInfo = await getGeminiSubscriptionInfo(accessToken, proxyOptions);
|
||||
projectId = subInfo?.cloudaicompanionProject || null;
|
||||
plan = subInfo?.currentTier?.name || plan;
|
||||
}
|
||||
|
||||
if (!projectId) {
|
||||
return { plan, message: "Gemini CLI project ID not available." };
|
||||
}
|
||||
|
||||
const controller = new AbortController();
|
||||
const timeoutId = setTimeout(() => controller.abort(), 10000);
|
||||
let response;
|
||||
try {
|
||||
response = await proxyAwareFetch(
|
||||
"https://cloudcode-pa.googleapis.com/v1internal:retrieveUserQuota",
|
||||
{
|
||||
method: "POST",
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify({ project: projectId }),
|
||||
signal: controller.signal,
|
||||
},
|
||||
proxyOptions
|
||||
);
|
||||
} finally {
|
||||
clearTimeout(timeoutId);
|
||||
}
|
||||
|
||||
if (!response.ok) {
|
||||
return { plan, message: `Gemini CLI quota error (${response.status}).` };
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
const quotas = {};
|
||||
|
||||
if (Array.isArray(data.buckets)) {
|
||||
for (const bucket of data.buckets) {
|
||||
if (!bucket.modelId || bucket.remainingFraction == null) continue;
|
||||
|
||||
const remainingFraction = Number(bucket.remainingFraction) || 0;
|
||||
const total = 1000; // Normalized base, matches antigravity convention
|
||||
const remaining = Math.round(total * remainingFraction);
|
||||
const used = Math.max(0, total - remaining);
|
||||
|
||||
quotas[bucket.modelId] = {
|
||||
used,
|
||||
total,
|
||||
resetAt: parseResetTime(bucket.resetTime),
|
||||
remainingPercentage: remainingFraction * 100,
|
||||
unlimited: false,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return { plan, quotas };
|
||||
} catch (error) {
|
||||
return { message: `Gemini CLI error: ${error.message}` };
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get Gemini CLI subscription info via loadCodeAssist
|
||||
*/
|
||||
async function getGeminiSubscriptionInfo(accessToken, proxyOptions = null) {
|
||||
const controller = new AbortController();
|
||||
const timeoutId = setTimeout(() => controller.abort(), 10000);
|
||||
try {
|
||||
// Gemini CLI uses Google Cloud quotas
|
||||
// Try to get quota info from Cloud Resource Manager
|
||||
const response = await proxyAwareFetch(
|
||||
"https://cloudresourcemanager.googleapis.com/v1/projects?filter=lifecycleState:ACTIVE",
|
||||
"https://cloudcode-pa.googleapis.com/v1internal:loadCodeAssist",
|
||||
{
|
||||
method: "POST",
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
Accept: "application/json",
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify({
|
||||
metadata: {
|
||||
ideType: "IDE_UNSPECIFIED",
|
||||
platform: "PLATFORM_UNSPECIFIED",
|
||||
pluginType: "GEMINI",
|
||||
},
|
||||
}),
|
||||
signal: controller.signal,
|
||||
},
|
||||
proxyOptions
|
||||
);
|
||||
|
||||
if (!response.ok) {
|
||||
// Quota API may not be accessible, return generic message
|
||||
return { message: "Gemini CLI uses Google Cloud quotas. Check Google Cloud Console for details." };
|
||||
}
|
||||
|
||||
return { message: "Gemini CLI connected. Usage tracked via Google Cloud Console." };
|
||||
} catch (error) {
|
||||
return { message: "Unable to fetch Gemini usage. Check Google Cloud Console." };
|
||||
if (!response.ok) return null;
|
||||
return await response.json();
|
||||
} catch {
|
||||
return null;
|
||||
} finally {
|
||||
clearTimeout(timeoutId);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -798,3 +906,206 @@ async function getOllamaUsage(accessToken, providerSpecificData) {
|
||||
return { message: "Unable to fetch Ollama Cloud usage." };
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* GLM Coding Plan usage (international + China regions)
|
||||
*/
|
||||
async function getGlmUsage(apiKey, provider, proxyOptions = null) {
|
||||
if (!apiKey) {
|
||||
return { message: "GLM API key not available." };
|
||||
}
|
||||
|
||||
const region = provider === "glm-cn" ? "china" : "international";
|
||||
const quotaUrl = GLM_QUOTA_URLS[region];
|
||||
|
||||
try {
|
||||
const response = await proxyAwareFetch(quotaUrl, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${apiKey}`,
|
||||
Accept: "application/json",
|
||||
},
|
||||
}, proxyOptions);
|
||||
|
||||
if (!response.ok) {
|
||||
if (response.status === 401) {
|
||||
return { message: "GLM API key invalid or expired." };
|
||||
}
|
||||
return { message: `GLM quota API error (${response.status}).` };
|
||||
}
|
||||
|
||||
const json = await response.json();
|
||||
const data = json?.data && typeof json.data === "object" ? json.data : {};
|
||||
const limits = Array.isArray(data.limits) ? data.limits : [];
|
||||
const quotas = {};
|
||||
|
||||
for (const limit of limits) {
|
||||
if (!limit || limit.type !== "TOKENS_LIMIT") continue;
|
||||
const usedPercent = Number(limit.percentage) || 0;
|
||||
const resetMs = Number(limit.nextResetTime) || 0;
|
||||
const remaining = Math.max(0, 100 - usedPercent);
|
||||
|
||||
quotas["session"] = {
|
||||
used: usedPercent,
|
||||
total: 100,
|
||||
remaining,
|
||||
remainingPercentage: remaining,
|
||||
resetAt: resetMs > 0 ? new Date(resetMs).toISOString() : null,
|
||||
unlimited: false,
|
||||
};
|
||||
}
|
||||
|
||||
const levelRaw = typeof data.level === "string" ? data.level : "";
|
||||
const plan = levelRaw
|
||||
? levelRaw.charAt(0).toUpperCase() + levelRaw.slice(1).toLowerCase()
|
||||
: "Unknown";
|
||||
|
||||
return { plan, quotas };
|
||||
} catch (error) {
|
||||
return { message: `GLM error: ${error.message}` };
|
||||
}
|
||||
}
|
||||
|
||||
// ── MiniMax helpers ──────────────────────────────────────────────────────
|
||||
function isMiniMaxTextQuotaModel(modelName) {
|
||||
const normalized = (modelName || "").trim().toLowerCase();
|
||||
return normalized.startsWith("minimax-m") || normalized.startsWith("coding-plan");
|
||||
}
|
||||
|
||||
function getMiniMaxField(model, snakeKey, camelKey) {
|
||||
if (!model || typeof model !== "object") return null;
|
||||
return model[snakeKey] ?? model[camelKey] ?? null;
|
||||
}
|
||||
|
||||
function getMiniMaxSessionTotal(model) {
|
||||
return Math.max(0, Number(getMiniMaxField(model, "current_interval_total_count", "currentIntervalTotalCount")) || 0);
|
||||
}
|
||||
|
||||
function getMiniMaxWeeklyTotal(model) {
|
||||
return Math.max(0, Number(getMiniMaxField(model, "current_weekly_total_count", "currentWeeklyTotalCount")) || 0);
|
||||
}
|
||||
|
||||
function pickMiniMaxRepresentativeModel(models, getTotal) {
|
||||
const withQuota = models.filter((m) => getTotal(m) > 0);
|
||||
const pool = withQuota.length > 0 ? withQuota : models;
|
||||
if (pool.length === 0) return null;
|
||||
return pool.reduce((best, current) => (getTotal(current) > getTotal(best) ? current : best));
|
||||
}
|
||||
|
||||
function getMiniMaxResetAt(model, capturedAtMs, remainsSnake, remainsCamel, endSnake, endCamel) {
|
||||
const remainsMs = Number(getMiniMaxField(model, remainsSnake, remainsCamel)) || 0;
|
||||
if (remainsMs > 0) return new Date(capturedAtMs + remainsMs).toISOString();
|
||||
return parseResetTime(getMiniMaxField(model, endSnake, endCamel));
|
||||
}
|
||||
|
||||
function buildMiniMaxQuota(total, count, resetAt, countMeansRemaining) {
|
||||
const safeTotal = Math.max(0, total);
|
||||
const used = countMeansRemaining ? Math.max(safeTotal - count, 0) : Math.min(Math.max(0, count), safeTotal);
|
||||
const remaining = Math.max(safeTotal - used, 0);
|
||||
return {
|
||||
used,
|
||||
total: safeTotal,
|
||||
remaining,
|
||||
remainingPercentage: safeTotal > 0 ? Math.max(0, Math.min(100, (remaining / safeTotal) * 100)) : 0,
|
||||
resetAt,
|
||||
unlimited: false,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* MiniMax Token Plan / Coding Plan usage
|
||||
*/
|
||||
async function getMiniMaxUsage(apiKey, provider, proxyOptions = null) {
|
||||
if (!apiKey) {
|
||||
return { message: "MiniMax API key not available." };
|
||||
}
|
||||
|
||||
const usageUrls = MINIMAX_USAGE_URLS[provider] || [];
|
||||
let lastErrorMessage = "";
|
||||
|
||||
for (let index = 0; index < usageUrls.length; index += 1) {
|
||||
const usageUrl = usageUrls[index];
|
||||
const canFallback = index < usageUrls.length - 1;
|
||||
|
||||
try {
|
||||
const response = await proxyAwareFetch(usageUrl, {
|
||||
method: "GET",
|
||||
headers: {
|
||||
Authorization: `Bearer ${apiKey}`,
|
||||
Accept: "application/json",
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
}, proxyOptions);
|
||||
|
||||
const rawText = await response.text();
|
||||
let payload = {};
|
||||
if (rawText) {
|
||||
try { payload = JSON.parse(rawText); } catch { payload = {}; }
|
||||
}
|
||||
|
||||
const baseResp = (payload?.base_resp ?? payload?.baseResp) || {};
|
||||
const apiStatusCode = Number(baseResp.status_code ?? baseResp.statusCode) || 0;
|
||||
const apiStatusMessage = String(baseResp.status_msg ?? baseResp.statusMsg ?? "").trim();
|
||||
const combined = `${apiStatusMessage} ${rawText}`.trim();
|
||||
const authLike = /token plan|coding plan|invalid api key|invalid key|unauthorized|inactive/i;
|
||||
|
||||
if (response.status === 401 || response.status === 403 || apiStatusCode === 1004 || authLike.test(combined)) {
|
||||
return { message: "MiniMax API key invalid or inactive. Use an active Token/Coding Plan key." };
|
||||
}
|
||||
|
||||
if (!response.ok) {
|
||||
lastErrorMessage = `MiniMax usage endpoint error (${response.status})`;
|
||||
if ((response.status === 404 || response.status === 405 || response.status >= 500) && canFallback) continue;
|
||||
return { message: `MiniMax connected. ${lastErrorMessage}` };
|
||||
}
|
||||
|
||||
if (apiStatusCode !== 0) {
|
||||
return { message: `MiniMax connected. ${apiStatusMessage || "Upstream quota API error"}` };
|
||||
}
|
||||
|
||||
const modelRemains = payload?.model_remains ?? payload?.modelRemains;
|
||||
const allModels = Array.isArray(modelRemains) ? modelRemains : [];
|
||||
const textModels = allModels.filter((m) => isMiniMaxTextQuotaModel(String(getMiniMaxField(m, "model_name", "modelName"))));
|
||||
|
||||
if (textModels.length === 0) {
|
||||
return { message: "MiniMax connected. No text quota data was returned." };
|
||||
}
|
||||
|
||||
const capturedAtMs = Date.now();
|
||||
const countMeansRemaining = usageUrl.includes("/coding_plan/remains");
|
||||
const quotas = {};
|
||||
|
||||
const sessionModel = pickMiniMaxRepresentativeModel(textModels, getMiniMaxSessionTotal);
|
||||
if (sessionModel) {
|
||||
const total = getMiniMaxSessionTotal(sessionModel);
|
||||
const count = Math.max(0, Number(getMiniMaxField(sessionModel, "current_interval_usage_count", "currentIntervalUsageCount")) || 0);
|
||||
quotas["session (5h)"] = buildMiniMaxQuota(
|
||||
total, count,
|
||||
getMiniMaxResetAt(sessionModel, capturedAtMs, "remains_time", "remainsTime", "end_time", "endTime"),
|
||||
countMeansRemaining
|
||||
);
|
||||
}
|
||||
|
||||
const weeklyModel = pickMiniMaxRepresentativeModel(textModels, getMiniMaxWeeklyTotal);
|
||||
if (weeklyModel && getMiniMaxWeeklyTotal(weeklyModel) > 0) {
|
||||
const total = getMiniMaxWeeklyTotal(weeklyModel);
|
||||
const count = Math.max(0, Number(getMiniMaxField(weeklyModel, "current_weekly_usage_count", "currentWeeklyUsageCount")) || 0);
|
||||
quotas["weekly (7d)"] = buildMiniMaxQuota(
|
||||
total, count,
|
||||
getMiniMaxResetAt(weeklyModel, capturedAtMs, "weekly_remains_time", "weeklyRemainsTime", "weekly_end_time", "weeklyEndTime"),
|
||||
countMeansRemaining
|
||||
);
|
||||
}
|
||||
|
||||
if (Object.keys(quotas).length === 0) {
|
||||
return { message: "MiniMax connected. Unable to extract quota usage." };
|
||||
}
|
||||
|
||||
return { quotas };
|
||||
} catch (error) {
|
||||
lastErrorMessage = error.message;
|
||||
if (!canFallback) break;
|
||||
}
|
||||
}
|
||||
|
||||
return { message: lastErrorMessage ? `MiniMax connected. Unable to fetch usage: ${lastErrorMessage}` : "MiniMax connected. Unable to fetch usage." };
|
||||
}
|
||||
|
||||
@@ -16,8 +16,10 @@ const MODEL_RULES = [
|
||||
];
|
||||
|
||||
function shouldInject(message, scope) {
|
||||
if (message?.role !== "assistant" || "reasoning_content" in message) return false;
|
||||
if (scope === "toolCalls") return Array.isArray(message.tool_calls);
|
||||
if (message?.role !== "assistant") return false;
|
||||
const rc = message.reasoning_content;
|
||||
if (typeof rc === "string" && rc.length > 0) return false;
|
||||
if (scope === "toolCalls") return Array.isArray(message.tool_calls) && message.tool_calls.length > 0;
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "9router-app",
|
||||
"version": "0.4.16",
|
||||
"version": "0.4.17",
|
||||
"description": "9Router web dashboard",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
|
||||
|
Before Width: | Height: | Size: 4.7 KiB After Width: | Height: | Size: 3.5 KiB |
|
Before Width: | Height: | Size: 4.7 KiB After Width: | Height: | Size: 3.5 KiB |
BIN
public/providers/aws-polly.png
Normal file
|
After Width: | Height: | Size: 30 KiB |
BIN
public/providers/black-forest-labs.png
Normal file
|
After Width: | Height: | Size: 4.5 KiB |
BIN
public/providers/byteplus.png
Executable file → Normal file
|
Before Width: | Height: | Size: 2.9 KiB After Width: | Height: | Size: 2.6 KiB |
|
Before Width: | Height: | Size: 24 KiB After Width: | Height: | Size: 3.9 KiB |
BIN
public/providers/fal-ai.png
Normal file
|
After Width: | Height: | Size: 6.2 KiB |
BIN
public/providers/jina-ai.png
Normal file
|
After Width: | Height: | Size: 3.8 KiB |
|
Before Width: | Height: | Size: 4.2 KiB After Width: | Height: | Size: 3.7 KiB |
|
Before Width: | Height: | Size: 3.4 KiB After Width: | Height: | Size: 10 KiB |
|
Before Width: | Height: | Size: 7.3 KiB After Width: | Height: | Size: 5.5 KiB |
|
Before Width: | Height: | Size: 7.3 KiB After Width: | Height: | Size: 5.5 KiB |
BIN
public/providers/recraft.png
Normal file
|
After Width: | Height: | Size: 4.0 KiB |
BIN
public/providers/runwayml.png
Normal file
|
After Width: | Height: | Size: 5.2 KiB |
BIN
public/providers/stability-ai.png
Normal file
|
After Width: | Height: | Size: 24 KiB |
BIN
public/providers/topaz.png
Normal file
|
After Width: | Height: | Size: 1.1 KiB |
|
Before Width: | Height: | Size: 9.8 KiB After Width: | Height: | Size: 8.7 KiB |
|
Before Width: | Height: | Size: 9.8 KiB After Width: | Height: | Size: 8.7 KiB |
|
Before Width: | Height: | Size: 41 KiB After Width: | Height: | Size: 5.9 KiB |
77
skills/9router-stt/SKILL.md
Normal file
@@ -0,0 +1,77 @@
|
||||
---
|
||||
name: 9router-stt
|
||||
description: Speech-to-text via 9Router /v1/audio/transcriptions using OpenAI Whisper / Groq / Gemini / Deepgram / AssemblyAI / NVIDIA / HuggingFace models. Use when the user wants to transcribe audio, convert speech to text, or get subtitles from audio files.
|
||||
---
|
||||
|
||||
# 9Router — Speech-to-Text
|
||||
|
||||
Requires `NINEROUTER_URL` (and `NINEROUTER_KEY` if auth enabled). See https://raw.githubusercontent.com/decolua/9router/refs/heads/master/skills/9router/SKILL.md for setup.
|
||||
|
||||
## Discover models
|
||||
|
||||
```bash
|
||||
curl $NINEROUTER_URL/v1/models/stt | jq '.data[].id'
|
||||
```
|
||||
|
||||
`model` = STT model ID (e.g. `openai/whisper-1`, `groq/whisper-large-v3`, `deepgram/nova-3`, `gemini/gemini-2.5-flash`).
|
||||
|
||||
## Endpoint
|
||||
|
||||
`POST $NINEROUTER_URL/v1/audio/transcriptions` (OpenAI Whisper compatible, `multipart/form-data`)
|
||||
|
||||
| Field | Required | Notes |
|
||||
|---|---|---|
|
||||
| `model` | yes | from `/v1/models/stt` |
|
||||
| `file` | yes | audio file (mp3, wav, m4a, webm, ogg, flac) |
|
||||
| `language` | no | ISO-639-1 (e.g. `en`, `vi`) |
|
||||
| `prompt` | no | hint text to guide transcription |
|
||||
| `response_format` | no | `json` (default) / `text` / `verbose_json` / `srt` / `vtt` |
|
||||
| `temperature` | no | 0–1 |
|
||||
|
||||
## Examples
|
||||
|
||||
```bash
|
||||
curl -X POST "$NINEROUTER_URL/v1/audio/transcriptions" \
|
||||
-H "Authorization: Bearer $NINEROUTER_KEY" \
|
||||
-F "model=openai/whisper-1" \
|
||||
-F "file=@audio.mp3" \
|
||||
-F "language=vi"
|
||||
```
|
||||
|
||||
JS (Node):
|
||||
|
||||
```js
|
||||
import { createReadStream } from "node:fs";
|
||||
const form = new FormData();
|
||||
form.append("model", "groq/whisper-large-v3-turbo");
|
||||
form.append("file", new Blob([await (await import("node:fs/promises")).readFile("audio.mp3")]), "audio.mp3");
|
||||
const r = await fetch(`${process.env.NINEROUTER_URL}/v1/audio/transcriptions`, {
|
||||
method: "POST",
|
||||
headers: { "Authorization": `Bearer ${process.env.NINEROUTER_KEY}` },
|
||||
body: form,
|
||||
});
|
||||
const { text } = await r.json();
|
||||
console.log(text);
|
||||
```
|
||||
|
||||
## Response shape
|
||||
|
||||
Default (`response_format=json`):
|
||||
```json
|
||||
{ "text": "Xin chào, đây là bản ghi âm." }
|
||||
```
|
||||
|
||||
`verbose_json` adds `language`, `duration`, `segments[]` with timestamps.
|
||||
`srt` / `vtt` return subtitle text.
|
||||
|
||||
## Provider quirks
|
||||
|
||||
| Provider | `model` format | Notes |
|
||||
|---|---|---|
|
||||
| `openai` | `whisper-1`, `gpt-4o-transcribe`, `gpt-4o-mini-transcribe` | Native OpenAI shape |
|
||||
| `groq` | `whisper-large-v3`, `whisper-large-v3-turbo`, `distil-whisper-large-v3-en` | Fastest; OpenAI shape |
|
||||
| `gemini` | `gemini-2.5-flash`, `gemini-2.5-pro`, `gemini-2.5-flash-lite` | Server converts to `generateContent` with audio inline |
|
||||
| `deepgram` | `nova-3`, `nova-2`, `whisper-large` | Token auth; server adapts response |
|
||||
| `assemblyai` | `universal-3-pro`, `universal-2` | Async upload+poll handled server-side |
|
||||
| `nvidia` | `nvidia/parakeet-ctc-1.1b-asr` | NIM endpoint |
|
||||
| `huggingface` | `openai/whisper-large-v3`, `openai/whisper-small` | HF Inference API |
|
||||
@@ -49,6 +49,7 @@ When the user needs a specific capability, fetch that skill's `SKILL.md` from it
|
||||
| Chat / code-gen | https://raw.githubusercontent.com/decolua/9router/refs/heads/master/skills/9router-chat/SKILL.md |
|
||||
| Image generation | https://raw.githubusercontent.com/decolua/9router/refs/heads/master/skills/9router-image/SKILL.md |
|
||||
| Text-to-speech | https://raw.githubusercontent.com/decolua/9router/refs/heads/master/skills/9router-tts/SKILL.md |
|
||||
| Speech-to-text | https://raw.githubusercontent.com/decolua/9router/refs/heads/master/skills/9router-stt/SKILL.md |
|
||||
| Embeddings | https://raw.githubusercontent.com/decolua/9router/refs/heads/master/skills/9router-embeddings/SKILL.md |
|
||||
| Web search | https://raw.githubusercontent.com/decolua/9router/refs/heads/master/skills/9router-web-search/SKILL.md |
|
||||
| Web fetch (URL → markdown) | https://raw.githubusercontent.com/decolua/9router/refs/heads/master/skills/9router-web-fetch/SKILL.md |
|
||||
|
||||
@@ -12,6 +12,7 @@ Drop-in skills for any AI agent (Claude, Cursor, ChatGPT, custom SDK). Just **co
|
||||
| Chat / code-gen | https://raw.githubusercontent.com/decolua/9router/refs/heads/master/skills/9router-chat/SKILL.md |
|
||||
| Image generation | https://raw.githubusercontent.com/decolua/9router/refs/heads/master/skills/9router-image/SKILL.md |
|
||||
| Text-to-speech | https://raw.githubusercontent.com/decolua/9router/refs/heads/master/skills/9router-tts/SKILL.md |
|
||||
| Speech-to-text | https://raw.githubusercontent.com/decolua/9router/refs/heads/master/skills/9router-stt/SKILL.md |
|
||||
| Embeddings | https://raw.githubusercontent.com/decolua/9router/refs/heads/master/skills/9router-embeddings/SKILL.md |
|
||||
| Web search | https://raw.githubusercontent.com/decolua/9router/refs/heads/master/skills/9router-web-search/SKILL.md |
|
||||
| Web fetch (URL → markdown) | https://raw.githubusercontent.com/decolua/9router/refs/heads/master/skills/9router-web-fetch/SKILL.md |
|
||||
|
||||
@@ -4,7 +4,7 @@ import { useState, useEffect, useCallback } from "react";
|
||||
import { Card, CardSkeleton } from "@/shared/components";
|
||||
import { CLI_TOOLS } from "@/shared/constants/cliTools";
|
||||
import { getModelsByProviderId, PROVIDER_ID_TO_ALIAS } from "@/shared/constants/models";
|
||||
import { ClaudeToolCard, CodexToolCard, DroidToolCard, OpenClawToolCard, HermesToolCard, DefaultToolCard, OpenCodeToolCard, MitmLinkCard } from "./components";
|
||||
import { ClaudeToolCard, CodexToolCard, DroidToolCard, OpenClawToolCard, HermesToolCard, DefaultToolCard, OpenCodeToolCard, CoworkToolCard, MitmLinkCard } from "./components";
|
||||
import { MITM_TOOLS } from "@/shared/constants/cliTools";
|
||||
|
||||
const CLOUD_URL = process.env.NEXT_PUBLIC_CLOUD_URL;
|
||||
@@ -17,6 +17,7 @@ const STATUS_ENDPOINTS = {
|
||||
droid: "/api/cli-tools/droid-settings",
|
||||
openclaw: "/api/cli-tools/openclaw-settings",
|
||||
hermes: "/api/cli-tools/hermes-settings",
|
||||
cowork: "/api/cli-tools/cowork-settings",
|
||||
};
|
||||
|
||||
export default function CLIToolsPageClient({ machineId }) {
|
||||
@@ -27,6 +28,8 @@ export default function CLIToolsPageClient({ machineId }) {
|
||||
const [cloudEnabled, setCloudEnabled] = useState(false);
|
||||
const [tunnelEnabled, setTunnelEnabled] = useState(false);
|
||||
const [tunnelPublicUrl, setTunnelPublicUrl] = useState("");
|
||||
const [tailscaleEnabled, setTailscaleEnabled] = useState(false);
|
||||
const [tailscaleUrl, setTailscaleUrl] = useState("");
|
||||
const [apiKeys, setApiKeys] = useState([]);
|
||||
const [toolStatuses, setToolStatuses] = useState({});
|
||||
|
||||
@@ -68,8 +71,10 @@ export default function CLIToolsPageClient({ machineId }) {
|
||||
}
|
||||
if (tunnelRes.ok) {
|
||||
const data = await tunnelRes.json();
|
||||
setTunnelEnabled(data.enabled || false);
|
||||
setTunnelPublicUrl(data.publicUrl || "");
|
||||
setTunnelEnabled(!!(data.tunnel?.enabled || data.tunnel?.settingsEnabled));
|
||||
setTunnelPublicUrl(data.tunnel?.publicUrl || "");
|
||||
setTailscaleEnabled(!!(data.tailscale?.enabled || data.tailscale?.settingsEnabled));
|
||||
setTailscaleUrl(data.tailscale?.tunnelUrl || "");
|
||||
}
|
||||
} catch (error) {
|
||||
console.log("Error loading settings:", error);
|
||||
@@ -176,6 +181,22 @@ export default function CLIToolsPageClient({ machineId }) {
|
||||
return <CodexToolCard key={toolId} {...commonProps} activeProviders={getActiveProviders()} cloudEnabled={cloudEnabled} initialStatus={toolStatuses.codex} />;
|
||||
case "opencode":
|
||||
return <OpenCodeToolCard key={toolId} {...commonProps} activeProviders={getActiveProviders()} cloudEnabled={cloudEnabled} initialStatus={toolStatuses.opencode} />;
|
||||
case "cowork":
|
||||
return (
|
||||
<CoworkToolCard
|
||||
key={toolId}
|
||||
{...commonProps}
|
||||
activeProviders={getActiveProviders()}
|
||||
hasActiveProviders={hasActiveProviders}
|
||||
cloudEnabled={cloudEnabled}
|
||||
cloudUrl={CLOUD_URL}
|
||||
tunnelEnabled={tunnelEnabled}
|
||||
tunnelPublicUrl={tunnelPublicUrl}
|
||||
tailscaleEnabled={tailscaleEnabled}
|
||||
tailscaleUrl={tailscaleUrl}
|
||||
initialStatus={toolStatuses.cowork}
|
||||
/>
|
||||
);
|
||||
case "droid":
|
||||
return <DroidToolCard key={toolId} {...commonProps} activeProviders={getActiveProviders()} hasActiveProviders={hasActiveProviders} cloudEnabled={cloudEnabled} initialStatus={toolStatuses.droid} />;
|
||||
case "openclaw":
|
||||
|
||||
@@ -0,0 +1,401 @@
|
||||
"use client";
|
||||
|
||||
import { useState, useEffect, useMemo } from "react";
|
||||
import { Card, Button, ModelSelectModal, ManualConfigModal } from "@/shared/components";
|
||||
import Image from "next/image";
|
||||
|
||||
const ENDPOINT = "/api/cli-tools/cowork-settings";
|
||||
|
||||
const isLocalhostUrl = (url) => /localhost|127\.0\.0\.1|0\.0\.0\.0/i.test(url || "");
|
||||
|
||||
const stripV1 = (url) => (url || "").replace(/\/v1\/?$/, "");
|
||||
const ensureV1 = (url) => {
|
||||
const trimmed = (url || "").replace(/\/+$/, "");
|
||||
if (!trimmed) return "";
|
||||
return /\/v1$/.test(trimmed) ? trimmed : `${trimmed}/v1`;
|
||||
};
|
||||
|
||||
export default function CoworkToolCard({
|
||||
tool,
|
||||
isExpanded,
|
||||
onToggle,
|
||||
baseUrl,
|
||||
apiKeys,
|
||||
activeProviders,
|
||||
hasActiveProviders,
|
||||
cloudEnabled,
|
||||
cloudUrl,
|
||||
tunnelEnabled,
|
||||
tunnelPublicUrl,
|
||||
tailscaleEnabled,
|
||||
tailscaleUrl,
|
||||
initialStatus,
|
||||
}) {
|
||||
const [status, setStatus] = useState(initialStatus || null);
|
||||
const [checking, setChecking] = useState(false);
|
||||
const [applying, setApplying] = useState(false);
|
||||
const [restoring, setRestoring] = useState(false);
|
||||
const [message, setMessage] = useState(null);
|
||||
const [selectedApiKey, setSelectedApiKey] = useState("");
|
||||
const [selectedModels, setSelectedModels] = useState([]);
|
||||
const [modalOpen, setModalOpen] = useState(false);
|
||||
const [modelAliases, setModelAliases] = useState({});
|
||||
const [showManualConfigModal, setShowManualConfigModal] = useState(false);
|
||||
const [endpointMode, setEndpointMode] = useState("custom");
|
||||
const [customBaseUrl, setCustomBaseUrl] = useState("");
|
||||
|
||||
const endpointOptions = useMemo(() => {
|
||||
const opts = [];
|
||||
if (tunnelEnabled && tunnelPublicUrl) {
|
||||
opts.push({ value: "tunnel", label: `Tunnel - ${tunnelPublicUrl}`, url: ensureV1(tunnelPublicUrl) });
|
||||
}
|
||||
if (tailscaleEnabled && tailscaleUrl) {
|
||||
opts.push({ value: "tailscale", label: `Tailscale - ${tailscaleUrl}`, url: ensureV1(tailscaleUrl) });
|
||||
}
|
||||
if (cloudEnabled && cloudUrl) {
|
||||
opts.push({ value: "cloud", label: `Cloud - ${cloudUrl}`, url: ensureV1(cloudUrl) });
|
||||
}
|
||||
opts.push({ value: "custom", label: "Custom URL (VPS / public host)", url: "" });
|
||||
return opts;
|
||||
}, [tunnelEnabled, tunnelPublicUrl, tailscaleEnabled, tailscaleUrl, cloudEnabled, cloudUrl]);
|
||||
|
||||
useEffect(() => {
|
||||
if (apiKeys?.length > 0 && !selectedApiKey) {
|
||||
setSelectedApiKey(apiKeys[0].key);
|
||||
}
|
||||
}, [apiKeys, selectedApiKey]);
|
||||
|
||||
useEffect(() => {
|
||||
if (initialStatus) setStatus(initialStatus);
|
||||
}, [initialStatus]);
|
||||
|
||||
useEffect(() => {
|
||||
if (isExpanded && !status) {
|
||||
checkStatus();
|
||||
fetchModelAliases();
|
||||
}
|
||||
if (isExpanded) fetchModelAliases();
|
||||
}, [isExpanded]);
|
||||
|
||||
useEffect(() => {
|
||||
if (status?.cowork?.models?.length) {
|
||||
setSelectedModels(status.cowork.models);
|
||||
}
|
||||
if (status?.cowork?.baseUrl && !customBaseUrl) {
|
||||
setCustomBaseUrl(stripV1(status.cowork.baseUrl));
|
||||
setEndpointMode("custom");
|
||||
}
|
||||
}, [status]);
|
||||
|
||||
// Auto-pick first available preset when expand if user has not set anything
|
||||
useEffect(() => {
|
||||
if (!customBaseUrl && endpointOptions[0]?.url) {
|
||||
setEndpointMode(endpointOptions[0].value);
|
||||
setCustomBaseUrl(stripV1(endpointOptions[0].url));
|
||||
}
|
||||
}, [endpointOptions]);
|
||||
|
||||
const fetchModelAliases = async () => {
|
||||
try {
|
||||
const res = await fetch("/api/models/alias");
|
||||
const data = await res.json();
|
||||
if (res.ok) setModelAliases(data.aliases || {});
|
||||
} catch (error) {
|
||||
console.log("Error fetching model aliases:", error);
|
||||
}
|
||||
};
|
||||
|
||||
const checkStatus = async () => {
|
||||
setChecking(true);
|
||||
try {
|
||||
const res = await fetch(ENDPOINT);
|
||||
const data = await res.json();
|
||||
setStatus(data);
|
||||
} catch (error) {
|
||||
setStatus({ installed: false, error: error.message });
|
||||
} finally {
|
||||
setChecking(false);
|
||||
}
|
||||
};
|
||||
|
||||
const getEffectiveBaseUrl = () => ensureV1(customBaseUrl);
|
||||
|
||||
const getConfigStatus = () => {
|
||||
if (!status?.installed) return null;
|
||||
const url = status?.cowork?.baseUrl;
|
||||
if (!url) return "not_configured";
|
||||
if (isLocalhostUrl(url)) return "invalid";
|
||||
return status.has9Router ? "configured" : "other";
|
||||
};
|
||||
|
||||
const configStatus = getConfigStatus();
|
||||
const hasCustomSelectedApiKey = selectedApiKey && !apiKeys.some((key) => key.key === selectedApiKey);
|
||||
|
||||
const handleEndpointModeChange = (value) => {
|
||||
setEndpointMode(value);
|
||||
const opt = endpointOptions.find((o) => o.value === value);
|
||||
if (opt?.url) {
|
||||
setCustomBaseUrl(stripV1(opt.url));
|
||||
} else {
|
||||
setCustomBaseUrl("");
|
||||
}
|
||||
};
|
||||
|
||||
const handleApply = async () => {
|
||||
setMessage(null);
|
||||
const effectiveUrl = getEffectiveBaseUrl();
|
||||
|
||||
if (isLocalhostUrl(effectiveUrl)) {
|
||||
setMessage({ type: "error", text: "Localhost is not allowed. Enable Tunnel/Tailscale or use VPS." });
|
||||
return;
|
||||
}
|
||||
if (selectedModels.length === 0) {
|
||||
setMessage({ type: "error", text: "Please select at least one model" });
|
||||
return;
|
||||
}
|
||||
|
||||
setApplying(true);
|
||||
try {
|
||||
const keyToUse = selectedApiKey?.trim()
|
||||
|| (apiKeys?.length > 0 ? apiKeys[0].key : null)
|
||||
|| (!cloudEnabled ? "sk_9router" : null);
|
||||
|
||||
const res = await fetch(ENDPOINT, {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({
|
||||
baseUrl: effectiveUrl,
|
||||
apiKey: keyToUse,
|
||||
models: selectedModels,
|
||||
}),
|
||||
});
|
||||
const data = await res.json();
|
||||
if (res.ok) {
|
||||
setMessage({ type: "success", text: "Settings applied. Quit & reopen Claude Desktop to load." });
|
||||
checkStatus();
|
||||
} else {
|
||||
setMessage({ type: "error", text: data.error || "Failed to apply settings" });
|
||||
}
|
||||
} catch (error) {
|
||||
setMessage({ type: "error", text: error.message });
|
||||
} finally {
|
||||
setApplying(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleReset = async () => {
|
||||
setRestoring(true);
|
||||
setMessage(null);
|
||||
try {
|
||||
const res = await fetch(ENDPOINT, { method: "DELETE" });
|
||||
const data = await res.json();
|
||||
if (res.ok) {
|
||||
setMessage({ type: "success", text: "Settings reset successfully" });
|
||||
setSelectedModels([]);
|
||||
checkStatus();
|
||||
} else {
|
||||
setMessage({ type: "error", text: data.error || "Failed to reset" });
|
||||
}
|
||||
} catch (error) {
|
||||
setMessage({ type: "error", text: error.message });
|
||||
} finally {
|
||||
setRestoring(false);
|
||||
}
|
||||
};
|
||||
|
||||
const getManualConfigs = () => {
|
||||
const keyToUse = (selectedApiKey && selectedApiKey.trim())
|
||||
? selectedApiKey
|
||||
: (!cloudEnabled ? "sk_9router" : "<API_KEY_FROM_DASHBOARD>");
|
||||
|
||||
const modelsToShow = selectedModels.length > 0 ? selectedModels : ["provider/model-id"];
|
||||
const cfg = {
|
||||
inferenceProvider: "gateway",
|
||||
inferenceGatewayBaseUrl: getEffectiveBaseUrl() || "https://your-public-host/v1",
|
||||
inferenceGatewayApiKey: keyToUse,
|
||||
inferenceModels: modelsToShow.map((name) => ({ name })),
|
||||
};
|
||||
|
||||
return [{
|
||||
filename: "~/Library/Application Support/Claude-3p/configLibrary/<appliedId>.json",
|
||||
content: JSON.stringify(cfg, null, 2),
|
||||
}];
|
||||
};
|
||||
|
||||
return (
|
||||
<Card padding="xs" className="overflow-hidden">
|
||||
<div className="flex items-start justify-between gap-3 hover:cursor-pointer sm:items-center" onClick={onToggle}>
|
||||
<div className="flex min-w-0 items-center gap-3">
|
||||
<div className="size-8 flex items-center justify-center shrink-0">
|
||||
<Image src={tool.image} alt={tool.name} width={32} height={32} className="size-8 object-contain rounded-lg" sizes="32px" onError={(e) => { e.target.style.display = "none"; }} />
|
||||
</div>
|
||||
<div className="min-w-0">
|
||||
<div className="flex min-w-0 flex-wrap items-center gap-2">
|
||||
<h3 className="font-medium text-sm">{tool.name}</h3>
|
||||
{configStatus === "configured" && <span className="px-1.5 py-0.5 text-[10px] font-medium bg-green-500/10 text-green-600 dark:text-green-400 rounded-full">Connected</span>}
|
||||
{configStatus === "not_configured" && <span className="px-1.5 py-0.5 text-[10px] font-medium bg-yellow-500/10 text-yellow-600 dark:text-yellow-400 rounded-full">Not configured</span>}
|
||||
{configStatus === "invalid" && <span className="px-1.5 py-0.5 text-[10px] font-medium bg-red-500/10 text-red-600 dark:text-red-400 rounded-full">Localhost (invalid)</span>}
|
||||
{configStatus === "other" && <span className="px-1.5 py-0.5 text-[10px] font-medium bg-blue-500/10 text-blue-600 dark:text-blue-400 rounded-full">Other</span>}
|
||||
</div>
|
||||
<p className="text-xs text-text-muted truncate">{tool.description}</p>
|
||||
</div>
|
||||
</div>
|
||||
<span className={`material-symbols-outlined text-text-muted text-[20px] transition-transform ${isExpanded ? "rotate-180" : ""}`}>expand_more</span>
|
||||
</div>
|
||||
|
||||
{isExpanded && (
|
||||
<div className="mt-4 pt-4 border-t border-border flex flex-col gap-4">
|
||||
<div className="flex items-start gap-2 p-3 bg-blue-500/10 border border-blue-500/30 rounded-lg text-xs text-blue-700 dark:text-blue-300">
|
||||
<span className="material-symbols-outlined text-[16px] mt-0.5">info</span>
|
||||
<span>Claude Cowork runs in a sandboxed VM and <b>cannot reach localhost</b>. Use Tunnel, Tailscale, or VPS public URL.</span>
|
||||
</div>
|
||||
|
||||
{checking && (
|
||||
<div className="flex items-center gap-2 text-text-muted">
|
||||
<span className="material-symbols-outlined animate-spin">progress_activity</span>
|
||||
<span>Checking Claude Cowork...</span>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{!checking && status && !status.installed && (
|
||||
<div className="flex flex-col gap-3 p-4 bg-yellow-500/10 border border-yellow-500/30 rounded-lg">
|
||||
<div className="flex items-start gap-3">
|
||||
<span className="material-symbols-outlined text-yellow-500">warning</span>
|
||||
<div className="flex-1">
|
||||
<p className="font-medium text-yellow-600 dark:text-yellow-400">Claude Desktop (Cowork mode) not detected</p>
|
||||
<p className="text-sm text-text-muted">Open Claude Desktop → Help → Troubleshooting → Enable Developer mode → Configure third-party inference, then return here.</p>
|
||||
</div>
|
||||
</div>
|
||||
<div className="pl-9">
|
||||
<Button variant="secondary" size="sm" onClick={() => setShowManualConfigModal(true)} className="!bg-yellow-500/20 !border-yellow-500/40 !text-yellow-700 dark:!text-yellow-300 hover:!bg-yellow-500/30">
|
||||
<span className="material-symbols-outlined text-[18px] mr-1">content_copy</span>
|
||||
Manual Config
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{!checking && status?.installed && (
|
||||
<>
|
||||
<div className="flex flex-col gap-2">
|
||||
{status?.cowork?.baseUrl && (
|
||||
<div className="grid grid-cols-1 gap-1.5 sm:grid-cols-[8rem_auto_1fr_auto] sm:items-center sm:gap-2">
|
||||
<span className="text-xs font-semibold text-text-main sm:text-right sm:text-sm">Current</span>
|
||||
<span className="material-symbols-outlined hidden text-text-muted text-[14px] sm:inline">arrow_forward</span>
|
||||
<span className="min-w-0 truncate rounded bg-surface/40 px-2 py-2 text-xs text-text-muted sm:py-1.5">
|
||||
{status.cowork.baseUrl}
|
||||
</span>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="grid grid-cols-1 gap-1.5 sm:grid-cols-[8rem_auto_1fr_auto] sm:items-center sm:gap-2">
|
||||
<span className="text-xs font-semibold text-text-main sm:text-right sm:text-sm">Endpoint Mode</span>
|
||||
<span className="material-symbols-outlined hidden text-text-muted text-[14px] sm:inline">arrow_forward</span>
|
||||
<select
|
||||
value={endpointMode}
|
||||
onChange={(e) => handleEndpointModeChange(e.target.value)}
|
||||
className="w-full min-w-0 px-2 py-2 bg-surface rounded text-xs border border-border focus:outline-none focus:ring-1 focus:ring-primary/50 sm:py-1.5"
|
||||
>
|
||||
{endpointOptions.map((opt) => (
|
||||
<option key={opt.value} value={opt.value}>{opt.label}</option>
|
||||
))}
|
||||
</select>
|
||||
</div>
|
||||
|
||||
<div className="grid grid-cols-1 gap-1.5 sm:grid-cols-[8rem_auto_1fr_auto] sm:items-center sm:gap-2">
|
||||
<span className="text-xs font-semibold text-text-main sm:text-right sm:text-sm">Base URL</span>
|
||||
<span className="material-symbols-outlined hidden text-text-muted text-[14px] sm:inline">arrow_forward</span>
|
||||
<input
|
||||
type="text"
|
||||
value={getEffectiveBaseUrl()}
|
||||
onChange={(e) => setCustomBaseUrl(stripV1(e.target.value))}
|
||||
placeholder="https://your-host.com/v1"
|
||||
className="w-full min-w-0 px-2 py-2 bg-surface rounded border border-border text-xs focus:outline-none focus:ring-1 focus:ring-primary/50 sm:py-1.5"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className="grid grid-cols-1 gap-1.5 sm:grid-cols-[8rem_auto_1fr_auto] sm:items-center sm:gap-2">
|
||||
<span className="text-xs font-semibold text-text-main sm:text-right sm:text-sm">API Key</span>
|
||||
<span className="material-symbols-outlined hidden text-text-muted text-[14px] sm:inline">arrow_forward</span>
|
||||
{apiKeys.length > 0 || selectedApiKey ? (
|
||||
<select value={selectedApiKey} onChange={(e) => setSelectedApiKey(e.target.value)} className="w-full min-w-0 px-2 py-2 bg-surface rounded text-xs border border-border focus:outline-none focus:ring-1 focus:ring-primary/50 sm:py-1.5">
|
||||
{hasCustomSelectedApiKey && <option value={selectedApiKey}>{selectedApiKey}</option>}
|
||||
{apiKeys.map((key) => <option key={key.id} value={key.key}>{key.key}</option>)}
|
||||
</select>
|
||||
) : (
|
||||
<span className="min-w-0 rounded bg-surface/40 px-2 py-2 text-xs text-text-muted sm:py-1.5">
|
||||
{cloudEnabled ? "No API keys - Create one in Keys page" : "sk_9router (default)"}
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<div className="grid grid-cols-1 gap-1.5 sm:grid-cols-[8rem_auto_1fr] sm:items-start sm:gap-2">
|
||||
<span className="w-32 shrink-0 text-sm font-semibold text-text-main text-right pt-1">Models</span>
|
||||
<span className="material-symbols-outlined text-text-muted text-[14px] mt-1.5">arrow_forward</span>
|
||||
<div className="flex-1 flex flex-col gap-2">
|
||||
<div className="flex flex-wrap gap-1.5 min-h-[28px] px-2 py-1.5 bg-surface rounded border border-border">
|
||||
{selectedModels.length === 0 ? (
|
||||
<span className="text-xs text-text-muted">No models selected</span>
|
||||
) : (
|
||||
selectedModels.map((m) => (
|
||||
<span key={m} className="inline-flex items-center gap-1 px-2 py-0.5 rounded text-xs bg-black/5 dark:bg-white/5 text-text-muted border border-transparent hover:border-border">
|
||||
{m}
|
||||
<button onClick={() => setSelectedModels((prev) => prev.filter((x) => x !== m))} className="ml-0.5 hover:text-red-500">
|
||||
<span className="material-symbols-outlined text-[12px]">close</span>
|
||||
</button>
|
||||
</span>
|
||||
))
|
||||
)}
|
||||
</div>
|
||||
<button onClick={() => setModalOpen(true)} disabled={!hasActiveProviders} className={`self-start px-2 py-1 rounded border text-xs transition-colors ${hasActiveProviders ? "bg-surface border-border text-text-main hover:border-primary cursor-pointer" : "opacity-50 cursor-not-allowed border-border"}`}>Add Model</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{message && (
|
||||
<div className={`flex items-center gap-2 px-2 py-1.5 rounded text-xs ${message.type === "success" ? "bg-green-500/10 text-green-600" : "bg-red-500/10 text-red-600"}`}>
|
||||
<span className="material-symbols-outlined text-[14px]">{message.type === "success" ? "check_circle" : "error"}</span>
|
||||
<span>{message.text}</span>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="flex flex-col sm:flex-row sm:items-center gap-2">
|
||||
<Button variant="primary" size="sm" onClick={handleApply} disabled={selectedModels.length === 0} loading={applying} className="w-full sm:w-auto">
|
||||
<span className="material-symbols-outlined text-[14px] mr-1">save</span>Apply
|
||||
</Button>
|
||||
<Button variant="outline" size="sm" onClick={handleReset} disabled={!status.has9Router} loading={restoring} className="w-full sm:w-auto">
|
||||
<span className="material-symbols-outlined text-[14px] mr-1">restore</span>Reset
|
||||
</Button>
|
||||
<Button variant="ghost" size="sm" onClick={() => setShowManualConfigModal(true)} className="w-full sm:w-auto">
|
||||
<span className="material-symbols-outlined text-[14px] mr-1">content_copy</span>Manual Config
|
||||
</Button>
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
|
||||
<ModelSelectModal
|
||||
isOpen={modalOpen}
|
||||
onClose={() => setModalOpen(false)}
|
||||
onSelect={(model) => {
|
||||
if (!selectedModels.includes(model.value)) {
|
||||
setSelectedModels([...selectedModels, model.value]);
|
||||
}
|
||||
setModalOpen(false);
|
||||
}}
|
||||
selectedModel={null}
|
||||
activeProviders={activeProviders}
|
||||
modelAliases={modelAliases}
|
||||
title="Add Model for Claude Cowork"
|
||||
/>
|
||||
|
||||
<ManualConfigModal
|
||||
isOpen={showManualConfigModal}
|
||||
onClose={() => setShowManualConfigModal(false)}
|
||||
title="Claude Cowork - Manual Configuration"
|
||||
configs={getManualConfigs()}
|
||||
/>
|
||||
</Card>
|
||||
);
|
||||
}
|
||||
@@ -6,6 +6,7 @@ export { default as HermesToolCard } from "./HermesToolCard";
|
||||
export { default as DefaultToolCard } from "./DefaultToolCard";
|
||||
export { default as AntigravityToolCard } from "./AntigravityToolCard";
|
||||
export { default as OpenCodeToolCard } from "./OpenCodeToolCard";
|
||||
export { default as CoworkToolCard } from "./CoworkToolCard";
|
||||
export { default as CopilotToolCard } from "./CopilotToolCard";
|
||||
export { default as MitmServerCard } from "./MitmServerCard";
|
||||
export { default as MitmToolCard } from "./MitmToolCard";
|
||||
|
||||
@@ -12,6 +12,7 @@ import ConnectionsCard from "@/app/(dashboard)/dashboard/providers/components/Co
|
||||
import ModelsCard from "@/app/(dashboard)/dashboard/providers/components/ModelsCard";
|
||||
import { TTS_PROVIDER_CONFIG } from "@/shared/constants/ttsProviders";
|
||||
import { getTtsVoicesForModel } from "open-sse/config/ttsModels.js";
|
||||
import { GOOGLE_TTS_LANGUAGES } from "open-sse/config/googleTtsLanguages.js";
|
||||
|
||||
// Shared row layout — defined outside components to avoid re-mount on re-render
|
||||
function Row({ label, children }) {
|
||||
@@ -92,13 +93,6 @@ const KIND_EXAMPLE_CONFIG = {
|
||||
extraBody: { prompt: "Describe this image in detail" },
|
||||
defaultResponse: `{\n "text": "A cat sitting on a windowsill...",\n "model": "..."\n}`,
|
||||
},
|
||||
stt: {
|
||||
inputLabel: "Audio URL",
|
||||
inputPlaceholder: "https://example.com/audio.mp3",
|
||||
defaultInput: "",
|
||||
bodyKey: "url",
|
||||
defaultResponse: `{\n "text": "Hello world...",\n "model": "..."\n}`,
|
||||
},
|
||||
video: {
|
||||
inputLabel: "Prompt",
|
||||
inputPlaceholder: "A serene lake at sunset",
|
||||
@@ -394,6 +388,8 @@ function TtsExampleCard({ providerId }) {
|
||||
const [modalSearch, setModalSearch] = useState("");
|
||||
const [modalError, setModalError] = useState("");
|
||||
const [byLang, setByLang] = useState({});
|
||||
// Language hint (e.g. Gemini): controls the spoken language without affecting voice selection
|
||||
const [languageHint, setLanguageHint] = useState("");
|
||||
|
||||
useEffect(() => {
|
||||
setLocalEndpoint(window.location.origin);
|
||||
@@ -514,10 +510,15 @@ function TtsExampleCard({ providerId }) {
|
||||
return "";
|
||||
})();
|
||||
|
||||
const ttsBody = (() => {
|
||||
const b = { model: modelFull, input };
|
||||
if (config.hasLanguageHint && languageHint) b.language = languageHint;
|
||||
return b;
|
||||
})();
|
||||
const curlSnippet = `curl -X POST ${endpoint}/v1/audio/speech${responseFormat === "json" ? "?response_format=json" : ""} \\
|
||||
-H "Content-Type: application/json" \\
|
||||
-H "Authorization: Bearer ${apiKey || "YOUR_KEY"}" \\
|
||||
-d '{"model": "${modelFull}", "input": "${input}"}' \\
|
||||
-d '${JSON.stringify(ttsBody)}' \\
|
||||
${responseFormat === "json" ? "" : "--output speech.mp3"}`;
|
||||
|
||||
const handleRun = async () => {
|
||||
@@ -534,7 +535,7 @@ function TtsExampleCard({ providerId }) {
|
||||
const res = await fetch(url, {
|
||||
method: "POST",
|
||||
headers,
|
||||
body: JSON.stringify({ model: modelFull, input: input.trim() }),
|
||||
body: JSON.stringify({ ...ttsBody, input: input.trim() }),
|
||||
});
|
||||
setLatency(Date.now() - start);
|
||||
if (!res.ok) {
|
||||
@@ -608,6 +609,22 @@ function TtsExampleCard({ providerId }) {
|
||||
</Row>
|
||||
)}
|
||||
|
||||
{/* Language hint dropdown (Gemini) — sends body.language to guide pronunciation */}
|
||||
{config.hasLanguageHint && (
|
||||
<Row label="Language">
|
||||
<select
|
||||
value={languageHint}
|
||||
onChange={(e) => setLanguageHint(e.target.value)}
|
||||
className="w-full px-3 py-1.5 text-sm border border-border rounded-lg bg-background focus:outline-none focus:border-primary"
|
||||
>
|
||||
<option value="">Auto-detect</option>
|
||||
{GOOGLE_TTS_LANGUAGES.map((l) => (
|
||||
<option key={l.id} value={l.name}>{l.name}</option>
|
||||
))}
|
||||
</select>
|
||||
</Row>
|
||||
)}
|
||||
|
||||
{/* Language row + Browse button (edge-tts, local-device, elevenlabs) */}
|
||||
{config.hasBrowseButton && (
|
||||
<Row label="Language">
|
||||
@@ -886,7 +903,7 @@ function GenericExampleCard({ providerId, kind }) {
|
||||
// Get models for this kind (e.g., type="image")
|
||||
const kindModels = getModelsByProviderId(providerId).filter((m) => m.type === kind);
|
||||
// Kinds that need a model identifier in the request (image/video/music)
|
||||
const KIND_NEEDS_MODEL = new Set(["image", "video", "music", "stt", "imageToText"]);
|
||||
const KIND_NEEDS_MODEL = new Set(["image", "video", "music", "imageToText"]);
|
||||
const needsModel = KIND_NEEDS_MODEL.has(kind);
|
||||
const allowManualModel = needsModel && kindModels.length === 0;
|
||||
const [selectedModel, setSelectedModel] = useState(kindModels[0]?.id ?? "");
|
||||
@@ -1344,6 +1361,288 @@ function GenericExampleCard({ providerId, kind }) {
|
||||
);
|
||||
}
|
||||
|
||||
// ─── STT Example Card ────────────────────────────────────────────────────────
|
||||
function SttExampleCard({ providerId }) {
|
||||
const providerAlias = getProviderAlias(providerId);
|
||||
const builtinSttModels = getModelsByProviderId(providerId).filter((m) => m.type === "stt");
|
||||
const [customSttModels, setCustomSttModels] = useState([]);
|
||||
const sttModels = [...builtinSttModels, ...customSttModels];
|
||||
|
||||
const [selectedModel, setSelectedModel] = useState(builtinSttModels[0]?.id ?? "");
|
||||
const selectedModelObj = sttModels.find((m) => m.id === selectedModel);
|
||||
const allowedParams = Array.isArray(selectedModelObj?.params) ? selectedModelObj.params : [];
|
||||
|
||||
const [audioFile, setAudioFile] = useState(null);
|
||||
const [language, setLanguage] = useState("");
|
||||
const [prompt, setPrompt] = useState("");
|
||||
const [responseFormat, setResponseFormat] = useState("json");
|
||||
const [temperature, setTemperature] = useState("");
|
||||
const [apiKey, setApiKey] = useState("");
|
||||
const [useTunnel, setUseTunnel] = useState(false);
|
||||
const [localEndpoint, setLocalEndpoint] = useState("");
|
||||
const [tunnelEndpoint, setTunnelEndpoint] = useState("");
|
||||
const [result, setResult] = useState(null);
|
||||
const [latency, setLatency] = useState(null);
|
||||
const [running, setRunning] = useState(false);
|
||||
const [error, setError] = useState("");
|
||||
const { copied: copiedCurl, copy: copyCurl } = useCopyToClipboard();
|
||||
const { copied: copiedRes, copy: copyRes } = useCopyToClipboard();
|
||||
|
||||
useEffect(() => {
|
||||
setLocalEndpoint(window.location.origin);
|
||||
fetch("/api/keys")
|
||||
.then((r) => r.json())
|
||||
.then((d) => { setApiKey((d.keys || []).find((k) => k.isActive !== false)?.key || ""); })
|
||||
.catch(() => {});
|
||||
fetch("/api/tunnel/status")
|
||||
.then((r) => r.json())
|
||||
.then((d) => { if (d.publicUrl) setTunnelEndpoint(d.publicUrl); })
|
||||
.catch(() => {});
|
||||
const loadCustom = () => {
|
||||
fetch("/api/models/custom", { cache: "no-store" })
|
||||
.then((r) => r.json())
|
||||
.then((d) => {
|
||||
const list = (d.models || []).filter((m) => m.type === "stt" && m.providerAlias === providerAlias);
|
||||
setCustomSttModels(list);
|
||||
})
|
||||
.catch(() => {});
|
||||
};
|
||||
loadCustom();
|
||||
window.addEventListener("focus", loadCustom);
|
||||
window.addEventListener("customModelChanged", loadCustom);
|
||||
return () => {
|
||||
window.removeEventListener("focus", loadCustom);
|
||||
window.removeEventListener("customModelChanged", loadCustom);
|
||||
};
|
||||
}, [providerAlias]);
|
||||
|
||||
const endpoint = useTunnel ? tunnelEndpoint : localEndpoint;
|
||||
const modelFull = selectedModel ? `${providerAlias}/${selectedModel}` : "";
|
||||
|
||||
const curlSnippet = `curl -X POST ${endpoint}/v1/audio/transcriptions \\
|
||||
-H "Authorization: Bearer ${apiKey || "YOUR_KEY"}" \\
|
||||
-F "file=@${audioFile?.name || "audio.mp3"}" \\
|
||||
-F "model=${modelFull}"${allowedParams.includes("language") && language ? ` \\\n -F "language=${language}"` : ""}${allowedParams.includes("response_format") ? ` \\\n -F "response_format=${responseFormat}"` : ""}${allowedParams.includes("temperature") && temperature ? ` \\\n -F "temperature=${temperature}"` : ""}${allowedParams.includes("prompt") && prompt ? ` \\\n -F "prompt=${prompt}"` : ""}`;
|
||||
|
||||
const handleRun = async () => {
|
||||
if (!audioFile || !modelFull) return;
|
||||
setRunning(true);
|
||||
setError("");
|
||||
setResult(null);
|
||||
const start = Date.now();
|
||||
try {
|
||||
const fd = new FormData();
|
||||
fd.append("file", audioFile);
|
||||
fd.append("model", modelFull);
|
||||
if (allowedParams.includes("language") && language) fd.append("language", language);
|
||||
if (allowedParams.includes("response_format")) fd.append("response_format", responseFormat);
|
||||
if (allowedParams.includes("temperature") && temperature) fd.append("temperature", temperature);
|
||||
if (allowedParams.includes("prompt") && prompt) fd.append("prompt", prompt);
|
||||
|
||||
const headers = {};
|
||||
if (apiKey) headers["Authorization"] = `Bearer ${apiKey}`;
|
||||
const res = await fetch("/api/v1/audio/transcriptions", { method: "POST", headers, body: fd });
|
||||
setLatency(Date.now() - start);
|
||||
const ct = res.headers.get("content-type") || "";
|
||||
const data = ct.includes("application/json") ? await res.json() : await res.text();
|
||||
if (!res.ok) {
|
||||
setError(data?.error?.message || data?.error || data || `HTTP ${res.status}`);
|
||||
return;
|
||||
}
|
||||
setResult(data);
|
||||
} catch (e) {
|
||||
setError(e.message || "Network error");
|
||||
} finally {
|
||||
setRunning(false);
|
||||
}
|
||||
};
|
||||
|
||||
const resultStr = typeof result === "string" ? result : (result ? JSON.stringify(result, null, 2) : `{\n "text": "Hello world..."\n}`);
|
||||
|
||||
return (
|
||||
<Card>
|
||||
<h2 className="text-lg font-semibold mb-4">Example</h2>
|
||||
<div className="flex flex-col gap-2.5">
|
||||
{/* Model */}
|
||||
{sttModels.length > 0 ? (
|
||||
<Row label="Model">
|
||||
<select
|
||||
value={selectedModel}
|
||||
onChange={(e) => setSelectedModel(e.target.value)}
|
||||
className="w-full px-3 py-1.5 text-sm border border-border rounded-lg bg-background focus:outline-none focus:border-primary"
|
||||
>
|
||||
{sttModels.map((m) => (
|
||||
<option key={m.id} value={m.id}>{m.name || m.id}</option>
|
||||
))}
|
||||
</select>
|
||||
</Row>
|
||||
) : (
|
||||
<Row label="Model">
|
||||
<input
|
||||
value={selectedModel}
|
||||
onChange={(e) => setSelectedModel(e.target.value)}
|
||||
placeholder="Enter model id"
|
||||
className="w-full px-3 py-1.5 text-sm border border-border rounded-lg bg-background focus:outline-none focus:border-primary font-mono"
|
||||
/>
|
||||
</Row>
|
||||
)}
|
||||
|
||||
{/* Endpoint */}
|
||||
<Row label="Endpoint">
|
||||
<div className="flex w-full flex-col gap-2 sm:w-auto sm:flex-row sm:items-center">
|
||||
<span className="w-full min-w-0 flex-1 px-3 py-1.5 text-sm font-mono text-text-main bg-sidebar rounded-lg truncate">
|
||||
{endpoint}/v1/audio/transcriptions
|
||||
</span>
|
||||
{tunnelEndpoint && (
|
||||
<button
|
||||
onClick={() => setUseTunnel((v) => !v)}
|
||||
title={useTunnel ? "Using tunnel" : "Using local"}
|
||||
className={`flex items-center gap-1 text-xs px-2 py-1.5 rounded-lg border shrink-0 transition-colors ${
|
||||
useTunnel ? "border-primary/40 bg-primary/10 text-primary" : "border-border text-text-muted hover:text-primary"
|
||||
}`}
|
||||
>
|
||||
<span className="material-symbols-outlined text-[14px]">wifi_tethering</span>
|
||||
Tunnel
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
</Row>
|
||||
|
||||
{/* API Key */}
|
||||
<Row label="API Key">
|
||||
<span className="px-3 py-1.5 text-sm font-mono text-text-main bg-sidebar rounded-lg truncate block">
|
||||
{apiKey ? `${apiKey.slice(0, 8)}${"\u2022".repeat(Math.min(20, apiKey.length - 8))}` : <span className="text-text-muted italic">No key configured</span>}
|
||||
</span>
|
||||
</Row>
|
||||
|
||||
{/* Audio file */}
|
||||
<Row label="Audio File">
|
||||
<div className="flex flex-col gap-2">
|
||||
<input
|
||||
type="file"
|
||||
accept="audio/*,video/mp4,.m4a,.mp3,.wav,.ogg,.flac,.webm,.opus"
|
||||
onChange={(e) => setAudioFile(e.target.files?.[0] || null)}
|
||||
className="w-full text-xs text-text-muted file:mr-2 file:py-1 file:px-2.5 file:rounded-lg file:border file:border-border file:bg-background file:text-text-main hover:file:bg-sidebar file:cursor-pointer"
|
||||
/>
|
||||
{audioFile && (
|
||||
<span className="text-xs text-text-muted font-mono">
|
||||
{audioFile.name} · {(audioFile.size / 1024).toFixed(1)} KB
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
</Row>
|
||||
|
||||
{/* Language (if model supports) */}
|
||||
{allowedParams.includes("language") && (
|
||||
<Row label="Language">
|
||||
<input
|
||||
value={language}
|
||||
onChange={(e) => setLanguage(e.target.value)}
|
||||
placeholder="e.g. en, vi, ja (auto-detect if empty)"
|
||||
className="w-full px-3 py-1.5 text-sm border border-border rounded-lg bg-background focus:outline-none focus:border-primary font-mono"
|
||||
/>
|
||||
</Row>
|
||||
)}
|
||||
|
||||
{/* Prompt (if model supports) */}
|
||||
{allowedParams.includes("prompt") && (
|
||||
<Row label="Prompt">
|
||||
<input
|
||||
value={prompt}
|
||||
onChange={(e) => setPrompt(e.target.value)}
|
||||
placeholder="optional context to improve accuracy"
|
||||
className="w-full px-3 py-1.5 text-sm border border-border rounded-lg bg-background focus:outline-none focus:border-primary"
|
||||
/>
|
||||
</Row>
|
||||
)}
|
||||
|
||||
{/* Temperature (if model supports) */}
|
||||
{allowedParams.includes("temperature") && (
|
||||
<Row label="Temperature">
|
||||
<input
|
||||
type="number"
|
||||
step="0.1"
|
||||
min="0"
|
||||
max="1"
|
||||
value={temperature}
|
||||
onChange={(e) => setTemperature(e.target.value)}
|
||||
placeholder="0 - 1 (default 0)"
|
||||
className="w-full px-3 py-1.5 text-sm border border-border rounded-lg bg-background focus:outline-none focus:border-primary"
|
||||
/>
|
||||
</Row>
|
||||
)}
|
||||
|
||||
{/* Response format (if model supports) */}
|
||||
{allowedParams.includes("response_format") && (
|
||||
<Row label="Response Format">
|
||||
<select
|
||||
value={responseFormat}
|
||||
onChange={(e) => setResponseFormat(e.target.value)}
|
||||
className="w-full px-3 py-1.5 text-sm border border-border rounded-lg bg-background focus:outline-none focus:border-primary"
|
||||
>
|
||||
<option value="json">json</option>
|
||||
<option value="text">text</option>
|
||||
<option value="srt">srt</option>
|
||||
<option value="verbose_json">verbose_json</option>
|
||||
<option value="vtt">vtt</option>
|
||||
</select>
|
||||
</Row>
|
||||
)}
|
||||
|
||||
{/* Curl + Run */}
|
||||
<div className="mt-1">
|
||||
<div className="flex flex-col gap-2 sm:flex-row sm:items-center sm:justify-between mb-1.5">
|
||||
<span className="text-xs font-semibold text-text-muted uppercase tracking-wider">Request</span>
|
||||
<div className="flex w-full flex-col gap-2 sm:w-auto sm:flex-row sm:items-center">
|
||||
<button
|
||||
onClick={() => copyCurl(curlSnippet)}
|
||||
className="inline-flex items-center gap-1 text-xs text-text-muted hover:text-primary transition-colors"
|
||||
>
|
||||
<span className="material-symbols-outlined text-[14px]">{copiedCurl ? "check" : "content_copy"}</span>
|
||||
{copiedCurl ? "Copied" : "Copy"}
|
||||
</button>
|
||||
<button
|
||||
onClick={handleRun}
|
||||
disabled={running || !audioFile || !modelFull}
|
||||
className="flex w-full sm:w-auto items-center justify-center gap-1.5 px-3 py-1 rounded-lg bg-primary text-white text-xs font-medium hover:bg-primary/90 transition-colors disabled:opacity-50 disabled:cursor-not-allowed"
|
||||
>
|
||||
<span className="material-symbols-outlined text-[14px]" style={running ? { animation: "spin 1s linear infinite" } : undefined}>
|
||||
play_arrow
|
||||
</span>
|
||||
{running ? "Transcribing..." : "Run"}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
<pre className="bg-sidebar rounded-lg px-3 py-2.5 text-xs font-mono text-text-main overflow-x-auto whitespace-pre-wrap break-all">{curlSnippet}</pre>
|
||||
</div>
|
||||
|
||||
{error && <p className="text-xs text-red-500 break-words">{error}</p>}
|
||||
|
||||
{/* Response */}
|
||||
<div>
|
||||
<div className="flex flex-col gap-2 sm:flex-row sm:items-center sm:justify-between mb-1.5">
|
||||
<span className="text-xs font-semibold text-text-muted uppercase tracking-wider">
|
||||
Response {result && latency && <span className="font-normal normal-case">⚡ {latency}ms</span>}
|
||||
</span>
|
||||
{result && (
|
||||
<button
|
||||
onClick={() => copyRes(resultStr)}
|
||||
className="inline-flex items-center gap-1 text-xs text-text-muted hover:text-primary transition-colors"
|
||||
>
|
||||
<span className="material-symbols-outlined text-[14px]">{copiedRes ? "check" : "content_copy"}</span>
|
||||
{copiedRes ? "Copied" : "Copy"}
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
<pre className="bg-sidebar rounded-lg px-3 py-2.5 text-xs font-mono text-text-main overflow-x-auto whitespace-pre-wrap break-all opacity-70">
|
||||
{resultStr}
|
||||
</pre>
|
||||
</div>
|
||||
</div>
|
||||
</Card>
|
||||
);
|
||||
}
|
||||
|
||||
// MediaProviderDetailPage
|
||||
export default function MediaProviderDetailPage() {
|
||||
const { kind, id } = useParams();
|
||||
@@ -1502,11 +1801,12 @@ export default function MediaProviderDetailPage() {
|
||||
)}
|
||||
|
||||
{/* Provider Info — config-driven, supports searchConfig, fetchConfig, ttsConfig, embeddingConfig, searchViaChat */}
|
||||
{!isCustom && (provider.searchConfig || provider.fetchConfig || provider.ttsConfig || provider.embeddingConfig || provider.searchViaChat) && (
|
||||
{!isCustom && (provider.searchConfig || provider.fetchConfig || provider.ttsConfig || provider.sttConfig || provider.embeddingConfig || provider.searchViaChat) && (
|
||||
<ProviderInfoCard
|
||||
config={
|
||||
kind === "webFetch" ? provider.fetchConfig
|
||||
: kind === "tts" ? provider.ttsConfig
|
||||
: kind === "stt" ? provider.sttConfig
|
||||
: kind === "embedding" ? provider.embeddingConfig
|
||||
: provider.searchConfig || { mode: "chat-completions", defaultModel: provider.searchViaChat?.defaultModel, pricingUrl: provider.searchViaChat?.pricingUrl, freeTier: provider.searchViaChat?.freeTier }
|
||||
}
|
||||
@@ -1520,6 +1820,7 @@ export default function MediaProviderDetailPage() {
|
||||
<EmbeddingExampleCard providerId={id} customAlias={customNode?.prefix} />
|
||||
)}
|
||||
{kind === "tts" && <TtsExampleCard providerId={id} />}
|
||||
{kind === "stt" && !isCustom && <SttExampleCard providerId={id} />}
|
||||
{!isCustom && KIND_EXAMPLE_CONFIG[kind] && <GenericExampleCard providerId={id} kind={kind} />}
|
||||
|
||||
{isCustom && (
|
||||
|
||||
@@ -15,15 +15,22 @@ export default function AddCustomModelModal({ isOpen, providerAlias, providerDis
|
||||
if (isOpen) { setModelId(""); setTestStatus(null); setTestError(""); }
|
||||
}, [isOpen]);
|
||||
|
||||
// Strip provider's own alias prefix (e.g. "cc/model" -> "model" for cc provider)
|
||||
const stripAlias = (id) => {
|
||||
const prefix = `${providerAlias}/`;
|
||||
return id.startsWith(prefix) ? id.slice(prefix.length) : id;
|
||||
};
|
||||
|
||||
const handleTest = async () => {
|
||||
if (!modelId.trim()) return;
|
||||
const cleanId = stripAlias(modelId.trim());
|
||||
if (!cleanId) return;
|
||||
setTestStatus("testing");
|
||||
setTestError("");
|
||||
try {
|
||||
const res = await fetch("/api/models/test", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ model: `${providerAlias}/${modelId.trim()}` }),
|
||||
body: JSON.stringify({ model: `${providerAlias}/${cleanId}` }),
|
||||
});
|
||||
const data = await res.json();
|
||||
setTestStatus(data.ok ? "ok" : "error");
|
||||
@@ -35,10 +42,11 @@ export default function AddCustomModelModal({ isOpen, providerAlias, providerDis
|
||||
};
|
||||
|
||||
const handleSave = async () => {
|
||||
if (!modelId.trim() || saving) return;
|
||||
const cleanId = stripAlias(modelId.trim());
|
||||
if (!cleanId || saving) return;
|
||||
setSaving(true);
|
||||
try {
|
||||
await onSave(modelId.trim());
|
||||
await onSave(cleanId);
|
||||
} finally {
|
||||
setSaving(false);
|
||||
}
|
||||
@@ -74,7 +82,7 @@ export default function AddCustomModelModal({ isOpen, providerAlias, providerDis
|
||||
</Button>
|
||||
</div>
|
||||
<p className="text-xs text-text-muted mt-1">
|
||||
Sent to provider as: <code className="font-mono bg-sidebar px-1 rounded">{modelId.trim() || "model-id"}</code>
|
||||
Sent to provider as: <code className="font-mono bg-sidebar px-1 rounded">{stripAlias(modelId.trim()) || "model-id"}</code>
|
||||
</p>
|
||||
</div>
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import PropTypes from "prop-types";
|
||||
|
||||
export default function ModelRow({ model, fullModel, alias, copied, onCopy, testStatus, isCustom, isFree, onDeleteAlias, onTest, isTesting }) {
|
||||
export default function ModelRow({ model, fullModel, alias, copied, onCopy, testStatus, isCustom, isFree, onDeleteAlias, onTest, isTesting, onDisable }) {
|
||||
const borderColor = testStatus === "ok"
|
||||
? "border-green-500/40"
|
||||
: testStatus === "error"
|
||||
@@ -55,7 +55,7 @@ export default function ModelRow({ model, fullModel, alias, copied, onCopy, test
|
||||
{copied === `model-${model.id}` ? "Copied!" : "Copy"}
|
||||
</span>
|
||||
</div>
|
||||
{isCustom && (
|
||||
{isCustom ? (
|
||||
<button
|
||||
onClick={onDeleteAlias}
|
||||
className="ml-auto rounded p-0.5 text-text-muted opacity-100 transition-opacity hover:bg-red-500/10 hover:text-red-500 sm:opacity-0 sm:group-hover:opacity-100"
|
||||
@@ -63,7 +63,15 @@ export default function ModelRow({ model, fullModel, alias, copied, onCopy, test
|
||||
>
|
||||
<span className="material-symbols-outlined text-sm">close</span>
|
||||
</button>
|
||||
)}
|
||||
) : onDisable ? (
|
||||
<button
|
||||
onClick={onDisable}
|
||||
className="ml-auto rounded p-0.5 text-text-muted opacity-100 transition-opacity hover:bg-red-500/10 hover:text-red-500 sm:opacity-0 sm:group-hover:opacity-100"
|
||||
title="Disable this model"
|
||||
>
|
||||
<span className="material-symbols-outlined text-sm">close</span>
|
||||
</button>
|
||||
) : null}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
@@ -83,4 +91,5 @@ ModelRow.propTypes = {
|
||||
onDeleteAlias: PropTypes.func,
|
||||
onTest: PropTypes.func,
|
||||
isTesting: PropTypes.bool,
|
||||
onDisable: PropTypes.func,
|
||||
};
|
||||
|
||||
@@ -46,6 +46,7 @@ export default function ProviderDetailPage() {
|
||||
const [thinkingMode, setThinkingMode] = useState("auto");
|
||||
const [suggestedModels, setSuggestedModels] = useState([]);
|
||||
const [kiloFreeModels, setKiloFreeModels] = useState([]);
|
||||
const [disabledModelIds, setDisabledModelIds] = useState([]);
|
||||
const { copied, copy } = useCopyToClipboard();
|
||||
|
||||
const providerInfo = providerNode
|
||||
@@ -74,6 +75,62 @@ export default function ProviderDetailPage() {
|
||||
? (providerNode?.prefix || providerId)
|
||||
: providerAlias;
|
||||
|
||||
const fetchDisabledModels = useCallback(async () => {
|
||||
try {
|
||||
const res = await fetch(`/api/models/disabled?providerAlias=${encodeURIComponent(providerStorageAlias)}`, { cache: "no-store" });
|
||||
const data = await res.json();
|
||||
if (res.ok) setDisabledModelIds(data.ids || []);
|
||||
} catch (error) {
|
||||
console.log("Error fetching disabled models:", error);
|
||||
}
|
||||
}, [providerStorageAlias]);
|
||||
|
||||
const handleDisableModel = async (modelId) => {
|
||||
try {
|
||||
const res = await fetch("/api/models/disabled", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ providerAlias: providerStorageAlias, ids: [modelId] }),
|
||||
});
|
||||
if (res.ok) await fetchDisabledModels();
|
||||
} catch (error) {
|
||||
console.log("Error disabling model:", error);
|
||||
}
|
||||
};
|
||||
|
||||
const handleEnableModel = async (modelId) => {
|
||||
try {
|
||||
const res = await fetch(`/api/models/disabled?providerAlias=${encodeURIComponent(providerStorageAlias)}&id=${encodeURIComponent(modelId)}`, { method: "DELETE" });
|
||||
if (res.ok) await fetchDisabledModels();
|
||||
} catch (error) {
|
||||
console.log("Error enabling model:", error);
|
||||
}
|
||||
};
|
||||
|
||||
const handleDisableAll = async (ids) => {
|
||||
if (!ids.length) return;
|
||||
if (!confirm(`Disable all ${ids.length} model(s)?`)) return;
|
||||
try {
|
||||
const res = await fetch("/api/models/disabled", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ providerAlias: providerStorageAlias, ids }),
|
||||
});
|
||||
if (res.ok) await fetchDisabledModels();
|
||||
} catch (error) {
|
||||
console.log("Error disabling all models:", error);
|
||||
}
|
||||
};
|
||||
|
||||
const handleEnableAll = async () => {
|
||||
try {
|
||||
const res = await fetch(`/api/models/disabled?providerAlias=${encodeURIComponent(providerStorageAlias)}`, { method: "DELETE" });
|
||||
if (res.ok) await fetchDisabledModels();
|
||||
} catch (error) {
|
||||
console.log("Error enabling all models:", error);
|
||||
}
|
||||
};
|
||||
|
||||
// Define callbacks BEFORE the useEffect that uses them
|
||||
const fetchAliases = useCallback(async () => {
|
||||
try {
|
||||
@@ -237,7 +294,8 @@ export default function ProviderDetailPage() {
|
||||
useEffect(() => {
|
||||
fetchConnections();
|
||||
fetchAliases();
|
||||
}, [fetchConnections, fetchAliases]);
|
||||
fetchDisabledModels();
|
||||
}, [fetchConnections, fetchAliases, fetchDisabledModels]);
|
||||
|
||||
// Fetch suggested models from provider's public API (if configured)
|
||||
useEffect(() => {
|
||||
@@ -587,10 +645,13 @@ export default function ProviderDetailPage() {
|
||||
}
|
||||
// Combine hardcoded models with Kilo free models (deduplicated)
|
||||
// Exclude non-llm models (embedding, tts, etc.) — they have dedicated pages under media-providers
|
||||
const displayModels = [
|
||||
const allModels = [
|
||||
...models,
|
||||
...kiloFreeModels.filter((fm) => !models.some((m) => m.id === fm.id)),
|
||||
].filter((m) => !m.type || m.type === "llm");
|
||||
const disabledSet = new Set(disabledModelIds);
|
||||
const displayModels = allModels.filter((m) => !disabledSet.has(m.id));
|
||||
const disabledDisplayModels = allModels.filter((m) => disabledSet.has(m.id));
|
||||
// Custom models added by user (stored as aliases: modelId → providerAlias/modelId)
|
||||
const customModels = Object.entries(modelAliases)
|
||||
.filter(([alias, fullModel]) => {
|
||||
@@ -610,6 +671,25 @@ export default function ProviderDetailPage() {
|
||||
|
||||
return (
|
||||
<div className="flex flex-wrap gap-3">
|
||||
{/* Custom models first */}
|
||||
{customModels.map((model) => (
|
||||
<ModelRow
|
||||
key={model.id}
|
||||
model={{ id: model.id }}
|
||||
fullModel={`${providerDisplayAlias}/${model.id}`}
|
||||
alias={model.alias}
|
||||
copied={copied}
|
||||
onCopy={copy}
|
||||
onSetAlias={() => {}}
|
||||
onDeleteAlias={() => handleDeleteAlias(model.alias)}
|
||||
testStatus={modelTestResults[model.id]}
|
||||
onTest={connections.length > 0 || isFreeNoAuth ? () => handleTestModel(model.id) : undefined}
|
||||
isTesting={testingModelId === model.id}
|
||||
isCustom
|
||||
isFree={false}
|
||||
/>
|
||||
))}
|
||||
|
||||
{displayModels.map((model) => {
|
||||
const fullModel = `${providerStorageAlias}/${model.id}`;
|
||||
const oldFormatModel = `${providerId}/${model.id}`;
|
||||
@@ -630,33 +710,15 @@ export default function ProviderDetailPage() {
|
||||
onTest={connections.length > 0 || isFreeNoAuth ? () => handleTestModel(model.id) : undefined}
|
||||
isTesting={testingModelId === model.id}
|
||||
isFree={model.isFree}
|
||||
onDisable={() => handleDisableModel(model.id)}
|
||||
/>
|
||||
);
|
||||
})}
|
||||
|
||||
{/* Custom models inline */}
|
||||
{customModels.map((model) => (
|
||||
<ModelRow
|
||||
key={model.id}
|
||||
model={{ id: model.id }}
|
||||
fullModel={`${providerDisplayAlias}/${model.id}`}
|
||||
alias={model.alias}
|
||||
copied={copied}
|
||||
onCopy={copy}
|
||||
onSetAlias={() => {}}
|
||||
onDeleteAlias={() => handleDeleteAlias(model.alias)}
|
||||
testStatus={modelTestResults[model.id]}
|
||||
onTest={connections.length > 0 || isFreeNoAuth ? () => handleTestModel(model.id) : undefined}
|
||||
isTesting={testingModelId === model.id}
|
||||
isCustom
|
||||
isFree={false}
|
||||
/>
|
||||
))}
|
||||
|
||||
{/* Add model button — inline, same style as model chips */}
|
||||
<button
|
||||
onClick={() => setShowAddCustomModel(true)}
|
||||
className="flex w-full items-center justify-center gap-1.5 rounded-lg border border-dashed border-black/15 px-3 py-2 text-xs text-text-muted transition-colors hover:border-primary/40 hover:text-primary sm:w-auto"
|
||||
className="flex w-full items-center justify-center gap-1.5 rounded-lg border border-dashed border-primary/40 px-3 py-2 text-xs text-primary transition-colors hover:border-primary hover:bg-primary/5 sm:w-auto"
|
||||
>
|
||||
<span className="material-symbols-outlined text-sm">add</span>
|
||||
Add Model
|
||||
@@ -692,6 +754,26 @@ export default function ProviderDetailPage() {
|
||||
</div>
|
||||
);
|
||||
})()}
|
||||
|
||||
{/* Disabled models — restorable */}
|
||||
{disabledDisplayModels.length > 0 && (
|
||||
<div className="w-full mt-2">
|
||||
<p className="text-xs text-text-muted mb-2">Disabled models ({disabledDisplayModels.length}):</p>
|
||||
<div className="flex flex-wrap gap-2">
|
||||
{disabledDisplayModels.map((m) => (
|
||||
<button
|
||||
key={m.id}
|
||||
onClick={() => handleEnableModel(m.id)}
|
||||
className="flex items-center gap-1 px-2.5 py-1.5 rounded-lg border border-dashed border-black/10 dark:border-white/10 text-xs text-text-muted hover:text-primary hover:border-primary/40 hover:bg-primary/5 transition-colors"
|
||||
title="Restore model"
|
||||
>
|
||||
<span className="material-symbols-outlined text-[13px]">add</span>
|
||||
{m.id}
|
||||
</button>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
@@ -969,6 +1051,27 @@ export default function ProviderDetailPage() {
|
||||
<h2 className="text-lg font-semibold">
|
||||
{"Available Models"}
|
||||
</h2>
|
||||
{!isCompatible && (() => {
|
||||
const allIds = [
|
||||
...models,
|
||||
...kiloFreeModels.filter((fm) => !models.some((m) => m.id === fm.id)),
|
||||
].filter((m) => !m.type || m.type === "llm").map((m) => m.id);
|
||||
const activeIds = allIds.filter((id) => !disabledModelIds.includes(id));
|
||||
return (
|
||||
<div className="flex gap-2">
|
||||
{disabledModelIds.length > 0 && (
|
||||
<Button size="sm" variant="secondary" icon="restart_alt" onClick={handleEnableAll}>
|
||||
Active All
|
||||
</Button>
|
||||
)}
|
||||
{activeIds.length > 0 && (
|
||||
<Button size="sm" variant="secondary" icon="block" onClick={() => handleDisableAll(activeIds)}>
|
||||
Disable All
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
})()}
|
||||
</div>
|
||||
{!!modelsTestError && (
|
||||
<p className="text-xs text-red-500 mb-3 break-words">{modelsTestError}</p>
|
||||
|
||||
@@ -165,7 +165,10 @@ export default function ModelsCard({ providerId, kindFilter, providerAliasOverri
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ providerAlias, id: modelId, type: effectiveType }),
|
||||
});
|
||||
if (res.ok) await fetchData();
|
||||
if (res.ok) {
|
||||
await fetchData();
|
||||
window.dispatchEvent(new CustomEvent("customModelChanged"));
|
||||
}
|
||||
} catch (e) { console.log("add custom model error:", e); }
|
||||
};
|
||||
|
||||
@@ -173,7 +176,10 @@ export default function ModelsCard({ providerId, kindFilter, providerAliasOverri
|
||||
try {
|
||||
const params = new URLSearchParams({ providerAlias, id: modelId, type: effectiveType });
|
||||
const res = await fetch(`/api/models/custom?${params}`, { method: "DELETE" });
|
||||
if (res.ok) await fetchData();
|
||||
if (res.ok) {
|
||||
await fetchData();
|
||||
window.dispatchEvent(new CustomEvent("customModelChanged"));
|
||||
}
|
||||
} catch (e) { console.log("delete custom model error:", e); }
|
||||
};
|
||||
|
||||
|
||||
@@ -24,6 +24,7 @@ import {
|
||||
import Link from "next/link";
|
||||
import { getErrorCode, getRelativeTime } from "@/shared/utils";
|
||||
import { useNotificationStore } from "@/store/notificationStore";
|
||||
import { useHeaderSearchStore } from "@/store/headerSearchStore";
|
||||
import ModelAvailabilityBadge from "./components/ModelAvailabilityBadge";
|
||||
|
||||
function getStatusDisplay(connected, error, errorCode) {
|
||||
@@ -103,6 +104,18 @@ export default function ProvidersPage() {
|
||||
const [testingMode, setTestingMode] = useState(null);
|
||||
const [testResults, setTestResults] = useState(null);
|
||||
const notify = useNotificationStore();
|
||||
const searchQuery = useHeaderSearchStore((s) => s.query);
|
||||
const registerSearch = useHeaderSearchStore((s) => s.register);
|
||||
const unregisterSearch = useHeaderSearchStore((s) => s.unregister);
|
||||
|
||||
useEffect(() => {
|
||||
registerSearch("Search providers...");
|
||||
return () => unregisterSearch();
|
||||
}, [registerSearch, unregisterSearch]);
|
||||
|
||||
const matchSearch = (name) =>
|
||||
!searchQuery.trim() ||
|
||||
name.toLowerCase().includes(searchQuery.trim().toLowerCase());
|
||||
|
||||
useEffect(() => {
|
||||
const fetchData = async () => {
|
||||
@@ -224,7 +237,8 @@ export default function ProvidersPage() {
|
||||
color: "#10A37F",
|
||||
textIcon: "OC",
|
||||
apiType: node.apiType,
|
||||
}));
|
||||
}))
|
||||
.filter((p) => matchSearch(p.name));
|
||||
|
||||
const anthropicCompatibleProviders = providerNodes
|
||||
.filter((node) => node.type === "anthropic-compatible")
|
||||
@@ -233,7 +247,22 @@ export default function ProvidersPage() {
|
||||
name: node.name || "Anthropic Compatible",
|
||||
color: "#D97757",
|
||||
textIcon: "AC",
|
||||
}));
|
||||
}))
|
||||
.filter((p) => matchSearch(p.name));
|
||||
|
||||
const oauthEntries = Object.entries(OAUTH_PROVIDERS).filter(([, info]) =>
|
||||
matchSearch(info.name),
|
||||
);
|
||||
const freeEntries = Object.entries(FREE_PROVIDERS).filter(([, info]) =>
|
||||
matchSearch(info.name),
|
||||
);
|
||||
const freeTierEntries = Object.entries(FREE_TIER_PROVIDERS).filter(
|
||||
([, info]) => matchSearch(info.name),
|
||||
);
|
||||
const apikeyEntries = Object.entries(APIKEY_PROVIDERS).filter(
|
||||
([, info]) =>
|
||||
(info.serviceKinds ?? ["llm"]).includes("llm") && matchSearch(info.name),
|
||||
);
|
||||
|
||||
if (loading) {
|
||||
return (
|
||||
@@ -244,9 +273,27 @@ export default function ProvidersPage() {
|
||||
);
|
||||
}
|
||||
|
||||
const hasAnyResult =
|
||||
oauthEntries.length > 0 ||
|
||||
freeEntries.length > 0 ||
|
||||
freeTierEntries.length > 0 ||
|
||||
apikeyEntries.length > 0 ||
|
||||
compatibleProviders.length > 0 ||
|
||||
anthropicCompatibleProviders.length > 0;
|
||||
|
||||
return (
|
||||
<div className="flex min-w-0 flex-col gap-6 px-1 sm:px-0">
|
||||
{!hasAnyResult && (
|
||||
<div className="text-center py-8 border border-dashed border-border rounded-xl">
|
||||
<span className="material-symbols-outlined text-[32px] text-text-muted mb-2">
|
||||
search_off
|
||||
</span>
|
||||
<p className="text-text-muted text-sm">No providers match your search</p>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* OAuth Providers */}
|
||||
{oauthEntries.length > 0 && (
|
||||
<div className="flex flex-col gap-4">
|
||||
<div className="flex flex-col gap-3 sm:flex-row sm:items-center sm:justify-between">
|
||||
<h2 className="text-lg sm:text-xl font-semibold flex items-center gap-2 leading-tight">
|
||||
@@ -275,7 +322,7 @@ export default function ProvidersPage() {
|
||||
</div>
|
||||
</div>
|
||||
<div className="grid grid-cols-1 gap-3 sm:grid-cols-2 sm:gap-4 lg:grid-cols-3 xl:grid-cols-4">
|
||||
{Object.entries(OAUTH_PROVIDERS).map(([key, info]) => (
|
||||
{oauthEntries.map(([key, info]) => (
|
||||
<ProviderCard
|
||||
key={key}
|
||||
providerId={key}
|
||||
@@ -287,8 +334,10 @@ export default function ProvidersPage() {
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Free Tier Providers */}
|
||||
{(freeEntries.length > 0 || freeTierEntries.length > 0) && (
|
||||
<div className="flex flex-col gap-4">
|
||||
<div className="flex flex-col gap-3 sm:flex-row sm:items-center sm:justify-between">
|
||||
<h2 className="text-lg sm:text-xl font-semibold flex items-center gap-2 leading-tight">
|
||||
@@ -314,7 +363,7 @@ export default function ProvidersPage() {
|
||||
</button>
|
||||
</div>
|
||||
<div className="grid grid-cols-1 gap-3 sm:grid-cols-2 sm:gap-4 lg:grid-cols-3 xl:grid-cols-4">
|
||||
{Object.entries(FREE_PROVIDERS).map(([key, info]) => (
|
||||
{freeEntries.map(([key, info]) => (
|
||||
<ProviderCard
|
||||
key={key}
|
||||
providerId={key}
|
||||
@@ -324,7 +373,7 @@ export default function ProvidersPage() {
|
||||
onToggle={(active) => handleToggleProvider(key, "oauth", active)}
|
||||
/>
|
||||
))}
|
||||
{Object.entries(FREE_TIER_PROVIDERS).map(([key, info]) => (
|
||||
{freeTierEntries.map(([key, info]) => (
|
||||
<ApiKeyProviderCard
|
||||
key={key}
|
||||
providerId={key}
|
||||
@@ -336,8 +385,10 @@ export default function ProvidersPage() {
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* API Key Providers — fixed list */}
|
||||
{apikeyEntries.length > 0 && (
|
||||
<div className="flex flex-col gap-4">
|
||||
<div className="flex flex-col gap-3 sm:flex-row sm:items-center sm:justify-between">
|
||||
<h2 className="text-lg sm:text-xl font-semibold flex items-center gap-2 leading-tight">
|
||||
@@ -363,20 +414,19 @@ export default function ProvidersPage() {
|
||||
</button>
|
||||
</div>
|
||||
<div className="grid grid-cols-1 gap-3 sm:grid-cols-2 sm:gap-4 lg:grid-cols-3 xl:grid-cols-4">
|
||||
{Object.entries(APIKEY_PROVIDERS)
|
||||
.filter(([, info]) => (info.serviceKinds ?? ["llm"]).includes("llm"))
|
||||
.map(([key, info]) => (
|
||||
<ApiKeyProviderCard
|
||||
key={key}
|
||||
providerId={key}
|
||||
provider={info}
|
||||
stats={getProviderStats(key, "apikey")}
|
||||
authType="apikey"
|
||||
onToggle={(active) => handleToggleProvider(key, "apikey", active)}
|
||||
/>
|
||||
))}
|
||||
{apikeyEntries.map(([key, info]) => (
|
||||
<ApiKeyProviderCard
|
||||
key={key}
|
||||
providerId={key}
|
||||
provider={info}
|
||||
stats={getProviderStats(key, "apikey")}
|
||||
authType="apikey"
|
||||
onToggle={(active) => handleToggleProvider(key, "apikey", active)}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Web Cookie Providers — use browser subscription cookie instead of API key */}
|
||||
{/* <div className="flex flex-col gap-4">
|
||||
|
||||
@@ -41,6 +41,10 @@ export default function ProxyPoolsPage() {
|
||||
const [importing, setImporting] = useState(false);
|
||||
const [deploying, setDeploying] = useState(false);
|
||||
const [testingId, setTestingId] = useState(null);
|
||||
const [selectedIds, setSelectedIds] = useState([]);
|
||||
const [healthChecking, setHealthChecking] = useState(false);
|
||||
const [healthProgress, setHealthProgress] = useState({ current: 0, total: 0 });
|
||||
const [bulkBusy, setBulkBusy] = useState(false);
|
||||
const notify = useNotificationStore();
|
||||
|
||||
const fetchProxyPools = useCallback(async () => {
|
||||
@@ -162,6 +166,136 @@ export default function ProxyPoolsPage() {
|
||||
}
|
||||
};
|
||||
|
||||
const handleToggleActive = async (pool) => {
|
||||
const next = !pool.isActive;
|
||||
setProxyPools((prev) => prev.map((p) => p.id === pool.id ? { ...p, isActive: next } : p));
|
||||
try {
|
||||
const res = await fetch(`/api/proxy-pools/${pool.id}`, {
|
||||
method: "PUT",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ isActive: next }),
|
||||
});
|
||||
if (!res.ok) {
|
||||
setProxyPools((prev) => prev.map((p) => p.id === pool.id ? { ...p, isActive: pool.isActive } : p));
|
||||
notify.error("Failed to update active state");
|
||||
}
|
||||
} catch (error) {
|
||||
console.log("Error toggling active:", error);
|
||||
setProxyPools((prev) => prev.map((p) => p.id === pool.id ? { ...p, isActive: pool.isActive } : p));
|
||||
}
|
||||
};
|
||||
|
||||
const allSelected = proxyPools.length > 0 && selectedIds.length === proxyPools.length;
|
||||
const toggleSelect = (id) => setSelectedIds((prev) => prev.includes(id) ? prev.filter((x) => x !== id) : [...prev, id]);
|
||||
const toggleSelectAll = () => setSelectedIds(allSelected ? [] : proxyPools.map((p) => p.id));
|
||||
const clearSelection = () => setSelectedIds([]);
|
||||
|
||||
const bulkSetActive = async (isActive) => {
|
||||
const targets = selectedIds.length > 0 ? selectedIds : proxyPools.map((p) => p.id);
|
||||
if (targets.length === 0) return;
|
||||
setBulkBusy(true);
|
||||
try {
|
||||
let ok = 0; let failed = 0;
|
||||
for (const id of targets) {
|
||||
try {
|
||||
const res = await fetch(`/api/proxy-pools/${id}`, {
|
||||
method: "PUT",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ isActive }),
|
||||
});
|
||||
if (res.ok) ok += 1; else failed += 1;
|
||||
} catch { failed += 1; }
|
||||
}
|
||||
await fetchProxyPools();
|
||||
notify.success(`${isActive ? "Activated" : "Deactivated"} ${ok}${failed ? `, failed ${failed}` : ""}`);
|
||||
} finally {
|
||||
setBulkBusy(false);
|
||||
}
|
||||
};
|
||||
|
||||
const bulkDelete = async () => {
|
||||
if (selectedIds.length === 0) return;
|
||||
if (!confirm(`Delete ${selectedIds.length} proxy pool(s)?`)) return;
|
||||
setBulkBusy(true);
|
||||
try {
|
||||
let ok = 0; let blocked = 0; let failed = 0;
|
||||
for (const id of selectedIds) {
|
||||
try {
|
||||
const res = await fetch(`/api/proxy-pools/${id}`, { method: "DELETE" });
|
||||
if (res.ok) ok += 1;
|
||||
else if (res.status === 409) blocked += 1;
|
||||
else failed += 1;
|
||||
} catch { failed += 1; }
|
||||
}
|
||||
await fetchProxyPools();
|
||||
clearSelection();
|
||||
notify.success(`Deleted ${ok}${blocked ? `, ${blocked} bound` : ""}${failed ? `, ${failed} failed` : ""}`);
|
||||
} finally {
|
||||
setBulkBusy(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleHealthCheck = async () => {
|
||||
const targets = selectedIds.length > 0
|
||||
? proxyPools.filter((p) => selectedIds.includes(p.id))
|
||||
: proxyPools;
|
||||
if (targets.length === 0) return;
|
||||
setHealthChecking(true);
|
||||
setHealthProgress({ current: 0, total: targets.length });
|
||||
let alive = 0; const deadIds = [];
|
||||
let done = 0;
|
||||
const CONCURRENCY = 10;
|
||||
const queue = [...targets];
|
||||
|
||||
const worker = async () => {
|
||||
while (queue.length > 0) {
|
||||
const pool = queue.shift();
|
||||
if (!pool) break;
|
||||
try {
|
||||
const res = await fetch(`/api/proxy-pools/${pool.id}/test`, { method: "POST" });
|
||||
const data = await res.json();
|
||||
if (res.ok && data.ok) alive += 1; else deadIds.push(pool.id);
|
||||
} catch {
|
||||
deadIds.push(pool.id);
|
||||
} finally {
|
||||
done += 1;
|
||||
setHealthProgress({ current: done, total: targets.length });
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
await Promise.all(Array.from({ length: Math.min(CONCURRENCY, targets.length) }, worker));
|
||||
await fetchProxyPools();
|
||||
setHealthChecking(false);
|
||||
setHealthProgress({ current: 0, total: 0 });
|
||||
|
||||
if (deadIds.length > 0 && confirm(`Alive: ${alive}, Dead: ${deadIds.length}.\n\nDisable ${deadIds.length} dead proxies?`)) {
|
||||
setBulkBusy(true);
|
||||
try {
|
||||
for (const id of deadIds) {
|
||||
try {
|
||||
await fetch(`/api/proxy-pools/${id}`, {
|
||||
method: "PUT",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ isActive: false }),
|
||||
});
|
||||
} catch {}
|
||||
}
|
||||
await fetchProxyPools();
|
||||
notify.success(`Disabled ${deadIds.length} dead proxies`);
|
||||
} finally {
|
||||
setBulkBusy(false);
|
||||
}
|
||||
} else {
|
||||
notify.success(`Health check done. Alive: ${alive}, Dead: ${deadIds.length}`);
|
||||
}
|
||||
};
|
||||
|
||||
// Cleanup selectedIds when pools change
|
||||
useEffect(() => {
|
||||
setSelectedIds((prev) => prev.filter((id) => proxyPools.some((p) => p.id === id)));
|
||||
}, [proxyPools]);
|
||||
|
||||
const openBatchImportModal = () => {
|
||||
setBatchImportText("");
|
||||
setShowBatchImportModal(true);
|
||||
@@ -354,13 +488,57 @@ export default function ProxyPoolsPage() {
|
||||
</div>
|
||||
|
||||
<Card>
|
||||
<div className="mb-4 flex flex-col gap-2 sm:flex-row sm:items-center sm:justify-between">
|
||||
<div className="flex flex-wrap items-center gap-2">
|
||||
<Badge variant="default">Total: {proxyPools.length}</Badge>
|
||||
<Badge variant="success">Active: {activeCount}</Badge>
|
||||
</div>
|
||||
<div className="mb-4 flex flex-wrap items-center gap-2">
|
||||
{proxyPools.length > 0 && (
|
||||
<label className="flex items-center gap-1.5 text-xs text-text-muted cursor-pointer">
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={allSelected}
|
||||
onChange={toggleSelectAll}
|
||||
className="size-4 rounded border-black/20 dark:border-white/20"
|
||||
/>
|
||||
{allSelected ? "Unselect all" : "Select all"}
|
||||
</label>
|
||||
)}
|
||||
<Badge variant="default">Total: {proxyPools.length}</Badge>
|
||||
<Badge variant="success">Active: {activeCount}</Badge>
|
||||
</div>
|
||||
|
||||
{(selectedIds.length > 0 || healthChecking) && (
|
||||
<div className="mb-4 flex flex-wrap items-center gap-2 rounded-lg border border-primary/30 bg-primary/5 px-3 py-2">
|
||||
<span className="material-symbols-outlined text-[18px] text-primary">checklist</span>
|
||||
<span className="text-xs font-medium text-primary">
|
||||
{selectedIds.length > 0 ? `${selectedIds.length} selected` : "All pools"}
|
||||
</span>
|
||||
<div className="ml-auto flex flex-wrap items-center gap-2">
|
||||
<Button
|
||||
size="sm"
|
||||
icon={healthChecking ? "progress_activity" : "health_and_safety"}
|
||||
onClick={handleHealthCheck}
|
||||
disabled={healthChecking || bulkBusy || proxyPools.length === 0}
|
||||
>
|
||||
{healthChecking ? `Checking ${healthProgress.current}/${healthProgress.total}` : "Health Check"}
|
||||
</Button>
|
||||
{selectedIds.length > 0 && (
|
||||
<>
|
||||
<Button size="sm" variant="secondary" icon="toggle_on" onClick={() => bulkSetActive(true)} disabled={bulkBusy || healthChecking}>
|
||||
Activate
|
||||
</Button>
|
||||
<Button size="sm" variant="secondary" icon="toggle_off" onClick={() => bulkSetActive(false)} disabled={bulkBusy || healthChecking}>
|
||||
Deactivate
|
||||
</Button>
|
||||
<Button size="sm" variant="secondary" icon="delete" onClick={bulkDelete} disabled={bulkBusy || healthChecking}>
|
||||
Delete
|
||||
</Button>
|
||||
<Button size="sm" variant="ghost" onClick={clearSelection} disabled={bulkBusy || healthChecking}>
|
||||
Clear
|
||||
</Button>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{proxyPools.length === 0 ? (
|
||||
<div className="text-center py-10">
|
||||
<p className="text-text-main font-medium mb-1">No proxy pool entries yet</p>
|
||||
@@ -372,8 +550,15 @@ export default function ProxyPoolsPage() {
|
||||
) : (
|
||||
<div className="flex flex-col divide-y divide-black/[0.04] dark:divide-white/[0.05]">
|
||||
{proxyPools.map((pool) => (
|
||||
<div key={pool.id} className="group flex flex-col gap-3 py-3 sm:flex-row sm:items-center sm:justify-between">
|
||||
<div className="min-w-0 flex-1">
|
||||
<div key={pool.id} className="flex flex-col gap-3 py-3 sm:flex-row sm:items-center sm:justify-between">
|
||||
<div className="flex items-start gap-3 min-w-0 flex-1">
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={selectedIds.includes(pool.id)}
|
||||
onChange={() => toggleSelect(pool.id)}
|
||||
className="mt-1 size-4 shrink-0 rounded border-black/20 dark:border-white/20"
|
||||
/>
|
||||
<div className="min-w-0 flex-1">
|
||||
<div className="flex items-center gap-2 flex-wrap">
|
||||
<p className="min-w-0 max-w-full truncate text-sm font-medium sm:max-w-[18rem]">{pool.name}</p>
|
||||
<Badge variant={getStatusVariant(pool.testStatus)} size="sm" dot>
|
||||
@@ -397,9 +582,16 @@ export default function ProxyPoolsPage() {
|
||||
Last tested: {formatDateTime(pool.lastTestedAt)}
|
||||
{pool.lastError ? ` · ${pool.lastError}` : ""}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="flex items-center justify-end gap-1 opacity-100 transition-opacity sm:opacity-0 sm:group-hover:opacity-100">
|
||||
<div className="flex items-center justify-end gap-1">
|
||||
<Toggle
|
||||
size="sm"
|
||||
checked={pool.isActive === true}
|
||||
onChange={() => handleToggleActive(pool)}
|
||||
title={pool.isActive ? "Disable" : "Enable"}
|
||||
/>
|
||||
<button
|
||||
onClick={() => handleTest(pool.id)}
|
||||
className="p-2 rounded hover:bg-black/5 dark:hover:bg-white/5 text-text-muted hover:text-primary"
|
||||
|
||||
@@ -7,7 +7,12 @@ import Toggle from "@/shared/components/Toggle";
|
||||
import { parseQuotaData, calculatePercentage } from "./utils";
|
||||
import Card from "@/shared/components/Card";
|
||||
import { EditConnectionModal } from "@/shared/components";
|
||||
import { USAGE_SUPPORTED_PROVIDERS } from "@/shared/constants/providers";
|
||||
import { USAGE_SUPPORTED_PROVIDERS, USAGE_APIKEY_PROVIDERS } from "@/shared/constants/providers";
|
||||
|
||||
// Connection is eligible for the quota page when it uses OAuth or is an apikey provider whitelisted for quota
|
||||
const isUsageEligible = (conn) =>
|
||||
USAGE_SUPPORTED_PROVIDERS.includes(conn.provider) &&
|
||||
(conn.authType === "oauth" || USAGE_APIKEY_PROVIDERS.includes(conn.provider));
|
||||
|
||||
const REFRESH_INTERVAL_MS = 60000; // 60 seconds
|
||||
const DEPLETED_QUOTA_THRESHOLD = 5; // percent
|
||||
@@ -239,16 +244,11 @@ export default function ProviderLimits() {
|
||||
try {
|
||||
const conns = await fetchConnections();
|
||||
|
||||
// Filter only supported OAuth providers
|
||||
const oauthConnections = conns.filter(
|
||||
(conn) =>
|
||||
USAGE_SUPPORTED_PROVIDERS.includes(conn.provider) &&
|
||||
conn.authType === "oauth",
|
||||
);
|
||||
// Filter eligible connections (OAuth + whitelisted apikey)
|
||||
const eligibleConnections = conns.filter(isUsageEligible);
|
||||
|
||||
// Fetch quota for supported OAuth connections only
|
||||
await Promise.all(
|
||||
oauthConnections.map((conn) => fetchQuota(conn.id, conn.provider)),
|
||||
eligibleConnections.map((conn) => fetchQuota(conn.id, conn.provider)),
|
||||
);
|
||||
|
||||
setLastUpdated(new Date());
|
||||
@@ -266,21 +266,17 @@ export default function ProviderLimits() {
|
||||
const conns = await fetchConnections();
|
||||
setConnectionsLoading(false);
|
||||
|
||||
const oauthConnections = conns.filter(
|
||||
(conn) =>
|
||||
USAGE_SUPPORTED_PROVIDERS.includes(conn.provider) &&
|
||||
conn.authType === "oauth",
|
||||
);
|
||||
const eligibleConnections = conns.filter(isUsageEligible);
|
||||
|
||||
// Mark all as loading before fetching
|
||||
const loadingState = {};
|
||||
oauthConnections.forEach((conn) => {
|
||||
eligibleConnections.forEach((conn) => {
|
||||
loadingState[conn.id] = true;
|
||||
});
|
||||
setLoading(loadingState);
|
||||
|
||||
await Promise.all(
|
||||
oauthConnections.map((conn) => fetchQuota(conn.id, conn.provider)),
|
||||
eligibleConnections.map((conn) => fetchQuota(conn.id, conn.provider)),
|
||||
);
|
||||
setLastUpdated(new Date());
|
||||
};
|
||||
@@ -354,12 +350,8 @@ export default function ProviderLimits() {
|
||||
};
|
||||
}, [autoRefresh, refreshAll]);
|
||||
|
||||
// Filter only supported providers
|
||||
const filteredConnections = connections.filter(
|
||||
(conn) =>
|
||||
USAGE_SUPPORTED_PROVIDERS.includes(conn.provider) &&
|
||||
conn.authType === "oauth",
|
||||
);
|
||||
// Filter eligible connections (OAuth + whitelisted apikey)
|
||||
const filteredConnections = connections.filter(isUsageEligible);
|
||||
|
||||
const providerFilteredConnections = filteredConnections.filter(
|
||||
(conn) => providerFilter === "all" || conn.provider === providerFilter,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"use client";
|
||||
|
||||
import { useMemo, useState, useCallback, useRef } from "react";
|
||||
import { useMemo, useState, useEffect, useCallback, useRef } from "react";
|
||||
import PropTypes from "prop-types";
|
||||
import {
|
||||
ReactFlow,
|
||||
@@ -10,6 +10,10 @@ import {
|
||||
import "@xyflow/react/dist/style.css";
|
||||
import { AI_PROVIDERS } from "@/shared/constants/providers";
|
||||
|
||||
// Force-stop FE animation if a provider stays active longer than this
|
||||
const FE_ACTIVE_TIMEOUT_MS = 60000;
|
||||
const FE_ACTIVE_TICK_MS = 1000;
|
||||
|
||||
function getProviderConfig(providerId) {
|
||||
return AI_PROVIDERS[providerId] || { color: "#6b7280", name: providerId };
|
||||
}
|
||||
@@ -199,13 +203,44 @@ export default function ProviderTopology({ providers = [], activeRequests = [],
|
||||
const lastKey = lastProvider?.toLowerCase() || "";
|
||||
const errorKey = errorProvider?.toLowerCase() || "";
|
||||
|
||||
const activeSet = useMemo(() => new Set(activeKey ? activeKey.split(",") : []), [activeKey]);
|
||||
const rawActiveSet = useMemo(() => new Set(activeKey ? activeKey.split(",") : []), [activeKey]);
|
||||
const lastSet = useMemo(() => new Set(lastKey ? [lastKey] : []), [lastKey]);
|
||||
const errorSet = useMemo(() => new Set(errorKey ? [errorKey] : []), [errorKey]);
|
||||
|
||||
// Track firstSeen per active provider; drop provider if running too long (BE stuck)
|
||||
const firstSeenRef = useRef({});
|
||||
const [tick, setTick] = useState(0);
|
||||
|
||||
useEffect(() => {
|
||||
const seen = firstSeenRef.current;
|
||||
const now = Date.now();
|
||||
for (const p of rawActiveSet) {
|
||||
if (!seen[p]) seen[p] = now;
|
||||
}
|
||||
for (const p of Object.keys(seen)) {
|
||||
if (!rawActiveSet.has(p)) delete seen[p];
|
||||
}
|
||||
}, [rawActiveSet]);
|
||||
|
||||
useEffect(() => {
|
||||
if (rawActiveSet.size === 0) return;
|
||||
const id = setInterval(() => setTick((t) => t + 1), FE_ACTIVE_TICK_MS);
|
||||
return () => clearInterval(id);
|
||||
}, [rawActiveSet]);
|
||||
|
||||
const activeSet = useMemo(() => {
|
||||
const now = Date.now();
|
||||
const filtered = new Set();
|
||||
for (const p of rawActiveSet) {
|
||||
const ts = firstSeenRef.current[p];
|
||||
if (!ts || now - ts < FE_ACTIVE_TIMEOUT_MS) filtered.add(p);
|
||||
}
|
||||
return filtered;
|
||||
}, [rawActiveSet, tick]);
|
||||
|
||||
const { nodes, edges } = useMemo(
|
||||
() => buildLayout(providers, activeSet, lastSet, errorSet),
|
||||
[providers, activeKey, lastKey, errorKey]
|
||||
[providers, activeSet, lastKey, errorKey]
|
||||
);
|
||||
|
||||
// Stable key — only remount when provider list changes
|
||||
|
||||
246
src/app/api/cli-tools/cowork-settings/route.js
Normal file
@@ -0,0 +1,246 @@
|
||||
"use server";
|
||||
|
||||
import { NextResponse } from "next/server";
|
||||
import fs from "fs/promises";
|
||||
import path from "path";
|
||||
import os from "os";
|
||||
import crypto from "crypto";
|
||||
|
||||
const PROVIDER = "gateway";
|
||||
|
||||
// Candidate user-data roots — Cowork can run from either Claude-3p (3p mode) or Claude (1p mode w/ cowork features)
|
||||
const getCandidateRoots = () => {
|
||||
if (os.platform() === "darwin") {
|
||||
const base = path.join(os.homedir(), "Library", "Application Support");
|
||||
return [path.join(base, "Claude-3p"), path.join(base, "Claude")];
|
||||
}
|
||||
if (os.platform() === "win32") {
|
||||
const localApp = process.env.LOCALAPPDATA || path.join(os.homedir(), "AppData", "Local");
|
||||
const roaming = process.env.APPDATA || path.join(os.homedir(), "AppData", "Roaming");
|
||||
return [
|
||||
path.join(localApp, "Claude-3p"),
|
||||
path.join(roaming, "Claude-3p"),
|
||||
path.join(localApp, "Claude"),
|
||||
path.join(roaming, "Claude"),
|
||||
];
|
||||
}
|
||||
return [
|
||||
path.join(os.homedir(), ".config", "Claude-3p"),
|
||||
path.join(os.homedir(), ".config", "Claude"),
|
||||
];
|
||||
};
|
||||
|
||||
// Claude.app/exe install paths — fallback detect when no user-data folder yet
|
||||
const getAppInstallPaths = () => {
|
||||
if (os.platform() === "darwin") {
|
||||
return ["/Applications/Claude.app", path.join(os.homedir(), "Applications", "Claude.app")];
|
||||
}
|
||||
if (os.platform() === "win32") {
|
||||
const localApp = process.env.LOCALAPPDATA || path.join(os.homedir(), "AppData", "Local");
|
||||
const programFiles = process.env["ProgramFiles"] || "C:\\Program Files";
|
||||
return [
|
||||
path.join(localApp, "AnthropicClaude"),
|
||||
path.join(programFiles, "Claude"),
|
||||
path.join(programFiles, "AnthropicClaude"),
|
||||
];
|
||||
}
|
||||
return [];
|
||||
};
|
||||
|
||||
// For READ: prefer existing configLibrary (any root). For WRITE: always Claude-3p (first candidate).
|
||||
const resolveAppRootForRead = async () => {
|
||||
const candidates = getCandidateRoots();
|
||||
for (const dir of candidates) {
|
||||
try {
|
||||
await fs.access(path.join(dir, "configLibrary"));
|
||||
return dir;
|
||||
} catch { /* try next */ }
|
||||
}
|
||||
return candidates[0];
|
||||
};
|
||||
|
||||
const getWriteRoot = () => getCandidateRoots()[0]; // always Claude-3p
|
||||
|
||||
const getConfigDir = async () => path.join(await resolveAppRootForRead(), "configLibrary");
|
||||
const getWriteConfigDir = () => path.join(getWriteRoot(), "configLibrary");
|
||||
const getMetaPath = async () => path.join(await getConfigDir(), "_meta.json");
|
||||
const getWriteMetaPath = () => path.join(getWriteConfigDir(), "_meta.json");
|
||||
|
||||
// Locate Claude (1p) folder for claude_desktop_config.json bootstrap
|
||||
const get1pRoot = () => {
|
||||
if (os.platform() === "darwin") {
|
||||
return path.join(os.homedir(), "Library", "Application Support", "Claude");
|
||||
}
|
||||
if (os.platform() === "win32") {
|
||||
const localApp = process.env.LOCALAPPDATA || path.join(os.homedir(), "AppData", "Local");
|
||||
const roaming = process.env.APPDATA || path.join(os.homedir(), "AppData", "Roaming");
|
||||
return path.join(roaming, "Claude"); // 1p uses roaming on Win
|
||||
}
|
||||
return path.join(os.homedir(), ".config", "Claude");
|
||||
};
|
||||
|
||||
// Set deploymentMode="3p" in Claude/claude_desktop_config.json (preserve existing keys)
|
||||
const bootstrapDeploymentMode = async () => {
|
||||
const cfgPath = path.join(get1pRoot(), "claude_desktop_config.json");
|
||||
let cfg = {};
|
||||
try {
|
||||
const content = await fs.readFile(cfgPath, "utf-8");
|
||||
cfg = JSON.parse(content);
|
||||
} catch (error) {
|
||||
if (error.code !== "ENOENT") throw error;
|
||||
}
|
||||
if (cfg.deploymentMode === "3p") return false; // no change
|
||||
cfg.deploymentMode = "3p";
|
||||
await fs.mkdir(get1pRoot(), { recursive: true });
|
||||
await fs.writeFile(cfgPath, JSON.stringify(cfg, null, 2));
|
||||
return true;
|
||||
};
|
||||
|
||||
// Cowork is available if either (a) any user-data root exists or (b) Claude app is installed
|
||||
const checkInstalled = async () => {
|
||||
for (const dir of [...getCandidateRoots(), ...getAppInstallPaths()]) {
|
||||
try {
|
||||
await fs.access(dir);
|
||||
return true;
|
||||
} catch { /* try next */ }
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
const isLocalhostUrl = (url) => /localhost|127\.0\.0\.1|0\.0\.0\.0/i.test(url || "");
|
||||
|
||||
const readJson = async (filePath) => {
|
||||
try {
|
||||
const content = await fs.readFile(filePath, "utf-8");
|
||||
return JSON.parse(content);
|
||||
} catch (error) {
|
||||
if (error.code === "ENOENT") return null;
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
// Ensure meta exists in Claude-3p/configLibrary (write target). If meta already exists in Claude/ (1p), copy appliedId.
|
||||
const ensureMeta = async () => {
|
||||
const writeMetaPath = getWriteMetaPath();
|
||||
let meta = await readJson(writeMetaPath);
|
||||
if (!meta || !meta.appliedId) {
|
||||
// Try to inherit from any existing root
|
||||
const existingRead = await readJson(await getMetaPath());
|
||||
if (existingRead?.appliedId) {
|
||||
meta = existingRead;
|
||||
} else {
|
||||
const newId = crypto.randomUUID();
|
||||
meta = { appliedId: newId, entries: [{ id: newId, name: "Default" }] };
|
||||
}
|
||||
await fs.mkdir(getWriteConfigDir(), { recursive: true });
|
||||
await fs.writeFile(writeMetaPath, JSON.stringify(meta, null, 2));
|
||||
}
|
||||
return meta;
|
||||
};
|
||||
|
||||
export async function GET() {
|
||||
try {
|
||||
const installed = await checkInstalled();
|
||||
if (!installed) {
|
||||
return NextResponse.json({
|
||||
installed: false,
|
||||
config: null,
|
||||
message: "Claude Desktop (Cowork mode) not detected",
|
||||
});
|
||||
}
|
||||
|
||||
const meta = await readJson(await getMetaPath());
|
||||
const appliedId = meta?.appliedId || null;
|
||||
const configDir = await getConfigDir();
|
||||
const configPath = appliedId ? path.join(configDir, `${appliedId}.json`) : null;
|
||||
const config = configPath ? await readJson(configPath) : null;
|
||||
|
||||
const baseUrl = config?.inferenceGatewayBaseUrl || null;
|
||||
const models = Array.isArray(config?.inferenceModels)
|
||||
? config.inferenceModels.map((m) => (typeof m === "string" ? m : m?.name)).filter(Boolean)
|
||||
: [];
|
||||
|
||||
const has9Router = !!(config?.inferenceProvider === PROVIDER && baseUrl);
|
||||
|
||||
return NextResponse.json({
|
||||
installed: true,
|
||||
config,
|
||||
has9Router,
|
||||
configPath,
|
||||
cowork: {
|
||||
appliedId,
|
||||
baseUrl,
|
||||
models,
|
||||
provider: config?.inferenceProvider || null,
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
console.log("Error reading cowork settings:", error);
|
||||
return NextResponse.json({ error: "Failed to read cowork settings" }, { status: 500 });
|
||||
}
|
||||
}
|
||||
|
||||
export async function POST(request) {
|
||||
try {
|
||||
const { baseUrl, apiKey, models } = await request.json();
|
||||
|
||||
if (!baseUrl || !apiKey) {
|
||||
return NextResponse.json({ error: "baseUrl and apiKey are required" }, { status: 400 });
|
||||
}
|
||||
|
||||
if (isLocalhostUrl(baseUrl)) {
|
||||
return NextResponse.json({
|
||||
error: "Claude Cowork sandbox cannot reach localhost. Enable Tunnel/Cloud Endpoint or use Tailscale/VPS.",
|
||||
}, { status: 400 });
|
||||
}
|
||||
|
||||
const modelsArray = Array.isArray(models) ? models.filter((m) => typeof m === "string" && m.trim()) : [];
|
||||
if (modelsArray.length === 0) {
|
||||
return NextResponse.json({ error: "At least one model is required" }, { status: 400 });
|
||||
}
|
||||
|
||||
const bootstrapped = await bootstrapDeploymentMode();
|
||||
const meta = await ensureMeta();
|
||||
const configPath = path.join(getWriteConfigDir(), `${meta.appliedId}.json`);
|
||||
|
||||
const newConfig = {
|
||||
inferenceProvider: PROVIDER,
|
||||
inferenceGatewayBaseUrl: baseUrl,
|
||||
inferenceGatewayApiKey: apiKey,
|
||||
inferenceModels: modelsArray.map((name) => ({ name })),
|
||||
};
|
||||
|
||||
await fs.writeFile(configPath, JSON.stringify(newConfig, null, 2));
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
bootstrapped,
|
||||
message: bootstrapped
|
||||
? "Cowork enabled (3p mode set). Quit & reopen Claude Desktop."
|
||||
: "Cowork settings applied. Quit & reopen Claude Desktop.",
|
||||
configPath,
|
||||
});
|
||||
} catch (error) {
|
||||
console.log("Error applying cowork settings:", error);
|
||||
return NextResponse.json({ error: "Failed to apply cowork settings" }, { status: 500 });
|
||||
}
|
||||
}
|
||||
|
||||
export async function DELETE() {
|
||||
try {
|
||||
const meta = await readJson(await getMetaPath());
|
||||
if (!meta?.appliedId) {
|
||||
return NextResponse.json({ success: true, message: "No active config to reset" });
|
||||
}
|
||||
const configPath = path.join(await getConfigDir(), `${meta.appliedId}.json`);
|
||||
try {
|
||||
await fs.writeFile(configPath, JSON.stringify({}, null, 2));
|
||||
} catch (error) {
|
||||
if (error.code !== "ENOENT") throw error;
|
||||
}
|
||||
return NextResponse.json({ success: true, message: "Cowork config reset" });
|
||||
} catch (error) {
|
||||
console.log("Error resetting cowork settings:", error);
|
||||
return NextResponse.json({ error: "Failed to reset cowork settings" }, { status: 500 });
|
||||
}
|
||||
}
|
||||
@@ -33,6 +33,7 @@ export async function GET(request) {
|
||||
|
||||
// ElevenLabs requires API key
|
||||
const raw = provider === "elevenlabs" ? await fetcher(apiKey) : await fetcher();
|
||||
const useElevenShape = provider === "elevenlabs" || provider === "gemini";
|
||||
let voices;
|
||||
|
||||
if (provider === "local-device") {
|
||||
@@ -46,7 +47,7 @@ export async function GET(request) {
|
||||
langName: langName(v.lang),
|
||||
gender: v.gender,
|
||||
}));
|
||||
} else if (provider === "elevenlabs") {
|
||||
} else if (useElevenShape) {
|
||||
voices = raw.map((v) => ({
|
||||
id: v.voice_id,
|
||||
name: v.name,
|
||||
|
||||
50
src/app/api/models/disabled/route.js
Normal file
@@ -0,0 +1,50 @@
|
||||
import { NextResponse } from "next/server";
|
||||
import { getDisabledModels, disableModels, enableModels } from "@/lib/disabledModelsDb";
|
||||
|
||||
export const dynamic = "force-dynamic";
|
||||
|
||||
// GET /api/models/disabled?providerAlias=xxx
|
||||
export async function GET(request) {
|
||||
try {
|
||||
const { searchParams } = new URL(request.url);
|
||||
const providerAlias = searchParams.get("providerAlias");
|
||||
const all = await getDisabledModels();
|
||||
if (providerAlias) return NextResponse.json({ ids: all[providerAlias] || [] });
|
||||
return NextResponse.json({ disabled: all });
|
||||
} catch (error) {
|
||||
console.log("Error fetching disabled models:", error);
|
||||
return NextResponse.json({ error: "Failed to fetch disabled models" }, { status: 500 });
|
||||
}
|
||||
}
|
||||
|
||||
// POST /api/models/disabled body: { providerAlias, ids: [...] }
|
||||
export async function POST(request) {
|
||||
try {
|
||||
const { providerAlias, ids } = await request.json();
|
||||
if (!providerAlias || !Array.isArray(ids)) {
|
||||
return NextResponse.json({ error: "providerAlias and ids[] required" }, { status: 400 });
|
||||
}
|
||||
await disableModels(providerAlias, ids);
|
||||
return NextResponse.json({ success: true });
|
||||
} catch (error) {
|
||||
console.log("Error disabling models:", error);
|
||||
return NextResponse.json({ error: "Failed to disable models" }, { status: 500 });
|
||||
}
|
||||
}
|
||||
|
||||
// DELETE /api/models/disabled?providerAlias=xxx[&id=yyy]
|
||||
export async function DELETE(request) {
|
||||
try {
|
||||
const { searchParams } = new URL(request.url);
|
||||
const providerAlias = searchParams.get("providerAlias");
|
||||
const id = searchParams.get("id");
|
||||
if (!providerAlias) {
|
||||
return NextResponse.json({ error: "providerAlias required" }, { status: 400 });
|
||||
}
|
||||
await enableModels(providerAlias, id ? [id] : []);
|
||||
return NextResponse.json({ success: true });
|
||||
} catch (error) {
|
||||
console.log("Error enabling models:", error);
|
||||
return NextResponse.json({ error: "Failed to enable models" }, { status: 500 });
|
||||
}
|
||||
}
|
||||
@@ -1,20 +1,29 @@
|
||||
import { NextResponse } from "next/server";
|
||||
import { getModelAliases, setModelAlias } from "@/models";
|
||||
import { getDisabledModels } from "@/lib/disabledModelsDb";
|
||||
import { AI_MODELS } from "@/shared/constants/config";
|
||||
import { getProviderAlias } from "@/shared/constants/providers";
|
||||
|
||||
// GET /api/models - Get models with aliases
|
||||
export async function GET() {
|
||||
try {
|
||||
const modelAliases = await getModelAliases();
|
||||
const disabled = await getDisabledModels();
|
||||
|
||||
const models = AI_MODELS.map((m) => {
|
||||
const fullModel = `${m.provider}/${m.model}`;
|
||||
return {
|
||||
...m,
|
||||
fullModel,
|
||||
alias: modelAliases[fullModel] || m.model,
|
||||
};
|
||||
});
|
||||
const models = AI_MODELS
|
||||
.filter((m) => {
|
||||
const alias = getProviderAlias(m.provider) || m.provider;
|
||||
const list = disabled[alias] || disabled[m.provider] || [];
|
||||
return !list.includes(m.model);
|
||||
})
|
||||
.map((m) => {
|
||||
const fullModel = `${m.provider}/${m.model}`;
|
||||
return {
|
||||
...m,
|
||||
fullModel,
|
||||
alias: modelAliases[fullModel] || m.model,
|
||||
};
|
||||
});
|
||||
|
||||
return NextResponse.json({ models });
|
||||
} catch (error) {
|
||||
|
||||
@@ -7,7 +7,13 @@ import {
|
||||
pollForToken
|
||||
} from "@/lib/oauth/providers";
|
||||
import { createProviderConnection } from "@/models";
|
||||
import { startCodexProxy, stopCodexProxy } from "@/lib/oauth/utils/server";
|
||||
import {
|
||||
startCodexProxy,
|
||||
stopCodexProxy,
|
||||
registerCodexSession,
|
||||
getCodexSessionStatus,
|
||||
clearCodexSession,
|
||||
} from "@/lib/oauth/utils/server";
|
||||
|
||||
/**
|
||||
* Dynamic OAuth API Route
|
||||
@@ -39,8 +45,34 @@ export async function GET(request, { params }) {
|
||||
if (!appPort) {
|
||||
return NextResponse.json({ error: "Missing app_port" }, { status: 400 });
|
||||
}
|
||||
// Optional server-side mode params: register session for auto-exchange
|
||||
const state = searchParams.get("state");
|
||||
const codeVerifier = searchParams.get("code_verifier");
|
||||
const redirectUri = searchParams.get("redirect_uri");
|
||||
const result = await startCodexProxy(Number(appPort));
|
||||
return NextResponse.json(result);
|
||||
let serverSide = false;
|
||||
if (result.success && state && codeVerifier && redirectUri) {
|
||||
serverSide = registerCodexSession({ state, codeVerifier, redirectUri });
|
||||
}
|
||||
return NextResponse.json({ ...result, serverSide });
|
||||
}
|
||||
|
||||
if (action === "poll-status") {
|
||||
if (provider !== "codex") {
|
||||
return NextResponse.json({ error: "Poll only supported for codex" }, { status: 400 });
|
||||
}
|
||||
const state = searchParams.get("state");
|
||||
if (!state) {
|
||||
return NextResponse.json({ error: "Missing state" }, { status: 400 });
|
||||
}
|
||||
const session = getCodexSessionStatus(state);
|
||||
if (!session) return NextResponse.json({ status: "unknown" });
|
||||
if (session.status === "done" || session.status === "error") {
|
||||
const payload = { ...session };
|
||||
clearCodexSession(state);
|
||||
return NextResponse.json(payload);
|
||||
}
|
||||
return NextResponse.json({ status: session.status });
|
||||
}
|
||||
|
||||
if (action === "stop-proxy") {
|
||||
|
||||
@@ -110,7 +110,7 @@ export async function POST(request) {
|
||||
if (!provider || !isValidProvider) {
|
||||
return NextResponse.json({ error: "Invalid provider" }, { status: 400 });
|
||||
}
|
||||
if (!apiKey) {
|
||||
if (!apiKey && provider !== "ollama-local") {
|
||||
return NextResponse.json({ error: `${isWebCookieProvider ? "Cookie value" : "API Key"} is required` }, { status: 400 });
|
||||
}
|
||||
if (!name) {
|
||||
@@ -185,7 +185,7 @@ export async function POST(request) {
|
||||
provider,
|
||||
authType: isWebCookieProvider ? "cookie" : "apikey",
|
||||
name,
|
||||
apiKey,
|
||||
apiKey: apiKey || "",
|
||||
priority: priority || 1,
|
||||
globalPriority: globalPriority || null,
|
||||
defaultModel: defaultModel || null,
|
||||
|
||||
@@ -49,7 +49,7 @@ async function probeMediaProvider(provider, apiKey) {
|
||||
const kinds = p.serviceKinds || ["llm"];
|
||||
const isMediaOnly = kinds.every((k) => MEDIA_KINDS.has(k));
|
||||
if (!isMediaOnly) return null;
|
||||
const cfg = p.ttsConfig || p.embeddingConfig || p.imageConfig || p.videoConfig || p.musicConfig;
|
||||
const cfg = p.ttsConfig || p.sttConfig || p.embeddingConfig || p.imageConfig || p.videoConfig || p.musicConfig;
|
||||
// No probe config → best-effort accept (validate at usage time)
|
||||
if (!cfg) return true;
|
||||
if (p.noAuth || cfg.authType === "none") return true;
|
||||
|
||||
@@ -5,6 +5,7 @@ import { getProviderConnectionById, updateProviderConnection } from "@/lib/local
|
||||
import { getUsageForProvider } from "open-sse/services/usage.js";
|
||||
import { getExecutor } from "open-sse/executors/index.js";
|
||||
import { resolveConnectionProxyConfig } from "@/lib/network/connectionProxy";
|
||||
import { USAGE_APIKEY_PROVIDERS } from "@/shared/constants/providers";
|
||||
|
||||
// Detect auth-expired messages returned by usage providers instead of throwing
|
||||
const AUTH_EXPIRED_PATTERNS = ["expired", "authentication", "unauthorized", "401", "re-authorize"];
|
||||
@@ -113,9 +114,14 @@ export async function GET(request, { params }) {
|
||||
return Response.json({ error: "Connection not found" }, { status: 404 });
|
||||
}
|
||||
|
||||
// Only OAuth connections have usage APIs
|
||||
if (connection.authType !== "oauth") {
|
||||
return Response.json({ message: "Usage not available for API key connections" });
|
||||
// Allow OAuth connections, plus whitelisted apikey providers (glm/minimax/...)
|
||||
const isOAuth = connection.authType === "oauth";
|
||||
const isApikeyEligible =
|
||||
connection.authType === "apikey" &&
|
||||
USAGE_APIKEY_PROVIDERS.includes(connection.provider);
|
||||
|
||||
if (!isOAuth && !isApikeyEligible) {
|
||||
return Response.json({ message: "Usage not available for this connection" });
|
||||
}
|
||||
|
||||
// Resolve connection proxy config; force strictProxy=false so quota/refresh fall back to direct on failure
|
||||
@@ -128,23 +134,25 @@ export async function GET(request, { params }) {
|
||||
strictProxy: false,
|
||||
};
|
||||
|
||||
// Refresh credentials if needed using executor
|
||||
try {
|
||||
const result = await refreshAndUpdateCredentials(connection, false, proxyOptions);
|
||||
connection = result.connection;
|
||||
} catch (refreshError) {
|
||||
console.error("[Usage API] Credential refresh failed:", refreshError);
|
||||
return Response.json({
|
||||
error: `Credential refresh failed: ${refreshError.message}`
|
||||
}, { status: 401 });
|
||||
// Refresh credentials only for OAuth connections (apikey has no token refresh)
|
||||
if (isOAuth) {
|
||||
try {
|
||||
const result = await refreshAndUpdateCredentials(connection, false, proxyOptions);
|
||||
connection = result.connection;
|
||||
} catch (refreshError) {
|
||||
console.error("[Usage API] Credential refresh failed:", refreshError);
|
||||
return Response.json({
|
||||
error: `Credential refresh failed: ${refreshError.message}`
|
||||
}, { status: 401 });
|
||||
}
|
||||
}
|
||||
|
||||
// Fetch usage from provider API
|
||||
let usage = await getUsageForProvider(connection, proxyOptions);
|
||||
|
||||
// If provider returned an auth-expired message instead of throwing,
|
||||
// force-refresh token and retry once
|
||||
if (isAuthExpiredMessage(usage) && connection.refreshToken) {
|
||||
// force-refresh token and retry once (OAuth only)
|
||||
if (isOAuth && isAuthExpiredMessage(usage) && connection.refreshToken) {
|
||||
try {
|
||||
const retryResult = await refreshAndUpdateCredentials(connection, true, proxyOptions);
|
||||
connection = retryResult.connection;
|
||||
|
||||
19
src/app/api/v1/audio/transcriptions/route.js
Normal file
@@ -0,0 +1,19 @@
|
||||
import { handleStt } from "@/sse/handlers/stt.js";
|
||||
|
||||
// Allow large audio uploads — 5min for processing large files
|
||||
export const maxDuration = 300;
|
||||
|
||||
export async function OPTIONS() {
|
||||
return new Response(null, {
|
||||
headers: {
|
||||
"Access-Control-Allow-Origin": "*",
|
||||
"Access-Control-Allow-Methods": "POST, OPTIONS",
|
||||
"Access-Control-Allow-Headers": "*",
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
/** POST /v1/audio/transcriptions - OpenAI Whisper compatible STT */
|
||||
export async function POST(request) {
|
||||
return await handleStt(request);
|
||||
}
|
||||
67
src/lib/disabledModelsDb.js
Normal file
@@ -0,0 +1,67 @@
|
||||
import { Low } from "lowdb";
|
||||
import { JSONFile } from "lowdb/node";
|
||||
import path from "node:path";
|
||||
import fs from "node:fs";
|
||||
import { DATA_DIR } from "@/lib/dataDir.js";
|
||||
|
||||
const DB_FILE = path.join(DATA_DIR, "disabledModels.json");
|
||||
|
||||
if (!fs.existsSync(DATA_DIR)) fs.mkdirSync(DATA_DIR, { recursive: true });
|
||||
|
||||
const defaultData = { disabled: {} };
|
||||
|
||||
let dbInstance = null;
|
||||
|
||||
async function getDb() {
|
||||
if (!dbInstance) {
|
||||
const adapter = new JSONFile(DB_FILE);
|
||||
dbInstance = new Low(adapter, defaultData);
|
||||
try {
|
||||
await dbInstance.read();
|
||||
} catch (error) {
|
||||
if (error instanceof SyntaxError) {
|
||||
dbInstance.data = { ...defaultData };
|
||||
await dbInstance.write();
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
if (!dbInstance.data || typeof dbInstance.data !== "object") dbInstance.data = { ...defaultData };
|
||||
if (!dbInstance.data.disabled) dbInstance.data.disabled = {};
|
||||
}
|
||||
return dbInstance;
|
||||
}
|
||||
|
||||
export async function getDisabledModels() {
|
||||
const db = await getDb();
|
||||
return db.data.disabled || {};
|
||||
}
|
||||
|
||||
export async function getDisabledByProvider(providerAlias) {
|
||||
const all = await getDisabledModels();
|
||||
return all[providerAlias] || [];
|
||||
}
|
||||
|
||||
export async function disableModels(providerAlias, ids) {
|
||||
if (!providerAlias || !Array.isArray(ids)) return;
|
||||
const db = await getDb();
|
||||
const current = new Set(db.data.disabled[providerAlias] || []);
|
||||
ids.forEach((id) => current.add(id));
|
||||
db.data.disabled[providerAlias] = [...current];
|
||||
await db.write();
|
||||
}
|
||||
|
||||
export async function enableModels(providerAlias, ids) {
|
||||
if (!providerAlias) return;
|
||||
const db = await getDb();
|
||||
const current = db.data.disabled[providerAlias] || [];
|
||||
if (!Array.isArray(ids) || ids.length === 0) {
|
||||
delete db.data.disabled[providerAlias];
|
||||
} else {
|
||||
const removeSet = new Set(ids);
|
||||
const next = current.filter((id) => !removeSet.has(id));
|
||||
if (next.length === 0) delete db.data.disabled[providerAlias];
|
||||
else db.data.disabled[providerAlias] = next;
|
||||
}
|
||||
await db.write();
|
||||
}
|
||||
@@ -1,17 +1,23 @@
|
||||
import initializeApp from "@/shared/services/initializeApp";
|
||||
|
||||
let initialized = false;
|
||||
// Survive Next.js HMR — module-level flag resets on reload, globalThis persists
|
||||
const g = globalThis.__cloudSyncInit ??= { initialized: false, inProgress: null };
|
||||
|
||||
export async function ensureAppInitialized() {
|
||||
if (!initialized) {
|
||||
if (g.initialized) return true;
|
||||
if (g.inProgress) return g.inProgress;
|
||||
g.inProgress = (async () => {
|
||||
try {
|
||||
await initializeApp();
|
||||
initialized = true;
|
||||
g.initialized = true;
|
||||
} catch (error) {
|
||||
console.error("[ServerInit] Error initializing app:", error);
|
||||
} finally {
|
||||
g.inProgress = null;
|
||||
}
|
||||
}
|
||||
return initialized;
|
||||
return g.initialized;
|
||||
})();
|
||||
return g.inProgress;
|
||||
}
|
||||
|
||||
// Auto-initialize at runtime only, not during next build
|
||||
|
||||
@@ -119,41 +119,133 @@ let codexProxyServer = null;
|
||||
let codexProxyTimeout = null;
|
||||
|
||||
const CODEX_PROXY_TIMEOUT_MS = 300000; // 5 minutes
|
||||
const CODEX_PORT = 1455;
|
||||
|
||||
// Pending exchange sessions keyed by state — used by server-side exchange mode
|
||||
const pendingExchanges = new Map();
|
||||
|
||||
/**
|
||||
* Start a proxy server on Codex fixed port (1455) that redirects callback to the app port.
|
||||
* Returns { success: true } if started, or { success: false } if port is busy.
|
||||
* Register a pending exchange session for server-side mode.
|
||||
* Modal client calls this before opening popup.
|
||||
*/
|
||||
export function registerCodexSession({ state, codeVerifier, redirectUri }) {
|
||||
if (!state || !codeVerifier || !redirectUri) return false;
|
||||
pendingExchanges.set(state, {
|
||||
codeVerifier,
|
||||
redirectUri,
|
||||
status: "pending",
|
||||
createdAt: Date.now(),
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Read session status (modal polls this).
|
||||
*/
|
||||
export function getCodexSessionStatus(state) {
|
||||
return pendingExchanges.get(state) || null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear a session (called after modal consumes status).
|
||||
*/
|
||||
export function clearCodexSession(state) {
|
||||
pendingExchanges.delete(state);
|
||||
}
|
||||
|
||||
function renderCodexResultPage(success, message) {
|
||||
const color = success ? "#22c55e" : "#ef4444";
|
||||
const icon = success ? "✓" : "✗";
|
||||
const title = success ? "Authentication Successful" : "Authentication Failed";
|
||||
return `<!DOCTYPE html>
|
||||
<html><head><meta charset="utf-8"><title>${title}</title>
|
||||
<style>body{font-family:system-ui;display:flex;justify-content:center;align-items:center;height:100vh;margin:0;background:#f5f5f5}.c{text-align:center;padding:2rem;background:#fff;border-radius:8px;box-shadow:0 2px 10px rgba(0,0,0,.1)}.i{color:${color};font-size:3rem}h1{margin:1rem 0}p{color:#666}</style>
|
||||
</head><body><div class="c"><div class="i">${icon}</div><h1>${title}</h1><p>${message}</p><p>Closing in <span id="cd">3</span>s...</p>
|
||||
<script>let n=3;const c=document.getElementById("cd");const t=setInterval(()=>{n--;c.textContent=n;if(n<=0){clearInterval(t);window.close();}},1000);</script>
|
||||
</div></body></html>`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Start Codex proxy on fixed port 1455.
|
||||
* Mode A (server-side): if any session was registered, proxy auto-exchanges + saves DB.
|
||||
* Mode B (channel fallback): if no session, proxy 302 redirects to app port for legacy channel-based flow.
|
||||
*/
|
||||
export function startCodexProxy(appPort) {
|
||||
return new Promise((resolve) => {
|
||||
// Already running
|
||||
if (codexProxyServer) {
|
||||
resolve({ success: true });
|
||||
return;
|
||||
}
|
||||
|
||||
const CODEX_PORT = 1455;
|
||||
const server = http.createServer((req, res) => {
|
||||
const server = http.createServer(async (req, res) => {
|
||||
const url = new URL(req.url, "http://localhost");
|
||||
|
||||
if (url.pathname === "/callback" || url.pathname === "/auth/callback") {
|
||||
// Redirect to app port with all query params preserved
|
||||
const redirectUrl = `http://localhost:${appPort}/callback${url.search}`;
|
||||
res.writeHead(302, { Location: redirectUrl });
|
||||
res.end();
|
||||
|
||||
// Auto-close after redirect
|
||||
stopCodexProxy();
|
||||
if (url.pathname !== "/callback" && url.pathname !== "/auth/callback") {
|
||||
res.writeHead(404);
|
||||
res.end("Not found");
|
||||
return;
|
||||
}
|
||||
|
||||
res.writeHead(404);
|
||||
res.end("Not found");
|
||||
const code = url.searchParams.get("code");
|
||||
const state = url.searchParams.get("state");
|
||||
const errorParam = url.searchParams.get("error");
|
||||
const session = state ? pendingExchanges.get(state) : null;
|
||||
|
||||
// Mode A: server-side exchange (session registered)
|
||||
if (session) {
|
||||
try {
|
||||
if (errorParam) {
|
||||
throw new Error(url.searchParams.get("error_description") || errorParam);
|
||||
}
|
||||
if (!code) throw new Error("No authorization code received");
|
||||
|
||||
// Lazy import to avoid circular deps
|
||||
const { exchangeTokens } = await import("../providers.js");
|
||||
const { createProviderConnection } = await import("@/models");
|
||||
|
||||
const tokenData = await exchangeTokens(
|
||||
"codex",
|
||||
code,
|
||||
session.redirectUri,
|
||||
session.codeVerifier,
|
||||
state
|
||||
);
|
||||
const connection = await createProviderConnection({
|
||||
provider: "codex",
|
||||
authType: "oauth",
|
||||
...tokenData,
|
||||
expiresAt: tokenData.expiresIn
|
||||
? new Date(Date.now() + tokenData.expiresIn * 1000).toISOString()
|
||||
: null,
|
||||
testStatus: "active",
|
||||
});
|
||||
|
||||
session.status = "done";
|
||||
session.connectionId = connection.id;
|
||||
session.email = connection.email;
|
||||
|
||||
res.writeHead(200, { "Content-Type": "text/html; charset=utf-8" });
|
||||
res.end(renderCodexResultPage(true, "You can close this window."));
|
||||
} catch (err) {
|
||||
session.status = "error";
|
||||
session.error = err.message;
|
||||
res.writeHead(200, { "Content-Type": "text/html; charset=utf-8" });
|
||||
res.end(renderCodexResultPage(false, err.message));
|
||||
} finally {
|
||||
stopCodexProxy();
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// Mode B: legacy channel fallback — 302 redirect to app /callback
|
||||
const redirectUrl = `http://localhost:${appPort}/callback${url.search}`;
|
||||
res.writeHead(302, { Location: redirectUrl });
|
||||
res.end();
|
||||
stopCodexProxy();
|
||||
});
|
||||
|
||||
server.listen(CODEX_PORT, "127.0.0.1", () => {
|
||||
codexProxyServer = server;
|
||||
// Auto-cleanup after timeout
|
||||
codexProxyTimeout = setTimeout(() => stopCodexProxy(), CODEX_PROXY_TIMEOUT_MS);
|
||||
resolve({ success: true });
|
||||
});
|
||||
|
||||
@@ -4,7 +4,32 @@ const path = require("path");
|
||||
const os = require("os");
|
||||
const { log, err } = require("../logger");
|
||||
const { TOOL_HOSTS } = require("../../shared/constants/mitmToolHosts");
|
||||
const { runElevatedPowerShell, quotePs, isAdmin } = require("../winElevated.js");
|
||||
const { runElevatedPowerShell, isAdmin } = require("../winElevated.js");
|
||||
|
||||
/**
|
||||
* Atomic-ish write for Windows hosts file with rollback on failure.
|
||||
* Strategy: write `.new` sibling → rename current to `.bak` → rename `.new` to target.
|
||||
* If anything fails mid-way, restore from `.bak`. Same-volume renames are atomic on NTFS.
|
||||
*/
|
||||
function atomicWriteHostsWin(target, originalContent, newContent) {
|
||||
const tmpNew = `${target}.9router.new`;
|
||||
const tmpBak = `${target}.9router.bak`;
|
||||
try {
|
||||
fs.writeFileSync(tmpNew, newContent, "utf8");
|
||||
try { fs.unlinkSync(tmpBak); } catch { /* none */ }
|
||||
fs.renameSync(target, tmpBak);
|
||||
try {
|
||||
fs.renameSync(tmpNew, target);
|
||||
} catch (e) {
|
||||
// Rollback: restore original
|
||||
try { fs.renameSync(tmpBak, target); } catch { fs.writeFileSync(target, originalContent, "utf8"); }
|
||||
throw e;
|
||||
}
|
||||
try { fs.unlinkSync(tmpBak); } catch { /* best effort */ }
|
||||
} finally {
|
||||
try { fs.unlinkSync(tmpNew); } catch { /* already moved or never created */ }
|
||||
}
|
||||
}
|
||||
|
||||
const IS_WIN = process.platform === "win32";
|
||||
const IS_MAC = process.platform === "darwin";
|
||||
@@ -130,16 +155,13 @@ async function addDNSEntry(tool, sudoPassword) {
|
||||
|
||||
try {
|
||||
if (IS_WIN) {
|
||||
// Read → trim → append → write (avoids stacked blank lines from Add-Content)
|
||||
// Read → trim → append → atomic write (Node-side, no CLI size limit)
|
||||
const current = fs.readFileSync(HOSTS_FILE, "utf8");
|
||||
const trimmed = current.replace(/[\r\n\s]+$/g, "");
|
||||
const toAppend = entriesToAdd.map(h => `127.0.0.1 ${h}`).join("\r\n");
|
||||
const next = `${trimmed}\r\n${toAppend}\r\n`;
|
||||
const script = `
|
||||
Set-Content -LiteralPath ${quotePs(HOSTS_FILE)} -Value ${quotePs(next)} -NoNewline
|
||||
ipconfig /flushdns | Out-Null
|
||||
`;
|
||||
await runElevatedPowerShell(script);
|
||||
atomicWriteHostsWin(HOSTS_FILE, current, next);
|
||||
await runElevatedPowerShell("ipconfig /flushdns | Out-Null");
|
||||
} else {
|
||||
const current = fs.readFileSync(HOSTS_FILE, "utf8");
|
||||
const trimmed = current.replace(/[\r\n\s]+$/g, "");
|
||||
@@ -175,11 +197,8 @@ async function removeDNSEntry(tool, sudoPassword) {
|
||||
const current = fs.readFileSync(HOSTS_FILE, "utf8");
|
||||
const filtered = current.split(/\r?\n/).filter(l => !entriesToRemove.some(h => l.includes(h))).join("\r\n");
|
||||
const next = filtered.replace(/[\r\n\s]+$/g, "") + "\r\n";
|
||||
const script = `
|
||||
Set-Content -LiteralPath ${quotePs(HOSTS_FILE)} -Value ${quotePs(next)} -NoNewline
|
||||
ipconfig /flushdns | Out-Null
|
||||
`;
|
||||
await runElevatedPowerShell(script);
|
||||
atomicWriteHostsWin(HOSTS_FILE, current, next);
|
||||
await runElevatedPowerShell("ipconfig /flushdns | Out-Null");
|
||||
} else {
|
||||
const current = fs.readFileSync(HOSTS_FILE, "utf8");
|
||||
const filtered = current.split(/\r?\n/).filter(l => !entriesToRemove.some(h => l.includes(h))).join("\n");
|
||||
|
||||
@@ -7,6 +7,7 @@ import PropTypes from "prop-types";
|
||||
import ProviderIcon from "@/shared/components/ProviderIcon";
|
||||
import HeaderMenu from "@/shared/components/HeaderMenu";
|
||||
import ThemeToggle from "@/shared/components/ThemeToggle";
|
||||
import { useHeaderSearchStore } from "@/store/headerSearchStore";
|
||||
import { OAUTH_PROVIDERS, APIKEY_PROVIDERS } from "@/shared/constants/config";
|
||||
import { MEDIA_PROVIDER_KINDS, AI_PROVIDERS } from "@/shared/constants/providers";
|
||||
import { translate } from "@/i18n/runtime";
|
||||
@@ -265,6 +266,7 @@ export default function Header({ onMenuClick, showMenuButton = true }) {
|
||||
|
||||
{/* Right actions */}
|
||||
<div className="flex items-center gap-1 shrink-0">
|
||||
<HeaderSearch />
|
||||
<ThemeToggle />
|
||||
<HeaderMenu onLogout={handleLogout} />
|
||||
</div>
|
||||
@@ -272,6 +274,40 @@ export default function Header({ onMenuClick, showMenuButton = true }) {
|
||||
);
|
||||
}
|
||||
|
||||
function HeaderSearch() {
|
||||
const visible = useHeaderSearchStore((s) => s.visible);
|
||||
const query = useHeaderSearchStore((s) => s.query);
|
||||
const placeholder = useHeaderSearchStore((s) => s.placeholder);
|
||||
const setQuery = useHeaderSearchStore((s) => s.setQuery);
|
||||
|
||||
if (!visible) return null;
|
||||
|
||||
return (
|
||||
<div className="relative w-[160px] sm:w-[220px]">
|
||||
<span className="material-symbols-outlined absolute left-2 top-1/2 -translate-y-1/2 text-text-muted text-[16px] pointer-events-none">
|
||||
search
|
||||
</span>
|
||||
<input
|
||||
type="text"
|
||||
value={query}
|
||||
onChange={(e) => setQuery(e.target.value)}
|
||||
placeholder={placeholder}
|
||||
className="w-full h-8 pl-7 pr-7 rounded-lg border border-border bg-surface/60 text-sm focus:outline-none focus:border-primary/50 transition-colors"
|
||||
/>
|
||||
{query && (
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => setQuery("")}
|
||||
className="absolute right-1 top-1/2 -translate-y-1/2 text-text-muted hover:text-text-main p-0.5 rounded"
|
||||
aria-label="Clear search"
|
||||
>
|
||||
<span className="material-symbols-outlined text-[16px]">close</span>
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
Header.propTypes = {
|
||||
onMenuClick: PropTypes.func,
|
||||
showMenuButton: PropTypes.bool,
|
||||
|
||||
@@ -40,6 +40,7 @@ export default function ModelSelectModal({
|
||||
const [combos, setCombos] = useState([]);
|
||||
const [providerNodes, setProviderNodes] = useState([]);
|
||||
const [customModels, setCustomModels] = useState([]);
|
||||
const [disabledModels, setDisabledModels] = useState({});
|
||||
|
||||
const fetchCombos = async () => {
|
||||
try {
|
||||
@@ -89,6 +90,22 @@ export default function ModelSelectModal({
|
||||
if (isOpen) fetchCustomModels();
|
||||
}, [isOpen]);
|
||||
|
||||
const fetchDisabledModels = async () => {
|
||||
try {
|
||||
const res = await fetch("/api/models/disabled");
|
||||
if (!res.ok) throw new Error(`Failed to fetch disabled models: ${res.status}`);
|
||||
const data = await res.json();
|
||||
setDisabledModels(data.disabled || {});
|
||||
} catch (error) {
|
||||
console.error("Error fetching disabled models:", error);
|
||||
setDisabledModels({});
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (isOpen) fetchDisabledModels();
|
||||
}, [isOpen]);
|
||||
|
||||
const allProviders = useMemo(() => ({ ...OAUTH_PROVIDERS, ...FREE_PROVIDERS, ...FREE_TIER_PROVIDERS, ...APIKEY_PROVIDERS }), []);
|
||||
|
||||
// Group models by provider with priority order
|
||||
@@ -104,7 +121,9 @@ export default function ModelSelectModal({
|
||||
|
||||
// Filter a models[] array by kindFilter (keep only matching m.type)
|
||||
const filterByKind = (models) => {
|
||||
if (!kindFilter || !TYPED_KINDS.has(kindFilter)) return models;
|
||||
// No kindFilter → LLM context: keep only LLM models (no type or type === "llm")
|
||||
if (!kindFilter) return models.filter((m) => m.isPlaceholder || !m.type || m.type === "llm");
|
||||
if (!TYPED_KINDS.has(kindFilter)) return models;
|
||||
return models.filter((m) => m.isPlaceholder || m.type === kindFilter);
|
||||
};
|
||||
|
||||
@@ -239,11 +258,18 @@ export default function ModelSelectModal({
|
||||
.filter((m) => m.providerAlias === alias && !hardcodedIds.has(m.id) && !customAliasIds.has(m.id))
|
||||
.map((m) => ({ id: m.id, name: m.name || m.id, value: `${alias}/${m.id}`, isCustom: true }));
|
||||
|
||||
let allModels = filterByKind([
|
||||
const merged = [
|
||||
...hardcodedModels.map((m) => ({ id: m.id, name: m.name, value: `${alias}/${m.id}`, type: m.type })),
|
||||
...customAliasModels,
|
||||
...customRegisteredModels,
|
||||
]);
|
||||
];
|
||||
// Dedupe by value (alias may equal hardcoded id, causing React key collision)
|
||||
const seen = new Set();
|
||||
let allModels = filterByKind(merged.filter((m) => {
|
||||
if (seen.has(m.value)) return false;
|
||||
seen.add(m.value);
|
||||
return true;
|
||||
}));
|
||||
|
||||
// Provider-as-model fallback: providers that support the kind but have no hardcoded models
|
||||
// can still be picked (value = providerAlias). Skips embedding (always needs model).
|
||||
@@ -265,8 +291,20 @@ export default function ModelSelectModal({
|
||||
}
|
||||
});
|
||||
|
||||
// Filter out disabled models per provider (disabled keyed by storage alias OR providerId)
|
||||
Object.entries(groups).forEach(([providerId, group]) => {
|
||||
const aliasKey = getProviderAlias(providerId);
|
||||
const disabledIds = new Set([
|
||||
...(disabledModels[aliasKey] || []),
|
||||
...(disabledModels[providerId] || []),
|
||||
]);
|
||||
if (disabledIds.size === 0) return;
|
||||
group.models = group.models.filter((m) => !disabledIds.has(m.id));
|
||||
if (group.models.length === 0) delete groups[providerId];
|
||||
});
|
||||
|
||||
return groups;
|
||||
}, [filteredActiveProviders, modelAliases, allProviders, providerNodes, customModels, kindFilter]);
|
||||
}, [filteredActiveProviders, modelAliases, allProviders, providerNodes, customModels, disabledModels, kindFilter, activeProviders]);
|
||||
|
||||
// Filter combos by search query (and hide combos when kindFilter is set — combos are LLM-only by design)
|
||||
const filteredCombos = useMemo(() => {
|
||||
|
||||
@@ -173,24 +173,13 @@ export default function OAuthModal({ isOpen, provider, providerInfo, onSuccess,
|
||||
// Authorization code flow - build redirect URI (some providers require fixed ports)
|
||||
const appPort = window.location.port || (window.location.protocol === "https:" ? "443" : "80");
|
||||
let redirectUri;
|
||||
let codexProxyActive = false;
|
||||
|
||||
if (provider === "codex") {
|
||||
// Try to start proxy on fixed port 1455 → redirect callback to app port
|
||||
try {
|
||||
const proxyRes = await fetch(`/api/oauth/codex/start-proxy?app_port=${appPort}`);
|
||||
const proxyData = await proxyRes.json();
|
||||
codexProxyActive = proxyData.success;
|
||||
} catch {
|
||||
codexProxyActive = false;
|
||||
}
|
||||
// Always use fixed port 1455 as redirect_uri (Codex requirement)
|
||||
redirectUri = "http://localhost:1455/auth/callback";
|
||||
} else {
|
||||
redirectUri = `http://localhost:${appPort}/callback`;
|
||||
}
|
||||
|
||||
// Build authorize URL, optionally passing provider-specific metadata (e.g. gitlab clientId)
|
||||
// Build authorize URL first to get codeVerifier/state for codex server-side mode
|
||||
const authorizeUrl = new URL(`/api/oauth/${provider}/authorize`, window.location.origin);
|
||||
authorizeUrl.searchParams.set("redirect_uri", redirectUri);
|
||||
if (oauthMeta) {
|
||||
@@ -200,10 +189,29 @@ export default function OAuthModal({ isOpen, provider, providerInfo, onSuccess,
|
||||
const data = await res.json();
|
||||
if (!res.ok) throw new Error(data.error);
|
||||
|
||||
setAuthData({ ...data, redirectUri });
|
||||
// Codex: start proxy with server-side session (auto-exchange) + fallback to channels
|
||||
let codexProxyActive = false;
|
||||
let codexServerSide = false;
|
||||
if (provider === "codex") {
|
||||
try {
|
||||
const proxyUrl = new URL(`/api/oauth/codex/start-proxy`, window.location.origin);
|
||||
proxyUrl.searchParams.set("app_port", appPort);
|
||||
proxyUrl.searchParams.set("state", data.state);
|
||||
proxyUrl.searchParams.set("code_verifier", data.codeVerifier);
|
||||
proxyUrl.searchParams.set("redirect_uri", redirectUri);
|
||||
const proxyRes = await fetch(proxyUrl.toString());
|
||||
const proxyData = await proxyRes.json();
|
||||
codexProxyActive = proxyData.success;
|
||||
codexServerSide = !!proxyData.serverSide;
|
||||
} catch {
|
||||
codexProxyActive = false;
|
||||
}
|
||||
}
|
||||
|
||||
setAuthData({ ...data, redirectUri, codexServerSide });
|
||||
|
||||
if (provider === "codex" && codexProxyActive) {
|
||||
// Proxy active: callback will redirect to app port automatically
|
||||
// Proxy active: callback will be handled server-side (auto-exchange) or via channels (fallback)
|
||||
setStep("waiting");
|
||||
popupRef.current = window.open(data.authUrl, "oauth_popup", "width=600,height=700");
|
||||
if (!popupRef.current) {
|
||||
@@ -247,6 +255,49 @@ export default function OAuthModal({ isOpen, provider, providerInfo, onSuccess,
|
||||
}
|
||||
}, [isOpen, provider, startOAuthFlow]);
|
||||
|
||||
// Codex server-side mode: poll status (proxy auto-exchanges + saves DB)
|
||||
useEffect(() => {
|
||||
if (!authData?.codexServerSide || !authData?.state) return;
|
||||
if (callbackProcessedRef.current) return;
|
||||
let cancelled = false;
|
||||
const POLL_INTERVAL_MS = 1500;
|
||||
const MAX_ATTEMPTS = 200; // ~5 minutes
|
||||
let attempts = 0;
|
||||
|
||||
const tick = async () => {
|
||||
if (cancelled || callbackProcessedRef.current) return;
|
||||
attempts += 1;
|
||||
try {
|
||||
const res = await fetch(`/api/oauth/codex/poll-status?state=${encodeURIComponent(authData.state)}`);
|
||||
const data = await res.json();
|
||||
if (cancelled || callbackProcessedRef.current) return;
|
||||
if (data.status === "done") {
|
||||
callbackProcessedRef.current = true;
|
||||
setStep("success");
|
||||
onSuccess?.();
|
||||
return;
|
||||
}
|
||||
if (data.status === "error") {
|
||||
callbackProcessedRef.current = true;
|
||||
setError(data.error || "Authentication failed");
|
||||
setStep("error");
|
||||
return;
|
||||
}
|
||||
} catch {
|
||||
// Network error, keep polling
|
||||
}
|
||||
if (attempts >= MAX_ATTEMPTS) {
|
||||
callbackProcessedRef.current = true;
|
||||
setError("Authentication timeout");
|
||||
setStep("error");
|
||||
return;
|
||||
}
|
||||
setTimeout(tick, POLL_INTERVAL_MS);
|
||||
};
|
||||
setTimeout(tick, POLL_INTERVAL_MS);
|
||||
return () => { cancelled = true; };
|
||||
}, [authData, onSuccess]);
|
||||
|
||||
// Listen for OAuth callback via multiple methods
|
||||
useEffect(() => {
|
||||
if (!authData) return;
|
||||
|
||||
@@ -12,7 +12,7 @@ import Button from "./Button";
|
||||
import { ConfirmModal } from "./Modal";
|
||||
|
||||
// const VISIBLE_MEDIA_KINDS = ["embedding", "image", "imageToText", "tts", "stt", "webSearch", "webFetch", "video", "music"];
|
||||
const VISIBLE_MEDIA_KINDS = ["embedding", "image", "tts"];
|
||||
const VISIBLE_MEDIA_KINDS = ["embedding", "image", "tts", "stt"];
|
||||
// Combined entry: webSearch + webFetch share one page at /dashboard/media-providers/web
|
||||
const COMBINED_WEB_ITEM = { id: "web", label: "Web Fetch & Search", icon: "travel_explore", href: "/dashboard/media-providers/web" };
|
||||
|
||||
|
||||
@@ -114,6 +114,22 @@ export const CLI_TOOLS = {
|
||||
description: "OpenCode AI Terminal Assistant",
|
||||
configType: "custom",
|
||||
},
|
||||
cowork: {
|
||||
id: "cowork",
|
||||
name: "Claude Cowork",
|
||||
image: "/providers/claude.png",
|
||||
color: "#D97757",
|
||||
description: "Claude Desktop Cowork (third-party inference)",
|
||||
configType: "custom",
|
||||
},
|
||||
hermes: {
|
||||
id: "hermes",
|
||||
name: "Hermes Agent",
|
||||
image: "/providers/hermes.png",
|
||||
color: "#8B5CF6",
|
||||
description: "Nous Research self-improving AI agent",
|
||||
configType: "custom",
|
||||
},
|
||||
droid: {
|
||||
id: "droid",
|
||||
name: "Factory Droid",
|
||||
@@ -212,14 +228,6 @@ export const CLI_TOOLS = {
|
||||
}`,
|
||||
},
|
||||
},
|
||||
hermes: {
|
||||
id: "hermes",
|
||||
name: "Hermes Agent",
|
||||
image: "/providers/hermes.png",
|
||||
color: "#8B5CF6",
|
||||
description: "Nous Research self-improving AI agent",
|
||||
configType: "custom",
|
||||
},
|
||||
// HIDDEN: gemini-cli
|
||||
// "gemini-cli": {
|
||||
// id: "gemini-cli",
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
// Free Providers (kiro first, iflow last)
|
||||
export const FREE_PROVIDERS = {
|
||||
kiro: { id: "kiro", alias: "kr", name: "Kiro AI", icon: "psychology_alt", color: "#FF6B35", website: "https://kiro.dev", notice: { signupUrl: "https://kiro.dev" } },
|
||||
qwen: { id: "qwen", alias: "qw", name: "Qwen Code", icon: "psychology", color: "#10B981", deprecated: true, deprecationNotice: "Qwen OAuth free tier was discontinued by Alibaba on 2026-04-15. New connections will not work.", website: "https://chat.qwen.ai", notice: { signupUrl: "https://chat.qwen.ai" }, serviceKinds: ["llm", "tts", "stt"], ttsConfig: { baseUrl: "http://localhost:8000/v1/audio/speech", authType: "none", authHeader: "none", format: "openai", models: [{ id: "qwen3-tts", name: "Qwen3 TTS" }] } },
|
||||
qwen: { id: "qwen", alias: "qw", name: "Qwen Code", icon: "psychology", color: "#10B981", mediaPriority: 999, deprecated: true, deprecationNotice: "Qwen OAuth free tier was discontinued by Alibaba on 2026-04-15. New connections will not work.", website: "https://chat.qwen.ai", notice: { signupUrl: "https://chat.qwen.ai" }, serviceKinds: ["llm", "tts"], ttsConfig: { baseUrl: "http://localhost:8000/v1/audio/speech", authType: "none", authHeader: "none", format: "openai", models: [{ id: "qwen3-tts", name: "Qwen3 TTS" }] } },
|
||||
"gemini-cli": { id: "gemini-cli", alias: "gc", name: "Gemini CLI", icon: "terminal", color: "#4285F4", deprecated: true, deprecationNotice: "Gemini CLI is designed exclusively for Gemini CLI. Using it with other tools (OpenClaw, Claude, Codex...) may result in account restrictions or bans.", website: "https://github.com/google-gemini/gemini-cli", notice: { signupUrl: "https://github.com/google-gemini/gemini-cli" } },
|
||||
// gitlab: { id: "gitlab", alias: "gl", name: "GitLab Duo", icon: "code", color: "#FC6D26" },
|
||||
// codebuddy: { id: "codebuddy", alias: "cb", name: "CodeBuddy", icon: "smart_toy", color: "#006EFF" },
|
||||
@@ -15,10 +15,10 @@ export const FREE_PROVIDERS = {
|
||||
// Free Tier Providers (has free access but may require account/API key)
|
||||
export const FREE_TIER_PROVIDERS = {
|
||||
openrouter: { id: "openrouter", alias: "openrouter", name: "OpenRouter", icon: "router", color: "#F97316", textIcon: "OR", website: "https://openrouter.ai", notice: { text: "Free tier: 27+ free models, no credit card needed, 200 req/day. After $10 credit: 1,000 req/day.", apiKeyUrl: "https://openrouter.ai/settings/keys" }, modelsFetcher: { url: "https://openrouter.ai/api/v1/models", type: "openrouter-free" }, passthroughModels: true, serviceKinds: ["llm", "embedding", "tts", "imageToText"], embeddingConfig: { baseUrl: "https://openrouter.ai/api/v1/embeddings", authType: "apikey", authHeader: "bearer", models: [{ id: "openai/text-embedding-3-small", name: "Text Embedding 3 Small (OpenRouter)", dimensions: 1536 }, { id: "openai/text-embedding-3-large", name: "Text Embedding 3 Large (OpenRouter)", dimensions: 3072 }, { id: "openai/text-embedding-ada-002", name: "Text Embedding Ada 002 (OpenRouter)", dimensions: 1536 }] } },
|
||||
nvidia: { id: "nvidia", alias: "nvidia", name: "NVIDIA NIM", icon: "developer_board", color: "#76B900", textIcon: "NV", website: "https://developer.nvidia.com/nim", notice: { text: "Free access for NVIDIA Developer Program members (prototyping & testing).", apiKeyUrl: "https://build.nvidia.com/settings/api-keys" }, serviceKinds: ["llm", "tts", "embedding", "stt"], ttsConfig: { baseUrl: "https://integrate.api.nvidia.com/v1/audio/speech", authType: "apikey", authHeader: "bearer", format: "nvidia-tts", models: [{ id: "fastpitch", name: "FastPitch" }, { id: "tacotron2", name: "Tacotron2" }] }, embeddingConfig: { baseUrl: "https://integrate.api.nvidia.com/v1/embeddings", authType: "apikey", authHeader: "bearer", models: [{ id: "nvidia/nv-embedqa-e5-v5", name: "NV EmbedQA E5 v5", dimensions: 1024 }] } },
|
||||
nvidia: { id: "nvidia", alias: "nvidia", name: "NVIDIA NIM", icon: "developer_board", color: "#76B900", textIcon: "NV", website: "https://developer.nvidia.com/nim", notice: { text: "Free access for NVIDIA Developer Program members (prototyping & testing).", apiKeyUrl: "https://build.nvidia.com/settings/api-keys" }, serviceKinds: ["llm", "tts", "embedding"], ttsConfig: { baseUrl: "https://integrate.api.nvidia.com/v1/audio/speech", authType: "apikey", authHeader: "bearer", format: "nvidia-tts", models: [{ id: "fastpitch", name: "FastPitch" }, { id: "tacotron2", name: "Tacotron2" }] }, embeddingConfig: { baseUrl: "https://integrate.api.nvidia.com/v1/embeddings", authType: "apikey", authHeader: "bearer", models: [{ id: "nvidia/nv-embedqa-e5-v5", name: "NV EmbedQA E5 v5", dimensions: 1024 }] } },
|
||||
ollama: { id: "ollama", alias: "ollama", name: "Ollama Cloud", icon: "cloud", color: "#ffffffff", textIcon: "OL", website: "https://ollama.com", notice: { text: "Free tier: light usage, 1 cloud model at a time (limits reset every 5h & 7d). Pro $20/mo · Max $100/mo.", apiKeyUrl: "https://ollama.com/settings/keys" } },
|
||||
vertex: { id: "vertex", alias: "vx", name: "Vertex AI", icon: "cloud", color: "#4285F4", textIcon: "VX", website: "https://cloud.google.com/vertex-ai", notice: { text: "New Google Cloud accounts get $300 free credits. Requires GCP project + Service Account with Vertex AI API enabled.", apiKeyUrl: "https://console.cloud.google.com/iam-admin/serviceaccounts" } },
|
||||
gemini: { id: "gemini", alias: "gemini", name: "Gemini", icon: "diamond", color: "#4285F4", textIcon: "GE", website: "https://ai.google.dev", notice: { apiKeyUrl: "https://aistudio.google.com/app/apikey" }, serviceKinds: ["llm", "embedding", "image", "imageToText", "webSearch"], searchViaChat: { defaultModel: "gemini-2.5-flash", pricingUrl: "https://ai.google.dev/pricing", freeTier: "Free tier: 15 RPM, 1M tokens/day on gemini-2.5-flash via AI Studio." }, embeddingConfig: { baseUrl: "https://generativelanguage.googleapis.com/v1beta/models", authType: "apikey", authHeader: "key", models: [{ id: "text-embedding-004", name: "Text Embedding 004", dimensions: 768 }, { id: "embedding-001", name: "Embedding 001", dimensions: 768 }] } },
|
||||
gemini: { id: "gemini", alias: "gemini", name: "Gemini", icon: "diamond", color: "#4285F4", textIcon: "GE", mediaPriority: 1, website: "https://ai.google.dev", notice: { apiKeyUrl: "https://aistudio.google.com/app/apikey" }, serviceKinds: ["llm", "embedding", "image", "imageToText", "webSearch", "tts", "stt"], sttConfig: { baseUrl: "https://generativelanguage.googleapis.com/v1beta/models", authType: "apikey", authHeader: "key", format: "gemini-stt", models: [{ id: "gemini-2.5-pro", name: "Gemini 2.5 Pro (Best)" }, { id: "gemini-2.5-flash", name: "Gemini 2.5 Flash" }, { id: "gemini-2.5-flash-lite", name: "Gemini 2.5 Flash Lite (Cheapest)" }, { id: "gemini-2.0-flash", name: "Gemini 2.0 Flash" }] }, searchViaChat: { defaultModel: "gemini-2.5-flash", pricingUrl: "https://ai.google.dev/pricing", freeTier: "Free tier: 15 RPM, 1M tokens/day on gemini-2.5-flash via AI Studio." }, embeddingConfig: { baseUrl: "https://generativelanguage.googleapis.com/v1beta/models", authType: "apikey", authHeader: "key", models: [{ id: "text-embedding-004", name: "Text Embedding 004", dimensions: 768 }, { id: "embedding-001", name: "Embedding 001", dimensions: 768 }] }, ttsConfig: { baseUrl: "https://generativelanguage.googleapis.com/v1beta/models", authType: "apikey", authHeader: "key", format: "gemini-tts", models: [{ id: "gemini-2.5-flash-preview-tts", name: "Gemini 2.5 Flash TTS" }, { id: "gemini-2.5-pro-preview-tts", name: "Gemini 2.5 Pro TTS" }] } },
|
||||
"cloudflare-ai": { id: "cloudflare-ai", alias: "cf", name: "Cloudflare", icon: "cloud", color: "#F38020", textIcon: "CF", website: "https://developers.cloudflare.com/workers-ai/", notice: { text: "Workers AI free tier. Requires a Cloudflare API token and Account ID.", apiKeyUrl: "https://dash.cloudflare.com/profile/api-tokens" }, serviceKinds: ["llm"], hasProviderSpecificData: true },
|
||||
byteplus: { id: "byteplus", alias: "bpm", name: "BytePlus ModelArk", icon: "cloud", color: "#2563EB", textIcon: "BP", website: "https://console.byteplus.com/ark", notice: { text: "Free credits for new accounts. Access to Seed 2.0, Kimi K2 Thinking, GLM 4.7, GPT-OSS-120B models.", apiKeyUrl: "https://console.byteplus.com/ark/region:ark+ap-southeast-1/apiKey" }, serviceKinds: ["llm"] },
|
||||
};
|
||||
@@ -63,13 +63,13 @@ export const APIKEY_PROVIDERS = {
|
||||
"alicode-intl": { id: "alicode-intl", alias: "alicode-intl", name: "Alibaba Intl", icon: "cloud", color: "#FF6A00", textIcon: "ALi", website: "https://modelstudio.console.alibabacloud.com", notice: { apiKeyUrl: "https://modelstudio.console.alibabacloud.com/?apiKey=1" } },
|
||||
"xiaomi-mimo": { id: "xiaomi-mimo", alias: "mimo", name: "Xiaomi MiMo", icon: "smart_toy", color: "#FF6900", textIcon: "XM", website: "https://xiaomimimo.com", notice: { apiKeyUrl: "https://xiaomimimo.com" } },
|
||||
"volcengine-ark": { id: "volcengine-ark", alias: "ark", name: "Volcengine Ark", icon: "cloud", color: "#1677FF", textIcon: "ARK", website: "https://ark.cn-beijing.volces.com", notice: { apiKeyUrl: "https://console.volcengine.com/ark/region:ark+cn-beijing/apiKey" } },
|
||||
openai: { id: "openai", alias: "openai", name: "OpenAI", icon: "auto_awesome", color: "#10A37F", textIcon: "OA", website: "https://platform.openai.com", notice: { apiKeyUrl: "https://platform.openai.com/api-keys" }, serviceKinds: ["llm", "embedding", "tts", "image", "imageToText", "webSearch"], thinkingConfig: THINKING_CONFIG.effort, searchViaChat: { defaultModel: "gpt-4o-mini", pricingUrl: "https://openai.com/api/pricing" }, ttsConfig: { baseUrl: "https://api.openai.com/v1/audio/speech", authType: "apikey", authHeader: "bearer", format: "openai", models: [{ id: "tts-1", name: "TTS-1" }, { id: "tts-1-hd", name: "TTS-1 HD" }, { id: "gpt-4o-mini-tts", name: "GPT-4o Mini TTS" }] }, embeddingConfig: { baseUrl: "https://api.openai.com/v1/embeddings", authType: "apikey", authHeader: "bearer", models: [{ id: "text-embedding-3-small", name: "Text Embedding 3 Small", dimensions: 1536 }, { id: "text-embedding-3-large", name: "Text Embedding 3 Large", dimensions: 3072 }, { id: "text-embedding-ada-002", name: "Text Embedding Ada 002", dimensions: 1536 }] } },
|
||||
openai: { id: "openai", alias: "openai", name: "OpenAI", icon: "auto_awesome", color: "#10A37F", textIcon: "OA", website: "https://platform.openai.com", notice: { apiKeyUrl: "https://platform.openai.com/api-keys" }, serviceKinds: ["llm", "embedding", "tts", "stt", "image", "imageToText", "webSearch"], thinkingConfig: THINKING_CONFIG.effort, searchViaChat: { defaultModel: "gpt-4o-mini", pricingUrl: "https://openai.com/api/pricing" }, ttsConfig: { baseUrl: "https://api.openai.com/v1/audio/speech", authType: "apikey", authHeader: "bearer", format: "openai", models: [{ id: "tts-1", name: "TTS-1" }, { id: "tts-1-hd", name: "TTS-1 HD" }, { id: "gpt-4o-mini-tts", name: "GPT-4o Mini TTS" }] }, sttConfig: { baseUrl: "https://api.openai.com/v1/audio/transcriptions", authType: "apikey", authHeader: "bearer", format: "openai", models: [{ id: "whisper-1", name: "Whisper 1" }, { id: "gpt-4o-transcribe", name: "GPT-4o Transcribe" }, { id: "gpt-4o-mini-transcribe", name: "GPT-4o Mini Transcribe" }] }, embeddingConfig: { baseUrl: "https://api.openai.com/v1/embeddings", authType: "apikey", authHeader: "bearer", models: [{ id: "text-embedding-3-small", name: "Text Embedding 3 Small", dimensions: 1536 }, { id: "text-embedding-3-large", name: "Text Embedding 3 Large", dimensions: 3072 }, { id: "text-embedding-ada-002", name: "Text Embedding Ada 002", dimensions: 1536 }] } },
|
||||
anthropic: { id: "anthropic", alias: "anthropic", name: "Anthropic", icon: "smart_toy", color: "#D97757", textIcon: "AN", website: "https://console.anthropic.com", notice: { apiKeyUrl: "https://console.anthropic.com/settings/keys" }, serviceKinds: ["llm", "imageToText"] },
|
||||
"opencode-go": { id: "opencode-go", alias: "ocg", name: "OpenCode Go", icon: "terminal", color: "#E87040", textIcon: "OC", website: "https://opencode.ai/auth", notice: { text: "OpenCode Go subscription: $5/mo (then $10/mo). Access to Kimi, GLM, Qwen, MiMo, MiniMax models.", apiKeyUrl: "https://opencode.ai/auth" } },
|
||||
azure: { id: "azure", alias: "azure", name: "Azure OpenAI", icon: "cloud", color: "#0078D4", textIcon: "AZ", website: "https://azure.microsoft.com/en-us/products/ai-services/openai-service", notice: { apiKeyUrl: "https://portal.azure.com/#view/Microsoft_Azure_ProjectOxford/CognitiveServicesHub/~/OpenAI" }, hasProviderSpecificData: true },
|
||||
|
||||
deepseek: { id: "deepseek", alias: "ds", name: "DeepSeek", icon: "bolt", color: "#4D6BFE", textIcon: "DS", website: "https://deepseek.com", notice: { apiKeyUrl: "https://platform.deepseek.com/api_keys" } },
|
||||
groq: { id: "groq", alias: "groq", name: "Groq", icon: "speed", color: "#F55036", textIcon: "GQ", website: "https://groq.com", notice: { apiKeyUrl: "https://console.groq.com/keys" }, serviceKinds: ["llm", "imageToText"] },
|
||||
groq: { id: "groq", alias: "groq", name: "Groq", icon: "speed", color: "#F55036", textIcon: "GQ", website: "https://groq.com", notice: { apiKeyUrl: "https://console.groq.com/keys" }, serviceKinds: ["llm", "imageToText", "stt"], sttConfig: { baseUrl: "https://api.groq.com/openai/v1/audio/transcriptions", authType: "apikey", authHeader: "bearer", format: "openai", models: [{ id: "whisper-large-v3", name: "Whisper Large v3" }, { id: "whisper-large-v3-turbo", name: "Whisper Large v3 Turbo" }, { id: "distil-whisper-large-v3-en", name: "Distil Whisper Large v3 EN" }] } },
|
||||
xai: { id: "xai", alias: "xai", name: "xAI (Grok)", icon: "auto_awesome", color: "#1DA1F2", textIcon: "XA", website: "https://x.ai", notice: { apiKeyUrl: "https://console.x.ai" }, serviceKinds: ["llm", "imageToText", "webSearch"], searchViaChat: { defaultModel: "grok-4.20-reasoning", pricingUrl: "https://x.ai/api#pricing" } },
|
||||
mistral: { id: "mistral", alias: "mistral", name: "Mistral", icon: "air", color: "#FF7000", textIcon: "MI", website: "https://mistral.ai", notice: { apiKeyUrl: "https://console.mistral.ai/api-keys" }, serviceKinds: ["llm", "imageToText", "embedding"], embeddingConfig: { baseUrl: "https://api.mistral.ai/v1/embeddings", authType: "apikey", authHeader: "bearer", models: [{ id: "mistral-embed", name: "Mistral Embed", dimensions: 1024 }] } },
|
||||
perplexity: { id: "perplexity", alias: "pplx", name: "Perplexity", icon: "search", color: "#20808D", textIcon: "PP", website: "https://www.perplexity.ai", notice: { apiKeyUrl: "https://www.perplexity.ai/settings/api" }, serviceKinds: ["llm", "webSearch"], searchConfig: { baseUrl: "https://api.perplexity.ai/search", method: "POST", authType: "apikey", authHeader: "bearer", costPerQuery: 0.005, freeMonthlyQuota: 0, searchTypes: ["web"], defaultMaxResults: 5, maxMaxResults: 20, timeoutMs: 10000, cacheTTLMs: 300000 } },
|
||||
@@ -80,22 +80,22 @@ export const APIKEY_PROVIDERS = {
|
||||
nebius: { id: "nebius", alias: "nebius", name: "Nebius AI", icon: "cloud", color: "#6C5CE7", textIcon: "NB", website: "https://nebius.com", notice: { apiKeyUrl: "https://studio.nebius.com/settings/api-keys" }, serviceKinds: ["llm", "embedding"], embeddingConfig: { baseUrl: "https://api.tokenfactory.nebius.com/v1/embeddings", authType: "apikey", authHeader: "bearer", models: [{ id: "Qwen/Qwen3-Embedding-8B", name: "Qwen3 Embedding 8B", dimensions: 4096 }] } },
|
||||
siliconflow: { id: "siliconflow", alias: "siliconflow", name: "SiliconFlow", icon: "cloud_queue", color: "#5B6EF5", textIcon: "SF", website: "https://cloud.siliconflow.com", notice: { apiKeyUrl: "https://cloud.siliconflow.com/account/ak" } },
|
||||
hyperbolic: { id: "hyperbolic", alias: "hyp", name: "Hyperbolic", icon: "bolt", color: "#00D4FF", textIcon: "HY", website: "https://hyperbolic.xyz", notice: { apiKeyUrl: "https://app.hyperbolic.xyz/settings" }, serviceKinds: ["llm", "tts"], ttsConfig: { baseUrl: "https://api.hyperbolic.xyz/v1/audio/generation", authType: "apikey", authHeader: "bearer", format: "hyperbolic", models: [{ id: "melo-tts", name: "Melo TTS" }] } },
|
||||
deepgram: { id: "deepgram", alias: "dg", name: "Deepgram", icon: "mic", color: "#13EF93", textIcon: "DG", website: "https://deepgram.com", notice: { text: "$200 free credit on signup (no card required). Aura-1: $0.015/1k chars, Aura-2: $0.030/1k chars (Pay-As-You-Go).", apiKeyUrl: "https://console.deepgram.com/api-keys" }, serviceKinds: ["stt", "imageToText", "tts"], ttsConfig: { baseUrl: "https://api.deepgram.com/v1/speak", authType: "apikey", authHeader: "token", format: "deepgram", models: [] } },
|
||||
assemblyai: { id: "assemblyai", alias: "aai", name: "AssemblyAI", icon: "record_voice_over", color: "#0062FF", textIcon: "AA", website: "https://assemblyai.com", notice: { apiKeyUrl: "https://www.assemblyai.com/app/api-keys" }, serviceKinds: ["stt"] },
|
||||
deepgram: { id: "deepgram", alias: "dg", name: "Deepgram", icon: "mic", color: "#13EF93", textIcon: "DG", website: "https://deepgram.com", notice: { text: "$200 free credit on signup (no card required). Aura-1: $0.015/1k chars, Aura-2: $0.030/1k chars (Pay-As-You-Go).", apiKeyUrl: "https://console.deepgram.com/api-keys" }, serviceKinds: ["stt", "imageToText", "tts"], ttsConfig: { baseUrl: "https://api.deepgram.com/v1/speak", authType: "apikey", authHeader: "token", format: "deepgram", models: [] }, sttConfig: { baseUrl: "https://api.deepgram.com/v1/listen", authType: "apikey", authHeader: "token", format: "deepgram", models: [{ id: "nova-3", name: "Nova 3" }, { id: "nova-2", name: "Nova 2" }, { id: "whisper-large", name: "Whisper Large" }] } },
|
||||
assemblyai: { id: "assemblyai", alias: "aai", name: "AssemblyAI", icon: "record_voice_over", color: "#0062FF", textIcon: "AA", website: "https://assemblyai.com", notice: { apiKeyUrl: "https://www.assemblyai.com/app/api-keys" }, serviceKinds: ["stt"], sttConfig: { baseUrl: "https://api.assemblyai.com/v2/transcript", authType: "apikey", authHeader: "bearer", format: "assemblyai", async: true, models: [{ id: "universal-3-pro", name: "Universal 3 Pro" }, { id: "universal-2", name: "Universal 2" }] } },
|
||||
nanobanana: { id: "nanobanana", alias: "nb", name: "NanoBanana API", icon: "extension", color: "#FFD700", textIcon: "🍌", website: "https://nanobananaapi.ai", notice: { text: "3rd-party proxy for Google Nano Banana (Gemini 2.5/3 Flash Image). For official, use Gemini provider.", apiKeyUrl: "https://nanobananaapi.ai/dashboard" }, serviceKinds: ["image"] },
|
||||
elevenlabs: { id: "elevenlabs", alias: "el", name: "ElevenLabs", icon: "record_voice_over", color: "#6C47FF", textIcon: "EL", website: "https://elevenlabs.io", notice: { apiKeyUrl: "https://elevenlabs.io/app/settings/api-keys" }, serviceKinds: ["tts"], ttsConfig: { baseUrl: "https://api.elevenlabs.io/v1/text-to-speech", authType: "apikey", authHeader: "xi-api-key", format: "elevenlabs", models: [{ id: "eleven_multilingual_v2", name: "Eleven Multilingual v2" }, { id: "eleven_turbo_v2_5", name: "Eleven Turbo v2.5" }] } },
|
||||
cartesia: { id: "cartesia", alias: "cartesia", name: "Cartesia", icon: "spatial_audio", color: "#FF4F8B", textIcon: "CA", website: "https://cartesia.ai", notice: { apiKeyUrl: "https://play.cartesia.ai/keys" }, serviceKinds: ["tts"], hidden: true, ttsConfig: { baseUrl: "https://api.cartesia.ai/tts/bytes", authType: "apikey", authHeader: "x-api-key", format: "cartesia", models: [{ id: "sonic-2", name: "Sonic 2" }, { id: "sonic-3", name: "Sonic 3" }] } },
|
||||
playht: { id: "playht", alias: "playht", name: "PlayHT", icon: "play_circle", color: "#00B4D8", textIcon: "PH", website: "https://play.ht", notice: { apiKeyUrl: "https://play.ht/studio/api-access" }, serviceKinds: ["tts"], hidden: true, ttsConfig: { baseUrl: "https://api.play.ht/api/v2/tts/stream", authType: "apikey", authHeader: "playht", format: "playht", models: [{ id: "PlayDialog", name: "PlayDialog" }, { id: "Play3.0-mini", name: "Play 3.0 Mini" }] } },
|
||||
"local-device": { id: "local-device", alias: "local-device", name: "Local Device", icon: "speaker", color: "#64748B", textIcon: "LD", serviceKinds: ["tts"], noAuth: true, ttsConfig: { baseUrl: "local-device", authType: "none", authHeader: "none", format: "local-device", models: [] } },
|
||||
"google-tts": { id: "google-tts", alias: "google-tts", name: "Google TTS", icon: "record_voice_over", color: "#4285F4", textIcon: "GT", serviceKinds: ["tts"], noAuth: true, ttsConfig: { baseUrl: "google-tts", authType: "none", authHeader: "none", format: "google-tts", models: [] } },
|
||||
"edge-tts": { id: "edge-tts", alias: "edge-tts", name: "Edge TTS", icon: "record_voice_over", color: "#0078D4", textIcon: "ET", serviceKinds: ["tts"], noAuth: true, ttsConfig: { baseUrl: "edge-tts", authType: "none", authHeader: "none", format: "edge-tts", models: [] } },
|
||||
"local-device": { id: "local-device", alias: "local-device", name: "Local Device", icon: "speaker", color: "#64748B", textIcon: "LD", mediaPriority: 5, serviceKinds: ["tts"], noAuth: true, ttsConfig: { baseUrl: "local-device", authType: "none", authHeader: "none", format: "local-device", models: [] } },
|
||||
"google-tts": { id: "google-tts", alias: "google-tts", name: "Google TTS", icon: "record_voice_over", color: "#4285F4", textIcon: "GT", mediaPriority: 5, serviceKinds: ["tts"], noAuth: true, ttsConfig: { baseUrl: "google-tts", authType: "none", authHeader: "none", format: "google-tts", models: [] } },
|
||||
"edge-tts": { id: "edge-tts", alias: "edge-tts", name: "Edge TTS", icon: "record_voice_over", color: "#0078D4", textIcon: "ET", mediaPriority: 5, serviceKinds: ["tts"], noAuth: true, ttsConfig: { baseUrl: "edge-tts", authType: "none", authHeader: "none", format: "edge-tts", models: [] } },
|
||||
coqui: { id: "coqui", alias: "coqui", name: "Coqui TTS", icon: "record_voice_over", color: "#10B981", textIcon: "CQ", website: "https://github.com/coqui-ai/TTS", serviceKinds: ["tts"], hidden: true, noAuth: true, ttsConfig: { baseUrl: "http://localhost:5002/api/tts", authType: "none", authHeader: "none", format: "coqui", models: [{ id: "tts_models/en/ljspeech/tacotron2-DDC", name: "Tacotron2 DDC (LJSpeech)" }] } },
|
||||
tortoise: { id: "tortoise", alias: "tortoise", name: "Tortoise TTS", icon: "record_voice_over", color: "#7C3AED", textIcon: "TT", website: "https://github.com/neonbjb/tortoise-tts", serviceKinds: ["tts"], hidden: true, noAuth: true, ttsConfig: { baseUrl: "http://localhost:5000/api/tts", authType: "none", authHeader: "none", format: "tortoise", models: [{ id: "tortoise-v2", name: "Tortoise v2" }] } },
|
||||
inworld: { id: "inworld", alias: "inworld", name: "Inworld TTS", icon: "record_voice_over", color: "#FF6B6B", textIcon: "IW", website: "https://inworld.ai", notice: { text: "Free tier: 40 minutes/month TTS. Paid: TTS-1.5 Mini $0.01/min ($15/1M chars), TTS-1.5 Max $0.025/min ($30/1M chars). 270+ voices, 15 languages.", apiKeyUrl: "https://platform.inworld.ai/api-keys" }, serviceKinds: ["tts"], ttsConfig: { baseUrl: "https://api.inworld.ai/tts/v1/voice", authType: "apikey", authHeader: "basic", format: "inworld", models: [{ id: "inworld-tts-1.5-mini", name: "Inworld TTS 1.5 Mini ($0.01/min)" }, { id: "inworld-tts-1.5-max", name: "Inworld TTS 1.5 Max ($0.025/min)" }] } },
|
||||
"voyage-ai": { id: "voyage-ai", alias: "voyage", name: "Voyage AI", icon: "data_array", color: "#0EA5E9", textIcon: "VG", website: "https://www.voyageai.com", notice: { apiKeyUrl: "https://dash.voyageai.com/api-keys" }, serviceKinds: ["embedding"], embeddingConfig: { baseUrl: "https://api.voyageai.com/v1/embeddings", authType: "apikey", authHeader: "bearer", models: [{ id: "voyage-3-large", name: "Voyage 3 Large", dimensions: 1024 }, { id: "voyage-3.5", name: "Voyage 3.5", dimensions: 1024 }, { id: "voyage-3.5-lite", name: "Voyage 3.5 Lite", dimensions: 1024 }, { id: "voyage-code-3", name: "Voyage Code 3", dimensions: 1024 }, { id: "voyage-finance-2", name: "Voyage Finance 2", dimensions: 1024 }, { id: "voyage-law-2", name: "Voyage Law 2", dimensions: 1024 }, { id: "voyage-multilingual-2", name: "Voyage Multilingual 2", dimensions: 1024 }] } },
|
||||
sdwebui: { id: "sdwebui", alias: "sdwebui", name: "SD WebUI", icon: "brush", color: "#FF7043", textIcon: "SD", website: "https://github.com/AUTOMATIC1111/stable-diffusion-webui", serviceKinds: ["image"] },
|
||||
comfyui: { id: "comfyui", alias: "comfyui", name: "ComfyUI", icon: "account_tree", color: "#4CAF50", textIcon: "CF", website: "https://github.com/comfyanonymous/ComfyUI", serviceKinds: ["image"] },
|
||||
huggingface: { id: "huggingface", alias: "hf", name: "HuggingFace", icon: "face", color: "#FFD21E", textIcon: "HF", website: "https://huggingface.co", notice: { apiKeyUrl: "https://huggingface.co/settings/tokens" }, serviceKinds: ["image", "imageToText", "tts"], hiddenKinds: ["tts"], ttsConfig: { baseUrl: "https://api-inference.huggingface.co/models", authType: "apikey", authHeader: "bearer", format: "huggingface-tts", models: [{ id: "facebook/mms-tts-eng", name: "MMS TTS English" }, { id: "microsoft/speecht5_tts", name: "SpeechT5 TTS" }] } },
|
||||
huggingface: { id: "huggingface", alias: "hf", name: "HuggingFace", icon: "face", color: "#FFD21E", textIcon: "HF", website: "https://huggingface.co", notice: { apiKeyUrl: "https://huggingface.co/settings/tokens" }, serviceKinds: ["image", "imageToText", "tts", "stt"], hiddenKinds: ["tts"], ttsConfig: { baseUrl: "https://api-inference.huggingface.co/models", authType: "apikey", authHeader: "bearer", format: "huggingface-tts", models: [{ id: "facebook/mms-tts-eng", name: "MMS TTS English" }, { id: "microsoft/speecht5_tts", name: "SpeechT5 TTS" }] }, sttConfig: { baseUrl: "https://api-inference.huggingface.co/models", authType: "apikey", authHeader: "bearer", format: "huggingface-asr", models: [{ id: "openai/whisper-large-v3", name: "Whisper Large v3 (HF)" }, { id: "openai/whisper-small", name: "Whisper Small (HF)" }] } },
|
||||
blackbox: { id: "blackbox", alias: "bb", name: "Blackbox AI", icon: "smart_toy", color: "#5B5FEF", textIcon: "BB", website: "https://blackbox.ai", notice: { apiKeyUrl: "https://www.blackbox.ai/api-management" }, serviceKinds: ["llm"] },
|
||||
chutes: { id: "chutes", alias: "ch", name: "Chutes AI", icon: "water_drop", color: "#ffffffff", textIcon: "CH", website: "https://chutes.ai", notice: { apiKeyUrl: "https://chutes.ai/app/api" } },
|
||||
"ollama-local": { id: "ollama-local", alias: "ollama-local", name: "Ollama Local", icon: "cloud", color: "#ffffffff", textIcon: "OL", website: "https://ollama.com" },
|
||||
@@ -133,7 +133,7 @@ export const MEDIA_PROVIDER_KINDS = [
|
||||
{ id: "image", label: "Text to Image", icon: "brush", endpoint: { method: "POST", path: "/v1/images/generations" } },
|
||||
{ id: "imageToText", label: "Image to Text", icon: "image_search", endpoint: { method: "POST", path: "/v1/images/understanding" } },
|
||||
{ id: "tts", label: "Text To Speech", icon: "record_voice_over", endpoint: { method: "POST", path: "/v1/audio/speech" } },
|
||||
{ id: "stt", label: "STT", icon: "mic", endpoint: { method: "POST", path: "/v1/audio/transcriptions" } },
|
||||
{ id: "stt", label: "Speech To Text", icon: "mic", endpoint: { method: "POST", path: "/v1/audio/transcriptions" } },
|
||||
{ id: "webSearch", label: "Web Search", icon: "travel_explore", endpoint: { method: "POST", path: "/v1/search" } },
|
||||
{ id: "webFetch", label: "Web Fetch", icon: "language", endpoint: { method: "POST", path: "/v1/web/fetch" } },
|
||||
{ id: "video", label: "Video", icon: "movie", endpoint: { method: "POST", path: "/v1/video/generations" } },
|
||||
@@ -203,13 +203,15 @@ export const ID_TO_ALIAS = Object.values(AI_PROVIDERS).reduce((acc, p) => {
|
||||
// Helper: Get providers by service kind (e.g. "tts", "embedding", "image")
|
||||
// Providers without serviceKinds default to ["llm"]
|
||||
export function getProvidersByKind(kind) {
|
||||
return Object.values(AI_PROVIDERS).filter((p) => {
|
||||
const kinds = p.serviceKinds ?? ["llm"];
|
||||
if (!kinds.includes(kind)) return false;
|
||||
if (p.hidden) return false; // globally hidden
|
||||
if (p.hiddenKinds?.includes(kind)) return false; // hidden for specific kind
|
||||
return true;
|
||||
});
|
||||
return Object.values(AI_PROVIDERS)
|
||||
.filter((p) => {
|
||||
const kinds = p.serviceKinds ?? ["llm"];
|
||||
if (!kinds.includes(kind)) return false;
|
||||
if (p.hidden) return false;
|
||||
if (p.hiddenKinds?.includes(kind)) return false;
|
||||
return true;
|
||||
})
|
||||
.sort((a, b) => (a.mediaPriority ?? 100) - (b.mediaPriority ?? 100));
|
||||
}
|
||||
|
||||
// Providers that support usage/quota API
|
||||
@@ -221,4 +223,17 @@ export const USAGE_SUPPORTED_PROVIDERS = [
|
||||
"codex",
|
||||
"kimi-coding",
|
||||
"ollama",
|
||||
"gemini-cli",
|
||||
"glm",
|
||||
"glm-cn",
|
||||
"minimax",
|
||||
"minimax-cn",
|
||||
];
|
||||
|
||||
// Subset that uses apikey auth (still surfaced on quota page)
|
||||
export const USAGE_APIKEY_PROVIDERS = [
|
||||
"glm",
|
||||
"glm-cn",
|
||||
"minimax",
|
||||
"minimax-cn",
|
||||
];
|
||||
|
||||
@@ -39,6 +39,13 @@ export const SKILLS = [
|
||||
endpoint: "/v1/audio/speech",
|
||||
icon: "record_voice_over",
|
||||
},
|
||||
{
|
||||
id: "9router-stt",
|
||||
name: "Speech-to-Text",
|
||||
description: "Transcribe audio via OpenAI Whisper, Groq, Gemini, Deepgram, AssemblyAI…",
|
||||
endpoint: "/v1/audio/transcriptions",
|
||||
icon: "mic",
|
||||
},
|
||||
{
|
||||
id: "9router-embeddings",
|
||||
name: "Embeddings",
|
||||
|
||||
@@ -109,4 +109,14 @@ export const TTS_PROVIDER_CONFIG = {
|
||||
hasVoiceIdInput: true,
|
||||
voiceSource: "config",
|
||||
},
|
||||
"gemini": {
|
||||
hasLanguageDropdown: false,
|
||||
hasLanguageHint: true, // sends body.language to guide TTS pronunciation
|
||||
hasModelSelector: true,
|
||||
hasBrowseButton: false,
|
||||
voiceSource: "hardcoded",
|
||||
modelKey: "gemini-tts-models",
|
||||
voiceKey: "gemini-tts-voices",
|
||||
voicesPerModel: true,
|
||||
},
|
||||
};
|
||||
|
||||
88
src/sse/handlers/stt.js
Normal file
@@ -0,0 +1,88 @@
|
||||
import {
|
||||
extractApiKey, isValidApiKey,
|
||||
getProviderCredentials, markAccountUnavailable,
|
||||
} from "../services/auth.js";
|
||||
import { getSettings } from "@/lib/localDb";
|
||||
import { getModelInfo } from "../services/model.js";
|
||||
import { handleSttCore } from "open-sse/handlers/sttCore.js";
|
||||
import { errorResponse, unavailableResponse } from "open-sse/utils/error.js";
|
||||
import { HTTP_STATUS } from "open-sse/config/runtimeConfig.js";
|
||||
import { AI_PROVIDERS } from "@/shared/constants/providers";
|
||||
import * as log from "../utils/logger.js";
|
||||
|
||||
// Providers requiring credentials for STT
|
||||
const CREDENTIALED_PROVIDERS = new Set(
|
||||
Object.entries(AI_PROVIDERS)
|
||||
.filter(([, p]) => p.serviceKinds?.includes("stt") && !p.noAuth && p.sttConfig?.authType !== "none")
|
||||
.map(([id]) => id)
|
||||
);
|
||||
|
||||
export async function handleStt(request) {
|
||||
let formData;
|
||||
try {
|
||||
formData = await request.formData();
|
||||
} catch {
|
||||
return errorResponse(HTTP_STATUS.BAD_REQUEST, "Invalid multipart form data");
|
||||
}
|
||||
|
||||
const modelStr = formData.get("model");
|
||||
log.request("POST", `/v1/audio/transcriptions | ${modelStr}`);
|
||||
|
||||
const settings = await getSettings();
|
||||
if (settings.requireApiKey) {
|
||||
const apiKey = extractApiKey(request);
|
||||
if (!apiKey) return errorResponse(HTTP_STATUS.UNAUTHORIZED, "Missing API key");
|
||||
const valid = await isValidApiKey(apiKey);
|
||||
if (!valid) return errorResponse(HTTP_STATUS.UNAUTHORIZED, "Invalid API key");
|
||||
}
|
||||
|
||||
if (!modelStr) return errorResponse(HTTP_STATUS.BAD_REQUEST, "Missing model");
|
||||
if (!formData.get("file")) return errorResponse(HTTP_STATUS.BAD_REQUEST, "Missing required field: file");
|
||||
|
||||
const modelInfo = await getModelInfo(modelStr);
|
||||
if (!modelInfo.provider) return errorResponse(HTTP_STATUS.BAD_REQUEST, "Invalid model format");
|
||||
|
||||
const { provider, model } = modelInfo;
|
||||
log.info("ROUTING", `Provider: ${provider}, Model: ${model}`);
|
||||
|
||||
// noAuth providers
|
||||
if (!CREDENTIALED_PROVIDERS.has(provider)) {
|
||||
const result = await handleSttCore({ provider, model, formData });
|
||||
if (result.success) return result.response;
|
||||
return errorResponse(result.status || HTTP_STATUS.BAD_GATEWAY, result.error || "STT failed");
|
||||
}
|
||||
|
||||
// Credentialed — fallback loop
|
||||
const excludeConnectionIds = new Set();
|
||||
let lastError = null;
|
||||
let lastStatus = null;
|
||||
|
||||
while (true) {
|
||||
const credentials = await getProviderCredentials(provider, excludeConnectionIds, model);
|
||||
|
||||
if (!credentials || credentials.allRateLimited) {
|
||||
if (credentials?.allRateLimited) {
|
||||
const msg = lastError || credentials.lastError || "Unavailable";
|
||||
const status = lastStatus || Number(credentials.lastErrorCode) || HTTP_STATUS.SERVICE_UNAVAILABLE;
|
||||
return unavailableResponse(status, `[${provider}/${model}] ${msg}`, credentials.retryAfter, credentials.retryAfterHuman);
|
||||
}
|
||||
if (excludeConnectionIds.size === 0) return errorResponse(HTTP_STATUS.BAD_REQUEST, `No credentials for provider: ${provider}`);
|
||||
return errorResponse(lastStatus || HTTP_STATUS.SERVICE_UNAVAILABLE, lastError || "All accounts unavailable");
|
||||
}
|
||||
|
||||
log.info("AUTH", `\x1b[32mUsing ${provider} account: ${credentials.connectionName}\x1b[0m`);
|
||||
|
||||
const result = await handleSttCore({ provider, model, formData, credentials });
|
||||
|
||||
if (result.success) return result.response;
|
||||
|
||||
const { shouldFallback } = await markAccountUnavailable(credentials.connectionId, result.status, result.error, provider, model);
|
||||
if (shouldFallback) {
|
||||
excludeConnectionIds.add(credentials.connectionId);
|
||||
lastError = result.error;
|
||||
lastStatus = result.status;
|
||||
continue;
|
||||
}
|
||||
return result.response || errorResponse(result.status, result.error);
|
||||
}
|
||||
}
|
||||
@@ -29,7 +29,8 @@ export async function handleTts(request) {
|
||||
const url = new URL(request.url);
|
||||
const modelStr = body.model;
|
||||
const responseFormat = url.searchParams.get("response_format") || "mp3"; // mp3 (default) | json
|
||||
log.request("POST", `${url.pathname} | ${modelStr} | format=${responseFormat}`);
|
||||
const language = body.language || ""; // Optional language hint (currently used by Gemini)
|
||||
log.request("POST", `${url.pathname} | ${modelStr} | format=${responseFormat}${language ? ` | lang=${language}` : ""}`);
|
||||
|
||||
const settings = await getSettings();
|
||||
if (settings.requireApiKey) {
|
||||
@@ -52,7 +53,7 @@ export async function handleTts(request) {
|
||||
return handleComboChat({
|
||||
body,
|
||||
models: comboModels,
|
||||
handleSingleModel: (b, m) => handleSingleModelTts(b, m, responseFormat),
|
||||
handleSingleModel: (b, m) => handleSingleModelTts(b, m, responseFormat, language),
|
||||
log,
|
||||
comboName: modelStr,
|
||||
comboStrategy,
|
||||
@@ -60,10 +61,10 @@ export async function handleTts(request) {
|
||||
});
|
||||
}
|
||||
|
||||
return handleSingleModelTts(body, modelStr, responseFormat);
|
||||
return handleSingleModelTts(body, modelStr, responseFormat, language);
|
||||
}
|
||||
|
||||
async function handleSingleModelTts(body, modelStr, responseFormat) {
|
||||
async function handleSingleModelTts(body, modelStr, responseFormat, language) {
|
||||
const modelInfo = await getModelInfo(modelStr);
|
||||
if (!modelInfo.provider) return errorResponse(HTTP_STATUS.BAD_REQUEST, "Invalid model format");
|
||||
|
||||
@@ -72,7 +73,7 @@ async function handleSingleModelTts(body, modelStr, responseFormat) {
|
||||
|
||||
// noAuth providers — no credential needed
|
||||
if (!CREDENTIALED_PROVIDERS.has(provider)) {
|
||||
const result = await handleTtsCore({ provider, model, input: body.input, responseFormat });
|
||||
const result = await handleTtsCore({ provider, model, input: body.input, responseFormat, language });
|
||||
if (result.success) return result.response;
|
||||
return errorResponse(result.status || HTTP_STATUS.BAD_GATEWAY, result.error || "TTS failed");
|
||||
}
|
||||
@@ -97,7 +98,7 @@ async function handleSingleModelTts(body, modelStr, responseFormat) {
|
||||
|
||||
log.info("AUTH", `\x1b[32mUsing ${provider} account: ${credentials.connectionName}\x1b[0m`);
|
||||
|
||||
const result = await handleTtsCore({ provider, model, input: body.input, credentials, responseFormat });
|
||||
const result = await handleTtsCore({ provider, model, input: body.input, credentials, responseFormat, language });
|
||||
|
||||
if (result.success) return result.response;
|
||||
|
||||
|
||||
19
src/store/headerSearchStore.js
Normal file
@@ -0,0 +1,19 @@
|
||||
/**
|
||||
* Header Search Store — Zustand-based reusable search input in Header.
|
||||
* Pages register placeholder on mount, read query, unregister on unmount.
|
||||
*/
|
||||
|
||||
import { create } from "zustand";
|
||||
|
||||
export const useHeaderSearchStore = create((set) => ({
|
||||
query: "",
|
||||
placeholder: "",
|
||||
visible: false,
|
||||
|
||||
setQuery: (query) => set({ query }),
|
||||
|
||||
register: (placeholder = "Search...") =>
|
||||
set({ visible: true, placeholder, query: "" }),
|
||||
|
||||
unregister: () => set({ visible: false, placeholder: "", query: "" }),
|
||||
}));
|
||||