- Cap maximum cooldown for rate limit handling in account unavailability and single-model chat flows

- Dynamic custom model fetching for model selection
This commit is contained in:
decolua
2026-04-24 16:14:18 +07:00
parent c42c0146ab
commit cca615eaff
19 changed files with 108 additions and 60 deletions

View File

@@ -1,3 +1,9 @@
# v0.4.5 (2026-04-24)
## Improvements
- Cap maximum cooldown for rate limit handling in account unavailability and single-model chat flows
- Dynamic custom model fetching for model selection
# v0.4.3 (2026-04-24) # v0.4.3 (2026-04-24)
## Improvements ## Improvements

View File

@@ -2,6 +2,7 @@ import { getModelInfoCore } from "open-sse/services/model.js";
import { handleChatCore } from "open-sse/handlers/chatCore.js"; import { handleChatCore } from "open-sse/handlers/chatCore.js";
import { errorResponse } from "open-sse/utils/error.js"; import { errorResponse } from "open-sse/utils/error.js";
import { checkFallbackError, isAccountUnavailable, getUnavailableUntil, getEarliestRateLimitedUntil, formatRetryAfter } from "open-sse/services/accountFallback.js"; import { checkFallbackError, isAccountUnavailable, getUnavailableUntil, getEarliestRateLimitedUntil, formatRetryAfter } from "open-sse/services/accountFallback.js";
import { MAX_RATE_LIMIT_COOLDOWN_MS } from "open-sse/config/errorConfig.js";
import { getComboModelsFromData, handleComboChat } from "open-sse/services/combo.js"; import { getComboModelsFromData, handleComboChat } from "open-sse/services/combo.js";
import { HTTP_STATUS } from "open-sse/config/runtimeConfig.js"; import { HTTP_STATUS } from "open-sse/config/runtimeConfig.js";
import * as log from "../utils/logger.js"; import * as log from "../utils/logger.js";
@@ -253,7 +254,7 @@ async function markAccountUnavailable(machineId, connectionId, status, errorText
// Provider-specific precise cooldown (e.g. codex usage_limit_reached) overrides backoff // Provider-specific precise cooldown (e.g. codex usage_limit_reached) overrides backoff
let cooldownMs, newBackoffLevel; let cooldownMs, newBackoffLevel;
if (resetsAtMs && resetsAtMs > Date.now()) { if (resetsAtMs && resetsAtMs > Date.now()) {
cooldownMs = resetsAtMs - Date.now(); cooldownMs = Math.min(resetsAtMs - Date.now(), MAX_RATE_LIMIT_COOLDOWN_MS);
newBackoffLevel = 0; newBackoffLevel = 0;
} else { } else {
({ cooldownMs, newBackoffLevel } = checkFallbackError(status, errorText, backoffLevel)); ({ cooldownMs, newBackoffLevel } = checkFallbackError(status, errorText, backoffLevel));

View File

@@ -38,6 +38,9 @@ export const BACKOFF_CONFIG = {
// Default cooldown for transient/unknown errors // Default cooldown for transient/unknown errors
export const TRANSIENT_COOLDOWN_MS = 30 * 1000; export const TRANSIENT_COOLDOWN_MS = 30 * 1000;
// Hard cap for provider-reported rate limit cooldown (e.g. codex resets_at can be 5-6h)
export const MAX_RATE_LIMIT_COOLDOWN_MS = 30 * 60 * 1000;
// Cooldown durations (ms) // Cooldown durations (ms)
const COOLDOWN = { const COOLDOWN = {
long: 2 * 60 * 1000, long: 2 * 60 * 1000,

View File

@@ -219,7 +219,6 @@ export const PROVIDER_MODELS = {
// Gemini 3.1 series // Gemini 3.1 series
{ id: "gemini-3.1-pro-preview", name: "Gemini 3.1 Pro Preview" }, { id: "gemini-3.1-pro-preview", name: "Gemini 3.1 Pro Preview" },
{ id: "gemini-3.1-flash-lite-preview", name: "Gemini 3.1 Flash Lite Preview" }, { id: "gemini-3.1-flash-lite-preview", name: "Gemini 3.1 Flash Lite Preview" },
{ id: "gemini-3.1-flash-image-preview", name: "Gemini 3.1 Flash Image Preview" },
// Gemini 3 series // Gemini 3 series
{ id: "gemini-3-flash-preview", name: "Gemini 3 Flash Preview" }, { id: "gemini-3-flash-preview", name: "Gemini 3 Flash Preview" },
// Gemini 2.5 series // Gemini 2.5 series
@@ -342,6 +341,7 @@ export const PROVIDER_MODELS = {
{ id: "DeepSeek-V3.2", name: "DeepSeek-V3.2" }, { id: "DeepSeek-V3.2", name: "DeepSeek-V3.2" },
], ],
deepseek: [ deepseek: [
{ id: "deepseek-v4-flash", name: "DeepSeek V4 Flash" },
{ id: "deepseek-chat", name: "DeepSeek V3.2 Chat" }, { id: "deepseek-chat", name: "DeepSeek V3.2 Chat" },
{ id: "deepseek-reasoner", name: "DeepSeek V3.2 Reasoner" }, { id: "deepseek-reasoner", name: "DeepSeek V3.2 Reasoner" },
], ],

View File

@@ -24,7 +24,7 @@ import { detectClientTool, isNativePassthrough } from "../utils/clientDetector.j
* @param {object} options.credentials - Provider credentials * @param {object} options.credentials - Provider credentials
* @param {string} options.sourceFormatOverride - Override detected source format (e.g. "openai-responses") * @param {string} options.sourceFormatOverride - Override detected source format (e.g. "openai-responses")
*/ */
export async function handleChatCore({ body, modelInfo, credentials, log, onCredentialsRefreshed, onRequestSuccess, onDisconnect, clientRawRequest, connectionId, userAgent, apiKey, ccFilterNaming, sourceFormatOverride, providerThinking }) { export async function handleChatCore({ body, modelInfo, credentials, log, onCredentialsRefreshed, onRequestSuccess, onDisconnect, clientRawRequest, connectionId, userAgent, apiKey, ccFilterNaming, rtkEnabled, sourceFormatOverride, providerThinking }) {
const { provider, model } = modelInfo; const { provider, model } = modelInfo;
const requestStartTime = Date.now(); const requestStartTime = Date.now();
@@ -82,7 +82,7 @@ export async function handleChatCore({ body, modelInfo, credentials, log, onCred
log?.debug?.("PASSTHROUGH", `${clientTool}${provider} | native lossless`); log?.debug?.("PASSTHROUGH", `${clientTool}${provider} | native lossless`);
translatedBody = { ...body, model }; translatedBody = { ...body, model };
} else { } else {
translatedBody = translateRequest(sourceFormat, targetFormat, model, body, stream, credentials, provider, reqLogger, stripList, connectionId); translatedBody = translateRequest(sourceFormat, targetFormat, model, body, stream, credentials, provider, reqLogger, stripList, connectionId, rtkEnabled);
if (!translatedBody) { if (!translatedBody) {
trackPendingRequest(model, provider, connectionId, false, true); trackPendingRequest(model, provider, connectionId, false, true);
return createErrorResult(HTTP_STATUS.BAD_REQUEST, `Failed to translate request for ${sourceFormat}${targetFormat}`); return createErrorResult(HTTP_STATUS.BAD_REQUEST, `Failed to translate request for ${sourceFormat}${targetFormat}`);

View File

@@ -1,11 +0,0 @@
// Synchronous RTK toggle cache. Updated by /api/settings PATCH handler
// and initialized from DB on server boot.
let enabled = false;
export function setRtkEnabled(value) {
enabled = Boolean(value);
}
export function isRtkEnabled() {
return enabled;
}

View File

@@ -3,21 +3,38 @@
import { RAW_CAP, MIN_COMPRESS_SIZE } from "./constants.js"; import { RAW_CAP, MIN_COMPRESS_SIZE } from "./constants.js";
import { autoDetectFilter } from "./autodetect.js"; import { autoDetectFilter } from "./autodetect.js";
import { safeApply } from "./applyFilter.js"; import { safeApply } from "./applyFilter.js";
import { isRtkEnabled } from "./flag.js";
export { isRtkEnabled, setRtkEnabled } from "./flag.js";
// Compress tool_result content in-place. Returns stats or null if disabled/failed. // Compress tool_result content in-place. Returns stats or null if disabled/failed.
export function compressMessages(body) { export function compressMessages(body, enabled) {
if (!isRtkEnabled()) return null; if (!enabled) return null;
if (!body || !Array.isArray(body.messages)) return null; if (!body) return null;
// Support both OpenAI/Claude "messages" and OpenAI Responses "input"
const items = Array.isArray(body.messages) ? body.messages
: Array.isArray(body.input) ? body.input
: null;
if (!items) return null;
const stats = { bytesBefore: 0, bytesAfter: 0, hits: [] }; const stats = { bytesBefore: 0, bytesAfter: 0, hits: [] };
try { try {
for (let i = 0; i < body.messages.length; i++) { for (let i = 0; i < items.length; i++) {
const msg = body.messages[i]; const msg = items[i];
if (!msg) continue; if (!msg) continue;
// Shape 4: OpenAI Responses — top-level { type:"function_call_output", output: string | [{type:"input_text", text}] }
if (msg.type === "function_call_output") {
if (typeof msg.output === "string") {
msg.output = compressText(msg.output, stats, "openai-responses-string");
} else if (Array.isArray(msg.output)) {
for (let k = 0; k < msg.output.length; k++) {
const part = msg.output[k];
if (part && part.type === "input_text" && typeof part.text === "string") {
part.text = compressText(part.text, stats, "openai-responses-array");
}
}
}
continue;
}
// Shape 1: OpenAI tool message — { role:"tool", content: "string" } // Shape 1: OpenAI tool message — { role:"tool", content: "string" }
if (msg.role === "tool" && typeof msg.content === "string") { if (msg.role === "tool" && typeof msg.content === "string") {
msg.content = compressText(msg.content, stats, "openai-tool"); msg.content = compressText(msg.content, stats, "openai-tool");

View File

@@ -39,6 +39,15 @@ export function getRotatedModels(models, comboName, strategy) {
return rotatedModels; return rotatedModels;
} }
/**
* Reset in-memory rotation state when combo/settings change
* @param {string} [comboName] - Combo name to reset; omit to clear all
*/
export function resetComboRotation(comboName) {
if (comboName) comboRotationState.delete(comboName);
else comboRotationState.clear();
}
/** /**
* Get combo models from combos data * Get combo models from combos data
* @param {string} modelStr - Model string to check * @param {string} modelStr - Model string to check

View File

@@ -71,12 +71,12 @@ function stripContentTypes(body, stripList = []) {
} }
// Translate request: source -> openai -> target // Translate request: source -> openai -> target
export function translateRequest(sourceFormat, targetFormat, model, body, stream = true, credentials = null, provider = null, reqLogger = null, stripList = [], connectionId = null) { export function translateRequest(sourceFormat, targetFormat, model, body, stream = true, credentials = null, provider = null, reqLogger = null, stripList = [], connectionId = null, rtkEnabled = false) {
ensureInitialized(); ensureInitialized();
let result = body; let result = body;
// RTK: compress tool_result content before any translation (shape-agnostic) // RTK: compress tool_result content before any translation (shape-agnostic)
const rtkStats = compressMessages(result); const rtkStats = compressMessages(result, rtkEnabled);
if (rtkStats) { if (rtkStats) {
const line = formatRtkLog(rtkStats); const line = formatRtkLog(rtkStats);
if (line) console.log(line); if (line) console.log(line);

View File

@@ -1,6 +1,6 @@
{ {
"name": "9router-app", "name": "9router-app",
"version": "0.4.3", "version": "0.4.5",
"description": "9Router web dashboard", "description": "9Router web dashboard",
"private": true, "private": true,
"scripts": { "scripts": {

View File

@@ -174,6 +174,7 @@ model = "${effectiveSubagentModel}"
`; `;
const authContent = JSON.stringify({ const authContent = JSON.stringify({
auth_mode: "apikey",
OPENAI_API_KEY: keyToUse OPENAI_API_KEY: keyToUse
}, null, 2); }, null, 2);

View File

@@ -159,7 +159,9 @@ export async function POST(request) {
authData = JSON.parse(existingAuth); authData = JSON.parse(existingAuth);
} catch { /* No existing auth */ } } catch { /* No existing auth */ }
// Force apikey mode (keep existing tokens untouched for ChatGPT login reuse)
authData.OPENAI_API_KEY = apiKey; authData.OPENAI_API_KEY = apiKey;
authData.auth_mode = "apikey";
await fs.writeFile(authPath, JSON.stringify(authData, null, 2)); await fs.writeFile(authPath, JSON.stringify(authData, null, 2));
return NextResponse.json({ return NextResponse.json({
@@ -215,6 +217,7 @@ export async function DELETE() {
const existingAuth = await fs.readFile(authPath, "utf-8"); const existingAuth = await fs.readFile(authPath, "utf-8");
const authData = JSON.parse(existingAuth); const authData = JSON.parse(existingAuth);
delete authData.OPENAI_API_KEY; delete authData.OPENAI_API_KEY;
delete authData.auth_mode;
// Write back or delete if empty // Write back or delete if empty
if (Object.keys(authData).length === 0) { if (Object.keys(authData).length === 0) {

View File

@@ -1,5 +1,6 @@
import { NextResponse } from "next/server"; import { NextResponse } from "next/server";
import { getComboById, updateCombo, deleteCombo, getComboByName } from "@/lib/localDb"; import { getComboById, updateCombo, deleteCombo, getComboByName } from "@/lib/localDb";
import { resetComboRotation } from "open-sse/services/combo.js";
// Validate combo name: only a-z, A-Z, 0-9, -, _ // Validate combo name: only a-z, A-Z, 0-9, -, _
const VALID_NAME_REGEX = /^[a-zA-Z0-9_.\-]+$/; const VALID_NAME_REGEX = /^[a-zA-Z0-9_.\-]+$/;
@@ -40,12 +41,18 @@ export async function PUT(request, { params }) {
} }
} }
// Capture previous name to invalidate rotation state on rename
const prev = await getComboById(id);
const combo = await updateCombo(id, body); const combo = await updateCombo(id, body);
if (!combo) { if (!combo) {
return NextResponse.json({ error: "Combo not found" }, { status: 404 }); return NextResponse.json({ error: "Combo not found" }, { status: 404 });
} }
// Invalidate rotation state (models/strategy/name may have changed)
if (prev?.name) resetComboRotation(prev.name);
if (combo.name && combo.name !== prev?.name) resetComboRotation(combo.name);
return NextResponse.json(combo); return NextResponse.json(combo);
} catch (error) { } catch (error) {
console.log("Error updating combo:", error); console.log("Error updating combo:", error);
@@ -57,12 +64,15 @@ export async function PUT(request, { params }) {
export async function DELETE(request, { params }) { export async function DELETE(request, { params }) {
try { try {
const { id } = await params; const { id } = await params;
const prev = await getComboById(id);
const success = await deleteCombo(id); const success = await deleteCombo(id);
if (!success) { if (!success) {
return NextResponse.json({ error: "Combo not found" }, { status: 404 }); return NextResponse.json({ error: "Combo not found" }, { status: 404 });
} }
if (prev?.name) resetComboRotation(prev.name);
return NextResponse.json({ success: true }); return NextResponse.json({ success: true });
} catch (error) { } catch (error) {
console.log("Error deleting combo:", error); console.log("Error deleting combo:", error);

View File

@@ -1,7 +1,7 @@
import { NextResponse } from "next/server"; import { NextResponse } from "next/server";
import { getSettings, updateSettings } from "@/lib/localDb"; import { getSettings, updateSettings } from "@/lib/localDb";
import { applyOutboundProxyEnv } from "@/lib/network/outboundProxy"; import { applyOutboundProxyEnv } from "@/lib/network/outboundProxy";
import { setRtkEnabled } from "open-sse/rtk/flag.js"; import { resetComboRotation } from "open-sse/services/combo.js";
import bcrypt from "bcryptjs"; import bcrypt from "bcryptjs";
export async function GET() { export async function GET() {
@@ -67,10 +67,14 @@ export async function PATCH(request) {
applyOutboundProxyEnv(settings); applyOutboundProxyEnv(settings);
} }
// Sync RTK toggle immediately (sync cache for request hot path) // Invalidate combo rotation state when strategy settings change
if (Object.prototype.hasOwnProperty.call(body, "rtkEnabled")) { if (
setRtkEnabled(settings.rtkEnabled); Object.prototype.hasOwnProperty.call(body, "comboStrategy") ||
Object.prototype.hasOwnProperty.call(body, "comboStrategies")
) {
resetComboRotation();
} }
const { password, ...safeSettings } = settings; const { password, ...safeSettings } = settings;
return NextResponse.json(safeSettings); return NextResponse.json(safeSettings);
} catch (error) { } catch (error) {

View File

@@ -3,7 +3,6 @@ import "./globals.css";
import { ThemeProvider } from "@/shared/components/ThemeProvider"; import { ThemeProvider } from "@/shared/components/ThemeProvider";
import "@/lib/initCloudSync"; // Auto-initialize cloud sync import "@/lib/initCloudSync"; // Auto-initialize cloud sync
import "@/lib/network/initOutboundProxy"; // Auto-initialize outbound proxy env import "@/lib/network/initOutboundProxy"; // Auto-initialize outbound proxy env
import "@/lib/rtk/initRtk"; // Auto-initialize RTK toggle from DB
import { initConsoleLogCapture } from "@/lib/consoleLogBuffer"; import { initConsoleLogCapture } from "@/lib/consoleLogBuffer";
import { RuntimeI18nProvider } from "@/i18n/RuntimeI18nProvider"; import { RuntimeI18nProvider } from "@/i18n/RuntimeI18nProvider";

View File

@@ -1,20 +0,0 @@
import { getSettings } from "@/lib/localDb";
import { setRtkEnabled } from "open-sse/rtk/flag.js";
let initialized = false;
export async function ensureRtkInitialized() {
if (initialized) return true;
try {
const settings = await getSettings();
setRtkEnabled(settings.rtkEnabled === true);
initialized = true;
} catch (error) {
console.error("[ServerInit] Error initializing RTK flag:", error);
}
return initialized;
}
ensureRtkInitialized().catch(console.log);
export default ensureRtkInitialized;

View File

@@ -3,8 +3,8 @@
import { useState, useMemo, useEffect } from "react"; import { useState, useMemo, useEffect } from "react";
import PropTypes from "prop-types"; import PropTypes from "prop-types";
import Modal from "./Modal"; import Modal from "./Modal";
import { getModelsByProviderId, PROVIDER_ID_TO_ALIAS } from "@/shared/constants/models"; import { getModelsByProviderId } from "@/shared/constants/models";
import { OAUTH_PROVIDERS, APIKEY_PROVIDERS, FREE_PROVIDERS, FREE_TIER_PROVIDERS, isOpenAICompatibleProvider, isAnthropicCompatibleProvider } from "@/shared/constants/providers"; import { OAUTH_PROVIDERS, APIKEY_PROVIDERS, FREE_PROVIDERS, FREE_TIER_PROVIDERS, isOpenAICompatibleProvider, isAnthropicCompatibleProvider, getProviderAlias } from "@/shared/constants/providers";
// Provider order: OAuth first, then Free Tier, then API Key (matches dashboard/providers) // Provider order: OAuth first, then Free Tier, then API Key (matches dashboard/providers)
const PROVIDER_ORDER = [ const PROVIDER_ORDER = [
@@ -29,6 +29,7 @@ export default function ModelSelectModal({
const [searchQuery, setSearchQuery] = useState(""); const [searchQuery, setSearchQuery] = useState("");
const [combos, setCombos] = useState([]); const [combos, setCombos] = useState([]);
const [providerNodes, setProviderNodes] = useState([]); const [providerNodes, setProviderNodes] = useState([]);
const [customModels, setCustomModels] = useState([]);
const fetchCombos = async () => { const fetchCombos = async () => {
try { try {
@@ -62,6 +63,22 @@ export default function ModelSelectModal({
if (isOpen) fetchProviderNodes(); if (isOpen) fetchProviderNodes();
}, [isOpen]); }, [isOpen]);
const fetchCustomModels = async () => {
try {
const res = await fetch("/api/models/custom");
if (!res.ok) throw new Error(`Failed to fetch custom models: ${res.status}`);
const data = await res.json();
setCustomModels(data.models || []);
} catch (error) {
console.error("Error fetching custom models:", error);
setCustomModels([]);
}
};
useEffect(() => {
if (isOpen) fetchCustomModels();
}, [isOpen]);
const allProviders = useMemo(() => ({ ...OAUTH_PROVIDERS, ...FREE_PROVIDERS, ...FREE_TIER_PROVIDERS, ...APIKEY_PROVIDERS }), []); const allProviders = useMemo(() => ({ ...OAUTH_PROVIDERS, ...FREE_PROVIDERS, ...FREE_TIER_PROVIDERS, ...APIKEY_PROVIDERS }), []);
// Group models by provider with priority order // Group models by provider with priority order
@@ -85,7 +102,7 @@ export default function ModelSelectModal({
}); });
sortedProviderIds.forEach((providerId) => { sortedProviderIds.forEach((providerId) => {
const alias = PROVIDER_ID_TO_ALIAS[providerId] || providerId; const alias = getProviderAlias(providerId);
const providerInfo = allProviders[providerId] || { name: providerId, color: "#666" }; const providerInfo = allProviders[providerId] || { name: providerId, color: "#666" };
const isCustomProvider = isOpenAICompatibleProvider(providerId) || isAnthropicCompatibleProvider(providerId); const isCustomProvider = isOpenAICompatibleProvider(providerId) || isAnthropicCompatibleProvider(providerId);
@@ -151,7 +168,7 @@ export default function ModelSelectModal({
// Custom models: if no hardcoded models (e.g. openrouter), show all aliases for this provider // Custom models: if no hardcoded models (e.g. openrouter), show all aliases for this provider
// Otherwise only show aliases where aliasName === modelId ("Add Model" button pattern) // Otherwise only show aliases where aliasName === modelId ("Add Model" button pattern)
const hasHardcoded = hardcodedModels.length > 0; const hasHardcoded = hardcodedModels.length > 0;
const customModels = Object.entries(modelAliases) const customAliasModels = Object.entries(modelAliases)
.filter(([aliasName, fullModel]) => .filter(([aliasName, fullModel]) =>
fullModel.startsWith(`${alias}/`) && fullModel.startsWith(`${alias}/`) &&
(hasHardcoded ? aliasName === fullModel.replace(`${alias}/`, "") : true) && (hasHardcoded ? aliasName === fullModel.replace(`${alias}/`, "") : true) &&
@@ -162,9 +179,16 @@ export default function ModelSelectModal({
return { id: modelId, name: aliasName, value: fullModel, isCustom: true }; return { id: modelId, name: aliasName, value: fullModel, isCustom: true };
}); });
// Custom models registered via /api/models/custom (provider "Add Model" button)
const customAliasIds = new Set(customAliasModels.map((m) => m.id));
const customRegisteredModels = customModels
.filter((m) => m.providerAlias === alias && !hardcodedIds.has(m.id) && !customAliasIds.has(m.id))
.map((m) => ({ id: m.id, name: m.name || m.id, value: `${alias}/${m.id}`, isCustom: true }));
const allModels = [ const allModels = [
...hardcodedModels.map((m) => ({ id: m.id, name: m.name, value: `${alias}/${m.id}` })), ...hardcodedModels.map((m) => ({ id: m.id, name: m.name, value: `${alias}/${m.id}` })),
...customModels, ...customAliasModels,
...customRegisteredModels,
]; ];
if (allModels.length > 0) { if (allModels.length > 0) {
@@ -179,7 +203,7 @@ export default function ModelSelectModal({
}); });
return groups; return groups;
}, [activeProviders, modelAliases, allProviders, providerNodes]); }, [activeProviders, modelAliases, allProviders, providerNodes, customModels]);
// Filter combos by search query // Filter combos by search query
const filteredCombos = useMemo(() => { const filteredCombos = useMemo(() => {
@@ -304,7 +328,7 @@ export default function ModelSelectModal({
const isPlaceholder = model.isPlaceholder; const isPlaceholder = model.isPlaceholder;
return ( return (
<button <button
key={model.id} key={model.value}
onClick={() => handleSelect(model)} onClick={() => handleSelect(model)}
title={isPlaceholder ? "Select to pre-fill, then edit model ID in the input" : undefined} title={isPlaceholder ? "Select to pre-fill, then edit model ID in the input" : undefined}
className={` className={`

View File

@@ -206,6 +206,7 @@ async function handleSingleModelChat(body, modelStr, clientRawRequest = null, re
userAgent, userAgent,
apiKey, apiKey,
ccFilterNaming: !!chatSettings.ccFilterNaming, ccFilterNaming: !!chatSettings.ccFilterNaming,
rtkEnabled: !!chatSettings.rtkEnabled,
providerThinking, providerThinking,
// Detect source format by endpoint + body // Detect source format by endpoint + body
sourceFormatOverride: request?.url ? detectFormatByEndpoint(new URL(request.url).pathname, body) : null, sourceFormatOverride: request?.url ? detectFormatByEndpoint(new URL(request.url).pathname, body) : null,

View File

@@ -1,6 +1,7 @@
import { getProviderConnections, validateApiKey, updateProviderConnection, getSettings } from "@/lib/localDb"; import { getProviderConnections, validateApiKey, updateProviderConnection, getSettings } from "@/lib/localDb";
import { resolveConnectionProxyConfig } from "@/lib/network/connectionProxy"; import { resolveConnectionProxyConfig } from "@/lib/network/connectionProxy";
import { formatRetryAfter, checkFallbackError, isModelLockActive, buildModelLockUpdate, getEarliestModelLockUntil } from "open-sse/services/accountFallback.js"; import { formatRetryAfter, checkFallbackError, isModelLockActive, buildModelLockUpdate, getEarliestModelLockUntil } from "open-sse/services/accountFallback.js";
import { MAX_RATE_LIMIT_COOLDOWN_MS } from "open-sse/config/errorConfig.js";
import { resolveProviderId, FREE_PROVIDERS } from "@/shared/constants/providers.js"; import { resolveProviderId, FREE_PROVIDERS } from "@/shared/constants/providers.js";
import * as log from "../utils/logger.js"; import * as log from "../utils/logger.js";
@@ -179,7 +180,7 @@ export async function markAccountUnavailable(connectionId, status, errorText, pr
let shouldFallback, cooldownMs, newBackoffLevel; let shouldFallback, cooldownMs, newBackoffLevel;
if (resetsAtMs && resetsAtMs > Date.now()) { if (resetsAtMs && resetsAtMs > Date.now()) {
shouldFallback = true; shouldFallback = true;
cooldownMs = resetsAtMs - Date.now(); cooldownMs = Math.min(resetsAtMs - Date.now(), MAX_RATE_LIMIT_COOLDOWN_MS);
newBackoffLevel = 0; newBackoffLevel = 0;
} else { } else {
({ shouldFallback, cooldownMs, newBackoffLevel } = checkFallbackError(status, errorText, backoffLevel)); ({ shouldFallback, cooldownMs, newBackoffLevel } = checkFallbackError(status, errorText, backoffLevel));