refactor: streamline provider interactions and enhance error handling

This commit is contained in:
decolua
2026-01-11 21:45:01 +07:00
parent f302c88dfb
commit f46ff42cb3
21 changed files with 859 additions and 170 deletions

View File

@@ -75,11 +75,12 @@ export const PROVIDERS = {
antigravity: {
baseUrls: [
"https://daily-cloudcode-pa.sandbox.googleapis.com",
"https://daily-cloudcode-pa.googleapis.com",
"https://cloudcode-pa.googleapis.com"
],
format: "antigravity",
headers: {
"User-Agent": "antigravity/1.11.5 windows/amd64"
"User-Agent": "antigravity/1.104.0 darwin/arm64"
},
clientId: "1071006060591-tmhssin2h21lcre235vtolojh4g403ep.apps.googleusercontent.com",
clientSecret: "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"

View File

@@ -0,0 +1,90 @@
import crypto from "crypto";
import { BaseExecutor } from "./base.js";
import { PROVIDERS, OAUTH_ENDPOINTS } from "../config/constants.js";
export class AntigravityExecutor extends BaseExecutor {
constructor() {
super("antigravity", PROVIDERS.antigravity);
}
buildUrl(model, stream, urlIndex = 0) {
const baseUrls = this.getBaseUrls();
const baseUrl = baseUrls[urlIndex] || baseUrls[0];
const path = stream ? "/v1internal:streamGenerateContent?alt=sse" : "/v1internal:generateContent";
return `${baseUrl}${path}`;
}
buildHeaders(credentials, stream = true) {
return {
"Content-Type": "application/json",
"Authorization": `Bearer ${credentials.accessToken}`,
"User-Agent": this.config.headers?.["User-Agent"] || "antigravity/1.104.0 darwin/arm64",
...(stream && { "Accept": "text/event-stream" })
};
}
transformRequest(model, body, stream, credentials) {
const projectId = credentials?.projectId || this.generateProjectId();
return {
...body,
project: projectId,
model: model,
userAgent: "antigravity",
requestType: "agent",
requestId: `agent-${crypto.randomUUID()}`,
request: {
...body.request,
sessionId: body.request?.sessionId || this.generateSessionId(),
safetySettings: undefined,
toolConfig: body.request?.tools?.length > 0
? { functionCallingConfig: { mode: "VALIDATED" } }
: body.request?.toolConfig
}
};
}
async refreshCredentials(credentials, log) {
if (!credentials.refreshToken) return null;
try {
const response = await fetch(OAUTH_ENDPOINTS.google.token, {
method: "POST",
headers: { "Content-Type": "application/x-www-form-urlencoded", "Accept": "application/json" },
body: new URLSearchParams({
grant_type: "refresh_token",
refresh_token: credentials.refreshToken,
client_id: this.config.clientId,
client_secret: this.config.clientSecret
})
});
if (!response.ok) return null;
const tokens = await response.json();
log?.info?.("TOKEN", "Antigravity refreshed");
return {
accessToken: tokens.access_token,
refreshToken: tokens.refresh_token || credentials.refreshToken,
expiresIn: tokens.expires_in,
projectId: credentials.projectId
};
} catch (error) {
log?.error?.("TOKEN", `Antigravity refresh error: ${error.message}`);
return null;
}
}
generateProjectId() {
const adj = ["useful", "bright", "swift", "calm", "bold"][Math.floor(Math.random() * 5)];
const noun = ["fuze", "wave", "spark", "flow", "core"][Math.floor(Math.random() * 5)];
return `${adj}-${noun}-${crypto.randomUUID().slice(0, 5)}`;
}
generateSessionId() {
return `-${Math.floor(Math.random() * 9_000_000_000_000_000_000)}`;
}
}
export default AntigravityExecutor;

109
open-sse/executors/base.js Normal file
View File

@@ -0,0 +1,109 @@
/**
* BaseExecutor - Base class for provider executors
*/
export class BaseExecutor {
constructor(provider, config) {
this.provider = provider;
this.config = config;
}
getProvider() {
return this.provider;
}
getBaseUrls() {
return this.config.baseUrls || (this.config.baseUrl ? [this.config.baseUrl] : []);
}
getFallbackCount() {
return this.getBaseUrls().length || 1;
}
buildUrl(model, stream, urlIndex = 0) {
const baseUrls = this.getBaseUrls();
return baseUrls[urlIndex] || baseUrls[0] || this.config.baseUrl;
}
buildHeaders(credentials, stream = true) {
const headers = {
"Content-Type": "application/json",
...this.config.headers
};
if (credentials.accessToken) {
headers["Authorization"] = `Bearer ${credentials.accessToken}`;
} else if (credentials.apiKey) {
headers["Authorization"] = `Bearer ${credentials.apiKey}`;
}
if (stream) {
headers["Accept"] = "text/event-stream";
}
return headers;
}
// Override in subclass for provider-specific transformations
transformRequest(model, body, stream, credentials) {
return body;
}
shouldRetry(status, urlIndex) {
return status === 429 && urlIndex + 1 < this.getFallbackCount();
}
// Override in subclass for provider-specific refresh
async refreshCredentials(credentials, log) {
return null;
}
needsRefresh(credentials) {
if (!credentials.expiresAt) return false;
const expiresAtMs = new Date(credentials.expiresAt).getTime();
return expiresAtMs - Date.now() < 5 * 60 * 1000;
}
parseError(response, bodyText) {
return { status: response.status, message: bodyText || `HTTP ${response.status}` };
}
async execute({ model, body, stream, credentials, signal, log }) {
const fallbackCount = this.getFallbackCount();
let lastError = null;
let lastStatus = 0;
for (let urlIndex = 0; urlIndex < fallbackCount; urlIndex++) {
const url = this.buildUrl(model, stream, urlIndex);
const headers = this.buildHeaders(credentials, stream);
const transformedBody = this.transformRequest(model, body, stream, credentials);
try {
const response = await fetch(url, {
method: "POST",
headers,
body: JSON.stringify(transformedBody),
signal
});
if (this.shouldRetry(response.status, urlIndex)) {
log?.debug?.("RETRY", `${response.status} on ${url}, trying fallback ${urlIndex + 1}`);
lastStatus = response.status;
continue;
}
return { response, url, headers, transformedBody };
} catch (error) {
lastError = error;
if (urlIndex + 1 < fallbackCount) {
log?.debug?.("RETRY", `Error on ${url}, trying fallback ${urlIndex + 1}`);
continue;
}
throw error;
}
}
throw lastError || new Error(`All ${fallbackCount} URLs failed with status ${lastStatus}`);
}
}
export default BaseExecutor;

View File

@@ -0,0 +1,116 @@
import { BaseExecutor } from "./base.js";
import { PROVIDERS, OAUTH_ENDPOINTS } from "../config/constants.js";
export class DefaultExecutor extends BaseExecutor {
constructor(provider) {
super(provider, PROVIDERS[provider] || PROVIDERS.openai);
}
buildUrl(model, stream, urlIndex = 0) {
switch (this.provider) {
case "claude":
case "glm":
case "kimi":
case "minimax":
return `${this.config.baseUrl}?beta=true`;
case "gemini":
return `${this.config.baseUrl}/${model}:${stream ? "streamGenerateContent?alt=sse" : "generateContent"}`;
default:
return this.config.baseUrl;
}
}
buildHeaders(credentials, stream = true) {
const headers = { "Content-Type": "application/json", ...this.config.headers };
switch (this.provider) {
case "gemini":
credentials.apiKey ? headers["x-goog-api-key"] = credentials.apiKey : headers["Authorization"] = `Bearer ${credentials.accessToken}`;
break;
case "claude":
credentials.apiKey ? headers["x-api-key"] = credentials.apiKey : headers["Authorization"] = `Bearer ${credentials.accessToken}`;
break;
case "glm":
case "kimi":
case "minimax":
headers["x-api-key"] = credentials.apiKey;
break;
default:
headers["Authorization"] = `Bearer ${credentials.apiKey || credentials.accessToken}`;
}
if (stream) headers["Accept"] = "text/event-stream";
return headers;
}
async refreshCredentials(credentials, log) {
if (!credentials.refreshToken) return null;
const refreshers = {
claude: () => this.refreshWithJSON(OAUTH_ENDPOINTS.anthropic.token, { grant_type: "refresh_token", refresh_token: credentials.refreshToken, client_id: PROVIDERS.claude.clientId }),
codex: () => this.refreshWithForm(OAUTH_ENDPOINTS.openai.token, { grant_type: "refresh_token", refresh_token: credentials.refreshToken, client_id: PROVIDERS.codex.clientId, scope: "openid profile email offline_access" }),
qwen: () => this.refreshWithForm(OAUTH_ENDPOINTS.qwen.token, { grant_type: "refresh_token", refresh_token: credentials.refreshToken, client_id: PROVIDERS.qwen.clientId }),
iflow: () => this.refreshIflow(credentials.refreshToken),
gemini: () => this.refreshGoogle(credentials.refreshToken)
};
const refresher = refreshers[this.provider];
if (!refresher) return null;
try {
const result = await refresher();
if (result) log?.info?.("TOKEN", `${this.provider} refreshed`);
return result;
} catch (error) {
log?.error?.("TOKEN", `${this.provider} refresh error: ${error.message}`);
return null;
}
}
async refreshWithJSON(url, body) {
const response = await fetch(url, {
method: "POST",
headers: { "Content-Type": "application/json", "Accept": "application/json" },
body: JSON.stringify(body)
});
if (!response.ok) return null;
const tokens = await response.json();
return { accessToken: tokens.access_token, refreshToken: tokens.refresh_token || body.refresh_token, expiresIn: tokens.expires_in };
}
async refreshWithForm(url, params) {
const response = await fetch(url, {
method: "POST",
headers: { "Content-Type": "application/x-www-form-urlencoded", "Accept": "application/json" },
body: new URLSearchParams(params)
});
if (!response.ok) return null;
const tokens = await response.json();
return { accessToken: tokens.access_token, refreshToken: tokens.refresh_token || params.refresh_token, expiresIn: tokens.expires_in };
}
async refreshIflow(refreshToken) {
const basicAuth = btoa(`${PROVIDERS.iflow.clientId}:${PROVIDERS.iflow.clientSecret}`);
const response = await fetch(OAUTH_ENDPOINTS.iflow.token, {
method: "POST",
headers: { "Content-Type": "application/x-www-form-urlencoded", "Accept": "application/json", "Authorization": `Basic ${basicAuth}` },
body: new URLSearchParams({ grant_type: "refresh_token", refresh_token: refreshToken, client_id: PROVIDERS.iflow.clientId, client_secret: PROVIDERS.iflow.clientSecret })
});
if (!response.ok) return null;
const tokens = await response.json();
return { accessToken: tokens.access_token, refreshToken: tokens.refresh_token || refreshToken, expiresIn: tokens.expires_in };
}
async refreshGoogle(refreshToken) {
const response = await fetch(OAUTH_ENDPOINTS.google.token, {
method: "POST",
headers: { "Content-Type": "application/x-www-form-urlencoded", "Accept": "application/json" },
body: new URLSearchParams({ grant_type: "refresh_token", refresh_token: refreshToken, client_id: this.config.clientId, client_secret: this.config.clientSecret })
});
if (!response.ok) return null;
const tokens = await response.json();
return { accessToken: tokens.access_token, refreshToken: tokens.refresh_token || refreshToken, expiresIn: tokens.expires_in };
}
}
export default DefaultExecutor;

View File

@@ -0,0 +1,62 @@
import { BaseExecutor } from "./base.js";
import { PROVIDERS, OAUTH_ENDPOINTS } from "../config/constants.js";
export class GeminiCLIExecutor extends BaseExecutor {
constructor() {
super("gemini-cli", PROVIDERS["gemini-cli"]);
}
buildUrl(model, stream, urlIndex = 0) {
const action = stream ? "streamGenerateContent?alt=sse" : "generateContent";
return `${this.config.baseUrl}:${action}`;
}
buildHeaders(credentials, stream = true) {
return {
"Content-Type": "application/json",
"Authorization": `Bearer ${credentials.accessToken}`,
...(stream && { "Accept": "text/event-stream" })
};
}
transformRequest(model, body, stream, credentials) {
if (!body.project && credentials?.projectId) {
body.project = credentials.projectId;
}
return body;
}
async refreshCredentials(credentials, log) {
if (!credentials.refreshToken) return null;
try {
const response = await fetch(OAUTH_ENDPOINTS.google.token, {
method: "POST",
headers: { "Content-Type": "application/x-www-form-urlencoded", "Accept": "application/json" },
body: new URLSearchParams({
grant_type: "refresh_token",
refresh_token: credentials.refreshToken,
client_id: this.config.clientId,
client_secret: this.config.clientSecret
})
});
if (!response.ok) return null;
const tokens = await response.json();
log?.info?.("TOKEN", "Gemini CLI refreshed");
return {
accessToken: tokens.access_token,
refreshToken: tokens.refresh_token || credentials.refreshToken,
expiresIn: tokens.expires_in,
projectId: credentials.projectId
};
} catch (error) {
log?.error?.("TOKEN", `Gemini CLI refresh error: ${error.message}`);
return null;
}
}
}
export default GeminiCLIExecutor;

View File

@@ -0,0 +1,97 @@
import { BaseExecutor } from "./base.js";
import { PROVIDERS, OAUTH_ENDPOINTS } from "../config/constants.js";
export class GithubExecutor extends BaseExecutor {
constructor() {
super("github", PROVIDERS.github);
}
buildUrl(model, stream, urlIndex = 0) {
return this.config.baseUrl;
}
buildHeaders(credentials, stream = true) {
const token = credentials.copilotToken || credentials.accessToken;
return {
"Authorization": `Bearer ${token}`,
"Content-Type": "application/json",
"copilot-integration-id": "vscode-chat",
"editor-version": "vscode/1.107.1",
"editor-plugin-version": "copilot-chat/0.26.7",
"user-agent": "GitHubCopilotChat/0.26.7",
"openai-intent": "conversation-panel",
"x-github-api-version": "2025-04-01",
"x-request-id": crypto.randomUUID?.() || `${Date.now()}-${Math.random().toString(36).slice(2)}`,
"x-vscode-user-agent-library-version": "electron-fetch",
"X-Initiator": "user",
"Accept": stream ? "text/event-stream" : "application/json"
};
}
async refreshCopilotToken(githubAccessToken, log) {
try {
const response = await fetch("https://api.github.com/copilot_internal/v2/token", {
headers: { "Authorization": `Bearer ${githubAccessToken}`, "User-Agent": "GitHub-Copilot/1.0", "Accept": "*/*" }
});
if (!response.ok) return null;
const data = await response.json();
log?.info?.("TOKEN", "Copilot token refreshed");
return { token: data.token, expiresAt: data.expires_at };
} catch (error) {
log?.error?.("TOKEN", `Copilot refresh error: ${error.message}`);
return null;
}
}
async refreshGitHubToken(refreshToken, log) {
try {
const response = await fetch(OAUTH_ENDPOINTS.github.token, {
method: "POST",
headers: { "Content-Type": "application/x-www-form-urlencoded", "Accept": "application/json" },
body: new URLSearchParams({
grant_type: "refresh_token",
refresh_token: refreshToken,
client_id: this.config.clientId,
client_secret: this.config.clientSecret
})
});
if (!response.ok) return null;
const tokens = await response.json();
log?.info?.("TOKEN", "GitHub token refreshed");
return { accessToken: tokens.access_token, refreshToken: tokens.refresh_token || refreshToken, expiresIn: tokens.expires_in };
} catch (error) {
log?.error?.("TOKEN", `GitHub refresh error: ${error.message}`);
return null;
}
}
async refreshCredentials(credentials, log) {
let copilotResult = await this.refreshCopilotToken(credentials.accessToken, log);
if (!copilotResult && credentials.refreshToken) {
const githubTokens = await this.refreshGitHubToken(credentials.refreshToken, log);
if (githubTokens?.accessToken) {
copilotResult = await this.refreshCopilotToken(githubTokens.accessToken, log);
if (copilotResult) {
return { ...githubTokens, copilotToken: copilotResult.token, copilotTokenExpiresAt: copilotResult.expiresAt };
}
return githubTokens;
}
}
if (copilotResult) {
return { accessToken: credentials.accessToken, refreshToken: credentials.refreshToken, copilotToken: copilotResult.token, copilotTokenExpiresAt: copilotResult.expiresAt };
}
return null;
}
needsRefresh(credentials) {
if (credentials.copilotTokenExpiresAt) {
if (new Date(credentials.copilotTokenExpiresAt).getTime() - Date.now() < 5 * 60 * 1000) return true;
}
return super.needsRefresh(credentials);
}
}
export default GithubExecutor;

View File

@@ -0,0 +1,28 @@
import { AntigravityExecutor } from "./antigravity.js";
import { GeminiCLIExecutor } from "./gemini-cli.js";
import { GithubExecutor } from "./github.js";
import { DefaultExecutor } from "./default.js";
const executors = {
antigravity: new AntigravityExecutor(),
"gemini-cli": new GeminiCLIExecutor(),
github: new GithubExecutor()
};
const defaultCache = new Map();
export function getExecutor(provider) {
if (executors[provider]) return executors[provider];
if (!defaultCache.has(provider)) defaultCache.set(provider, new DefaultExecutor(provider));
return defaultCache.get(provider);
}
export function hasSpecializedExecutor(provider) {
return !!executors[provider];
}
export { BaseExecutor } from "./base.js";
export { AntigravityExecutor } from "./antigravity.js";
export { GeminiCLIExecutor } from "./gemini-cli.js";
export { GithubExecutor } from "./github.js";
export { DefaultExecutor } from "./default.js";

View File

@@ -1,14 +1,15 @@
import { detectFormat, getTargetFormat, buildProviderUrl, buildProviderHeaders } from "../services/provider.js";
import { detectFormat, getTargetFormat } from "../services/provider.js";
import { translateRequest, needsTranslation } from "../translator/index.js";
import { FORMATS } from "../translator/formats.js";
import { createSSETransformStreamWithLogger, createPassthroughStreamWithLogger, COLORS } from "../utils/stream.js";
import { createStreamController, pipeWithDisconnect } from "../utils/streamHandler.js";
import { refreshTokenByProvider, refreshWithRetry } from "../services/tokenRefresh.js";
import { refreshWithRetry } from "../services/tokenRefresh.js";
import { createRequestLogger } from "../utils/requestLogger.js";
import { getModelTargetFormat, PROVIDER_ID_TO_ALIAS } from "../config/providerModels.js";
import { createErrorResult, parseUpstreamError, formatProviderError } from "../utils/error.js";
import { handleBypassRequest } from "../utils/bypassHandler.js";
import { saveRequestUsage, trackPendingRequest, appendRequestLog } from "@/lib/usageDb.js";
import { getExecutor } from "../executors/index.js";
/**
* Extract usage from non-streaming response body
@@ -118,9 +119,8 @@ export async function handleChatCore({ body, modelInfo, credentials, log, onCred
// Update model in body
translatedBody.model = model;
// Build provider URL and headers
const providerUrl = buildProviderUrl(provider, model, stream);
const providerHeaders = buildProviderHeaders(provider, credentials, stream, translatedBody);
// Get executor for this provider
const executor = getExecutor(provider);
// Track pending request
trackPendingRequest(model, provider, connectionId, true);
@@ -128,38 +128,39 @@ export async function handleChatCore({ body, modelInfo, credentials, log, onCred
// Log start
appendRequestLog({ model, provider, connectionId, status: "PENDING" }).catch(() => {});
// 2. Log converted request to provider
reqLogger.logConvertedRequest(providerUrl, providerHeaders, translatedBody);
const msgCount = translatedBody.messages?.length
|| translatedBody.contents?.length
|| translatedBody.request?.contents?.length
|| 0;
log?.debug?.("REQUEST", `${provider.toUpperCase()} | ${model} | ${msgCount} msgs`);
// Log headers (mask sensitive values)
const safeHeaders = {};
for (const [key, value] of Object.entries(providerHeaders)) {
if (key.toLowerCase().includes("auth") || key.toLowerCase().includes("key") || key.toLowerCase().includes("token")) {
safeHeaders[key] = value ? `${value.slice(0, 10)}...` : "";
} else {
safeHeaders[key] = value;
}
}
log?.debug?.("HEADERS", JSON.stringify(safeHeaders));
// Create stream controller for disconnect detection
const streamController = createStreamController({ onDisconnect, log, provider, model });
// Make request to provider with abort signal
// Execute request using executor (handles URL building, headers, fallback, transform)
let providerResponse;
let providerUrl;
let providerHeaders;
let finalBody;
try {
providerResponse = await fetch(providerUrl, {
method: "POST",
headers: providerHeaders,
body: JSON.stringify(translatedBody),
signal: streamController.signal
const result = await executor.execute({
model,
body: translatedBody,
stream,
credentials,
signal: streamController.signal,
log
});
providerResponse = result.response;
providerUrl = result.url;
providerHeaders = result.headers;
finalBody = result.transformedBody;
// Log converted request
reqLogger.logConvertedRequest(providerUrl, providerHeaders, finalBody);
} catch (error) {
trackPendingRequest(model, provider, connectionId, false);
appendRequestLog({ model, provider, connectionId, status: `FAILED ${error.name === "AbortError" ? 499 : 502}` }).catch(() => {});
@@ -172,65 +173,30 @@ export async function handleChatCore({ body, modelInfo, credentials, log, onCred
return createErrorResult(502, errMsg);
}
// Handle 401/403 - try token refresh
if (providerResponse.status === 401 || providerResponse.status === 403) {
let newCredentials = null;
// GitHub needs special handling - refresh copilotToken using accessToken
if (provider === "github") {
const { refreshCopilotToken, refreshGitHubToken } = await import("../services/tokenRefresh.js");
// First try refreshing copilotToken using existing accessToken
let copilotResult = await refreshCopilotToken(credentials.accessToken, log);
// If that fails, refresh GitHub accessToken first, then get new copilotToken
if (!copilotResult && credentials.refreshToken) {
const githubTokens = await refreshGitHubToken(credentials.refreshToken, log);
if (githubTokens?.accessToken) {
credentials.accessToken = githubTokens.accessToken;
if (githubTokens.refreshToken) {
credentials.refreshToken = githubTokens.refreshToken;
}
copilotResult = await refreshCopilotToken(githubTokens.accessToken, log);
}
}
if (copilotResult?.token) {
credentials.copilotToken = copilotResult.token;
newCredentials = {
accessToken: credentials.accessToken,
refreshToken: credentials.refreshToken,
providerSpecificData: {
...credentials.providerSpecificData,
copilotToken: copilotResult.token,
copilotTokenExpiresAt: copilotResult.expiresAt
}
};
log?.info?.("TOKEN", `${provider.toUpperCase()} | copilotToken refreshed`);
}
// Log headers (mask sensitive values)
const safeHeaders = {};
for (const [key, value] of Object.entries(providerHeaders || {})) {
if (key.toLowerCase().includes("auth") || key.toLowerCase().includes("key") || key.toLowerCase().includes("token")) {
safeHeaders[key] = value ? `${value.slice(0, 10)}...` : "";
} else {
newCredentials = await refreshWithRetry(
() => refreshTokenByProvider(provider, credentials, log),
3,
log
);
safeHeaders[key] = value;
}
}
log?.debug?.("HEADERS", JSON.stringify(safeHeaders));
if (newCredentials?.accessToken || (provider === "github" && credentials.copilotToken)) {
if (newCredentials?.accessToken) {
log?.info?.("TOKEN", `${provider.toUpperCase()} | refreshed`);
credentials.accessToken = newCredentials.accessToken;
}
if (newCredentials?.refreshToken) {
credentials.refreshToken = newCredentials.refreshToken;
}
if (newCredentials?.providerSpecificData) {
credentials.providerSpecificData = {
...credentials.providerSpecificData,
...newCredentials.providerSpecificData
};
}
// Handle 401/403 - try token refresh using executor
if (providerResponse.status === 401 || providerResponse.status === 403) {
const newCredentials = await refreshWithRetry(
() => executor.refreshCredentials(credentials, log),
3,
log
);
if (newCredentials?.accessToken || newCredentials?.copilotToken) {
log?.info?.("TOKEN", `${provider.toUpperCase()} | refreshed`);
// Update credentials
Object.assign(credentials, newCredentials);
// Notify caller about refreshed credentials
if (onCredentialsRefreshed && newCredentials) {
@@ -238,16 +204,22 @@ export async function handleChatCore({ body, modelInfo, credentials, log, onCred
}
// Retry with new credentials
const newHeaders = buildProviderHeaders(provider, credentials, stream, translatedBody);
const retryResponse = await fetch(providerUrl, {
method: "POST",
headers: newHeaders,
body: JSON.stringify(translatedBody),
signal: streamController.signal
});
try {
const retryResult = await executor.execute({
model,
body: translatedBody,
stream,
credentials,
signal: streamController.signal,
log
});
if (retryResponse.ok) {
providerResponse = retryResponse;
if (retryResult.response.ok) {
providerResponse = retryResult.response;
providerUrl = retryResult.url;
}
} catch (retryError) {
log?.warn?.("TOKEN", `${provider.toUpperCase()} | retry after refresh failed`);
}
} else {
log?.warn?.("TOKEN", `${provider.toUpperCase()} | refresh failed`);
@@ -263,7 +235,7 @@ export async function handleChatCore({ body, modelInfo, credentials, log, onCred
console.log(`${COLORS.red}[ERROR] ${errMsg}${COLORS.reset}`);
// Log error with full request body for debugging
reqLogger.logError(new Error(message), translatedBody);
reqLogger.logError(new Error(message), finalBody || translatedBody);
return createErrorResult(statusCode, errMsg);
}

View File

@@ -79,8 +79,14 @@ export function getProviderConfig(provider) {
return PROVIDERS[provider] || PROVIDERS.openai;
}
// Get number of fallback URLs for provider (for retry logic)
export function getProviderFallbackCount(provider) {
const config = getProviderConfig(provider);
return config.baseUrls?.length || 1;
}
// Build provider URL
export function buildProviderUrl(provider, model, stream = true) {
export function buildProviderUrl(provider, model, stream = true, options = {}) {
const config = getProviderConfig(provider);
switch (provider) {
@@ -98,7 +104,9 @@ export function buildProviderUrl(provider, model, stream = true) {
}
case "antigravity": {
const baseUrl = config.baseUrls[0];
// Use baseUrlIndex from options or default to 0
const urlIndex = options?.baseUrlIndex || 0;
const baseUrl = config.baseUrls[urlIndex] || config.baseUrls[0];
const path = stream ? "/v1internal:streamGenerateContent?alt=sse" : "/v1internal:generateContent";
return `${baseUrl}${path}`;
}

View File

@@ -51,7 +51,7 @@ export function translateRequest(sourceFormat, targetFormat, model, body, stream
if (targetFormat !== FORMATS.OPENAI) {
const fromOpenAI = requestRegistry.get(`${FORMATS.OPENAI}:${targetFormat}`);
if (fromOpenAI) {
result = fromOpenAI(model, result, stream);
result = fromOpenAI(model, result, stream, credentials);
}
}
}

View File

@@ -1,6 +1,10 @@
import { register } from "../index.js";
import { FORMATS } from "../formats.js";
import { DEFAULT_THINKING_GEMINI_SIGNATURE } from "../../config/defaultThinkingSignature.js";
function generateUUID() {
return crypto.randomUUID();
}
import {
DEFAULT_SAFETY_SETTINGS,
convertOpenAIContentToParts,
@@ -215,29 +219,45 @@ function openaiToGeminiCLIRequest(model, body, stream) {
}
// Wrap Gemini CLI format in Cloud Code wrapper
function wrapInCloudCodeEnvelope(model, geminiCLI, credentials = null) {
function wrapInCloudCodeEnvelope(model, geminiCLI, credentials = null, isAntigravity = false) {
const projectId = credentials?.projectId || generateProjectId();
return {
const envelope = {
project: projectId,
model: model,
userAgent: "gemini-cli",
requestId: generateRequestId(),
userAgent: isAntigravity ? "antigravity" : "gemini-cli",
requestId: isAntigravity ? `agent-${generateUUID()}` : generateRequestId(),
request: {
sessionId: generateSessionId(),
contents: geminiCLI.contents,
systemInstruction: geminiCLI.systemInstruction,
generationConfig: geminiCLI.generationConfig,
safetySettings: geminiCLI.safetySettings,
tools: geminiCLI.tools,
}
};
// Antigravity specific fields
if (isAntigravity) {
envelope.requestType = "agent";
// Remove safetySettings for Antigravity
// Add toolConfig for Antigravity
if (geminiCLI.tools?.length > 0) {
envelope.request.toolConfig = {
functionCallingConfig: { mode: "VALIDATED" }
};
}
} else {
// Keep safetySettings for Gemini CLI
envelope.request.safetySettings = geminiCLI.safetySettings;
}
return envelope;
}
// OpenAI -> Antigravity (Sandbox Cloud Code with wrapper)
function openaiToAntigravityRequest(model, body, stream, credentials = null) {
const geminiCLI = openaiToGeminiCLIRequest(model, body, stream);
return wrapInCloudCodeEnvelope(model, geminiCLI, credentials);
return wrapInCloudCodeEnvelope(model, geminiCLI, credentials, true);
}
// Register

View File

@@ -28,7 +28,6 @@ function claudeToOpenAIResponse(chunk, state) {
state.messageId = chunk.message?.id || `msg_${Date.now()}`;
state.model = chunk.message?.model;
state.toolCallIndex = 0;
console.log("🔍 ----------- toolCallIndex", state.toolCallIndex);
results.push(createChunk(state, { role: "assistant" }));
break;
}
@@ -114,6 +113,13 @@ function claudeToOpenAIResponse(chunk, state) {
case "message_stop": {
if (!state.finishReasonSent) {
const finishReason = state.finishReason || (state.toolCalls?.size > 0 ? "tool_calls" : "stop");
const usageObj = state.usage ? {
usage: {
prompt_tokens: state.usage.input_tokens || 0,
completion_tokens: state.usage.output_tokens || 0,
total_tokens: (state.usage.input_tokens || 0) + (state.usage.output_tokens || 0)
}
} : {};
results.push({
id: `chatcmpl-${state.messageId}`,
object: "chat.completion.chunk",
@@ -124,13 +130,7 @@ function claudeToOpenAIResponse(chunk, state) {
delta: {},
finish_reason: finishReason
}],
...(state.usage && {
usage: {
prompt_tokens: state.usage.input_tokens || 0,
completion_tokens: state.usage.output_tokens || 0,
total_tokens: (state.usage.input_tokens || 0) + (state.usage.output_tokens || 0)
}
})
...usageObj
});
state.finishReasonSent = true;
}

View File

@@ -30,7 +30,9 @@ export function handleBypassRequest(body, model) {
// Check warmup: first message "Warmup"
const firstText = getText(messages[0]?.content);
if (firstText === "Warmup") shouldBypass = true;
if (firstText === "Warmup") {
shouldBypass = true;
}
// Check count pattern: [{"role":"user","content":"count"}]
// if (!shouldBypass &&
@@ -40,10 +42,15 @@ export function handleBypassRequest(body, model) {
// shouldBypass = true;
// }
// Check skip patterns
// Check skip patterns - only check user messages, not system prompt
if (!shouldBypass && SKIP_PATTERNS?.length) {
const allText = messages.map(m => getText(m.content)).join(" ");
shouldBypass = SKIP_PATTERNS.some(p => allText.includes(p));
// Only check user messages, skip system/assistant messages to avoid matching system prompts
const userMessages = messages.filter(m => m.role === "user");
const userText = userMessages.map(m => getText(m.content)).join(" ");
const matchedPattern = SKIP_PATTERNS.find(p => userText.includes(p));
if (matchedPattern) {
shouldBypass = true;
}
}
if (!shouldBypass) return null;
@@ -54,10 +61,8 @@ export function handleBypassRequest(body, model) {
// Create bypass response using translator
if (stream) {
console.log("createStreamingResponse", sourceFormat, model);
return createStreamingResponse(sourceFormat, model);
} else {
console.log("createNonStreamingResponse", sourceFormat, model);
return createNonStreamingResponse(sourceFormat, model);
}
}

View File

@@ -106,18 +106,34 @@ function parseSSELine(line) {
* @returns {string} SSE formatted string
*/
export function formatSSE(data, sourceFormat) {
if (data.done) return "data: [DONE]\n\n";
// Handle null/undefined
if (data === null || data === undefined) {
return "data: null\n\n";
}
if (data && data.done) return "data: [DONE]\n\n";
// OpenAI Responses API format: has event field
if (data.event && data.data) {
if (data && data.event && data.data) {
return `event: ${data.event}\ndata: ${JSON.stringify(data.data)}\n\n`;
}
// Claude format: include event prefix
if (sourceFormat === FORMATS.CLAUDE && data.type) {
if (sourceFormat === FORMATS.CLAUDE && data && data.type) {
// If perf_metrics is null, remove it to avoid serialization issues
if (data.usage && typeof data.usage === 'object' && data.usage.perf_metrics === null) {
const { perf_metrics, ...usageWithoutPerf } = data.usage;
data = { ...data, usage: usageWithoutPerf };
}
return `event: ${data.type}\ndata: ${JSON.stringify(data)}\n\n`;
}
// If perf_metrics is null, remove it to avoid serialization issues
if (data?.usage && typeof data.usage === 'object' && data.usage.perf_metrics === null) {
const { perf_metrics, ...usageWithoutPerf } = data.usage;
data = { ...data, usage: usageWithoutPerf };
}
return `data: ${JSON.stringify(data)}\n\n`;
}
@@ -199,7 +215,7 @@ export function createSSEStream(options = {}) {
const parsed = parseSSELine(trimmed);
if (!parsed) continue;
if (parsed.done) {
if (parsed && parsed.done) {
const output = "data: [DONE]\n\n";
reqLogger?.appendConvertedChunk?.(output);
controller.enqueue(encoder.encode(output));

View File

@@ -4,7 +4,7 @@
"description": "9Router web dashboard",
"private": true,
"scripts": {
"dev": "next dev --port 3091",
"dev": "next dev",
"build": "next build",
"start": "next start"
},

View File

@@ -102,7 +102,7 @@ export default function PricingSettingsPage() {
<div className="space-y-3 text-sm text-text-muted">
<p>
<strong>Cost Calculation:</strong> Costs are calculated based on token usage and pricing rates.
Each request's cost is determined by: (input_tokens × input_rate) + (output_tokens × output_rate) + (cached_tokens × cached_rate)
Each request&apos;s cost is determined by: (input_tokens × input_rate) + (output_tokens × output_rate) + (cached_tokens × cached_rate)
</p>
<p>
<strong>Pricing Format:</strong> All rates are in <strong>dollars per million tokens</strong> ($/1M tokens).

View File

@@ -4,6 +4,9 @@ import { v4 as uuidv4 } from "uuid";
import path from "path";
import os from "os";
import fs from "fs";
const isCloud = typeof caches !== 'undefined' || typeof caches === 'object';
// Get app name - fixed constant to avoid Windows path issues in standalone build
function getAppName() {
return "9router";
@@ -11,10 +14,12 @@ function getAppName() {
// Get user data directory based on platform
function getUserDataDir() {
if (isCloud) return "/tmp"; // Fallback for Workers
const platform = process.platform;
const homeDir = os.homedir();
const appName = getAppName();
if (platform === "win32") {
return path.join(process.env.APPDATA || path.join(homeDir, "AppData", "Roaming"), appName);
} else {
@@ -25,10 +30,10 @@ function getUserDataDir() {
// Data file path - stored in user home directory
const DATA_DIR = getUserDataDir();
const DB_FILE = path.join(DATA_DIR, "db.json");
const DB_FILE = isCloud ? null : path.join(DATA_DIR, "db.json");
// Ensure data directory exists
if (!fs.existsSync(DATA_DIR)) {
if (!isCloud && !fs.existsSync(DATA_DIR)) {
fs.mkdirSync(DATA_DIR, { recursive: true });
}
@@ -52,10 +57,19 @@ let dbInstance = null;
* Get database instance (singleton)
*/
export async function getDb() {
if (isCloud) {
// Return in-memory DB for Workers
if (!dbInstance) {
dbInstance = new Low({ read: async () => {}, write: async () => {} }, defaultData);
dbInstance.data = defaultData;
}
return dbInstance;
}
if (!dbInstance) {
const adapter = new JSONFile(DB_FILE);
dbInstance = new Low(adapter, defaultData);
// Try to read DB with error recovery for corrupt JSON
try {
await dbInstance.read();
@@ -68,7 +82,7 @@ export async function getDb() {
throw error;
}
}
// Initialize with default data if empty
if (!dbInstance.data) {
dbInstance.data = defaultData;

View File

@@ -77,7 +77,10 @@ export const ANTIGRAVITY_CONFIG = {
"https://www.googleapis.com/auth/experimentsandconfigs",
],
// Antigravity specific
apiEndpoint: "https://cloudcode-pa.googleapis.com",
apiVersion: "v1internal",
loadCodeAssistEndpoint: "https://cloudcode-pa.googleapis.com/v1internal:loadCodeAssist",
onboardUserEndpoint: "https://cloudcode-pa.googleapis.com/v1internal:onboardUser",
loadCodeAssistUserAgent: "google-api-nodejs-client/9.15.1",
loadCodeAssistApiClient: "google-cloud-sdk vscode_cloudshelleditor/0.1",
loadCodeAssistClientMetadata: `{"ideType":"IDE_UNSPECIFIED","platform":"PLATFORM_UNSPECIFIED","pluginType":"GEMINI"}`,

View File

@@ -245,34 +245,73 @@ const PROVIDERS = {
return await response.json();
},
postExchange: async (tokens) => {
const headers = {
Authorization: `Bearer ${tokens.access_token}`,
"Content-Type": "application/json",
"User-Agent": ANTIGRAVITY_CONFIG.loadCodeAssistUserAgent,
"X-Goog-Api-Client": ANTIGRAVITY_CONFIG.loadCodeAssistApiClient,
"Client-Metadata": ANTIGRAVITY_CONFIG.loadCodeAssistClientMetadata,
};
const metadata = { ideType: "IDE_UNSPECIFIED", platform: "PLATFORM_UNSPECIFIED", pluginType: "GEMINI" };
// Fetch user info
const userInfoRes = await fetch(`${ANTIGRAVITY_CONFIG.userInfoUrl}?alt=json`, {
headers: { Authorization: `Bearer ${tokens.access_token}` },
});
const userInfo = userInfoRes.ok ? await userInfoRes.json() : {};
// Fetch project ID from loadCodeAssist
// Load Code Assist to get project ID and tier
let projectId = "";
let tierId = "legacy-tier";
try {
const projectRes = await fetch(ANTIGRAVITY_CONFIG.loadCodeAssistEndpoint, {
const loadRes = await fetch(ANTIGRAVITY_CONFIG.loadCodeAssistEndpoint, {
method: "POST",
headers: {
Authorization: `Bearer ${tokens.access_token}`,
"Content-Type": "application/json",
"User-Agent": ANTIGRAVITY_CONFIG.loadCodeAssistUserAgent,
"X-Goog-Api-Client": ANTIGRAVITY_CONFIG.loadCodeAssistApiClient,
"Client-Metadata": ANTIGRAVITY_CONFIG.loadCodeAssistClientMetadata,
},
body: JSON.stringify({
metadata: { ideType: "IDE_UNSPECIFIED", platform: "PLATFORM_UNSPECIFIED", pluginType: "GEMINI" },
}),
headers,
body: JSON.stringify({ metadata }),
});
if (projectRes.ok) {
const data = await projectRes.json();
if (loadRes.ok) {
const data = await loadRes.json();
projectId = data.cloudaicompanionProject?.id || data.cloudaicompanionProject || "";
// Extract tier ID
if (Array.isArray(data.allowedTiers)) {
for (const tier of data.allowedTiers) {
if (tier.isDefault && tier.id) {
tierId = tier.id.trim();
break;
}
}
}
}
} catch (e) {
console.log("Failed to fetch project ID:", e);
console.log("Failed to load code assist:", e);
}
// Onboard user to enable Gemini Code Assist
if (projectId) {
try {
for (let i = 0; i < 10; i++) {
const onboardRes = await fetch(ANTIGRAVITY_CONFIG.onboardUserEndpoint, {
method: "POST",
headers,
body: JSON.stringify({ tierId, metadata, cloudaicompanionProject: projectId }),
});
if (onboardRes.ok) {
const result = await onboardRes.json();
if (result.done === true) {
// Extract final project ID from response
if (result.response?.cloudaicompanionProject) {
const respProject = result.response.cloudaicompanionProject;
projectId = typeof respProject === 'string' ? respProject.trim() : (respProject.id || projectId);
}
break;
}
}
// Wait 5 seconds before retry
await new Promise(resolve => setTimeout(resolve, 5000));
}
} catch (e) {
console.log("Failed to onboard user:", e);
}
}
return { userInfo, projectId };

View File

@@ -78,45 +78,124 @@ export class AntigravityService {
}
/**
* Fetch Project ID from loadCodeAssist API
* Get common headers for Antigravity API calls
*/
async fetchProjectId(accessToken) {
const loadReqBody = {
metadata: {
ideType: "IDE_UNSPECIFIED",
platform: "PLATFORM_UNSPECIFIED",
pluginType: "GEMINI",
},
getApiHeaders(accessToken) {
return {
"Authorization": `Bearer ${accessToken}`,
"Content-Type": "application/json",
"User-Agent": this.config.loadCodeAssistUserAgent,
"X-Goog-Api-Client": this.config.loadCodeAssistApiClient,
"Client-Metadata": this.config.loadCodeAssistClientMetadata,
};
}
/**
* Get metadata object for API calls
*/
getMetadata() {
return {
ideType: "IDE_UNSPECIFIED",
platform: "PLATFORM_UNSPECIFIED",
pluginType: "GEMINI",
};
}
/**
* Fetch Project ID and Tier from loadCodeAssist API
*/
async loadCodeAssist(accessToken) {
const response = await fetch(this.config.loadCodeAssistEndpoint, {
method: "POST",
headers: {
"Authorization": `Bearer ${accessToken}`,
"Content-Type": "application/json",
"User-Agent": this.config.loadCodeAssistUserAgent,
"X-Goog-Api-Client": this.config.loadCodeAssistApiClient,
"Client-Metadata": this.config.loadCodeAssistClientMetadata,
},
body: JSON.stringify(loadReqBody),
headers: this.getApiHeaders(accessToken),
body: JSON.stringify({ metadata: this.getMetadata() }),
});
if (!response.ok) {
const errorText = await response.text();
throw new Error(`Failed to fetch project ID: ${errorText}`);
throw new Error(`Failed to load code assist: ${errorText}`);
}
const loadResp = await response.json();
let projectId = loadResp.cloudaicompanionProject;
const data = await response.json();
// Extract project ID
let projectId = data.cloudaicompanionProject;
if (typeof projectId === 'object' && projectId !== null && projectId.id) {
projectId = projectId.id;
}
// Extract tier ID (default to legacy-tier)
let tierId = "legacy-tier";
if (Array.isArray(data.allowedTiers)) {
for (const tier of data.allowedTiers) {
if (tier.isDefault && tier.id) {
tierId = tier.id.trim();
break;
}
}
}
return { projectId, tierId, raw: data };
}
/**
* Onboard user to enable Gemini Code Assist for the project
*/
async onboardUser(accessToken, projectId, tierId) {
const response = await fetch(this.config.onboardUserEndpoint, {
method: "POST",
headers: this.getApiHeaders(accessToken),
body: JSON.stringify({
tierId,
metadata: this.getMetadata(),
cloudaicompanionProject: projectId,
}),
});
if (!response.ok) {
const errorText = await response.text();
throw new Error(`Failed to onboard user: ${errorText}`);
}
return await response.json();
}
/**
* Complete onboarding flow with retry
*/
async completeOnboarding(accessToken, projectId, tierId, maxRetries = 10) {
for (let i = 0; i < maxRetries; i++) {
const result = await this.onboardUser(accessToken, projectId, tierId);
if (result.done === true) {
// Extract final project ID from response
let finalProjectId = projectId;
if (result.response?.cloudaicompanionProject) {
const respProject = result.response.cloudaicompanionProject;
if (typeof respProject === 'string') {
finalProjectId = respProject.trim();
} else if (respProject.id) {
finalProjectId = respProject.id.trim();
}
}
return { success: true, projectId: finalProjectId };
}
// Wait 5 seconds before retry
await new Promise(resolve => setTimeout(resolve, 5000));
}
throw new Error("Onboarding timeout - please try again");
}
/**
* Fetch Project ID from loadCodeAssist API (legacy method for compatibility)
*/
async fetchProjectId(accessToken) {
const { projectId } = await this.loadCodeAssist(accessToken);
if (!projectId) {
throw new Error("No cloudaicompanionProject found in response");
}
return projectId;
}
@@ -218,17 +297,27 @@ export class AntigravityService {
// Get user info
const userInfo = await this.getUserInfo(tokens.access_token);
spinner.text = "Fetching Google Cloud Project ID...";
spinner.text = "Loading Code Assist configuration...";
// Fetch Project ID
const projectId = await this.fetchProjectId(tokens.access_token);
// Load Code Assist to get project ID and tier
const { projectId, tierId } = await this.loadCodeAssist(tokens.access_token);
if (!projectId) {
throw new Error("No Google Cloud Project found. Please ensure you have a GCP project with Gemini Code Assist enabled.");
}
spinner.text = "Onboarding to Gemini Code Assist...";
// Complete onboarding to enable Gemini Code Assist
const onboardResult = await this.completeOnboarding(tokens.access_token, projectId, tierId);
const finalProjectId = onboardResult.projectId || projectId;
spinner.text = "Saving tokens to server...";
// Save tokens to server
await this.saveTokens(tokens, userInfo, projectId);
await this.saveTokens(tokens, userInfo, finalProjectId);
spinner.succeed(`Antigravity connected successfully! (${userInfo.email}, Project: ${projectId})`);
spinner.succeed(`Antigravity connected successfully! (${userInfo.email}, Project: ${finalProjectId})`);
return true;
} catch (error) {
spinner.fail(`Failed: ${error.message}`);

View File

@@ -5,8 +5,12 @@ import os from "os";
import fs from "fs";
import { fileURLToPath } from "url";
const isCloud = typeof caches !== 'undefined' || typeof caches === 'object';
// Get app name from root package.json config
function getAppName() {
if (isCloud) return "9router"; // Skip file system access in Workers
const __dirname = path.dirname(fileURLToPath(import.meta.url));
// Look for root package.json (monorepo root)
const rootPkgPath = path.resolve(__dirname, "../../../package.json");
@@ -20,6 +24,8 @@ function getAppName() {
// Get user data directory based on platform
function getUserDataDir() {
if (isCloud) return "/tmp"; // Fallback for Workers
const platform = process.platform;
const homeDir = os.homedir();
const appName = getAppName();
@@ -34,11 +40,11 @@ function getUserDataDir() {
// Data file path - stored in user home directory
const DATA_DIR = getUserDataDir();
const DB_FILE = path.join(DATA_DIR, "usage.json");
const LOG_FILE = path.join(DATA_DIR, "log.txt");
const DB_FILE = isCloud ? null : path.join(DATA_DIR, "usage.json");
const LOG_FILE = isCloud ? null : path.join(DATA_DIR, "log.txt");
// Ensure data directory exists
if (!fs.existsSync(DATA_DIR)) {
if (!isCloud && !fs.existsSync(DATA_DIR)) {
fs.mkdirSync(DATA_DIR, { recursive: true });
}
@@ -83,6 +89,15 @@ export function trackPendingRequest(model, provider, connectionId, started) {
* Get usage database instance (singleton)
*/
export async function getUsageDb() {
if (isCloud) {
// Return in-memory DB for Workers
if (!dbInstance) {
dbInstance = new Low({ read: async () => {}, write: async () => {} }, defaultData);
dbInstance.data = defaultData;
}
return dbInstance;
}
if (!dbInstance) {
const adapter = new JSONFile(DB_FILE);
dbInstance = new Low(adapter, defaultData);
@@ -114,6 +129,8 @@ export async function getUsageDb() {
* @param {object} entry - Usage entry { provider, model, tokens: { prompt_tokens, completion_tokens, ... }, connectionId? }
*/
export async function saveRequestUsage(entry) {
if (isCloud) return; // Skip saving in Workers
try {
const db = await getUsageDb();
@@ -187,6 +204,8 @@ function formatLogDate(date = new Date()) {
* Format: datetime(dd-mm-yyyy h:m:s) | model | provider | account | tokens sent | tokens received | status
*/
export async function appendRequestLog({ model, provider, connectionId, tokens, status }) {
if (isCloud) return; // Skip logging in Workers
try {
const timestamp = formatLogDate();
const p = provider?.toUpperCase() || "-";
@@ -218,6 +237,7 @@ export async function appendRequestLog({ model, provider, connectionId, tokens,
* Get last N lines of log.txt
*/
export async function getRecentLogs(limit = 200) {
if (isCloud) return []; // Skip in Workers
if (!fs.existsSync(LOG_FILE)) return [];
try {
const content = fs.readFileSync(LOG_FILE, "utf-8");