Fix AG MITM

This commit is contained in:
decolua
2026-03-01 18:40:55 +07:00
parent 7076108550
commit 50990e84b4
6 changed files with 27 additions and 24 deletions

View File

@@ -12,20 +12,19 @@ export const FORMATS = {
CURSOR: "cursor"
};
// Map endpoint suffix → source format (takes priority over body-based detection)
const ENDPOINT_FORMAT_MAP = {
"/v1/responses": FORMATS.OPENAI_RESPONSES,
"/v1/chat/completions": FORMATS.OPENAI,
};
/**
* Detect source format from request URL pathname.
* Returns null if no matching endpoint found.
* Detect source format from request URL pathname + body.
* Returns null to fall back to body-based detection.
*/
export function detectFormatByEndpoint(pathname) {
for (const [segment, format] of Object.entries(ENDPOINT_FORMAT_MAP)) {
if (pathname.includes(segment)) return format;
export function detectFormatByEndpoint(pathname, body) {
// /v1/responses is always openai-responses
if (pathname.includes("/v1/responses")) return FORMATS.OPENAI_RESPONSES;
// /v1/chat/completions + input[] → treat as openai (Cursor CLI sends Responses body via chat endpoint)
if (pathname.includes("/v1/chat/completions") && Array.isArray(body?.input)) {
return FORMATS.OPENAI;
}
return null;
}

View File

@@ -84,8 +84,10 @@ export function createSSEStream(options = {}) {
// Ensure OpenAI-required fields are present on streaming chunks (Letta compat)
let fieldsInjected = false;
if (!parsed.object) { parsed.object = "chat.completion.chunk"; fieldsInjected = true; }
if (!parsed.created) { parsed.created = Math.floor(Date.now() / 1000); fieldsInjected = true; }
if (parsed.choices !== undefined) {
if (!parsed.object) { parsed.object = "chat.completion.chunk"; fieldsInjected = true; }
if (!parsed.created) { parsed.created = Math.floor(Date.now() / 1000); fieldsInjected = true; }
}
// Strip Azure-specific non-standard fields from streaming chunks
if (parsed.prompt_filter_results !== undefined) {

View File

@@ -196,13 +196,15 @@ export function extractUsage(chunk) {
}
// Gemini format (Antigravity)
if (chunk.usageMetadata && typeof chunk.usageMetadata === "object") {
// Antigravity wraps usageMetadata inside response: { response: { usageMetadata: {...} } }
const usageMeta = chunk.usageMetadata || chunk.response?.usageMetadata;
if (usageMeta && typeof usageMeta === "object") {
return normalizeUsage({
prompt_tokens: chunk.usageMetadata?.promptTokenCount || 0,
completion_tokens: chunk.usageMetadata?.candidatesTokenCount || 0,
total_tokens: chunk.usageMetadata?.totalTokenCount,
cached_tokens: chunk.usageMetadata?.cachedContentTokenCount,
reasoning_tokens: chunk.usageMetadata?.thoughtsTokenCount
prompt_tokens: usageMeta.promptTokenCount || 0,
completion_tokens: usageMeta.candidatesTokenCount || 0,
total_tokens: usageMeta.totalTokenCount,
cached_tokens: usageMeta.cachedContentTokenCount,
reasoning_tokens: usageMeta.thoughtsTokenCount
});
}

View File

@@ -1,6 +1,6 @@
{
"name": "9router-app",
"version": "0.3.19",
"version": "0.3.23",
"description": "9Router web dashboard",
"private": true,
"scripts": {

View File

@@ -15,8 +15,8 @@ export const OAUTH_PROVIDERS = {
codex: { id: "codex", alias: "cx", name: "OpenAI Codex", icon: "code", color: "#3B82F6" },
github: { id: "github", alias: "gh", name: "GitHub Copilot", icon: "code", color: "#333333" },
cursor: { id: "cursor", alias: "cu", name: "Cursor IDE", icon: "edit_note", color: "#00D4AA" },
// kilocode: { id: "kilocode", alias: "kc", name: "Kilo Code", icon: "code", color: "#FF6B35", textIcon: "KC" },
// cline: { id: "cline", alias: "cl", name: "Cline", icon: "smart_toy", color: "#5B9BD5", textIcon: "CL" },
kilocode: { id: "kilocode", alias: "kc", name: "Kilo Code", icon: "code", color: "#FF6B35", textIcon: "KC" },
cline: { id: "cline", alias: "cl", name: "Cline", icon: "smart_toy", color: "#5B9BD5", textIcon: "CL" },
};
export const APIKEY_PROVIDERS = {

View File

@@ -181,8 +181,8 @@ async function handleSingleModelChat(body, modelStr, clientRawRequest = null, re
connectionId: credentials.connectionId,
userAgent,
apiKey,
// Detect source format by endpoint — /chat/completions is always openai, /responses is always openai-responses
sourceFormatOverride: request?.url ? detectFormatByEndpoint(new URL(request.url).pathname) : null,
// Detect source format by endpoint + body
sourceFormatOverride: request?.url ? detectFormatByEndpoint(new URL(request.url).pathname, body) : null,
onCredentialsRefreshed: async (newCreds) => {
await updateProviderCredentials(credentials.connectionId, {
accessToken: newCreds.accessToken,