feat: add GLM Coding (China) provider and Usage by API Keys statistics

Co-authored-by: Cursor <cursoragent@cursor.com>
This commit is contained in:
Blade096
2026-02-11 15:44:08 +07:00
committed by decolua
parent bd23ab41ee
commit 1ae4e311b7
6 changed files with 835 additions and 199 deletions

View File

@@ -344,8 +344,9 @@ function parseSSEToOpenAIResponse(rawSSE, fallbackModel) {
* @param {function} options.onRequestSuccess - Callback when request succeeds (to clear error status)
* @param {function} options.onDisconnect - Callback when client disconnects
* @param {string} options.connectionId - Connection ID for usage tracking
* @param {string} options.apiKey - API key for usage tracking
*/
export async function handleChatCore({ body, modelInfo, credentials, log, onCredentialsRefreshed, onRequestSuccess, onDisconnect, clientRawRequest, connectionId, userAgent }) {
export async function handleChatCore({ body, modelInfo, credentials, log, onCredentialsRefreshed, onRequestSuccess, onDisconnect, clientRawRequest, connectionId, userAgent, apiKey }) {
const { provider, model } = modelInfo;
const requestStartTime = Date.now();
@@ -587,7 +588,8 @@ export async function handleChatCore({ body, modelInfo, credentials, log, onCred
model: model || "unknown",
tokens: usage,
timestamp: new Date().toISOString(),
connectionId: connectionId || undefined
connectionId: connectionId || undefined,
apiKey: apiKey || undefined
}).catch(err => {
console.error("Failed to save usage stats:", err.message);
});
@@ -704,7 +706,8 @@ export async function handleChatCore({ body, modelInfo, credentials, log, onCred
model: model || "unknown",
tokens: usage,
timestamp: new Date().toISOString(),
connectionId: connectionId || undefined
connectionId: connectionId || undefined,
apiKey: apiKey || undefined
}).catch(err => {
console.error("Failed to save streaming usage stats:", err.message);
});
@@ -719,13 +722,13 @@ export async function handleChatCore({ body, modelInfo, credentials, log, onCred
if (needsCodexTranslation) {
log?.debug?.("STREAM", `Codex translation mode: openai-responses → openai`);
transformStream = createSSETransformStreamWithLogger('openai-responses', 'openai', provider, reqLogger, toolNameMap, model, connectionId, body, onStreamComplete);
transformStream = createSSETransformStreamWithLogger('openai-responses', 'openai', provider, reqLogger, toolNameMap, model, connectionId, body, onStreamComplete, apiKey);
} else if (needsTranslation(targetFormat, sourceFormat)) {
log?.debug?.("STREAM", `Translation mode: ${targetFormat}${sourceFormat}`);
transformStream = createSSETransformStreamWithLogger(targetFormat, sourceFormat, provider, reqLogger, toolNameMap, model, connectionId, body, onStreamComplete);
transformStream = createSSETransformStreamWithLogger(targetFormat, sourceFormat, provider, reqLogger, toolNameMap, model, connectionId, body, onStreamComplete, apiKey);
} else {
log?.debug?.("STREAM", `Standard passthrough mode`);
transformStream = createPassthroughStreamWithLogger(provider, reqLogger, model, connectionId, body, onStreamComplete);
transformStream = createPassthroughStreamWithLogger(provider, reqLogger, model, connectionId, body, onStreamComplete, apiKey);
}
const transformedBody = pipeWithDisconnect(providerResponse, transformStream, streamController);

View File

@@ -29,6 +29,7 @@ const STREAM_MODE = {
* @param {string} options.connectionId - Connection ID for usage tracking
* @param {object} options.body - Request body (for input token estimation)
* @param {function} options.onStreamComplete - Callback when stream completes (content, usage)
* @param {string} options.apiKey - API key for usage tracking
*/
export function createSSEStream(options = {}) {
const {
@@ -41,7 +42,8 @@ export function createSSEStream(options = {}) {
model = null,
connectionId = null,
body = null,
onStreamComplete = null
onStreamComplete = null,
apiKey = null
} = options;
let buffer = "";
@@ -246,7 +248,7 @@ export function createSSEStream(options = {}) {
}
if (hasValidUsage(usage)) {
logUsage(provider, usage, model, connectionId);
logUsage(provider, usage, model, connectionId, apiKey);
} else {
appendRequestLog({ model, provider, connectionId, tokens: null, status: "200 OK" }).catch(() => { });
}
@@ -308,7 +310,7 @@ export function createSSEStream(options = {}) {
}
if (hasValidUsage(state?.usage)) {
logUsage(state.provider || targetFormat, state.usage, model, connectionId);
logUsage(state.provider || targetFormat, state.usage, model, connectionId, apiKey);
} else {
appendRequestLog({ model, provider, connectionId, tokens: null, status: "200 OK" }).catch(() => { });
}
@@ -326,7 +328,7 @@ export function createSSEStream(options = {}) {
});
}
export function createSSETransformStreamWithLogger(targetFormat, sourceFormat, provider = null, reqLogger = null, toolNameMap = null, model = null, connectionId = null, body = null, onStreamComplete = null) {
export function createSSETransformStreamWithLogger(targetFormat, sourceFormat, provider = null, reqLogger = null, toolNameMap = null, model = null, connectionId = null, body = null, onStreamComplete = null, apiKey = null) {
return createSSEStream({
mode: STREAM_MODE.TRANSLATE,
targetFormat,
@@ -337,11 +339,12 @@ export function createSSETransformStreamWithLogger(targetFormat, sourceFormat, p
model,
connectionId,
body,
onStreamComplete
onStreamComplete,
apiKey
});
}
export function createPassthroughStreamWithLogger(provider = null, reqLogger = null, model = null, connectionId = null, body = null, onStreamComplete = null) {
export function createPassthroughStreamWithLogger(provider = null, reqLogger = null, model = null, connectionId = null, body = null, onStreamComplete = null, apiKey = null) {
return createSSEStream({
mode: STREAM_MODE.PASSTHROUGH,
provider,
@@ -349,6 +352,7 @@ export function createPassthroughStreamWithLogger(provider = null, reqLogger = n
model,
connectionId,
body,
onStreamComplete
onStreamComplete,
apiKey
});
}

View File

@@ -279,7 +279,7 @@ export function estimateUsage(body, contentLength, targetFormat = FORMATS.OPENAI
/**
* Log usage with cache info (green color)
*/
export function logUsage(provider, usage, model = null, connectionId = null) {
export function logUsage(provider, usage, model = null, connectionId = null, apiKey = null) {
if (!usage || typeof usage !== "object") return;
const p = provider?.toUpperCase() || "UNKNOWN";
@@ -318,6 +318,6 @@ export function logUsage(provider, usage, model = null, connectionId = null) {
cache_creation_input_tokens: cacheCreation || 0,
reasoning_tokens: reasoning || 0
};
saveRequestUsage({ model, provider, connectionId, tokens }).catch(() => { });
saveRequestUsage({ model, provider, connectionId, tokens, apiKey: apiKey || undefined }).catch(() => { });
appendRequestLog({ model, provider, connectionId, tokens, status: "200 OK" }).catch(() => { });
}

View File

@@ -139,7 +139,7 @@ export async function getUsageDb() {
/**
* Save request usage
* @param {object} entry - Usage entry { provider, model, tokens: { prompt_tokens, completion_tokens, ... }, connectionId? }
* @param {object} entry - Usage entry { provider, model, tokens: { prompt_tokens, completion_tokens, ... }, connectionId?, apiKey? }
*/
export async function saveRequestUsage(entry) {
if (isCloud) return; // Skip saving in Workers
@@ -349,8 +349,8 @@ export async function getUsageStats() {
const db = await getUsageDb();
const history = db.data.history || [];
// Import localDb to get provider connection names
const { getProviderConnections } = await import("@/lib/localDb.js");
// Import localDb to get provider connection names and API keys
const { getProviderConnections, getApiKeys } = await import("@/lib/localDb.js");
// Fetch all provider connections to get account names
let allConnections = [];
@@ -367,14 +367,33 @@ export async function getUsageStats() {
connectionMap[conn.id] = conn.name || conn.email || conn.id;
}
// Fetch all API keys to get key names
let allApiKeys = [];
try {
allApiKeys = await getApiKeys();
} catch (error) {
console.warn("Could not fetch API keys for usage stats:", error.message);
}
// Create a map from API key to key info
const apiKeyMap = {};
for (const key of allApiKeys) {
apiKeyMap[key.key] = {
name: key.name,
id: key.id,
createdAt: key.createdAt
};
}
const stats = {
totalRequests: history.length,
totalPromptTokens: 0,
totalCompletionTokens: 0,
totalCost: 0, // NEW
totalCost: 0,
byProvider: {},
byModel: {},
byAccount: {},
byApiKey: {},
last10Minutes: [],
pending: pendingRequests,
activeRequests: []
@@ -507,6 +526,65 @@ export async function getUsageStats() {
stats.byAccount[accountKey].lastUsed = entry.timestamp;
}
}
// Handle requests with API key
if (entry.apiKey && typeof entry.apiKey === "string") {
const keyInfo = apiKeyMap[entry.apiKey];
const keyName = keyInfo?.name || entry.apiKey.slice(0, 8) + "...";
// Use full API key to avoid collisions (keys with same prefix)
const apiKeyKey = entry.apiKey;
// Group by API Key + Model + Provider combination to track different models used with the same key
const apiKeyModelKey = `${apiKeyKey}|${entry.model}|${entry.provider || 'unknown'}`;
if (!stats.byApiKey[apiKeyModelKey]) {
stats.byApiKey[apiKeyModelKey] = {
requests: 0,
promptTokens: 0,
completionTokens: 0,
cost: 0,
rawModel: entry.model,
provider: entry.provider,
apiKey: entry.apiKey,
keyName: keyName,
apiKeyKey: apiKeyKey,
lastUsed: entry.timestamp
};
}
const apiKeyEntry = stats.byApiKey[apiKeyModelKey];
apiKeyEntry.requests++;
apiKeyEntry.promptTokens += promptTokens;
apiKeyEntry.completionTokens += completionTokens;
apiKeyEntry.cost += entryCost;
if (new Date(entry.timestamp) > new Date(apiKeyEntry.lastUsed)) {
apiKeyEntry.lastUsed = entry.timestamp;
}
} else {
const apiKeyKey = "local-no-key";
const keyName = "Local (No API Key)";
if (!stats.byApiKey[apiKeyKey]) {
stats.byApiKey[apiKeyKey] = {
requests: 0,
promptTokens: 0,
completionTokens: 0,
cost: 0,
rawModel: entry.model,
provider: entry.provider,
apiKey: null,
keyName: keyName,
apiKeyKey: apiKeyKey,
lastUsed: entry.timestamp
};
}
const apiKeyEntry = stats.byApiKey[apiKeyKey];
apiKeyEntry.requests++;
apiKeyEntry.promptTokens += promptTokens;
apiKeyEntry.completionTokens += completionTokens;
apiKeyEntry.cost += entryCost;
if (new Date(entry.timestamp) > new Date(apiKeyEntry.lastUsed)) {
apiKeyEntry.lastUsed = entry.timestamp;
}
}
}
return stats;

File diff suppressed because it is too large Load Diff

View File

@@ -83,19 +83,19 @@ export async function handleChat(request, clientRawRequest = null) {
return handleComboChat({
body,
models: comboModels,
handleSingleModel: (b, m) => handleSingleModelChat(b, m, clientRawRequest, request),
handleSingleModel: (b, m) => handleSingleModelChat(b, m, clientRawRequest, request, apiKey),
log
});
}
// Single model request
return handleSingleModelChat(body, modelStr, clientRawRequest, request);
return handleSingleModelChat(body, modelStr, clientRawRequest, request, apiKey);
}
/**
* Handle single model chat request
*/
async function handleSingleModelChat(body, modelStr, clientRawRequest = null, request = null) {
async function handleSingleModelChat(body, modelStr, clientRawRequest = null, request = null, apiKey = null) {
const modelInfo = await getModelInfo(modelStr);
if (!modelInfo.provider) {
log.warn("CHAT", "Invalid model format", { model: modelStr });
@@ -153,6 +153,7 @@ async function handleSingleModelChat(body, modelStr, clientRawRequest = null, re
clientRawRequest,
connectionId: credentials.connectionId,
userAgent,
apiKey,
onCredentialsRefreshed: async (newCreds) => {
await updateProviderCredentials(credentials.connectionId, {
accessToken: newCreds.accessToken,