feat: implement real project ID fetching for Antigravity (#170)

* feat: implement Project ID service to fetch and cache real Project IDs from Google Cloud Code API

* fix: implement caching and cleanup for Project ID retrieval

* feat: add project ID invalidation and refresh logic after token updates

* refactor: remove unnecessary format changes

* feat: add on-demand project ID retrieval for antigravity requests
This commit is contained in:
zx07
2026-02-22 00:15:18 +08:00
committed by GitHub
parent 94c4320632
commit ea67742f2a
5 changed files with 546 additions and 115 deletions

View File

@@ -74,6 +74,12 @@ export const ANTIGRAVITY_HEADERS = {
"x-goog-api-client": "gl-node/18.18.2 fire/0.8.6 grpc/1.10.x"
};
// Cloud Code Assist API endpoints (for Project ID discovery)
export const CLOUD_CODE_API = {
loadCodeAssist: "https://cloudcode-pa.googleapis.com/v1internal:loadCodeAssist",
onboardUser: "https://cloudcode-pa.googleapis.com/v1internal:onboardUser",
};
// Provider configurations
export const PROVIDERS = {
claude: {

View File

@@ -0,0 +1,315 @@
/**
* Project ID Service - Fetch and cache real Project IDs from Google Cloud Code API
*
* Reference: CLIProxyAPI internal/auth/antigravity/auth.go (FetchProjectID + OnboardUser)
*
* Instead of generating random project IDs (e.g. "useful-spark-a1b2c"),
* this service fetches the real Project ID bound to the authenticated user's account.
* This significantly reduces the risk of being flagged by Google's anti-abuse systems.
*/
import {ANTIGRAVITY_HEADERS, CLIENT_METADATA, CLOUD_CODE_API, getPlatformUserAgent} from "../config/constants.js";
// ─── Cache ────────────────────────────────────────────────────────────────────
// connectionId -> { projectId: string, fetchedAt: number }
const projectIdCache = new Map();
/** How long a cached project ID is considered fresh (1 hour). */
const CACHE_TTL_MS = 60 * 60 * 1000;
// ─── Pending-fetch deduplication ─────────────────────────────────────────────
// connectionId -> { promise: Promise<string|null>, controller: AbortController, startedAt: number }
const pendingFetches = new Map();
/** Abort and evict a pending fetch that has been running longer than this (2 min). */
const PENDING_TTL_MS = 2 * 60 * 1000;
// ─── Periodic cleanup ────────────────────────────────────────────────────────
/** How often the background sweep runs (10 min). */
const CLEANUP_INTERVAL_MS = 10 * 60 * 1000;
let _cleanupTimer = null;
/** Run one sweep immediately: evict stale cache entries and abort orphaned pending fetches. */
export function cleanupNow() {
const now = Date.now();
for (const [id, entry] of projectIdCache) {
if (!entry || now - entry.fetchedAt >= CACHE_TTL_MS) {
projectIdCache.delete(id);
}
}
for (const [id, item] of pendingFetches) {
if (!item || typeof item.startedAt !== "number") {
pendingFetches.delete(id);
continue;
}
if (now - item.startedAt > PENDING_TTL_MS) {
try { item.controller.abort(); } catch (_) { /* ignore */ }
pendingFetches.delete(id);
}
}
}
/** Start the periodic background cleanup (idempotent). Called automatically on module load. */
export function startCacheCleanup() {
if (_cleanupTimer) return;
_cleanupTimer = setInterval(() => {
try { cleanupNow(); } catch (e) {
console.warn("[ProjectId] cleanup sweep error:", e?.message ?? e);
}
}, CLEANUP_INTERVAL_MS);
// Unref so the timer doesn't prevent Node from exiting when it is otherwise idle
_cleanupTimer?.unref?.();
}
/** Stop the periodic background cleanup (e.g. during graceful shutdown). */
export function stopCacheCleanup() {
if (!_cleanupTimer) return;
clearInterval(_cleanupTimer);
_cleanupTimer = null;
}
// Start automatically when the module is first imported
startCacheCleanup();
// ─── Public API ───────────────────────────────────────────────────────────────
/**
* Get the Project ID for a connection, with caching.
* Returns null on failure (callers should fall back to random generation).
*
* @param {string} connectionId - The connection identifier for cache keying
* @param {string} accessToken - Valid OAuth access token
* @returns {Promise<string|null>} Real project ID or null
*/
export async function getProjectIdForConnection(connectionId, accessToken) {
if (!connectionId || !accessToken) return null;
// Return cached value if still fresh
const cached = projectIdCache.get(connectionId);
if (cached && Date.now() - cached.fetchedAt < CACHE_TTL_MS) {
return cached.projectId;
}
// Deduplicate concurrent fetches for the same connection
if (pendingFetches.has(connectionId)) {
return pendingFetches.get(connectionId).promise;
}
// Each fetch gets its own AbortController so it can be canceled via removeConnection()
const controller = new AbortController();
const promise = (async () => {
try {
const projectId = await fetchProjectId(accessToken, controller.signal);
if (projectId) {
projectIdCache.set(connectionId, {projectId, fetchedAt: Date.now()});
return projectId;
}
console.warn("[ProjectId] could not fetch projectId for connection", connectionId.slice(0, 8));
return null;
} catch (error) {
console.warn(`[ProjectId] Error fetching project ID: ${error.message}`);
return null;
} finally {
pendingFetches.delete(connectionId);
}
})();
pendingFetches.set(connectionId, {promise, controller, startedAt: Date.now()});
return promise;
}
/**
* Invalidate the cached project ID for a connection.
* Call this when a connection's credentials are fully revoked or refreshed.
*/
export function invalidateProjectId(connectionId) {
projectIdCache.delete(connectionId);
}
/**
* Fully remove a connection: abort any in-flight fetch and delete its cached project ID.
* Wire this into your connection close / disconnect lifecycle events to prevent memory leaks.
*
* @param {string} connectionId
*/
export function removeConnection(connectionId) {
if (!connectionId) return;
projectIdCache.delete(connectionId);
const pending = pendingFetches.get(connectionId);
if (pending) {
try { pending.controller.abort(); } catch (_) { /* ignore */ }
pendingFetches.delete(connectionId);
}
}
// ─── Internal helpers ─────────────────────────────────────────────────────────
/**
* Fetch project ID via loadCodeAssist endpoint.
* Falls back to onboardUser when loadCodeAssist returns no project.
*
* @param {string} accessToken
* @param {AbortSignal} signal
* @returns {Promise<string|null>}
*/
async function fetchProjectId(accessToken, signal) {
const response = await fetch(CLOUD_CODE_API.loadCodeAssist, {
method: "POST",
headers: {
"Authorization": `Bearer ${accessToken}`,
"Content-Type": "application/json",
"User-Agent": getPlatformUserAgent(),
...ANTIGRAVITY_HEADERS
},
body: JSON.stringify({metadata: CLIENT_METADATA, mode: 1}),
signal
});
if (!response.ok) {
const errorText = await response.text().catch(() => "");
throw new Error(`loadCodeAssist failed: HTTP ${response.status} ${errorText.slice(0, 200)}`);
}
const data = await response.json();
const projectId = extractProjectId(data);
if (projectId) return projectId;
// Determine the tier to use for onboarding
let tierID = "legacy-tier";
if (Array.isArray(data.allowedTiers)) {
for (const tier of data.allowedTiers) {
if (tier && typeof tier === "object" && tier.isDefault === true) {
if (tier.id && typeof tier.id === "string" && tier.id.trim()) {
tierID = tier.id.trim();
break;
}
}
}
}
return onboardUser(accessToken, tierID, signal);
}
/**
* Fetch project ID via onboardUser endpoint (polls until done).
*
* @param {string} accessToken
* @param {string} tierID
* @param {AbortSignal} externalSignal propagated from the connection's AbortController
* @returns {Promise<string|null>}
*/
async function onboardUser(accessToken, tierID, externalSignal) {
console.log(`[ProjectId] Onboarding user with tier: ${tierID}`);
const reqBody = {tierId: tierID, metadata: CLIENT_METADATA, mode: 1};
const MAX_ATTEMPTS = 5;
for (let attempt = 1; attempt <= MAX_ATTEMPTS; attempt++) {
// Bail out immediately if the connection was removed
if (externalSignal?.aborted) return null;
// Per-attempt timeout controller; forwards external abort as well
const localCtrl = new AbortController();
const timeoutId = setTimeout(() => localCtrl.abort(), 30_000);
const forwardAbort = () => localCtrl.abort();
externalSignal?.addEventListener("abort", forwardAbort);
try {
const response = await fetch(CLOUD_CODE_API.onboardUser, {
method: "POST",
headers: {
"Authorization": `Bearer ${accessToken}`,
"Content-Type": "application/json",
"User-Agent": getPlatformUserAgent(),
...ANTIGRAVITY_HEADERS
},
body: JSON.stringify(reqBody),
signal: localCtrl.signal
});
clearTimeout(timeoutId);
if (!response.ok) {
const errorText = await response.text().catch(() => "");
throw new Error(`onboardUser HTTP ${response.status}: ${errorText.slice(0, 200)}`);
}
const data = await response.json();
if (data.done === true) {
const projectId = extractProjectIdFromOnboard(data);
if (projectId) {
console.log(`[ProjectId] Successfully onboarded, project ID: ${projectId}`);
return projectId;
}
throw new Error("onboardUser done but no project_id in response");
}
// Server not done yet wait and retry
console.log(`[ProjectId] Onboard attempt ${attempt}/${MAX_ATTEMPTS}: not done yet, waiting...`);
await new Promise(resolve => setTimeout(resolve, 2000));
} catch (error) {
clearTimeout(timeoutId);
if (error.name === "AbortError") {
console.warn(`[ProjectId] onboardUser attempt ${attempt} aborted (timeout or connection removed)`);
if (externalSignal?.aborted) return null; // connection gone stop retrying
continue;
}
if (attempt === MAX_ATTEMPTS) {
console.warn(`[ProjectId] onboardUser failed after ${MAX_ATTEMPTS} attempts: ${error.message}`);
return null;
}
throw error;
} finally {
clearTimeout(timeoutId);
externalSignal?.removeEventListener("abort", forwardAbort);
}
}
return null;
}
/**
* Extract project ID from loadCodeAssist response.
*/
function extractProjectId(data) {
if (!data) return null;
if (typeof data.cloudaicompanionProject === "string") {
const id = data.cloudaicompanionProject.trim();
if (id) return id;
}
if (data.cloudaicompanionProject && typeof data.cloudaicompanionProject === "object") {
const id = data.cloudaicompanionProject.id;
if (typeof id === "string" && id.trim()) return id.trim();
}
return null;
}
/**
* Extract project ID from onboardUser response.
*/
function extractProjectIdFromOnboard(data) {
if (!data?.response) return null;
const project = data.response.cloudaicompanionProject;
if (typeof project === "string") {
const id = project.trim();
if (id) return id;
}
if (project && typeof project === "object") {
const id = project.id;
if (typeof id === "string" && id.trim()) return id.trim();
}
return null;
}

View File

@@ -8,7 +8,7 @@ export const TOKEN_EXPIRY_BUFFER_MS = 5 * 60 * 1000;
*/
export async function refreshAccessToken(provider, refreshToken, credentials, log) {
const config = PROVIDERS[provider];
if (!config || !config.refreshUrl) {
log?.warn?.("TOKEN_REFRESH", `No refresh URL configured for provider: ${provider}`);
return null;
@@ -44,7 +44,7 @@ export async function refreshAccessToken(provider, refreshToken, credentials, lo
}
const tokens = await response.json();
log?.info?.("TOKEN_REFRESH", `Successfully refreshed token for ${provider}`, {
hasNewAccessToken: !!tokens.access_token,
hasNewRefreshToken: !!tokens.refresh_token,
@@ -91,7 +91,7 @@ export async function refreshClaudeOAuthToken(refreshToken, log) {
}
const tokens = await response.json();
log?.info?.("TOKEN_REFRESH", "Successfully refreshed Claude OAuth token", {
hasNewAccessToken: !!tokens.access_token,
hasNewRefreshToken: !!tokens.refresh_token,
@@ -133,7 +133,7 @@ export async function refreshGoogleToken(refreshToken, clientId, clientSecret, l
}
const tokens = await response.json();
log?.info?.("TOKEN_REFRESH", "Successfully refreshed Google token", {
hasNewAccessToken: !!tokens.access_token,
hasNewRefreshToken: !!tokens.refresh_token,
@@ -152,7 +152,7 @@ export async function refreshGoogleToken(refreshToken, clientId, clientSecret, l
*/
export async function refreshQwenToken(refreshToken, log) {
const endpoint = OAUTH_ENDPOINTS.qwen.token;
try {
const response = await fetch(endpoint, {
method: "POST",
@@ -169,7 +169,7 @@ export async function refreshQwenToken(refreshToken, log) {
if (response.status === 200) {
const tokens = await response.json();
log?.info?.("TOKEN_REFRESH", "Successfully refreshed Qwen token", {
hasNewAccessToken: !!tokens.access_token,
hasNewRefreshToken: !!tokens.refresh_token,
@@ -226,7 +226,7 @@ export async function refreshCodexToken(refreshToken, log) {
}
const tokens = await response.json();
log?.info?.("TOKEN_REFRESH", "Successfully refreshed Codex token", {
hasNewAccessToken: !!tokens.access_token,
hasNewRefreshToken: !!tokens.refresh_token,
@@ -249,7 +249,7 @@ export async function refreshKiroToken(refreshToken, providerSpecificData, log)
const clientId = providerSpecificData?.clientId;
const clientSecret = providerSpecificData?.clientSecret;
const region = providerSpecificData?.region;
// AWS SSO OIDC (Builder ID or IDC)
// If clientId and clientSecret exist, assume AWS SSO OIDC (default to builder-id if authMethod not specified)
if (clientId && clientSecret) {
@@ -257,7 +257,7 @@ export async function refreshKiroToken(refreshToken, providerSpecificData, log)
const endpoint = isIDC && region
? `https://oidc.${region}.amazonaws.com/token`
: "https://oidc.us-east-1.amazonaws.com/token";
const response = await fetch(endpoint, {
method: "POST",
headers: {
@@ -282,7 +282,7 @@ export async function refreshKiroToken(refreshToken, providerSpecificData, log)
}
const tokens = await response.json();
log?.info?.("TOKEN_REFRESH", "Successfully refreshed Kiro AWS token", {
hasNewAccessToken: !!tokens.accessToken,
expiresIn: tokens.expiresIn,
@@ -294,7 +294,7 @@ export async function refreshKiroToken(refreshToken, providerSpecificData, log)
expiresIn: tokens.expiresIn,
};
}
// Social Auth (Google/GitHub) - use Kiro's refresh endpoint
const response = await fetch(PROVIDERS.kiro.tokenUrl, {
method: "POST",
@@ -317,7 +317,7 @@ export async function refreshKiroToken(refreshToken, providerSpecificData, log)
}
const tokens = await response.json();
log?.info?.("TOKEN_REFRESH", "Successfully refreshed Kiro social token", {
hasNewAccessToken: !!tokens.accessToken,
expiresIn: tokens.expiresIn,
@@ -335,7 +335,7 @@ export async function refreshKiroToken(refreshToken, providerSpecificData, log)
*/
export async function refreshIflowToken(refreshToken, log) {
const basicAuth = btoa(`${PROVIDERS.iflow.clientId}:${PROVIDERS.iflow.clientSecret}`);
const response = await fetch(OAUTH_ENDPOINTS.iflow.token, {
method: "POST",
headers: {
@@ -361,7 +361,7 @@ export async function refreshIflowToken(refreshToken, log) {
}
const tokens = await response.json();
log?.info?.("TOKEN_REFRESH", "Successfully refreshed iFlow token", {
hasNewAccessToken: !!tokens.access_token,
hasNewRefreshToken: !!tokens.refresh_token,
@@ -403,7 +403,7 @@ export async function refreshGitHubToken(refreshToken, log) {
}
const tokens = await response.json();
log?.info?.("TOKEN_REFRESH", "Successfully refreshed GitHub token", {
hasNewAccessToken: !!tokens.access_token,
hasNewRefreshToken: !!tokens.refresh_token,
@@ -442,7 +442,7 @@ export async function refreshCopilotToken(githubAccessToken, log) {
}
const data = await response.json();
log?.info?.("TOKEN_REFRESH", "Successfully refreshed Copilot token", {
hasToken: !!data.token,
expiresAt: data.expires_at
@@ -479,29 +479,29 @@ export async function getAccessToken(provider, credentials, log) {
PROVIDERS[provider].clientSecret,
log
);
case "claude":
return await refreshClaudeOAuthToken(credentials.refreshToken, log);
case "codex":
return await refreshCodexToken(credentials.refreshToken, log);
case "qwen":
return await refreshQwenToken(credentials.refreshToken, log);
case "iflow":
return await refreshIflowToken(credentials.refreshToken, log);
case "github":
return await refreshGitHubToken(credentials.refreshToken, log);
case "kiro":
return await refreshKiroToken(
credentials.refreshToken,
credentials.providerSpecificData,
log
);
default:
log?.warn?.("TOKEN_REFRESH", `Unsupported provider for token refresh: ${provider}`);
return null;
@@ -561,13 +561,13 @@ export function formatProviderCredentials(provider, credentials, log) {
accessToken: credentials.accessToken,
projectId: credentials.projectId
};
case "claude":
return {
apiKey: credentials.apiKey,
accessToken: credentials.accessToken
};
case "codex":
case "qwen":
case "iflow":
@@ -577,14 +577,15 @@ export function formatProviderCredentials(provider, credentials, log) {
apiKey: credentials.apiKey,
accessToken: credentials.accessToken
};
case "antigravity":
case "gemini-cli":
return {
accessToken: credentials.accessToken,
refreshToken: credentials.refreshToken
refreshToken: credentials.refreshToken,
projectId: credentials.projectId
};
default:
return {
apiKey: credentials.apiKey,
@@ -599,21 +600,21 @@ export function formatProviderCredentials(provider, credentials, log) {
*/
export async function getAllAccessTokens(userInfo, log) {
const results = {};
if (userInfo.connections && Array.isArray(userInfo.connections)) {
for (const connection of userInfo.connections) {
if (connection.isActive && connection.provider) {
const token = await getAccessToken(connection.provider, {
refreshToken: connection.refreshToken
}, log);
if (token) {
results[connection.provider] = token;
}
}
}
}
return results;
}

View File

@@ -13,6 +13,7 @@ import { handleComboChat } from "open-sse/services/combo.js";
import { HTTP_STATUS } from "open-sse/config/constants.js";
import * as log from "../utils/logger.js";
import { updateProviderCredentials, checkAndRefreshToken } from "../services/tokenRefresh.js";
import { getProjectIdForConnection } from "open-sse/services/projectId.js";
/**
* Handle chat completion request
@@ -144,7 +145,17 @@ async function handleSingleModelChat(body, modelStr, clientRawRequest = null, re
log.info("AUTH", `Using ${provider} account: ${accountId}...`);
const refreshedCredentials = await checkAndRefreshToken(provider, credentials);
// Ensure real project ID is available for providers that need it (P0 fix: cold miss)
if ((provider === "antigravity" || provider === "gemini-cli") && !refreshedCredentials.projectId) {
const pid = await getProjectIdForConnection(credentials.connectionId, refreshedCredentials.accessToken);
if (pid) {
refreshedCredentials.projectId = pid;
// Persist to DB in background so subsequent requests have it immediately
updateProviderCredentials(credentials.connectionId, { projectId: pid }).catch(() => { });
}
}
// Use shared chatCore
const result = await handleChatCore({
body: { ...body, model: `${provider}/${model}` },

View File

@@ -1,6 +1,11 @@
// Re-export from open-sse with local logger
import * as log from "../utils/logger.js";
import { updateProviderConnection } from "../../lib/localDb.js";
import {
getProjectIdForConnection,
invalidateProjectId,
removeConnection,
} from "open-sse/services/projectId.js";
import {
TOKEN_EXPIRY_BUFFER_MS as BUFFER_MS,
refreshAccessToken as _refreshAccessToken,
@@ -19,66 +24,139 @@ import {
export const TOKEN_EXPIRY_BUFFER_MS = BUFFER_MS;
// Wrap functions with local logger
export const refreshAccessToken = (provider, refreshToken, credentials) =>
// ─── Re-exports wrapped with local logger ─────────────────────────────────────
export const refreshAccessToken = (provider, refreshToken, credentials) =>
_refreshAccessToken(provider, refreshToken, credentials, log);
export const refreshClaudeOAuthToken = (refreshToken) =>
export const refreshClaudeOAuthToken = (refreshToken) =>
_refreshClaudeOAuthToken(refreshToken, log);
export const refreshGoogleToken = (refreshToken, clientId, clientSecret) =>
export const refreshGoogleToken = (refreshToken, clientId, clientSecret) =>
_refreshGoogleToken(refreshToken, clientId, clientSecret, log);
export const refreshQwenToken = (refreshToken) =>
export const refreshQwenToken = (refreshToken) =>
_refreshQwenToken(refreshToken, log);
export const refreshCodexToken = (refreshToken) =>
export const refreshCodexToken = (refreshToken) =>
_refreshCodexToken(refreshToken, log);
export const refreshIflowToken = (refreshToken) =>
export const refreshIflowToken = (refreshToken) =>
_refreshIflowToken(refreshToken, log);
export const refreshGitHubToken = (refreshToken) =>
export const refreshGitHubToken = (refreshToken) =>
_refreshGitHubToken(refreshToken, log);
export const refreshCopilotToken = (githubAccessToken) =>
export const refreshCopilotToken = (githubAccessToken) =>
_refreshCopilotToken(githubAccessToken, log);
export const getAccessToken = (provider, credentials) =>
export const getAccessToken = (provider, credentials) =>
_getAccessToken(provider, credentials, log);
export const refreshTokenByProvider = (provider, credentials) =>
export const refreshTokenByProvider = (provider, credentials) =>
_refreshTokenByProvider(provider, credentials, log);
export const formatProviderCredentials = (provider, credentials) =>
export const formatProviderCredentials = (provider, credentials) =>
_formatProviderCredentials(provider, credentials, log);
export const getAllAccessTokens = (userInfo) =>
export const getAllAccessTokens = (userInfo) =>
_getAllAccessTokens(userInfo, log);
// Local-specific: Update credentials in localDb
// ─── Lifecycle hook ───────────────────────────────────────────────────────────
/**
* Call this when a connection is fully closed / removed.
* Aborts any in-flight projectId fetch and evicts its cache entry,
* preventing the module-level Maps from accumulating stale entries.
*
* @param {string} connectionId
*/
export function releaseConnection(connectionId) {
if (!connectionId) return;
removeConnection(connectionId);
log.debug("TOKEN_REFRESH", "Released connection resources", { connectionId });
}
// ─── Internal helpers ─────────────────────────────────────────────────────────
/**
* Compute an ISO expiry timestamp from a relative expiresIn (seconds).
* @param {number} expiresIn
* @returns {string}
*/
function toExpiresAt(expiresIn) {
return new Date(Date.now() + expiresIn * 1000).toISOString();
}
/**
* Providers that carry a real Google project ID.
* @param {string} provider
* @returns {boolean}
*/
function needsProjectId(provider) {
return provider === "antigravity" || provider === "gemini-cli";
}
/**
* Non-blocking: fetch the project ID for a connection after a token refresh and
* persist it to localDb. Invalidates the stale cached value first so the fetch
* always retrieves a fresh one.
*
* @param {string} provider
* @param {string} connectionId
* @param {string} accessToken
*/
function _refreshProjectId(provider, connectionId, accessToken) {
if (!needsProjectId(provider) || !connectionId || !accessToken) return;
// Evict the stale cached entry so getProjectIdForConnection does a real fetch
invalidateProjectId(connectionId);
getProjectIdForConnection(connectionId, accessToken)
.then((projectId) => {
if (!projectId) return;
updateProviderCredentials(connectionId, { projectId }).catch((err) => {
log.debug("TOKEN_REFRESH", "Failed to persist refreshed projectId", {
connectionId,
error: err?.message ?? err,
});
});
})
.catch((err) => {
log.debug("TOKEN_REFRESH", "Failed to fetch projectId after token refresh", {
connectionId,
error: err?.message ?? err,
});
});
}
// ─── Local-specific: persist credentials to localDb ──────────────────────────
/**
* Persist updated credentials for a connection to localDb.
* Only fields that are present in `newCredentials` are written.
*
* @param {string} connectionId
* @param {object} newCredentials
* @returns {Promise<boolean>}
*/
export async function updateProviderCredentials(connectionId, newCredentials) {
try {
const updates = {};
if (newCredentials.accessToken) {
updates.accessToken = newCredentials.accessToken;
}
if (newCredentials.refreshToken) {
updates.refreshToken = newCredentials.refreshToken;
}
if (newCredentials.accessToken) updates.accessToken = newCredentials.accessToken;
if (newCredentials.refreshToken) updates.refreshToken = newCredentials.refreshToken;
if (newCredentials.expiresIn) {
updates.expiresAt = new Date(Date.now() + newCredentials.expiresIn * 1000).toISOString();
updates.expiresAt = toExpiresAt(newCredentials.expiresIn);
updates.expiresIn = newCredentials.expiresIn;
}
if (newCredentials.providerSpecificData) {
updates.providerSpecificData = newCredentials.providerSpecificData;
}
if (newCredentials.providerSpecificData) updates.providerSpecificData = newCredentials.providerSpecificData;
if (newCredentials.projectId) updates.projectId = newCredentials.projectId;
const result = await updateProviderConnection(connectionId, updates);
log.info("TOKEN_REFRESH", "Credentials updated in localDb", {
connectionId,
success: !!result
log.info("TOKEN_REFRESH", "Credentials updated in localDb", {
connectionId,
success: !!result
});
return !!result;
} catch (error) {
@@ -90,84 +168,104 @@ export async function updateProviderCredentials(connectionId, newCredentials) {
}
}
// Local-specific: Check and refresh token proactively
// ─── Local-specific: proactive token refresh ─────────────────────────────────
/**
* Check whether the provider token (and, for GitHub, the Copilot token) is
* about to expire and refresh it proactively.
*
* @param {string} provider
* @param {object} credentials
* @returns {Promise<object>} updated credentials object
*/
export async function checkAndRefreshToken(provider, credentials) {
let updatedCredentials = { ...credentials };
let creds = { ...credentials };
// Check regular token expiry
if (updatedCredentials.expiresAt) {
const expiresAt = new Date(updatedCredentials.expiresAt).getTime();
const now = Date.now();
// ── 1. Regular access-token expiry ────────────────────────────────────────
if (creds.expiresAt) {
const expiresAt = new Date(creds.expiresAt).getTime();
const now = Date.now();
const remaining = expiresAt - now;
if (expiresAt - now < TOKEN_EXPIRY_BUFFER_MS) {
if (remaining < TOKEN_EXPIRY_BUFFER_MS) {
log.info("TOKEN_REFRESH", "Token expiring soon, refreshing proactively", {
provider,
expiresIn: Math.round((expiresAt - now) / 1000)
expiresIn: Math.round(remaining / 1000),
});
const newCredentials = await getAccessToken(provider, updatedCredentials);
if (newCredentials && newCredentials.accessToken) {
await updateProviderCredentials(updatedCredentials.connectionId, newCredentials);
updatedCredentials = {
...updatedCredentials,
accessToken: newCredentials.accessToken,
refreshToken: newCredentials.refreshToken || updatedCredentials.refreshToken,
expiresAt: newCredentials.expiresIn
? new Date(Date.now() + newCredentials.expiresIn * 1000).toISOString()
: updatedCredentials.expiresAt
const newCreds = await getAccessToken(provider, creds);
if (newCreds?.accessToken) {
// Persist to DB (non-blocking path continues below)
await updateProviderCredentials(creds.connectionId, newCreds);
creds = {
...creds,
accessToken: newCreds.accessToken,
refreshToken: newCreds.refreshToken ?? creds.refreshToken,
expiresAt: newCreds.expiresIn
? toExpiresAt(newCreds.expiresIn)
: creds.expiresAt,
};
// Non-blocking: refresh projectId with the new access token
_refreshProjectId(provider, creds.connectionId, creds.accessToken);
}
}
}
// Check GitHub copilot token expiry
if (provider === "github" && updatedCredentials.providerSpecificData?.copilotTokenExpiresAt) {
const copilotExpiresAt = updatedCredentials.providerSpecificData.copilotTokenExpiresAt * 1000;
const now = Date.now();
// ── 2. GitHub Copilot token expiry ────────────────────────────────────────
if (provider === "github" && creds.providerSpecificData?.copilotTokenExpiresAt) {
const copilotExpiresAt = creds.providerSpecificData.copilotTokenExpiresAt * 1000;
const now = Date.now();
const remaining = copilotExpiresAt - now;
if (copilotExpiresAt - now < TOKEN_EXPIRY_BUFFER_MS) {
if (remaining < TOKEN_EXPIRY_BUFFER_MS) {
log.info("TOKEN_REFRESH", "Copilot token expiring soon, refreshing proactively", {
provider,
expiresIn: Math.round((copilotExpiresAt - now) / 1000)
expiresIn: Math.round(remaining / 1000),
});
const copilotToken = await refreshCopilotToken(updatedCredentials.accessToken);
const copilotToken = await refreshCopilotToken(creds.accessToken);
if (copilotToken) {
await updateProviderCredentials(updatedCredentials.connectionId, {
providerSpecificData: {
...updatedCredentials.providerSpecificData,
copilotToken: copilotToken.token,
copilotTokenExpiresAt: copilotToken.expiresAt
}
});
updatedCredentials.providerSpecificData = {
...updatedCredentials.providerSpecificData,
copilotToken: copilotToken.token,
copilotTokenExpiresAt: copilotToken.expiresAt
const updatedSpecific = {
...creds.providerSpecificData,
copilotToken: copilotToken.token,
copilotTokenExpiresAt: copilotToken.expiresAt,
};
await updateProviderCredentials(creds.connectionId, {
providerSpecificData: updatedSpecific,
});
creds.providerSpecificData = updatedSpecific;
}
}
}
return updatedCredentials;
return creds;
}
// Local-specific: Refresh GitHub and Copilot tokens together
// ─── Local-specific: combined GitHub + Copilot refresh ───────────────────────
/**
* Refresh the GitHub OAuth token and immediately exchange it for a fresh
* Copilot token.
*
* @param {object} credentials must contain `refreshToken`
* @returns {Promise<object|null>} merged credentials or the raw GitHub credentials on Copilot failure
*/
export async function refreshGitHubAndCopilotTokens(credentials) {
const newGitHubCredentials = await refreshGitHubToken(credentials.refreshToken);
if (newGitHubCredentials?.accessToken) {
const copilotToken = await refreshCopilotToken(newGitHubCredentials.accessToken);
if (copilotToken) {
return {
...newGitHubCredentials,
providerSpecificData: {
copilotToken: copilotToken.token,
copilotTokenExpiresAt: copilotToken.expiresAt
}
};
}
}
return newGitHubCredentials;
const newGitHubCreds = await refreshGitHubToken(credentials.refreshToken);
if (!newGitHubCreds?.accessToken) return newGitHubCreds;
const copilotToken = await refreshCopilotToken(newGitHubCreds.accessToken);
if (!copilotToken) return newGitHubCreds;
return {
...newGitHubCreds,
providerSpecificData: {
copilotToken: copilotToken.token,
copilotTokenExpiresAt: copilotToken.expiresAt,
},
};
}