mirror of
https://github.com/decolua/9router.git
synced 2026-05-08 12:01:28 +00:00
Feat : Setup cloudflare worker for cloud endpoint
This commit is contained in:
1
.gitignore
vendored
1
.gitignore
vendored
@@ -19,7 +19,6 @@
|
||||
|
||||
# production
|
||||
/build
|
||||
cloud/*
|
||||
|
||||
# misc
|
||||
.DS_Store
|
||||
|
||||
3
cloud/.gitignore
vendored
Normal file
3
cloud/.gitignore
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
.wrangler/*
|
||||
node_modules/*
|
||||
**node_modules/*
|
||||
25
cloud/README.md
Normal file
25
cloud/README.md
Normal file
@@ -0,0 +1,25 @@
|
||||
# 9Router Cloud Worker
|
||||
|
||||
Deploy your own Cloudflare Worker to access 9Router from anywhere.
|
||||
|
||||
## Setup
|
||||
|
||||
```bash
|
||||
# 1. Login to Cloudflare
|
||||
npm install -g wrangler
|
||||
wrangler login
|
||||
|
||||
# 2. Install dependencies
|
||||
cd app/cloud
|
||||
npm install
|
||||
|
||||
# 3. Create KV & D1, then paste IDs into wrangler.toml
|
||||
wrangler kv namespace create KV
|
||||
wrangler d1 create proxy-db
|
||||
|
||||
# 4. Init database & deploy
|
||||
wrangler d1 execute proxy-db --remote --file=./migrations/0001_init.sql
|
||||
npm run deploy
|
||||
```
|
||||
|
||||
Copy your Worker URL → 9Router Dashboard → **Endpoint** → **Setup Cloud** → paste → **Save** → **Enable Cloud**.
|
||||
12
cloud/jsconfig.json
Normal file
12
cloud/jsconfig.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"baseUrl": ".",
|
||||
"paths": {
|
||||
"open-sse": ["../open-sse"],
|
||||
"open-sse/*": ["../open-sse/*"]
|
||||
},
|
||||
"module": "ESNext",
|
||||
"moduleResolution": "bundler",
|
||||
"target": "ESNext"
|
||||
}
|
||||
}
|
||||
9
cloud/migrations/0001_init.sql
Normal file
9
cloud/migrations/0001_init.sql
Normal file
@@ -0,0 +1,9 @@
|
||||
-- Migration: Create machines table
|
||||
CREATE TABLE IF NOT EXISTS machines (
|
||||
machineId TEXT PRIMARY KEY,
|
||||
data TEXT NOT NULL,
|
||||
updatedAt TEXT NOT NULL
|
||||
);
|
||||
|
||||
-- Index for faster lookups
|
||||
CREATE INDEX IF NOT EXISTS idx_machines_updatedAt ON machines(updatedAt);
|
||||
17
cloud/package.json
Normal file
17
cloud/package.json
Normal file
@@ -0,0 +1,17 @@
|
||||
{
|
||||
"name": "9router-cloud",
|
||||
"version": "0.2.13",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"description": "9Router Cloud Worker - Self-hosted Cloudflare Worker proxy",
|
||||
"scripts": {
|
||||
"dev": "wrangler dev",
|
||||
"deploy": "wrangler deploy"
|
||||
},
|
||||
"dependencies": {
|
||||
"open-sse": "file:../open-sse"
|
||||
},
|
||||
"devDependencies": {
|
||||
"wrangler": "^3.0.0"
|
||||
}
|
||||
}
|
||||
37
cloud/src/handlers/cache.js
Normal file
37
cloud/src/handlers/cache.js
Normal file
@@ -0,0 +1,37 @@
|
||||
import { errorResponse } from "open-sse/utils/error.js";
|
||||
import { extractBearerToken, parseApiKey } from "../utils/apiKey.js";
|
||||
import * as log from "../utils/logger.js";
|
||||
|
||||
export async function handleCacheClear(request, env) {
|
||||
const apiKey = extractBearerToken(request);
|
||||
if (!apiKey) {
|
||||
return errorResponse(401, "Missing API key");
|
||||
}
|
||||
|
||||
try {
|
||||
const body = await request.json().catch(() => ({}));
|
||||
|
||||
// Get machineId from API key or body
|
||||
let machineId = body.machineId;
|
||||
if (!machineId) {
|
||||
const parsed = await parseApiKey(apiKey);
|
||||
machineId = parsed?.machineId;
|
||||
}
|
||||
|
||||
if (!machineId) {
|
||||
return errorResponse(400, "Missing machineId");
|
||||
}
|
||||
|
||||
// No cache layer to clear anymore
|
||||
log.info("CACHE", `Cache clear requested for machine: ${machineId} (no-op)`);
|
||||
|
||||
return new Response(JSON.stringify({ success: true, machineId, message: "No cache layer" }), {
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
"Access-Control-Allow-Origin": "*"
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
return errorResponse(500, error.message);
|
||||
}
|
||||
}
|
||||
305
cloud/src/handlers/chat.js
Normal file
305
cloud/src/handlers/chat.js
Normal file
@@ -0,0 +1,305 @@
|
||||
import { getModelInfoCore } from "open-sse/services/model.js";
|
||||
import { handleChatCore } from "open-sse/handlers/chatCore.js";
|
||||
import { errorResponse } from "open-sse/utils/error.js";
|
||||
import { checkFallbackError, isAccountUnavailable, getUnavailableUntil, getEarliestRateLimitedUntil, formatRetryAfter } from "open-sse/services/accountFallback.js";
|
||||
import { getComboModelsFromData, handleComboChat } from "open-sse/services/combo.js";
|
||||
import { HTTP_STATUS } from "open-sse/config/constants.js";
|
||||
import * as log from "../utils/logger.js";
|
||||
import { refreshTokenByProvider } from "../services/tokenRefresh.js";
|
||||
import { parseApiKey, extractBearerToken } from "../utils/apiKey.js";
|
||||
import { getMachineData, saveMachineData } from "../services/storage.js";
|
||||
|
||||
const TOKEN_EXPIRY_BUFFER_MS = 5 * 60 * 1000;
|
||||
|
||||
async function getModelInfo(modelStr, machineId, env) {
|
||||
const data = await getMachineData(machineId, env);
|
||||
return getModelInfoCore(modelStr, data?.modelAliases || {});
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle chat request
|
||||
* @param {Request} request
|
||||
* @param {Object} env
|
||||
* @param {Object} ctx
|
||||
* @param {string|null} machineIdOverride - machineId from URL (old format) or null (new format - extract from key)
|
||||
*/
|
||||
export async function handleChat(request, env, ctx, machineIdOverride = null) {
|
||||
if (request.method === "OPTIONS") {
|
||||
return new Response(null, {
|
||||
headers: {
|
||||
"Access-Control-Allow-Origin": "*",
|
||||
"Access-Control-Allow-Methods": "GET, POST, OPTIONS",
|
||||
"Access-Control-Allow-Headers": "*"
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Determine machineId: from URL (old) or from API key (new)
|
||||
let machineId = machineIdOverride;
|
||||
|
||||
if (!machineId) {
|
||||
// New format: extract machineId from API key
|
||||
const apiKey = extractBearerToken(request);
|
||||
if (!apiKey) return errorResponse(HTTP_STATUS.UNAUTHORIZED, "Missing API key");
|
||||
|
||||
const parsed = await parseApiKey(apiKey);
|
||||
if (!parsed) return errorResponse(HTTP_STATUS.UNAUTHORIZED, "Invalid API key format");
|
||||
|
||||
if (!parsed.isNewFormat || !parsed.machineId) {
|
||||
return errorResponse(HTTP_STATUS.BAD_REQUEST, "API key does not contain machineId. Use /{machineId}/v1/... endpoint for old format keys.");
|
||||
}
|
||||
|
||||
machineId = parsed.machineId;
|
||||
}
|
||||
|
||||
if (!await validateApiKey(request, machineId, env)) {
|
||||
return errorResponse(HTTP_STATUS.UNAUTHORIZED, "Invalid API key");
|
||||
}
|
||||
|
||||
let body;
|
||||
try {
|
||||
body = await request.json();
|
||||
} catch {
|
||||
return errorResponse(HTTP_STATUS.BAD_REQUEST, "Invalid JSON body");
|
||||
}
|
||||
|
||||
log.info("CHAT", `${machineId} | ${body.model}`, { stream: body.stream !== false });
|
||||
|
||||
const modelStr = body.model;
|
||||
if (!modelStr) return errorResponse(HTTP_STATUS.BAD_REQUEST, "Missing model");
|
||||
|
||||
// Check if model is a combo
|
||||
const data = await getMachineData(machineId, env);
|
||||
const comboModels = getComboModelsFromData(modelStr, data?.combos || []);
|
||||
|
||||
if (comboModels) {
|
||||
log.info("COMBO", `"${modelStr}" with ${comboModels.length} models`);
|
||||
return handleComboChat({
|
||||
body,
|
||||
models: comboModels,
|
||||
handleSingleModel: (reqBody, model) => handleSingleModelChat(reqBody, model, machineId, env),
|
||||
log
|
||||
});
|
||||
}
|
||||
|
||||
// Single model request
|
||||
return handleSingleModelChat(body, modelStr, machineId, env);
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle single model chat request
|
||||
*/
|
||||
async function handleSingleModelChat(body, modelStr, machineId, env) {
|
||||
const modelInfo = await getModelInfo(modelStr, machineId, env);
|
||||
if (!modelInfo.provider) return errorResponse(HTTP_STATUS.BAD_REQUEST, "Invalid model format");
|
||||
|
||||
const { provider, model } = modelInfo;
|
||||
log.info("MODEL", `${provider.toUpperCase()} | ${model}`);
|
||||
|
||||
let excludeConnectionId = null;
|
||||
let lastError = null;
|
||||
let lastStatus = null;
|
||||
|
||||
while (true) {
|
||||
const credentials = await getProviderCredentials(machineId, provider, env, excludeConnectionId);
|
||||
if (!credentials || credentials.allRateLimited) {
|
||||
if (credentials?.allRateLimited) {
|
||||
const retryAfterSec = Math.ceil((new Date(credentials.retryAfter).getTime() - Date.now()) / 1000);
|
||||
const errorMsg = lastError || credentials.lastError || "Unavailable";
|
||||
const msg = `[${provider}/${model}] ${errorMsg} (${credentials.retryAfterHuman})`;
|
||||
const status = lastStatus || Number(credentials.lastErrorCode) || HTTP_STATUS.SERVICE_UNAVAILABLE;
|
||||
log.warn("CHAT", `${provider.toUpperCase()} | ${msg}`);
|
||||
return new Response(
|
||||
JSON.stringify({ error: { message: msg } }),
|
||||
{ status, headers: { "Content-Type": "application/json", "Retry-After": String(Math.max(retryAfterSec, 1)) } }
|
||||
);
|
||||
}
|
||||
if (!excludeConnectionId) {
|
||||
return errorResponse(HTTP_STATUS.BAD_REQUEST, `No credentials for provider: ${provider}`);
|
||||
}
|
||||
log.warn("CHAT", `${provider.toUpperCase()} | no more accounts`);
|
||||
return new Response(
|
||||
JSON.stringify({ error: lastError || "All accounts unavailable" }),
|
||||
{ status: lastStatus || HTTP_STATUS.SERVICE_UNAVAILABLE, headers: { "Content-Type": "application/json" } }
|
||||
);
|
||||
}
|
||||
|
||||
log.debug("CHAT", `account=${credentials.id}`, { provider });
|
||||
|
||||
const refreshedCredentials = await checkAndRefreshToken(machineId, provider, credentials, env);
|
||||
|
||||
// Use shared chatCore
|
||||
const result = await handleChatCore({
|
||||
body,
|
||||
modelInfo: { provider, model },
|
||||
credentials: refreshedCredentials,
|
||||
log,
|
||||
onCredentialsRefreshed: async (newCreds) => {
|
||||
await updateCredentials(machineId, credentials.id, newCreds, env);
|
||||
},
|
||||
onRequestSuccess: async () => {
|
||||
// Clear error status only if currently has error (optimization)
|
||||
await clearAccountError(machineId, credentials.id, credentials, env);
|
||||
}
|
||||
});
|
||||
|
||||
if (result.success) return result.response;
|
||||
|
||||
const { shouldFallback } = checkFallbackError(result.status, result.error);
|
||||
|
||||
if (shouldFallback) {
|
||||
log.warn("FALLBACK", `${provider.toUpperCase()} | ${credentials.id} | ${result.status}`);
|
||||
await markAccountUnavailable(machineId, credentials.id, result.status, result.error, env);
|
||||
excludeConnectionId = credentials.id;
|
||||
lastError = result.error;
|
||||
lastStatus = result.status;
|
||||
continue;
|
||||
}
|
||||
|
||||
return result.response;
|
||||
}
|
||||
}
|
||||
|
||||
async function checkAndRefreshToken(machineId, provider, credentials, env) {
|
||||
if (!credentials.expiresAt) return credentials;
|
||||
|
||||
const expiresAt = new Date(credentials.expiresAt).getTime();
|
||||
if (expiresAt - Date.now() >= TOKEN_EXPIRY_BUFFER_MS) return credentials;
|
||||
|
||||
log.debug("TOKEN", `${provider.toUpperCase()} | expiring, refreshing`);
|
||||
|
||||
const newCredentials = await refreshTokenByProvider(provider, credentials);
|
||||
if (newCredentials?.accessToken) {
|
||||
await updateCredentials(machineId, credentials.id, newCredentials, env);
|
||||
return {
|
||||
...credentials,
|
||||
accessToken: newCredentials.accessToken,
|
||||
refreshToken: newCredentials.refreshToken || credentials.refreshToken,
|
||||
expiresAt: newCredentials.expiresIn
|
||||
? new Date(Date.now() + newCredentials.expiresIn * 1000).toISOString()
|
||||
: credentials.expiresAt
|
||||
};
|
||||
}
|
||||
|
||||
return credentials;
|
||||
}
|
||||
|
||||
async function validateApiKey(request, machineId, env) {
|
||||
const authHeader = request.headers.get("Authorization");
|
||||
if (!authHeader?.startsWith("Bearer ")) return false;
|
||||
|
||||
const apiKey = authHeader.slice(7);
|
||||
const data = await getMachineData(machineId, env);
|
||||
return data?.apiKeys?.some(k => k.key === apiKey) || false;
|
||||
}
|
||||
|
||||
async function getProviderCredentials(machineId, provider, env, excludeConnectionId = null) {
|
||||
const data = await getMachineData(machineId, env);
|
||||
if (!data?.providers) return null;
|
||||
|
||||
const providerConnections = Object.entries(data.providers)
|
||||
.filter(([connId, conn]) => {
|
||||
if (conn.provider !== provider || !conn.isActive) return false;
|
||||
if (excludeConnectionId && connId === excludeConnectionId) return false;
|
||||
if (isAccountUnavailable(conn.rateLimitedUntil)) return false;
|
||||
return true;
|
||||
})
|
||||
.sort((a, b) => (a[1].priority || 999) - (b[1].priority || 999));
|
||||
|
||||
if (providerConnections.length === 0) {
|
||||
// Check if accounts exist but all rate limited
|
||||
const allConnections = Object.entries(data.providers)
|
||||
.filter(([, conn]) => conn.provider === provider && conn.isActive)
|
||||
.map(([, conn]) => conn);
|
||||
const earliest = getEarliestRateLimitedUntil(allConnections);
|
||||
if (earliest) {
|
||||
const rateLimitedConns = allConnections.filter(c => c.rateLimitedUntil && new Date(c.rateLimitedUntil).getTime() > Date.now());
|
||||
const earliestConn = rateLimitedConns.sort((a, b) => new Date(a.rateLimitedUntil) - new Date(b.rateLimitedUntil))[0];
|
||||
return {
|
||||
allRateLimited: true,
|
||||
retryAfter: earliest,
|
||||
retryAfterHuman: formatRetryAfter(earliest),
|
||||
lastError: earliestConn?.lastError || null,
|
||||
lastErrorCode: earliestConn?.errorCode || null
|
||||
};
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
const [connectionId, connection] = providerConnections[0];
|
||||
|
||||
return {
|
||||
id: connectionId,
|
||||
apiKey: connection.apiKey,
|
||||
accessToken: connection.accessToken,
|
||||
refreshToken: connection.refreshToken,
|
||||
expiresAt: connection.expiresAt,
|
||||
projectId: connection.projectId,
|
||||
copilotToken: connection.providerSpecificData?.copilotToken,
|
||||
providerSpecificData: connection.providerSpecificData,
|
||||
// Include current status for optimization check
|
||||
status: connection.status,
|
||||
lastError: connection.lastError,
|
||||
rateLimitedUntil: connection.rateLimitedUntil
|
||||
};
|
||||
}
|
||||
|
||||
async function markAccountUnavailable(machineId, connectionId, status, errorText, env) {
|
||||
const data = await getMachineData(machineId, env);
|
||||
if (!data?.providers?.[connectionId]) return;
|
||||
|
||||
const conn = data.providers[connectionId];
|
||||
const backoffLevel = conn.backoffLevel || 0;
|
||||
const { cooldownMs, newBackoffLevel } = checkFallbackError(status, errorText, backoffLevel);
|
||||
const rateLimitedUntil = getUnavailableUntil(cooldownMs);
|
||||
const reason = typeof errorText === "string" ? errorText.slice(0, 100) : "Provider error";
|
||||
|
||||
data.providers[connectionId].rateLimitedUntil = rateLimitedUntil;
|
||||
data.providers[connectionId].status = "unavailable";
|
||||
data.providers[connectionId].lastError = reason;
|
||||
data.providers[connectionId].errorCode = status || null;
|
||||
data.providers[connectionId].lastErrorAt = new Date().toISOString();
|
||||
data.providers[connectionId].backoffLevel = newBackoffLevel ?? backoffLevel;
|
||||
data.providers[connectionId].updatedAt = new Date().toISOString();
|
||||
|
||||
await saveMachineData(machineId, data, env);
|
||||
log.warn("ACCOUNT", `${connectionId} | unavailable until ${rateLimitedUntil} (backoff=${newBackoffLevel ?? backoffLevel})`);
|
||||
}
|
||||
|
||||
async function clearAccountError(machineId, connectionId, currentCredentials, env) {
|
||||
// Only update if currently has error status (optimization)
|
||||
const hasError = currentCredentials.status === "unavailable" ||
|
||||
currentCredentials.lastError ||
|
||||
currentCredentials.rateLimitedUntil;
|
||||
|
||||
if (!hasError) return; // Skip if already clean
|
||||
|
||||
const data = await getMachineData(machineId, env);
|
||||
if (!data?.providers?.[connectionId]) return;
|
||||
|
||||
data.providers[connectionId].status = "active";
|
||||
data.providers[connectionId].lastError = null;
|
||||
data.providers[connectionId].lastErrorAt = null;
|
||||
data.providers[connectionId].rateLimitedUntil = null;
|
||||
data.providers[connectionId].backoffLevel = 0;
|
||||
data.providers[connectionId].updatedAt = new Date().toISOString();
|
||||
|
||||
await saveMachineData(machineId, data, env);
|
||||
log.info("ACCOUNT", `${connectionId} | error cleared`);
|
||||
}
|
||||
|
||||
async function updateCredentials(machineId, connectionId, newCredentials, env) {
|
||||
const data = await getMachineData(machineId, env);
|
||||
if (!data?.providers?.[connectionId]) return;
|
||||
|
||||
data.providers[connectionId].accessToken = newCredentials.accessToken;
|
||||
if (newCredentials.refreshToken) data.providers[connectionId].refreshToken = newCredentials.refreshToken;
|
||||
if (newCredentials.expiresIn) {
|
||||
data.providers[connectionId].expiresAt = new Date(Date.now() + newCredentials.expiresIn * 1000).toISOString();
|
||||
data.providers[connectionId].expiresIn = newCredentials.expiresIn;
|
||||
}
|
||||
data.providers[connectionId].updatedAt = new Date().toISOString();
|
||||
|
||||
await saveMachineData(machineId, data, env);
|
||||
log.debug("TOKEN", `credentials updated | ${connectionId}`);
|
||||
}
|
||||
34
cloud/src/handlers/cleanup.js
Normal file
34
cloud/src/handlers/cleanup.js
Normal file
@@ -0,0 +1,34 @@
|
||||
import * as log from "../utils/logger.js";
|
||||
|
||||
const RETENTION_DAYS = 7;
|
||||
|
||||
/**
|
||||
* Cleanup old machine data from D1
|
||||
* Runs daily via cron trigger
|
||||
*/
|
||||
export async function handleCleanup(env) {
|
||||
const cutoffDate = new Date(Date.now() - RETENTION_DAYS * 24 * 60 * 60 * 1000).toISOString();
|
||||
|
||||
log.info("CLEANUP", `Deleting records older than ${cutoffDate}`);
|
||||
|
||||
try {
|
||||
const result = await env.DB.prepare("DELETE FROM machines WHERE updatedAt < ?")
|
||||
.bind(cutoffDate)
|
||||
.run();
|
||||
|
||||
log.info("CLEANUP", `Deleted ${result.meta?.changes || 0} old records`);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
deleted: result.meta?.changes || 0,
|
||||
cutoffDate
|
||||
};
|
||||
} catch (error) {
|
||||
log.error("CLEANUP", error.message);
|
||||
return {
|
||||
success: false,
|
||||
error: error.message
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
46
cloud/src/handlers/countTokens.js
Normal file
46
cloud/src/handlers/countTokens.js
Normal file
@@ -0,0 +1,46 @@
|
||||
import { errorResponse } from "open-sse/utils/error.js";
|
||||
|
||||
const CORS_HEADERS = {
|
||||
"Access-Control-Allow-Origin": "*",
|
||||
"Access-Control-Allow-Methods": "POST, OPTIONS",
|
||||
"Access-Control-Allow-Headers": "*"
|
||||
};
|
||||
|
||||
/**
|
||||
* Handle POST /{machineId}/v1/messages/count_tokens
|
||||
* Mock token count response based on content length
|
||||
*/
|
||||
export async function handleCountTokens(request, env) {
|
||||
let body;
|
||||
try {
|
||||
body = await request.json();
|
||||
} catch {
|
||||
return errorResponse(400, "Invalid JSON body");
|
||||
}
|
||||
|
||||
// Estimate token count based on content length
|
||||
const messages = body.messages || [];
|
||||
let totalChars = 0;
|
||||
|
||||
for (const msg of messages) {
|
||||
if (typeof msg.content === "string") {
|
||||
totalChars += msg.content.length;
|
||||
} else if (Array.isArray(msg.content)) {
|
||||
for (const part of msg.content) {
|
||||
if (part.type === "text" && part.text) {
|
||||
totalChars += part.text.length;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Rough estimate: ~4 chars per token
|
||||
const inputTokens = Math.ceil(totalChars / 4);
|
||||
|
||||
return new Response(JSON.stringify({
|
||||
input_tokens: inputTokens
|
||||
}), {
|
||||
headers: { "Content-Type": "application/json", ...CORS_HEADERS }
|
||||
});
|
||||
}
|
||||
|
||||
75
cloud/src/handlers/forward.js
Normal file
75
cloud/src/handlers/forward.js
Normal file
@@ -0,0 +1,75 @@
|
||||
// CF headers to remove
|
||||
const CF_HEADERS = [
|
||||
"cf-connecting-ip", "cf-connecting-ip6", "cf-ray", "cf-visitor",
|
||||
"cf-ipcountry", "cf-tracking-id", "cf-connecting-ip6-policy",
|
||||
"x-real-ip", "x-forwarded-for", "x-forwarded-proto", "x-forwarded-host"
|
||||
];
|
||||
|
||||
// Forward request to any endpoint
|
||||
export async function handleForward(request) {
|
||||
try {
|
||||
const url = new URL(request.url);
|
||||
const clientIp = request.headers.get("CF-Connecting-IP") || "";
|
||||
const { targetUrl, headers = {}, body } = await request.json();
|
||||
|
||||
if (!targetUrl) {
|
||||
return new Response(JSON.stringify({ error: "targetUrl is required" }), {
|
||||
status: 400,
|
||||
headers: { "Content-Type": "application/json" }
|
||||
});
|
||||
}
|
||||
|
||||
// Filter out CF headers from input
|
||||
const cleanHeaders = {};
|
||||
for (const [key, value] of Object.entries(headers)) {
|
||||
if (!CF_HEADERS.includes(key.toLowerCase())) {
|
||||
cleanHeaders[key] = value;
|
||||
}
|
||||
}
|
||||
|
||||
// Set standard forwarding headers
|
||||
cleanHeaders["X-Client-IP"] = clientIp;
|
||||
cleanHeaders["X-Forwarded-Proto"] = url.protocol.replace(":", "");
|
||||
cleanHeaders["X-Forwarded-Host"] = url.host;
|
||||
cleanHeaders["X-From-Worker"] = "1";
|
||||
|
||||
console.log("[FORWARD] Target:", targetUrl);
|
||||
console.log("[FORWARD] Headers:", JSON.stringify(cleanHeaders));
|
||||
|
||||
// Create Request object to have more control over headers
|
||||
const outgoingRequest = new Request(targetUrl, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
...cleanHeaders
|
||||
},
|
||||
body: JSON.stringify(body)
|
||||
});
|
||||
|
||||
// Use fetch with cf options to minimize auto-added headers
|
||||
const response = await fetch(outgoingRequest, {
|
||||
cf: {
|
||||
// Disable automatic features that add headers
|
||||
scrapeShield: false,
|
||||
minify: false,
|
||||
mirage: false,
|
||||
polish: "off"
|
||||
}
|
||||
});
|
||||
|
||||
// Stream response back to client
|
||||
return new Response(response.body, {
|
||||
status: response.status,
|
||||
headers: {
|
||||
"Content-Type": response.headers.get("Content-Type") || "application/json",
|
||||
"Access-Control-Allow-Origin": "*"
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("[FORWARD] Error:", error.message);
|
||||
return new Response(JSON.stringify({ error: error.message }), {
|
||||
status: 500,
|
||||
headers: { "Content-Type": "application/json" }
|
||||
});
|
||||
}
|
||||
}
|
||||
173
cloud/src/handlers/forwardRaw.js
Normal file
173
cloud/src/handlers/forwardRaw.js
Normal file
@@ -0,0 +1,173 @@
|
||||
import { connect } from "cloudflare:sockets";
|
||||
|
||||
// Forward request via raw TCP socket (bypasses CF auto headers)
|
||||
export async function handleForwardRaw(request) {
|
||||
try {
|
||||
const { targetUrl, headers = {}, body } = await request.json();
|
||||
|
||||
if (!targetUrl) {
|
||||
return new Response(JSON.stringify({ error: "targetUrl is required" }), {
|
||||
status: 400,
|
||||
headers: { "Content-Type": "application/json" }
|
||||
});
|
||||
}
|
||||
|
||||
const url = new URL(targetUrl);
|
||||
const host = url.hostname;
|
||||
const port = url.port || (url.protocol === "https:" ? 443 : 80);
|
||||
const path = url.pathname + url.search;
|
||||
const isHttps = url.protocol === "https:";
|
||||
|
||||
console.log("[FORWARD_RAW] Connecting to:", host, port, isHttps ? "(TLS)" : "");
|
||||
|
||||
// Connect to target server
|
||||
let secureSocket;
|
||||
if (isHttps) {
|
||||
// For HTTPS, connect directly with TLS enabled
|
||||
console.log("[FORWARD_RAW] Creating TLS socket...");
|
||||
secureSocket = connect({
|
||||
hostname: host,
|
||||
port: parseInt(port),
|
||||
secureTransport: "on"
|
||||
});
|
||||
console.log("[FORWARD_RAW] TLS socket created");
|
||||
} else {
|
||||
secureSocket = connect({ hostname: host, port: parseInt(port) });
|
||||
}
|
||||
|
||||
console.log("[FORWARD_RAW] Socket object:", secureSocket);
|
||||
console.log("[FORWARD_RAW] Socket opened:", secureSocket.opened);
|
||||
|
||||
// Wait for socket to be ready
|
||||
try {
|
||||
console.log("[FORWARD_RAW] Waiting for socket to open...");
|
||||
await secureSocket.opened;
|
||||
console.log("[FORWARD_RAW] Socket opened successfully");
|
||||
} catch (openError) {
|
||||
console.error("[FORWARD_RAW] Socket open error:", openError.message);
|
||||
throw openError;
|
||||
}
|
||||
|
||||
console.log("[FORWARD_RAW] Getting writer and reader...");
|
||||
const writer = secureSocket.writable.getWriter();
|
||||
const reader = secureSocket.readable.getReader();
|
||||
console.log("[FORWARD_RAW] Writer and reader obtained");
|
||||
|
||||
// Build raw HTTP request
|
||||
const bodyStr = JSON.stringify(body);
|
||||
const requestHeaders = {
|
||||
"Host": host,
|
||||
"Content-Type": "application/json",
|
||||
"Content-Length": new TextEncoder().encode(bodyStr).length.toString(),
|
||||
"Connection": "close",
|
||||
...headers
|
||||
};
|
||||
|
||||
// Build HTTP request string
|
||||
let httpRequest = `POST ${path} HTTP/1.1\r\n`;
|
||||
for (const [key, value] of Object.entries(requestHeaders)) {
|
||||
httpRequest += `${key}: ${value}\r\n`;
|
||||
}
|
||||
httpRequest += `\r\n${bodyStr}`;
|
||||
|
||||
console.log("[FORWARD_RAW] Sending request:", httpRequest.substring(0, 300));
|
||||
console.log("[FORWARD_RAW] Full request length:", httpRequest.length);
|
||||
|
||||
// Send request
|
||||
try {
|
||||
console.log("[FORWARD_RAW] Writing to socket...");
|
||||
await writer.write(new TextEncoder().encode(httpRequest));
|
||||
console.log("[FORWARD_RAW] Write complete, closing writer...");
|
||||
await writer.close();
|
||||
console.log("[FORWARD_RAW] Writer closed");
|
||||
} catch (writeError) {
|
||||
console.error("[FORWARD_RAW] Write error:", writeError.message);
|
||||
throw writeError;
|
||||
}
|
||||
|
||||
// Read response with timeout
|
||||
console.log("[FORWARD_RAW] Starting to read response...");
|
||||
let responseData = new Uint8Array(0);
|
||||
let attempts = 0;
|
||||
const maxAttempts = 100; // 10 seconds max
|
||||
|
||||
while (attempts < maxAttempts) {
|
||||
console.log("[FORWARD_RAW] Reading attempt:", attempts);
|
||||
const { done, value } = await reader.read();
|
||||
console.log("[FORWARD_RAW] Read result - done:", done, "value length:", value?.length);
|
||||
if (done) break;
|
||||
if (value) {
|
||||
const newData = new Uint8Array(responseData.length + value.length);
|
||||
newData.set(responseData);
|
||||
newData.set(value, responseData.length);
|
||||
responseData = newData;
|
||||
|
||||
// Check if we have complete response (has headers end marker)
|
||||
const text = new TextDecoder().decode(responseData);
|
||||
if (text.includes("\r\n\r\n")) {
|
||||
// Check if we have Content-Length and received all body
|
||||
const headerEnd = text.indexOf("\r\n\r\n");
|
||||
const headers = text.substring(0, headerEnd).toLowerCase();
|
||||
const contentLengthMatch = headers.match(/content-length:\s*(\d+)/);
|
||||
if (contentLengthMatch) {
|
||||
const expectedLength = parseInt(contentLengthMatch[1]);
|
||||
const bodyReceived = text.length - headerEnd - 4;
|
||||
if (bodyReceived >= expectedLength) {
|
||||
console.log("[FORWARD_RAW] Complete response received");
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
attempts++;
|
||||
}
|
||||
|
||||
console.log("[FORWARD_RAW] Read loop finished, total bytes:", responseData.length);
|
||||
|
||||
const responseText = new TextDecoder().decode(responseData);
|
||||
console.log("[FORWARD_RAW] Response received:", responseText.substring(0, 500));
|
||||
|
||||
// Parse HTTP response
|
||||
const headerEndIndex = responseText.indexOf("\r\n\r\n");
|
||||
if (headerEndIndex === -1) {
|
||||
console.log("[FORWARD_RAW] Full response data:", responseText);
|
||||
throw new Error("Invalid HTTP response - no header end found");
|
||||
}
|
||||
|
||||
const headerPart = responseText.substring(0, headerEndIndex);
|
||||
const bodyPart = responseText.substring(headerEndIndex + 4);
|
||||
|
||||
// Parse status line
|
||||
const statusLine = headerPart.split("\r\n")[0];
|
||||
const statusMatch = statusLine.match(/HTTP\/[\d.]+ (\d+)/);
|
||||
const status = statusMatch ? parseInt(statusMatch[1]) : 200;
|
||||
|
||||
// Parse headers
|
||||
const responseHeaders = {};
|
||||
const headerLines = headerPart.split("\r\n").slice(1);
|
||||
for (const line of headerLines) {
|
||||
const colonIndex = line.indexOf(":");
|
||||
if (colonIndex > 0) {
|
||||
const key = line.substring(0, colonIndex).trim();
|
||||
const value = line.substring(colonIndex + 1).trim();
|
||||
responseHeaders[key.toLowerCase()] = value;
|
||||
}
|
||||
}
|
||||
|
||||
return new Response(bodyPart, {
|
||||
status,
|
||||
headers: {
|
||||
"Content-Type": responseHeaders["content-type"] || "application/json",
|
||||
"Access-Control-Allow-Origin": "*"
|
||||
}
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
console.error("[FORWARD_RAW] Error:", error.message, error.stack);
|
||||
return new Response(JSON.stringify({ error: error.message }), {
|
||||
status: 500,
|
||||
headers: { "Content-Type": "application/json" }
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
227
cloud/src/handlers/sync.js
Normal file
227
cloud/src/handlers/sync.js
Normal file
@@ -0,0 +1,227 @@
|
||||
import * as log from "../utils/logger.js";
|
||||
import { getMachineData, saveMachineData, deleteMachineData } from "../services/storage.js";
|
||||
|
||||
const CORS_HEADERS = {
|
||||
"Content-Type": "application/json",
|
||||
"Access-Control-Allow-Origin": "*"
|
||||
};
|
||||
|
||||
// Removed: WORKER_FIELDS and WORKER_SPECIFIC_FIELDS
|
||||
// Now syncing entire provider based on updatedAt (simpler logic)
|
||||
|
||||
export async function handleSync(request, env, ctx) {
|
||||
const url = new URL(request.url);
|
||||
const machineId = url.pathname.split("/")[2]; // /sync/:machineId
|
||||
|
||||
// Handle CORS preflight
|
||||
if (request.method === "OPTIONS") {
|
||||
return new Response(null, {
|
||||
headers: {
|
||||
"Access-Control-Allow-Origin": "*",
|
||||
"Access-Control-Allow-Methods": "GET, POST, DELETE, OPTIONS",
|
||||
"Access-Control-Allow-Headers": "*"
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (!machineId) {
|
||||
log.warn("SYNC", "Missing machineId in path");
|
||||
return jsonResponse({ error: "Missing machineId" }, 400);
|
||||
}
|
||||
|
||||
// Route by method
|
||||
switch (request.method) {
|
||||
case "GET":
|
||||
return handleGet(machineId, env);
|
||||
case "POST":
|
||||
return handlePost(request, machineId, env);
|
||||
case "DELETE":
|
||||
return handleDelete(machineId, env);
|
||||
default:
|
||||
return jsonResponse({ error: "Method not allowed" }, 405);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* GET /sync/:machineId - Return merged data for Web to update
|
||||
*/
|
||||
async function handleGet(machineId, env) {
|
||||
const data = await getMachineData(machineId, env);
|
||||
|
||||
if (!data) {
|
||||
log.warn("SYNC", "No data found", { machineId });
|
||||
return jsonResponse({ error: "No data found" }, 404);
|
||||
}
|
||||
|
||||
log.info("SYNC", "Data retrieved", { machineId });
|
||||
return jsonResponse({
|
||||
success: true,
|
||||
data
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* POST /sync/:machineId - Merge Web data with Worker data
|
||||
* providers stored by ID (supports multiple connections per provider)
|
||||
*/
|
||||
async function handlePost(request, machineId, env) {
|
||||
let body;
|
||||
try {
|
||||
body = await request.json();
|
||||
} catch {
|
||||
log.warn("SYNC", "Invalid JSON body", { machineId });
|
||||
return jsonResponse({ error: "Invalid JSON body" }, 400);
|
||||
}
|
||||
|
||||
// Validate required fields
|
||||
if (!body.providers || !Array.isArray(body.providers)) {
|
||||
log.warn("SYNC", "Missing or invalid providers array", { machineId });
|
||||
return jsonResponse({ error: "Missing providers array" }, 400);
|
||||
}
|
||||
|
||||
const existingData = await getMachineData(machineId, env) || { providers: {}, modelAliases: {}, apiKeys: [] };
|
||||
|
||||
// Merge providers by ID
|
||||
const mergedProviders = {};
|
||||
const changes = { updated: [], fromWorker: [] };
|
||||
|
||||
for (const webProvider of body.providers) {
|
||||
const providerId = webProvider.id;
|
||||
if (!providerId) {
|
||||
log.warn("SYNC", "Provider missing id", { provider: webProvider.provider });
|
||||
continue;
|
||||
}
|
||||
|
||||
const workerProvider = existingData.providers[providerId];
|
||||
|
||||
if (workerProvider) {
|
||||
// Merge: token fields from Worker, config fields from Web
|
||||
mergedProviders[providerId] = mergeProvider(webProvider, workerProvider, changes, providerId);
|
||||
} else {
|
||||
// New provider from Web
|
||||
mergedProviders[providerId] = formatProviderData(webProvider);
|
||||
changes.updated.push(providerId);
|
||||
}
|
||||
}
|
||||
|
||||
// Prepare final data - modelAliases, apiKeys, combos always from Web
|
||||
const finalData = {
|
||||
providers: mergedProviders,
|
||||
modelAliases: body.modelAliases || existingData.modelAliases || {},
|
||||
combos: body.combos || existingData.combos || [],
|
||||
apiKeys: body.apiKeys || existingData.apiKeys || [],
|
||||
updatedAt: new Date().toISOString()
|
||||
};
|
||||
|
||||
// Store in D1 + invalidate cache
|
||||
await saveMachineData(machineId, finalData, env);
|
||||
|
||||
log.info("SYNC", "Data synced successfully", {
|
||||
machineId,
|
||||
providerCount: Object.keys(mergedProviders).length,
|
||||
changes
|
||||
});
|
||||
|
||||
return jsonResponse({
|
||||
success: true,
|
||||
data: finalData,
|
||||
changes
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* DELETE /sync/:machineId - Clear cache when Worker is disabled
|
||||
*/
|
||||
async function handleDelete(machineId, env) {
|
||||
await deleteMachineData(machineId, env);
|
||||
|
||||
log.info("SYNC", "Data deleted", { machineId });
|
||||
return jsonResponse({
|
||||
success: true,
|
||||
message: "Data deleted successfully"
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge provider data: compare updatedAt to decide which source to use
|
||||
* Simple logic: newer wins (sync entire provider)
|
||||
*/
|
||||
function mergeProvider(webProvider, workerProvider, changes, providerId) {
|
||||
const webTime = new Date(webProvider.updatedAt || 0).getTime();
|
||||
const workerTime = new Date(workerProvider.updatedAt || 0).getTime();
|
||||
|
||||
let merged;
|
||||
|
||||
if (workerTime > webTime) {
|
||||
// Cloud has newer data - use entire Cloud provider
|
||||
merged = formatProviderData(workerProvider);
|
||||
changes.fromWorker.push(providerId);
|
||||
} else {
|
||||
// Server has newer data - use entire Server provider
|
||||
merged = formatProviderData(webProvider);
|
||||
changes.updated.push(providerId);
|
||||
}
|
||||
|
||||
// Always update timestamp
|
||||
merged.updatedAt = new Date().toISOString();
|
||||
return merged;
|
||||
}
|
||||
|
||||
/**
|
||||
* Format provider data for storage
|
||||
*/
|
||||
function formatProviderData(provider) {
|
||||
return {
|
||||
id: provider.id,
|
||||
provider: provider.provider,
|
||||
authType: provider.authType,
|
||||
name: provider.name,
|
||||
displayName: provider.displayName,
|
||||
email: provider.email,
|
||||
priority: provider.priority,
|
||||
globalPriority: provider.globalPriority,
|
||||
defaultModel: provider.defaultModel,
|
||||
accessToken: provider.accessToken,
|
||||
refreshToken: provider.refreshToken,
|
||||
expiresAt: provider.expiresAt,
|
||||
expiresIn: provider.expiresIn,
|
||||
tokenType: provider.tokenType,
|
||||
scope: provider.scope,
|
||||
idToken: provider.idToken,
|
||||
projectId: provider.projectId,
|
||||
apiKey: provider.apiKey,
|
||||
providerSpecificData: provider.providerSpecificData || {},
|
||||
isActive: provider.isActive,
|
||||
status: provider.status || "active",
|
||||
lastError: provider.lastError || null,
|
||||
lastErrorAt: provider.lastErrorAt || null,
|
||||
errorCode: provider.errorCode || null,
|
||||
rateLimitedUntil: provider.rateLimitedUntil || null,
|
||||
createdAt: provider.createdAt,
|
||||
updatedAt: provider.updatedAt || new Date().toISOString()
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Update provider status (called when token refresh fails or API errors)
|
||||
*/
|
||||
export function updateProviderStatus(providers, providerId, status, error = null, errorCode = null) {
|
||||
if (providers[providerId]) {
|
||||
providers[providerId].status = status;
|
||||
providers[providerId].lastError = error;
|
||||
providers[providerId].lastErrorAt = error ? new Date().toISOString() : null;
|
||||
providers[providerId].errorCode = errorCode;
|
||||
providers[providerId].updatedAt = new Date().toISOString();
|
||||
}
|
||||
return providers;
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to create JSON response
|
||||
*/
|
||||
function jsonResponse(data, status = 200) {
|
||||
return new Response(JSON.stringify(data), {
|
||||
status,
|
||||
headers: CORS_HEADERS
|
||||
});
|
||||
}
|
||||
60
cloud/src/handlers/verify.js
Normal file
60
cloud/src/handlers/verify.js
Normal file
@@ -0,0 +1,60 @@
|
||||
import { parseApiKey, extractBearerToken } from "../utils/apiKey.js";
|
||||
import { getMachineData } from "../services/storage.js";
|
||||
|
||||
/**
|
||||
* Verify API key endpoint
|
||||
* @param {Request} request
|
||||
* @param {Object} env
|
||||
* @param {string|null} machineIdOverride - machineId from URL (old format) or null (new format)
|
||||
*/
|
||||
export async function handleVerify(request, env, machineIdOverride = null) {
|
||||
const apiKey = extractBearerToken(request);
|
||||
if (!apiKey) {
|
||||
return jsonResponse({ error: "Missing or invalid Authorization header" }, 401);
|
||||
}
|
||||
|
||||
// Determine machineId: from URL (old) or from API key (new)
|
||||
let machineId = machineIdOverride;
|
||||
|
||||
if (!machineId) {
|
||||
const parsed = await parseApiKey(apiKey);
|
||||
if (!parsed) {
|
||||
return jsonResponse({ error: "Invalid API key format" }, 401);
|
||||
}
|
||||
|
||||
if (!parsed.isNewFormat || !parsed.machineId) {
|
||||
return jsonResponse({ error: "API key does not contain machineId" }, 400);
|
||||
}
|
||||
|
||||
machineId = parsed.machineId;
|
||||
}
|
||||
|
||||
const data = await getMachineData(machineId, env);
|
||||
|
||||
if (!data) {
|
||||
return jsonResponse({ error: "Machine not found" }, 404);
|
||||
}
|
||||
|
||||
const isValid = data.apiKeys?.some(k => k.key === apiKey) || false;
|
||||
|
||||
if (!isValid) {
|
||||
return jsonResponse({ error: "Invalid API key" }, 401);
|
||||
}
|
||||
|
||||
return jsonResponse({
|
||||
valid: true,
|
||||
machineId,
|
||||
providersCount: Object.keys(data.providers || {}).length
|
||||
});
|
||||
}
|
||||
|
||||
function jsonResponse(data, status = 200) {
|
||||
return new Response(JSON.stringify(data), {
|
||||
status,
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
"Access-Control-Allow-Origin": "*"
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
217
cloud/src/index.js
Normal file
217
cloud/src/index.js
Normal file
@@ -0,0 +1,217 @@
|
||||
import { initTranslators } from "open-sse/translator/index.js";
|
||||
import { ollamaModels } from "open-sse/config/ollamaModels.js";
|
||||
import { transformToOllama } from "open-sse/utils/ollamaTransform.js";
|
||||
import * as log from "./utils/logger.js";
|
||||
|
||||
// Static imports for handlers (avoid dynamic import CPU cost)
|
||||
import { handleCleanup } from "./handlers/cleanup.js";
|
||||
import { handleCacheClear } from "./handlers/cache.js";
|
||||
import { handleSync } from "./handlers/sync.js";
|
||||
import { handleChat } from "./handlers/chat.js";
|
||||
import { handleVerify } from "./handlers/verify.js";
|
||||
import { handleTestClaude } from "./handlers/testClaude.js";
|
||||
import { handleForward } from "./handlers/forward.js";
|
||||
import { handleForwardRaw } from "./handlers/forwardRaw.js";
|
||||
import { createLandingPageResponse } from "./services/landingPage.js";
|
||||
|
||||
// Initialize translators at module load (static imports)
|
||||
initTranslators();
|
||||
|
||||
// Helper to add CORS headers to response
|
||||
function addCorsHeaders(response) {
|
||||
const newHeaders = new Headers(response.headers);
|
||||
newHeaders.set("Access-Control-Allow-Origin", "*");
|
||||
newHeaders.set("Access-Control-Allow-Methods", "GET, POST, DELETE, OPTIONS");
|
||||
newHeaders.set("Access-Control-Allow-Headers", "*");
|
||||
return new Response(response.body, {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
headers: newHeaders
|
||||
});
|
||||
}
|
||||
|
||||
const worker = {
|
||||
async scheduled(event, env, ctx) {
|
||||
const result = await handleCleanup(env);
|
||||
log.info("SCHEDULED", "Cleanup completed", result);
|
||||
},
|
||||
|
||||
async fetch(request, env, ctx) {
|
||||
const startTime = Date.now();
|
||||
const url = new URL(request.url);
|
||||
let path = url.pathname;
|
||||
|
||||
// Normalize /v1/v1/* → /v1/*
|
||||
if (path.startsWith("/v1/v1/")) {
|
||||
path = path.replace("/v1/v1/", "/v1/");
|
||||
} else if (path === "/v1/v1") {
|
||||
path = "/v1";
|
||||
}
|
||||
|
||||
log.request(request.method, path);
|
||||
|
||||
// CORS preflight
|
||||
if (request.method === "OPTIONS") {
|
||||
return new Response(null, {
|
||||
headers: {
|
||||
"Access-Control-Allow-Origin": "*",
|
||||
"Access-Control-Allow-Methods": "GET, POST, DELETE, OPTIONS",
|
||||
"Access-Control-Allow-Headers": "*"
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
// Routes
|
||||
|
||||
// Landing page
|
||||
if (path === "/" && request.method === "GET") {
|
||||
const response = createLandingPageResponse();
|
||||
log.response(response.status, Date.now() - startTime);
|
||||
return response;
|
||||
}
|
||||
|
||||
if (path === "/health" && request.method === "GET") {
|
||||
log.response(200, Date.now() - startTime);
|
||||
return new Response(JSON.stringify({ status: "ok" }), {
|
||||
headers: { "Content-Type": "application/json" }
|
||||
});
|
||||
}
|
||||
|
||||
// Ollama compatible - list models
|
||||
if (path === "/api/tags" && request.method === "GET") {
|
||||
log.response(200, Date.now() - startTime);
|
||||
return new Response(JSON.stringify(ollamaModels), {
|
||||
headers: { "Content-Type": "application/json" }
|
||||
});
|
||||
}
|
||||
|
||||
if (path === "/cache/clear" && request.method === "POST") {
|
||||
const response = await handleCacheClear(request, env);
|
||||
log.response(response.status, Date.now() - startTime);
|
||||
return response;
|
||||
}
|
||||
|
||||
// Sync provider data by machineId (GET, POST, DELETE)
|
||||
if (path.startsWith("/sync/") && ["GET", "POST", "DELETE"].includes(request.method)) {
|
||||
const response = await handleSync(request, env, ctx);
|
||||
log.response(response.status, Date.now() - startTime);
|
||||
return response;
|
||||
}
|
||||
|
||||
// ========== NEW FORMAT: /v1/... (machineId in API key) ==========
|
||||
|
||||
// New format: /v1/chat/completions
|
||||
if (path === "/v1/chat/completions" && request.method === "POST") {
|
||||
const response = await handleChat(request, env, ctx, null);
|
||||
log.response(response.status, Date.now() - startTime);
|
||||
return addCorsHeaders(response);
|
||||
}
|
||||
|
||||
// New format: /v1/messages (Claude format)
|
||||
if (path === "/v1/messages" && request.method === "POST") {
|
||||
const response = await handleChat(request, env, ctx, null);
|
||||
log.response(response.status, Date.now() - startTime);
|
||||
return addCorsHeaders(response);
|
||||
}
|
||||
|
||||
// New format: /v1/responses (OpenAI Responses API - Codex CLI)
|
||||
if (path === "/v1/responses" && request.method === "POST") {
|
||||
const response = await handleChat(request, env, ctx, null);
|
||||
log.response(response.status, Date.now() - startTime);
|
||||
return response;
|
||||
}
|
||||
|
||||
// New format: /v1/verify
|
||||
if (path === "/v1/verify" && request.method === "GET") {
|
||||
const response = await handleVerify(request, env, null);
|
||||
log.response(response.status, Date.now() - startTime);
|
||||
return addCorsHeaders(response);
|
||||
}
|
||||
|
||||
// New format: /v1/api/chat (Ollama format)
|
||||
if (path === "/v1/api/chat" && request.method === "POST") {
|
||||
const clonedReq = request.clone();
|
||||
const body = await clonedReq.json();
|
||||
const response = await handleChat(request, env, ctx, null);
|
||||
const ollamaResponse = transformToOllama(response, body.model || "llama3.2");
|
||||
log.response(200, Date.now() - startTime);
|
||||
return ollamaResponse;
|
||||
}
|
||||
|
||||
// ========== OLD FORMAT: /{machineId}/v1/... ==========
|
||||
|
||||
// Machine ID based chat endpoint
|
||||
if (path.match(/^\/[^\/]+\/v1\/chat\/completions$/) && request.method === "POST") {
|
||||
const machineId = path.split("/")[1];
|
||||
const response = await handleChat(request, env, ctx, machineId);
|
||||
log.response(response.status, Date.now() - startTime);
|
||||
return response;
|
||||
}
|
||||
|
||||
// Machine ID based messages endpoint (Claude format)
|
||||
if (path.match(/^\/[^\/]+\/v1\/messages$/) && request.method === "POST") {
|
||||
const machineId = path.split("/")[1];
|
||||
const response = await handleChat(request, env, ctx, machineId);
|
||||
log.response(response.status, Date.now() - startTime);
|
||||
return response;
|
||||
}
|
||||
|
||||
// Machine ID based api/chat endpoint (Ollama format)
|
||||
if (path.match(/^\/[^\/]+\/v1\/api\/chat$/) && request.method === "POST") {
|
||||
const machineId = path.split("/")[1];
|
||||
const clonedReq = request.clone();
|
||||
const body = await clonedReq.json();
|
||||
const response = await handleChat(request, env, ctx, machineId);
|
||||
const ollamaResponse = transformToOllama(response, body.model || "llama3.2");
|
||||
log.response(200, Date.now() - startTime);
|
||||
return ollamaResponse;
|
||||
}
|
||||
|
||||
// Machine ID based verify endpoint
|
||||
if (path.match(/^\/[^\/]+\/v1\/verify$/) && request.method === "GET") {
|
||||
const machineId = path.split("/")[1];
|
||||
const response = await handleVerify(request, env, machineId);
|
||||
log.response(response.status, Date.now() - startTime);
|
||||
return response;
|
||||
}
|
||||
|
||||
// Test Claude - forward to Anthropic API
|
||||
if (path === "/testClaude" && request.method === "POST") {
|
||||
const response = await handleTestClaude(request);
|
||||
log.response(response.status, Date.now() - startTime);
|
||||
return response;
|
||||
}
|
||||
|
||||
// Forward request to any endpoint
|
||||
if (path === "/forward" && request.method === "POST") {
|
||||
const response = await handleForward(request);
|
||||
log.response(response.status, Date.now() - startTime);
|
||||
return response;
|
||||
}
|
||||
|
||||
// Forward request via raw TCP socket (bypasses CF auto headers)
|
||||
if (path === "/forward-raw" && request.method === "POST") {
|
||||
const response = await handleForwardRaw(request);
|
||||
log.response(response.status, Date.now() - startTime);
|
||||
return response;
|
||||
}
|
||||
|
||||
log.warn("ROUTER", "Not found", { path });
|
||||
return new Response(JSON.stringify({ error: "Not Found" }), {
|
||||
status: 404,
|
||||
headers: { "Content-Type": "application/json" }
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
log.error("ROUTER", error.message, { stack: error.stack });
|
||||
return new Response(JSON.stringify({ error: error.message }), {
|
||||
status: 500,
|
||||
headers: { "Content-Type": "application/json" }
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export default worker;
|
||||
|
||||
27
cloud/src/services/landingPage.js
Normal file
27
cloud/src/services/landingPage.js
Normal file
@@ -0,0 +1,27 @@
|
||||
/**
|
||||
* Landing Page Service
|
||||
* Simple health check page for self-hosted worker
|
||||
*/
|
||||
|
||||
/**
|
||||
* Create landing page response
|
||||
* @returns {Response} HTML response
|
||||
*/
|
||||
export function createLandingPageResponse() {
|
||||
const html = `<!DOCTYPE html>
|
||||
<html><head><title>9Router Worker</title></head>
|
||||
<body style="font-family:system-ui;display:flex;align-items:center;justify-content:center;height:100vh;margin:0;background:#0a0a0a;color:#fff">
|
||||
<div style="text-align:center">
|
||||
<h1>9Router Worker</h1>
|
||||
<p style="color:#888">Worker is running. Configure this URL in your 9Router dashboard.</p>
|
||||
</div>
|
||||
</body></html>`;
|
||||
|
||||
return new Response(html, {
|
||||
status: 200,
|
||||
headers: {
|
||||
"Content-Type": "text/html; charset=utf-8",
|
||||
"Cache-Control": "public, max-age=3600"
|
||||
}
|
||||
});
|
||||
}
|
||||
88
cloud/src/services/storage.js
Normal file
88
cloud/src/services/storage.js
Normal file
@@ -0,0 +1,88 @@
|
||||
import * as log from "../utils/logger.js";
|
||||
|
||||
// Request-scoped cache for getMachineData (avoids multiple D1 queries per request)
|
||||
const requestCache = new Map();
|
||||
const CACHE_TTL_MS = 5000;
|
||||
|
||||
/**
|
||||
* Get machine data from D1 (with request-scope caching)
|
||||
* @param {string} machineId
|
||||
* @param {Object} env
|
||||
* @returns {Promise<Object|null>}
|
||||
*/
|
||||
export async function getMachineData(machineId, env) {
|
||||
const cached = requestCache.get(machineId);
|
||||
if (cached && Date.now() - cached.timestamp < CACHE_TTL_MS) {
|
||||
return cached.data;
|
||||
}
|
||||
|
||||
const row = await env.DB.prepare("SELECT data FROM machines WHERE machineId = ?")
|
||||
.bind(machineId)
|
||||
.first();
|
||||
|
||||
if (!row) {
|
||||
log.debug("STORAGE", `Not found: ${machineId}`);
|
||||
return null;
|
||||
}
|
||||
|
||||
const data = JSON.parse(row.data);
|
||||
requestCache.set(machineId, { data, timestamp: Date.now() });
|
||||
log.debug("STORAGE", `Retrieved: ${machineId}`);
|
||||
return data;
|
||||
}
|
||||
|
||||
/**
|
||||
* Save machine data to D1
|
||||
* @param {string} machineId
|
||||
* @param {Object} data
|
||||
* @param {Object} env
|
||||
*/
|
||||
export async function saveMachineData(machineId, data, env) {
|
||||
const now = new Date().toISOString();
|
||||
data.updatedAt = now;
|
||||
|
||||
// Upsert to D1
|
||||
await env.DB.prepare(`
|
||||
INSERT INTO machines (machineId, data, updatedAt)
|
||||
VALUES (?, ?, ?)
|
||||
ON CONFLICT(machineId) DO UPDATE SET data = ?, updatedAt = ?
|
||||
`)
|
||||
.bind(machineId, JSON.stringify(data), now, JSON.stringify(data), now)
|
||||
.run();
|
||||
|
||||
// Update cache after save
|
||||
requestCache.set(machineId, { data, timestamp: Date.now() });
|
||||
log.debug("STORAGE", `Saved: ${machineId}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete machine data from D1
|
||||
* @param {string} machineId
|
||||
* @param {Object} env
|
||||
*/
|
||||
export async function deleteMachineData(machineId, env) {
|
||||
await env.DB.prepare("DELETE FROM machines WHERE machineId = ?")
|
||||
.bind(machineId)
|
||||
.run();
|
||||
|
||||
// Clear cache after delete
|
||||
requestCache.delete(machineId);
|
||||
log.debug("STORAGE", `Deleted: ${machineId}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Update specific fields in machine data (for token refresh, rate limit, etc.)
|
||||
* @param {string} machineId
|
||||
* @param {string} connectionId
|
||||
* @param {Object} updates
|
||||
* @param {Object} env
|
||||
*/
|
||||
export async function updateMachineProvider(machineId, connectionId, updates, env) {
|
||||
const data = await getMachineData(machineId, env);
|
||||
if (!data?.providers?.[connectionId]) return;
|
||||
|
||||
Object.assign(data.providers[connectionId], updates);
|
||||
data.providers[connectionId].updatedAt = new Date().toISOString();
|
||||
|
||||
await saveMachineData(machineId, data, env);
|
||||
}
|
||||
11
cloud/src/services/tokenRefresh.js
Normal file
11
cloud/src/services/tokenRefresh.js
Normal file
@@ -0,0 +1,11 @@
|
||||
// Re-export from open-sse with worker logger
|
||||
import * as log from "../utils/logger.js";
|
||||
import {
|
||||
TOKEN_EXPIRY_BUFFER_MS as BUFFER_MS,
|
||||
refreshTokenByProvider as _refreshTokenByProvider
|
||||
} from "open-sse/services/tokenRefresh.js";
|
||||
|
||||
export const TOKEN_EXPIRY_BUFFER_MS = BUFFER_MS;
|
||||
|
||||
export const refreshTokenByProvider = (provider, credentials) =>
|
||||
_refreshTokenByProvider(provider, credentials, log);
|
||||
8
cloud/src/stubs/usageDb.js
Normal file
8
cloud/src/stubs/usageDb.js
Normal file
@@ -0,0 +1,8 @@
|
||||
// Stub for cloud worker - no-op async functions
|
||||
export async function saveRequestUsage() {}
|
||||
export function trackPendingRequest() {}
|
||||
export async function appendRequestLog() {}
|
||||
export async function getUsageDb() { return { data: { history: [] } }; }
|
||||
export async function getUsageHistory() { return []; }
|
||||
export async function getUsageStats() { return {}; }
|
||||
export async function getRecentLogs() { return []; }
|
||||
72
cloud/src/utils/apiKey.js
Normal file
72
cloud/src/utils/apiKey.js
Normal file
@@ -0,0 +1,72 @@
|
||||
/**
|
||||
* API Key utilities for Worker
|
||||
* Supports both formats:
|
||||
* - New: sk-{machineId}-{keyId}-{crc8}
|
||||
* - Old: sk-{random8}
|
||||
*/
|
||||
|
||||
const API_KEY_SECRET = "endpoint-proxy-api-key-secret";
|
||||
|
||||
/**
|
||||
* Generate CRC (8-char HMAC) using Web Crypto API
|
||||
*/
|
||||
async function generateCrc(machineId, keyId) {
|
||||
const encoder = new TextEncoder();
|
||||
const keyData = encoder.encode(API_KEY_SECRET);
|
||||
const data = encoder.encode(machineId + keyId);
|
||||
|
||||
const key = await crypto.subtle.importKey(
|
||||
"raw",
|
||||
keyData,
|
||||
{ name: "HMAC", hash: "SHA-256" },
|
||||
false,
|
||||
["sign"]
|
||||
);
|
||||
|
||||
const signature = await crypto.subtle.sign("HMAC", key, data);
|
||||
const hashArray = Array.from(new Uint8Array(signature));
|
||||
const hashHex = hashArray.map(b => b.toString(16).padStart(2, "0")).join("");
|
||||
|
||||
return hashHex.slice(0, 8);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse API key and extract machineId + keyId
|
||||
* @param {string} apiKey
|
||||
* @returns {Promise<{ machineId: string, keyId: string, isNewFormat: boolean } | null>}
|
||||
*/
|
||||
export async function parseApiKey(apiKey) {
|
||||
if (!apiKey || !apiKey.startsWith("sk-")) return null;
|
||||
|
||||
const parts = apiKey.split("-");
|
||||
|
||||
// New format: sk-{machineId}-{keyId}-{crc8} = 4 parts
|
||||
if (parts.length === 4) {
|
||||
const [, machineId, keyId, crc] = parts;
|
||||
|
||||
// Verify CRC
|
||||
const expectedCrc = await generateCrc(machineId, keyId);
|
||||
if (crc !== expectedCrc) return null;
|
||||
|
||||
return { machineId, keyId, isNewFormat: true };
|
||||
}
|
||||
|
||||
// Old format: sk-{random8} = 2 parts
|
||||
if (parts.length === 2) {
|
||||
return { machineId: null, keyId: parts[1], isNewFormat: false };
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract Bearer token from Authorization header
|
||||
* @param {Request} request
|
||||
* @returns {string | null}
|
||||
*/
|
||||
export function extractBearerToken(request) {
|
||||
const authHeader = request.headers.get("Authorization");
|
||||
if (!authHeader || !authHeader.startsWith("Bearer ")) return null;
|
||||
return authHeader.slice(7);
|
||||
}
|
||||
|
||||
84
cloud/src/utils/logger.js
Normal file
84
cloud/src/utils/logger.js
Normal file
@@ -0,0 +1,84 @@
|
||||
// Logger utility for worker
|
||||
|
||||
const LOG_LEVELS = {
|
||||
DEBUG: 0,
|
||||
INFO: 1,
|
||||
WARN: 2,
|
||||
ERROR: 3
|
||||
};
|
||||
|
||||
const LEVEL = LOG_LEVELS.INFO;
|
||||
|
||||
// ANSI color codes
|
||||
const COLORS = {
|
||||
reset: "\x1b[0m",
|
||||
red: "\x1b[31m",
|
||||
green: "\x1b[32m",
|
||||
yellow: "\x1b[33m",
|
||||
blue: "\x1b[34m",
|
||||
cyan: "\x1b[36m"
|
||||
};
|
||||
|
||||
function formatTime() {
|
||||
return new Date().toLocaleTimeString("en-US", { hour12: false, hour: "2-digit", minute: "2-digit" });
|
||||
}
|
||||
|
||||
function formatInline(data) {
|
||||
if (!data) return "";
|
||||
if (typeof data === "string") return data;
|
||||
try {
|
||||
return Object.entries(data).map(([k, v]) => `${k}=${v}`).join(" | ");
|
||||
} catch {
|
||||
return String(data);
|
||||
}
|
||||
}
|
||||
|
||||
export function debug(tag, message, data) {
|
||||
if (LEVEL <= LOG_LEVELS.DEBUG) {
|
||||
const extra = data ? ` | ${formatInline(data)}` : "";
|
||||
console.log(`[${formatTime()}] 🔍 [${tag}] ${message}${extra}`);
|
||||
}
|
||||
}
|
||||
|
||||
export function info(tag, message, data) {
|
||||
if (LEVEL <= LOG_LEVELS.INFO) {
|
||||
const extra = data ? ` | ${formatInline(data)}` : "";
|
||||
console.log(`[${formatTime()}] ℹ️ [${tag}] ${message}${extra}`);
|
||||
}
|
||||
}
|
||||
|
||||
export function warn(tag, message, data) {
|
||||
if (LEVEL <= LOG_LEVELS.WARN) {
|
||||
const extra = data ? ` | ${formatInline(data)}` : "";
|
||||
console.warn(`${COLORS.yellow}[${formatTime()}] ⚠️ [${tag}] ${message}${extra}${COLORS.reset}`);
|
||||
}
|
||||
}
|
||||
|
||||
export function error(tag, message, data) {
|
||||
if (LEVEL <= LOG_LEVELS.ERROR) {
|
||||
const extra = data ? ` | ${formatInline(data)}` : "";
|
||||
console.error(`${COLORS.red}[${formatTime()}] ❌ [${tag}] ${message}${extra}${COLORS.reset}`);
|
||||
}
|
||||
}
|
||||
|
||||
export function request(method, path, extra) {
|
||||
const data = extra ? ` | ${formatInline(extra)}` : "";
|
||||
console.log(`[${formatTime()}] 📥 ${method} ${path}${data}`);
|
||||
}
|
||||
|
||||
export function response(status, duration, extra) {
|
||||
const icon = status < 400 ? "📤" : "💥";
|
||||
const data = extra ? ` | ${formatInline(extra)}` : "";
|
||||
console.log(`[${formatTime()}] ${icon} ${status} (${duration}ms)${data}`);
|
||||
}
|
||||
|
||||
export function stream(event, data) {
|
||||
const extra = data ? ` | ${formatInline(data)}` : "";
|
||||
console.log(`[${formatTime()}] 🌊 [STREAM] ${event}${extra}`);
|
||||
}
|
||||
|
||||
// Mask sensitive data
|
||||
export function maskKey(key) {
|
||||
if (!key || key.length < 8) return "***";
|
||||
return `${key.slice(0, 4)}...${key.slice(-4)}`;
|
||||
}
|
||||
17
cloud/wrangler.toml
Normal file
17
cloud/wrangler.toml
Normal file
@@ -0,0 +1,17 @@
|
||||
name = "9router"
|
||||
main = "src/index.js"
|
||||
compatibility_date = "2024-09-23"
|
||||
compatibility_flags = ["nodejs_compat"]
|
||||
|
||||
[alias]
|
||||
"@/lib/usageDb.js" = "./src/stubs/usageDb.js"
|
||||
|
||||
# Step 3: Paste your KV & D1 IDs here
|
||||
[[kv_namespaces]]
|
||||
binding = "KV"
|
||||
id = "YOUR_KV_NAMESPACE_ID"
|
||||
|
||||
[[d1_databases]]
|
||||
binding = "DB"
|
||||
database_name = "proxy-db"
|
||||
database_id = "YOUR_D1_DATABASE_ID"
|
||||
@@ -5,7 +5,7 @@ import PropTypes from "prop-types";
|
||||
import { Card, Button, Input, Modal, CardSkeleton } from "@/shared/components";
|
||||
import { useCopyToClipboard } from "@/shared/hooks/useCopyToClipboard";
|
||||
|
||||
const CLOUD_URL = process.env.NEXT_PUBLIC_CLOUD_URL;
|
||||
const DEFAULT_CLOUD_URL = process.env.NEXT_PUBLIC_CLOUD_URL || "";
|
||||
const CLOUD_ACTION_TIMEOUT_MS = 15000;
|
||||
|
||||
export default function APIPageClient({ machineId }) {
|
||||
@@ -17,8 +17,13 @@ export default function APIPageClient({ machineId }) {
|
||||
|
||||
// Cloud sync state
|
||||
const [cloudEnabled, setCloudEnabled] = useState(false);
|
||||
const [cloudUrl, setCloudUrl] = useState(DEFAULT_CLOUD_URL);
|
||||
const [cloudUrlInput, setCloudUrlInput] = useState(DEFAULT_CLOUD_URL);
|
||||
const [cloudUrlSaving, setCloudUrlSaving] = useState(false);
|
||||
const [showCloudModal, setShowCloudModal] = useState(false);
|
||||
const [showDisableModal, setShowDisableModal] = useState(false);
|
||||
const [showSetupModal, setShowSetupModal] = useState(false);
|
||||
const [setupStatus, setSetupStatus] = useState(null);
|
||||
const [cloudSyncing, setCloudSyncing] = useState(false);
|
||||
const [cloudStatus, setCloudStatus] = useState(null);
|
||||
const [syncStep, setSyncStep] = useState(""); // "syncing" | "verifying" | "disabling" | ""
|
||||
@@ -58,6 +63,9 @@ export default function APIPageClient({ machineId }) {
|
||||
if (res.ok) {
|
||||
const data = await res.json();
|
||||
setCloudEnabled(data.cloudEnabled || false);
|
||||
const url = data.cloudUrl || DEFAULT_CLOUD_URL;
|
||||
setCloudUrl(url);
|
||||
setCloudUrlInput(url);
|
||||
}
|
||||
} catch (error) {
|
||||
console.log("Error loading cloud settings:", error);
|
||||
@@ -169,6 +177,51 @@ export default function APIPageClient({ machineId }) {
|
||||
}
|
||||
};
|
||||
|
||||
const handleSaveCloudUrl = async () => {
|
||||
// Strip trailing /v1 or /v1/ and trailing slashes
|
||||
const trimmed = cloudUrlInput.trim().replace(/\/v1\/?$/, "").replace(/\/+$/, "");
|
||||
if (!trimmed) return;
|
||||
|
||||
setCloudUrlSaving(true);
|
||||
setSetupStatus(null);
|
||||
try {
|
||||
const res = await fetch("/api/settings", {
|
||||
method: "PATCH",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ cloudUrl: trimmed }),
|
||||
});
|
||||
if (res.ok) {
|
||||
setCloudUrl(trimmed);
|
||||
setCloudUrlInput(trimmed);
|
||||
setSetupStatus({ type: "success", message: "Worker URL saved" });
|
||||
} else {
|
||||
setSetupStatus({ type: "error", message: "Failed to save Worker URL" });
|
||||
}
|
||||
} catch (error) {
|
||||
setSetupStatus({ type: "error", message: error.message });
|
||||
} finally {
|
||||
setCloudUrlSaving(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleCheckCloud = async () => {
|
||||
if (!cloudUrl) return;
|
||||
setCloudSyncing(true);
|
||||
setSetupStatus(null);
|
||||
try {
|
||||
const { ok, data } = await postCloudAction("check", 8000);
|
||||
if (ok) {
|
||||
setSetupStatus({ type: "success", message: data.message || "Worker is running" });
|
||||
} else {
|
||||
setSetupStatus({ type: "error", message: data.error || "Check failed" });
|
||||
}
|
||||
} catch {
|
||||
setSetupStatus({ type: "error", message: "Cannot reach worker" });
|
||||
} finally {
|
||||
setCloudSyncing(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleCreateKey = async () => {
|
||||
if (!newKeyName.trim()) return;
|
||||
|
||||
@@ -205,7 +258,7 @@ export default function APIPageClient({ machineId }) {
|
||||
};
|
||||
|
||||
const [baseUrl, setBaseUrl] = useState("/v1");
|
||||
const cloudEndpointNew = `${CLOUD_URL}/v1`;
|
||||
const cloudEndpointNew = cloudUrl ? `${cloudUrl}/v1` : "";
|
||||
|
||||
// Hydration fix: Only access window on client side
|
||||
useEffect(() => {
|
||||
@@ -226,17 +279,10 @@ export default function APIPageClient({ machineId }) {
|
||||
// Use new format endpoint (machineId embedded in key)
|
||||
const currentEndpoint = cloudEnabled ? cloudEndpointNew : baseUrl;
|
||||
|
||||
const cloudBenefits = [
|
||||
{ icon: "public", title: "Access Anywhere", desc: "No port forwarding needed" },
|
||||
{ icon: "group", title: "Share Endpoint", desc: "Easy team collaboration" },
|
||||
{ icon: "schedule", title: "Always Online", desc: "24/7 availability" },
|
||||
{ icon: "speed", title: "Global Edge", desc: "Fast worldwide access" },
|
||||
];
|
||||
|
||||
return (
|
||||
<div className="flex flex-col gap-8">
|
||||
{/* Endpoint Card */}
|
||||
<Card className={cloudEnabled ? "" : ""}>
|
||||
<Card>
|
||||
<div className="flex items-center justify-between mb-4">
|
||||
<div>
|
||||
<h2 className="text-lg font-semibold">API Endpoint</h2>
|
||||
@@ -245,6 +291,14 @@ export default function APIPageClient({ machineId }) {
|
||||
</p>
|
||||
</div>
|
||||
<div className="flex items-center gap-2">
|
||||
<Button
|
||||
size="sm"
|
||||
variant="secondary"
|
||||
icon="settings"
|
||||
onClick={() => setShowSetupModal(true)}
|
||||
>
|
||||
Setup Cloudflare
|
||||
</Button>
|
||||
{cloudEnabled ? (
|
||||
<Button
|
||||
size="sm"
|
||||
@@ -261,7 +315,7 @@ export default function APIPageClient({ machineId }) {
|
||||
variant="primary"
|
||||
icon="cloud_upload"
|
||||
onClick={() => handleCloudToggle(true)}
|
||||
disabled={cloudSyncing}
|
||||
disabled={cloudSyncing || !cloudUrl}
|
||||
className="bg-linear-to-r from-primary to-blue-500 hover:from-primary-hover hover:to-blue-600"
|
||||
>
|
||||
Enable Cloud
|
||||
@@ -271,7 +325,7 @@ export default function APIPageClient({ machineId }) {
|
||||
</div>
|
||||
|
||||
{/* Endpoint URL */}
|
||||
<div className="flex gap-2 mb-3">
|
||||
<div className="flex gap-2">
|
||||
<Input
|
||||
value={currentEndpoint}
|
||||
readOnly
|
||||
@@ -286,6 +340,16 @@ export default function APIPageClient({ machineId }) {
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
{/* Cloud Status */}
|
||||
{cloudStatus && (
|
||||
<div className={`mt-3 p-2 rounded text-sm ${
|
||||
cloudStatus.type === "success" ? "bg-green-500/10 text-green-600 dark:text-green-400" :
|
||||
cloudStatus.type === "warning" ? "bg-yellow-500/10 text-yellow-600 dark:text-yellow-400" :
|
||||
"bg-red-500/10 text-red-600 dark:text-red-400"
|
||||
}`}>
|
||||
{cloudStatus.message}
|
||||
</div>
|
||||
)}
|
||||
</Card>
|
||||
|
||||
{/* API Keys */}
|
||||
@@ -344,62 +408,66 @@ export default function APIPageClient({ machineId }) {
|
||||
)}
|
||||
</Card>
|
||||
|
||||
{/* Cloud Proxy Card - Hidden */}
|
||||
{false && (
|
||||
<Card className={cloudEnabled ? "bg-primary/5" : ""}>
|
||||
<div className="flex flex-col gap-4">
|
||||
{/* Header */}
|
||||
<div className="flex items-center justify-between">
|
||||
<div className="flex items-center gap-2">
|
||||
<div className={`p-2 rounded-lg ${cloudEnabled ? "bg-primary text-white" : "bg-sidebar text-text-muted"}`}>
|
||||
<span className="material-symbols-outlined text-xl">cloud</span>
|
||||
</div>
|
||||
<div>
|
||||
<h2 className="text-lg font-semibold">Cloud Proxy</h2>
|
||||
<p className="text-xs text-text-muted">
|
||||
{cloudEnabled ? "Connected & Ready" : "Access your API from anywhere"}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
<div className="flex items-center gap-2">
|
||||
{cloudEnabled ? (
|
||||
<Button
|
||||
size="sm"
|
||||
variant="secondary"
|
||||
icon="cloud_off"
|
||||
onClick={() => handleCloudToggle(false)}
|
||||
disabled={cloudSyncing}
|
||||
className="bg-red-500/10! text-red-500! hover:bg-red-500/20! border-red-500/30!"
|
||||
>
|
||||
Disable
|
||||
</Button>
|
||||
) : (
|
||||
<Button
|
||||
variant="primary"
|
||||
icon="cloud_upload"
|
||||
onClick={() => handleCloudToggle(true)}
|
||||
disabled={cloudSyncing}
|
||||
className="bg-linear-to-r from-primary to-blue-500 hover:from-primary-hover hover:to-blue-600 px-6"
|
||||
>
|
||||
Enable Cloud
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Benefits Grid */}
|
||||
<div className="grid grid-cols-2 md:grid-cols-4 gap-3">
|
||||
{cloudBenefits.map((benefit) => (
|
||||
<div key={benefit.title} className="flex flex-col items-center text-center p-3 rounded-lg bg-sidebar/50">
|
||||
<span className="material-symbols-outlined text-xl text-primary mb-1">{benefit.icon}</span>
|
||||
<p className="text-xs font-semibold">{benefit.title}</p>
|
||||
<p className="text-xs text-text-muted">{benefit.desc}</p>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
{/* Setup Cloud Modal */}
|
||||
<Modal
|
||||
isOpen={showSetupModal}
|
||||
title="Setup Cloudflare Worker"
|
||||
onClose={() => { setShowSetupModal(false); setSetupStatus(null); }}
|
||||
>
|
||||
<div className="flex flex-col gap-4">
|
||||
<div className="bg-blue-50 dark:bg-blue-900/20 border border-blue-200 dark:border-blue-800 rounded-lg p-3">
|
||||
<p className="text-xs text-blue-700 dark:text-blue-300">
|
||||
<code className="font-semibold">https://9router.com</code> is a pre-configured worker ready to use. You can also deploy your own.
|
||||
</p>
|
||||
</div>
|
||||
</Card>
|
||||
)}
|
||||
<div>
|
||||
<p className="text-sm font-medium mb-2">Worker URL</p>
|
||||
<div className="flex gap-2">
|
||||
<Input
|
||||
value={cloudUrlInput}
|
||||
onChange={(e) => setCloudUrlInput(e.target.value)}
|
||||
placeholder="https://9router.your-subdomain.workers.dev"
|
||||
className="flex-1 font-mono text-sm"
|
||||
/>
|
||||
</div>
|
||||
<p className="text-xs text-text-muted mt-2">
|
||||
Deploy your own worker from <code className="text-xs bg-sidebar px-1 py-0.5 rounded">app/cloud/</code> directory.{" "}
|
||||
<a href="https://github.com/decolua/9router/tree/main/app/cloud" target="_blank" rel="noopener noreferrer" className="text-primary hover:underline">
|
||||
Setup guide →
|
||||
</a>
|
||||
</p>
|
||||
</div>
|
||||
|
||||
{/* Status in modal */}
|
||||
{setupStatus && (
|
||||
<div className={`p-2 rounded text-sm ${
|
||||
setupStatus.type === "success" ? "bg-green-500/10 text-green-600 dark:text-green-400" :
|
||||
"bg-red-500/10 text-red-600 dark:text-red-400"
|
||||
}`}>
|
||||
{setupStatus.message}
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="flex gap-2">
|
||||
<Button
|
||||
onClick={handleSaveCloudUrl}
|
||||
fullWidth
|
||||
disabled={cloudUrlSaving || !cloudUrlInput.trim() || cloudUrlInput.trim().replace(/\/v1\/?$/, "").replace(/\/+$/, "") === cloudUrl}
|
||||
>
|
||||
{cloudUrlSaving ? "Saving..." : "Save"}
|
||||
</Button>
|
||||
<Button
|
||||
onClick={handleCheckCloud}
|
||||
variant="secondary"
|
||||
fullWidth
|
||||
disabled={cloudSyncing || !cloudUrl}
|
||||
icon="check_circle"
|
||||
>
|
||||
{cloudSyncing ? "Checking..." : "Check"}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</Modal>
|
||||
|
||||
{/* Cloud Enable Modal */}
|
||||
<Modal
|
||||
|
||||
@@ -1,13 +1,16 @@
|
||||
import { NextResponse } from "next/server";
|
||||
import { getProviderConnections, getModelAliases, getCombos, getApiKeys, createApiKey, updateProviderConnection, updateSettings } from "@/lib/localDb";
|
||||
import { getProviderConnections, getModelAliases, getCombos, getApiKeys, createApiKey, updateProviderConnection, updateSettings, getCloudUrl } from "@/lib/localDb";
|
||||
import { getConsistentMachineId } from "@/shared/utils/machineId";
|
||||
import fs from "fs/promises";
|
||||
import path from "path";
|
||||
import os from "os";
|
||||
|
||||
const CLOUD_URL = process.env.CLOUD_URL || process.env.NEXT_PUBLIC_CLOUD_URL;
|
||||
const CLOUD_SYNC_TIMEOUT_MS = Number(process.env.CLOUD_SYNC_TIMEOUT_MS || 12000);
|
||||
|
||||
async function getResolvedCloudUrl() {
|
||||
return await getCloudUrl();
|
||||
}
|
||||
|
||||
async function fetchWithTimeout(url, options = {}, timeoutMs = CLOUD_SYNC_TIMEOUT_MS) {
|
||||
const controller = new AbortController();
|
||||
const timeoutId = setTimeout(() => controller.abort(), timeoutMs);
|
||||
@@ -50,6 +53,8 @@ export async function POST(request) {
|
||||
case "disable":
|
||||
await updateSettings({ cloudEnabled: false });
|
||||
return handleDisable(machineId, request);
|
||||
case "check":
|
||||
return handleCheck();
|
||||
default:
|
||||
return NextResponse.json({ error: "Invalid action" }, { status: 400 });
|
||||
}
|
||||
@@ -65,8 +70,9 @@ export async function POST(request) {
|
||||
* @param {string|null} createdKey - Key created during enable
|
||||
*/
|
||||
export async function syncToCloud(machineId, createdKey = null) {
|
||||
if (!CLOUD_URL) {
|
||||
return { error: "NEXT_PUBLIC_CLOUD_URL is not configured" };
|
||||
const cloudUrl = await getResolvedCloudUrl();
|
||||
if (!cloudUrl) {
|
||||
return { error: "Cloud URL is not configured" };
|
||||
}
|
||||
|
||||
// Get current data from db
|
||||
@@ -78,7 +84,7 @@ export async function syncToCloud(machineId, createdKey = null) {
|
||||
let response;
|
||||
try {
|
||||
// Send to Cloud
|
||||
response = await fetchWithTimeout(`${CLOUD_URL}/sync/${machineId}`, {
|
||||
response = await fetchWithTimeout(`${cloudUrl}/sync/${machineId}`, {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({
|
||||
@@ -140,7 +146,8 @@ async function syncAndVerify(machineId, createdKey, existingKeys) {
|
||||
}
|
||||
|
||||
try {
|
||||
const pingResponse = await fetchWithTimeout(`${CLOUD_URL}/${machineId}/v1/verify`, {
|
||||
const cloudUrl = await getResolvedCloudUrl();
|
||||
const pingResponse = await fetchWithTimeout(`${cloudUrl}/${machineId}/v1/verify`, {
|
||||
method: "GET",
|
||||
headers: {
|
||||
"Authorization": `Bearer ${apiKey}`,
|
||||
@@ -173,13 +180,14 @@ async function syncAndVerify(machineId, createdKey, existingKeys) {
|
||||
* Disable Cloud - delete cache and update Claude CLI settings
|
||||
*/
|
||||
async function handleDisable(machineId, request) {
|
||||
if (!CLOUD_URL) {
|
||||
return NextResponse.json({ error: "NEXT_PUBLIC_CLOUD_URL is not configured" }, { status: 500 });
|
||||
const cloudUrl = await getResolvedCloudUrl();
|
||||
if (!cloudUrl) {
|
||||
return NextResponse.json({ error: "Cloud URL is not configured" }, { status: 500 });
|
||||
}
|
||||
|
||||
let response;
|
||||
try {
|
||||
response = await fetchWithTimeout(`${CLOUD_URL}/sync/${machineId}`, {
|
||||
response = await fetchWithTimeout(`${cloudUrl}/sync/${machineId}`, {
|
||||
method: "DELETE"
|
||||
});
|
||||
} catch (error) {
|
||||
@@ -198,7 +206,7 @@ async function handleDisable(machineId, request) {
|
||||
|
||||
// Update Claude CLI settings to use local endpoint
|
||||
const host = request.headers.get("host") || "localhost:20128";
|
||||
await updateClaudeSettingsToLocal(machineId, host);
|
||||
await updateClaudeSettingsToLocal(machineId, host, cloudUrl);
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
@@ -209,10 +217,10 @@ async function handleDisable(machineId, request) {
|
||||
/**
|
||||
* Update Claude CLI settings to use local endpoint (only if currently using cloud)
|
||||
*/
|
||||
async function updateClaudeSettingsToLocal(machineId, host) {
|
||||
async function updateClaudeSettingsToLocal(machineId, host, cloudUrl) {
|
||||
try {
|
||||
const settingsPath = path.join(os.homedir(), ".claude", "settings.json");
|
||||
const cloudUrl = `${CLOUD_URL}/${machineId}`;
|
||||
const cloudEndpoint = `${cloudUrl}/${machineId}`;
|
||||
const localUrl = `http://${host}`;
|
||||
|
||||
// Read current settings
|
||||
@@ -229,19 +237,43 @@ async function updateClaudeSettingsToLocal(machineId, host) {
|
||||
|
||||
// Check if ANTHROPIC_BASE_URL matches cloud URL
|
||||
const currentUrl = settings.env?.ANTHROPIC_BASE_URL;
|
||||
if (!currentUrl || currentUrl !== cloudUrl) {
|
||||
if (!currentUrl || currentUrl !== cloudEndpoint) {
|
||||
return; // Not using cloud URL, don't modify
|
||||
}
|
||||
|
||||
// Update to local URL
|
||||
settings.env.ANTHROPIC_BASE_URL = localUrl;
|
||||
await fs.writeFile(settingsPath, JSON.stringify(settings, null, 2));
|
||||
console.log(`Updated Claude CLI settings: ${cloudUrl} → ${localUrl}`);
|
||||
console.log(`Updated Claude CLI settings: ${cloudEndpoint} → ${localUrl}`);
|
||||
} catch (error) {
|
||||
console.log("Failed to update Claude CLI settings:", error.message);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if cloud worker is reachable
|
||||
*/
|
||||
async function handleCheck() {
|
||||
const cloudUrl = await getResolvedCloudUrl();
|
||||
if (!cloudUrl) {
|
||||
return NextResponse.json({ error: "Cloud URL is not configured" }, { status: 400 });
|
||||
}
|
||||
|
||||
try {
|
||||
const res = await fetchWithTimeout(`${cloudUrl}/health`, { method: "GET" }, 5000);
|
||||
if (res.ok) {
|
||||
return NextResponse.json({ success: true, message: "Worker is running" });
|
||||
}
|
||||
return NextResponse.json({ error: `Worker responded with ${res.status}` }, { status: 502 });
|
||||
} catch (error) {
|
||||
const isTimeout = error?.name === "AbortError";
|
||||
return NextResponse.json(
|
||||
{ error: isTimeout ? "Worker request timeout" : "Cannot reach worker" },
|
||||
{ status: 502 }
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update local db with data from Cloud
|
||||
* Simple logic: if Cloud is newer, sync entire provider
|
||||
|
||||
@@ -746,6 +746,17 @@ export async function isCloudEnabled() {
|
||||
return settings.cloudEnabled === true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get cloud URL (UI config > env > default)
|
||||
*/
|
||||
export async function getCloudUrl() {
|
||||
const settings = await getSettings();
|
||||
return settings.cloudUrl
|
||||
|| process.env.CLOUD_URL
|
||||
|| process.env.NEXT_PUBLIC_CLOUD_URL
|
||||
|| "";
|
||||
}
|
||||
|
||||
// ============ Pricing ============
|
||||
|
||||
/**
|
||||
|
||||
Reference in New Issue
Block a user