Feature : RTK compress

This commit is contained in:
decolua
2026-04-22 15:36:51 +07:00
parent e1a219dba6
commit 8de9aae90c
26 changed files with 1612 additions and 0 deletions

View File

@@ -0,0 +1,15 @@
// Port of apply_filter (rtk/src/cmds/system/pipe_cmd.rs) — catch_unwind equivalent
// On panic/error: passthrough raw output + warn to stderr
export function safeApply(fn, text) {
if (typeof fn !== "function") return text;
try {
const out = fn(text);
if (typeof out !== "string") return text;
return out;
} catch (err) {
// Rust: eprintln!("[rtk] warning: filter panicked — passing through raw output")
const name = fn.filterName || fn.name || "anonymous";
console.warn(`[rtk] warning: filter '${name}' panicked — passing through raw output: ${err?.message || err}`);
return text;
}
}

104
open-sse/rtk/autodetect.js Normal file
View File

@@ -0,0 +1,104 @@
// Port of auto_detect_filter (rtk/src/cmds/system/pipe_cmd.rs:132-188) + JS extras
// Order: git-diff → git-status → grep → find → tree → ls → search-list
// → read-numbered → dedup-log → smart-truncate → null
import { DETECT_WINDOW, READ_NUMBERED_MIN_HIT_RATIO, SMART_TRUNCATE_MIN_LINES } from "./constants.js";
import { gitDiff } from "./filters/gitDiff.js";
import { gitStatus } from "./filters/gitStatus.js";
import { grep } from "./filters/grep.js";
import { find } from "./filters/find.js";
import { dedupLog } from "./filters/dedupLog.js";
import { ls } from "./filters/ls.js";
import { tree } from "./filters/tree.js";
import { smartTruncate } from "./filters/smartTruncate.js";
import { readNumbered, READ_NUMBERED_LINE_RE } from "./filters/readNumbered.js";
import { searchList, SEARCH_LIST_HEADER_RE } from "./filters/searchList.js";
const RE_GIT_DIFF = /^diff --git /m;
const RE_GIT_DIFF_HUNK = /^@@ /m;
const RE_GIT_STATUS = /^On branch |^nothing to commit|^Changes (not |to be )|^Untracked files:/m;
const RE_PORCELAIN = /^[ MADRCU?!][ MADRCU?!] \S/m;
const RE_TREE_GLYPH = /[├└]──|│ /;
const RE_LS_ROW = /^[-dlbcps][rwx-]{9}/m;
const RE_LS_TOTAL = /^total \d+$/m;
export function autoDetectFilter(text) {
// Rust: floor_char_boundary to avoid UTF-8 split — JS .slice() by char is safe
const head = text.length > DETECT_WINDOW ? text.slice(0, DETECT_WINDOW) : text;
if (RE_GIT_DIFF.test(head) || RE_GIT_DIFF_HUNK.test(head)) return gitDiff;
if (RE_GIT_STATUS.test(head) || isMostlyPorcelain(head)) return gitStatus;
const lines = head.split("\n");
const nonEmpty = lines.filter(l => l.trim().length > 0);
// Rust grep rule: first 5 non-empty lines, ANY matches "file:number:content"
const first5 = nonEmpty.slice(0, 5);
if (first5.some(isGrepLine)) return grep;
// Rust find rule: ALL non-empty lines path-like (no ':'), >=3 lines
if (nonEmpty.length >= 3 && nonEmpty.every(isPathLike)) return find;
// Tree: contains box-drawing glyphs typical of `tree` command
if (RE_TREE_GLYPH.test(head)) return tree;
// ls -la: has "total N" header or >=3 rows starting with perms string
if (RE_LS_TOTAL.test(head) || countMatches(head, RE_LS_ROW) >= 3) return ls;
// Cursor Glob search list header
if (SEARCH_LIST_HEADER_RE.test(head)) return searchList;
// Line-numbered file dump (" N|content") — fire only if many lines match
if (lines.length >= SMART_TRUNCATE_MIN_LINES && isLineNumbered(lines)) {
return readNumbered;
}
// Fallback: dedupLog for generic multi-line noise with duplicates
if (nonEmpty.length >= 5) return dedupLog;
// Last resort: big blob with no structure — smart truncate
if (text.split("\n").length >= SMART_TRUNCATE_MIN_LINES) return smartTruncate;
return null;
}
function isGrepLine(line) {
// Rust: splitn(3, ':') → parts.len()==3 && parts[1].parse::<usize>().is_ok()
const first = line.indexOf(":");
if (first === -1) return false;
const second = line.indexOf(":", first + 1);
if (second === -1) return false;
const lineno = line.slice(first + 1, second);
return /^\d+$/.test(lineno);
}
function isPathLike(line) {
const t = line.trim();
if (t.length === 0) return false;
if (t.includes(":")) return false;
return t.startsWith(".") || t.startsWith("/") || t.includes("/");
}
function isMostlyPorcelain(head) {
const lines = head.split("\n").filter(l => l.trim());
if (lines.length < 3) return false;
const hits = lines.filter(l => RE_PORCELAIN.test(l)).length;
return hits / lines.length >= 0.6;
}
function isLineNumbered(lines) {
let hits = 0;
let nonEmpty = 0;
const sample = lines.slice(0, 100);
for (const l of sample) {
if (l.length === 0) continue;
nonEmpty++;
if (READ_NUMBERED_LINE_RE.test(l)) hits++;
}
if (nonEmpty < 5) return false;
return hits / nonEmpty >= READ_NUMBERED_MIN_HIT_RATIO;
}
function countMatches(text, re) {
const g = new RegExp(re.source, re.flags.includes("g") ? re.flags : re.flags + "g");
return (text.match(g) || []).length;
}

54
open-sse/rtk/constants.js Normal file
View File

@@ -0,0 +1,54 @@
// RTK port constants (mirror Rust defaults)
export const RAW_CAP = 10 * 1024 * 1024; // 10 MiB
export const MIN_COMPRESS_SIZE = 500; // bytes; skip tiny blobs
export const DETECT_WINDOW = 1024; // autodetect peeks first N chars
export const GIT_DIFF_HUNK_MAX_LINES = 100; // per-hunk line cap
export const GIT_DIFF_CONTEXT_KEEP = 3; // context lines around changes
export const DEDUP_LINE_MAX = 2000; // dedupLog truncation cap
// Rust pipe_cmd.rs parity caps
export const GREP_PER_FILE_MAX = 10; // match rust: matches.iter().take(10)
export const FIND_PER_DIR_MAX = 10; // match rust: files.iter().take(10)
export const FIND_TOTAL_DIR_MAX = 20; // match rust: dirs.iter().take(20)
// git status caps (rust config::limits())
export const STATUS_MAX_FILES = 10; // config::limits().status_max_files
export const STATUS_MAX_UNTRACKED = 10; // config::limits().status_max_untracked
// ls compact_ls (rtk/src/cmds/system/ls.rs)
export const LS_EXT_SUMMARY_TOP = 5; // top-N extensions in summary
export const LS_NOISE_DIRS = [
"node_modules", ".git", "target", "__pycache__",
".next", "dist", "build", ".venv", "venv",
".cache", ".idea", ".vscode", ".DS_Store"
];
// tree filter_tree_output cap (no rust cap, we add one to be safe)
export const TREE_MAX_LINES = 200;
// Cursor Glob "Result of search in '...' (total N files):" list
export const SEARCH_LIST_PER_DIR_MAX = 10;
export const SEARCH_LIST_TOTAL_DIR_MAX = 20;
// Smart truncate (port of filter.rs smart_truncate fallback)
export const SMART_TRUNCATE_HEAD = 120; // lines kept from top
export const SMART_TRUNCATE_TAIL = 60; // lines kept from bottom
export const SMART_TRUNCATE_MIN_LINES = 250; // only kick in above this
// readNumbered (files with " N|content" lines, e.g. Cursor read_file)
export const READ_NUMBERED_MIN_HIT_RATIO = 0.7;
// Filter name strings (Rust parity + JS extras)
export const FILTERS = {
GIT_DIFF: "git-diff",
GIT_STATUS: "git-status",
GIT_LOG: "git-log",
GREP: "grep",
FIND: "find",
LS: "ls",
TREE: "tree",
DEDUP_LOG: "dedup-log",
SMART_TRUNCATE: "smart-truncate",
READ_NUMBERED: "read-numbered",
SEARCH_LIST: "search-list"
};

View File

@@ -0,0 +1,44 @@
// Generic fallback: collapse consecutive duplicate lines + blank-line dedupe + hard line cap
import { DEDUP_LINE_MAX } from "../constants.js";
export function dedupLog(input) {
const lines = input.split("\n");
const out = [];
let prev = null;
let runCount = 0;
let blankStreak = 0;
const flushRun = () => {
if (prev !== null && runCount > 1) {
out.push(` ... (${runCount - 1} duplicate lines)`);
}
};
for (const line of lines) {
if (line.trim() === "") {
if (blankStreak < 1) out.push(line);
blankStreak += 1;
flushRun();
prev = null;
runCount = 0;
continue;
}
blankStreak = 0;
if (line === prev) {
runCount += 1;
continue;
}
flushRun();
out.push(line);
prev = line;
runCount = 1;
if (out.length >= DEDUP_LINE_MAX) {
out.push(`... (truncated at ${DEDUP_LINE_MAX} lines)`);
return out.join("\n");
}
}
flushRun();
return out.join("\n");
}
dedupLog.filterName = "dedup-log";

View File

@@ -0,0 +1,49 @@
// Port of find_wrapper (rtk/src/cmds/system/pipe_cmd.rs:89-128)
// Group by parent dir, show basenames, cap 10/dir and 20 dirs total
import { FIND_PER_DIR_MAX, FIND_TOTAL_DIR_MAX } from "../constants.js";
export function find(input) {
const lines = input.split("\n").filter(l => l.trim());
if (lines.length === 0) return input;
const byDir = new Map();
for (const path of lines) {
const lastSlash = path.lastIndexOf("/");
let dir;
let basename;
if (lastSlash === -1) {
dir = ".";
basename = path;
} else {
// Rust: PathBuf::from(path).parent().display() + file_name().display()
dir = path.slice(0, lastSlash) || "/";
basename = path.slice(lastSlash + 1);
}
if (!byDir.has(dir)) byDir.set(dir, []);
byDir.get(dir).push(basename);
}
// Rust: dirs.sort_by_key(|(d, _)| d.clone())
const dirs = Array.from(byDir.keys()).sort();
let out = `${lines.length} files in ${dirs.length} dirs:\n\n`;
const showDirs = dirs.slice(0, FIND_TOTAL_DIR_MAX);
for (const dir of showDirs) {
const files = byDir.get(dir);
out += `${dir}/ (${files.length}):\n`;
const showFiles = files.slice(0, FIND_PER_DIR_MAX);
for (const f of showFiles) out += ` ${f}\n`;
if (files.length > FIND_PER_DIR_MAX) {
out += ` +${files.length - FIND_PER_DIR_MAX}\n`;
}
out += "\n";
}
if (dirs.length > FIND_TOTAL_DIR_MAX) {
out += `+${dirs.length - FIND_TOTAL_DIR_MAX} more dirs\n`;
}
return out;
}
find.filterName = "find";

View File

@@ -0,0 +1,92 @@
// Port of Rust git::compact_diff (src/cmds/git/git.rs L325-413)
// Compacts unified diff: file headers, hunk-level truncation at 100 lines, +/-/context counting
import { GIT_DIFF_HUNK_MAX_LINES } from "../constants.js";
export function gitDiff(diff, maxLines = 500) {
const result = [];
let currentFile = "";
let added = 0;
let removed = 0;
let inHunk = false;
let hunkShown = 0;
let hunkSkipped = 0;
let wasTruncated = false;
const maxHunkLines = GIT_DIFF_HUNK_MAX_LINES;
const lines = diff.split("\n");
outer: for (const line of lines) {
if (line.startsWith("diff --git")) {
if (hunkSkipped > 0) {
result.push(` ... (${hunkSkipped} lines truncated)`);
wasTruncated = true;
hunkSkipped = 0;
}
if (currentFile && (added > 0 || removed > 0)) {
result.push(` +${added} -${removed}`);
}
const parts = line.split(" b/");
currentFile = parts.length > 1 ? parts.slice(1).join(" b/") : "unknown";
result.push(`\n${currentFile}`);
added = 0;
removed = 0;
inHunk = false;
hunkShown = 0;
} else if (line.startsWith("@@")) {
if (hunkSkipped > 0) {
result.push(` ... (${hunkSkipped} lines truncated)`);
wasTruncated = true;
hunkSkipped = 0;
}
inHunk = true;
hunkShown = 0;
result.push(` ${line}`);
} else if (inHunk) {
if (line.startsWith("+") && !line.startsWith("+++")) {
added += 1;
if (hunkShown < maxHunkLines) {
result.push(` ${line}`);
hunkShown += 1;
} else {
hunkSkipped += 1;
}
} else if (line.startsWith("-") && !line.startsWith("---")) {
removed += 1;
if (hunkShown < maxHunkLines) {
result.push(` ${line}`);
hunkShown += 1;
} else {
hunkSkipped += 1;
}
} else if (hunkShown < maxHunkLines && !line.startsWith("\\")) {
if (hunkShown > 0) {
result.push(` ${line}`);
hunkShown += 1;
}
}
}
if (result.length >= maxLines) {
result.push("\n... (more changes truncated)");
wasTruncated = true;
break outer;
}
}
if (hunkSkipped > 0) {
result.push(` ... (${hunkSkipped} lines truncated)`);
wasTruncated = true;
}
if (currentFile && (added > 0 || removed > 0)) {
result.push(` +${added} -${removed}`);
}
if (wasTruncated) {
result.push("[full diff: rtk git diff --no-compact]");
}
return result.join("\n");
}
gitDiff.filterName = "git-diff";

View File

@@ -0,0 +1,117 @@
// Port of git::format_status_output (rtk/src/cmds/git/git.rs:619-730)
// Output format:
// * <branch>
// + Staged: N files
// path1
// ... +K more
// ~ Modified: N files
// ? Untracked: N files
// conflicts: N files
// clean — nothing to commit
import { STATUS_MAX_FILES, STATUS_MAX_UNTRACKED } from "../constants.js";
export function gitStatus(input) {
const lines = input.split("\n");
if (lines.length === 0 || (lines.length === 1 && !lines[0].trim())) {
return "Clean working tree";
}
let branch = "";
const stagedFiles = [];
const modifiedFiles = [];
const untrackedFiles = [];
let staged = 0;
let modified = 0;
let untracked = 0;
let conflicts = 0;
for (const raw of lines) {
if (!raw.trim()) continue;
// Long-form branch detection (LLM usually sends this, not porcelain)
const longBranch = raw.match(/^On branch (\S+)/);
if (longBranch) { branch = longBranch[1]; continue; }
// Porcelain branch header: "## main...origin/main"
if (raw.startsWith("##")) { branch = raw.replace(/^##\s*/, ""); continue; }
// Porcelain status (2 chars + space + path)
if (raw.length >= 3 && /^[ MADRCU?!][ MADRCU?!] /.test(raw)) {
const x = raw[0];
const y = raw[1];
const file = raw.slice(3);
if (raw.slice(0, 2) === "??") {
untracked++;
untrackedFiles.push(file);
continue;
}
if ("MADRC".includes(x)) {
staged++;
stagedFiles.push(file);
} else if (x === "U") {
conflicts++;
}
if (y === "M" || y === "D") {
modified++;
modifiedFiles.push(file);
}
continue;
}
// Long form fallback ("modified: path", "new file: path", ...)
const longMatch = raw.match(/^\s*(modified|new file|deleted|renamed|both modified):\s+(.+)$/);
if (longMatch) {
const kind = longMatch[1];
const path = longMatch[2].trim();
if (kind === "both modified") { conflicts++; }
else if (kind === "modified" || kind === "deleted") { modified++; modifiedFiles.push(path); }
else if (kind === "new file" || kind === "renamed") { staged++; stagedFiles.push(path); }
continue;
}
// "Untracked files:" section — gather bare paths after this marker
// Handled implicitly: plain paths without markers are skipped (safer).
}
let out = "";
if (branch) out += `* ${branch}\n`;
if (staged > 0) {
out += `+ Staged: ${staged} files\n`;
for (const f of stagedFiles.slice(0, STATUS_MAX_FILES)) out += ` ${f}\n`;
if (stagedFiles.length > STATUS_MAX_FILES) {
out += ` ... +${stagedFiles.length - STATUS_MAX_FILES} more\n`;
}
}
if (modified > 0) {
out += `~ Modified: ${modified} files\n`;
for (const f of modifiedFiles.slice(0, STATUS_MAX_FILES)) out += ` ${f}\n`;
if (modifiedFiles.length > STATUS_MAX_FILES) {
out += ` ... +${modifiedFiles.length - STATUS_MAX_FILES} more\n`;
}
}
if (untracked > 0) {
out += `? Untracked: ${untracked} files\n`;
for (const f of untrackedFiles.slice(0, STATUS_MAX_UNTRACKED)) out += ` ${f}\n`;
if (untrackedFiles.length > STATUS_MAX_UNTRACKED) {
out += ` ... +${untrackedFiles.length - STATUS_MAX_UNTRACKED} more\n`;
}
}
if (conflicts > 0) {
out += `conflicts: ${conflicts} files\n`;
}
if (staged === 0 && modified === 0 && untracked === 0 && conflicts === 0) {
out += "clean — nothing to commit\n";
}
return out.replace(/\n+$/, "");
}
gitStatus.filterName = "git-status";

View File

@@ -0,0 +1,48 @@
// Port of grep_wrapper (rtk/src/cmds/system/pipe_cmd.rs:50-86)
// Input format: "file:lineno:content" — splitn(3, ':') in Rust
import { GREP_PER_FILE_MAX } from "../constants.js";
export function grep(input) {
const byFile = new Map();
let total = 0;
for (const line of input.split("\n")) {
// splitn(3, ':') — only split on first 2 colons
const first = line.indexOf(":");
if (first === -1) continue;
const second = line.indexOf(":", first + 1);
if (second === -1) continue;
const file = line.slice(0, first);
const lineNumStr = line.slice(first + 1, second);
const content = line.slice(second + 1);
// Rust: parts[1].parse::<usize>().is_ok()
if (!/^\d+$/.test(lineNumStr)) continue;
total++;
if (!byFile.has(file)) byFile.set(file, []);
byFile.get(file).push([lineNumStr, content]);
}
if (total === 0) return input;
// Rust: files.sort_by_key(|(f, _)| *f)
const files = Array.from(byFile.keys()).sort();
let out = `${total} matches in ${files.length}F:\n\n`;
for (const file of files) {
const matches = byFile.get(file);
out += `[file] ${file} (${matches.length}):\n`;
const show = matches.slice(0, GREP_PER_FILE_MAX);
for (const [lineNum, content] of show) {
// Rust: format!(" {:>4}: {}", line_num, content.trim())
out += ` ${lineNum.padStart(4)}: ${content.trim()}\n`;
}
if (matches.length > GREP_PER_FILE_MAX) {
out += ` +${matches.length - GREP_PER_FILE_MAX}\n`;
}
out += "\n";
}
return out;
}
grep.filterName = "grep";

View File

@@ -0,0 +1,79 @@
// Port of compact_ls (rtk/src/cmds/system/ls.rs:154-232)
// Input: `ls -la` style output. Output: compact "name/ (dirs)\nname size"
import { LS_EXT_SUMMARY_TOP, LS_NOISE_DIRS } from "../constants.js";
// Rust LS_DATE_RE: month + day + (year|HH:MM)
const LS_DATE_RE = /\s+(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s+\d{1,2}\s+(\d{4}|\d{2}:\d{2})\s+/;
function humanSize(bytes) {
if (bytes >= 1_048_576) return `${(bytes / 1_048_576).toFixed(1)}M`;
if (bytes >= 1024) return `${(bytes / 1024).toFixed(1)}K`;
return `${bytes}B`;
}
function parseLsLine(line) {
const m = LS_DATE_RE.exec(line);
if (!m) return null;
const name = line.slice(m.index + m[0].length);
const beforeDate = line.slice(0, m.index);
const beforeParts = beforeDate.split(/\s+/).filter(Boolean);
if (beforeParts.length < 4) return null;
const perms = beforeParts[0];
const fileType = perms.charAt(0);
// size = rightmost parseable number before the date
let size = 0;
for (let i = beforeParts.length - 1; i >= 0; i--) {
const n = Number(beforeParts[i]);
if (Number.isInteger(n) && String(n) === beforeParts[i]) { size = n; break; }
}
return { fileType, size, name };
}
export function ls(input) {
const dirs = [];
const files = []; // [name, sizeStr]
const byExt = new Map();
for (const line of input.split("\n")) {
if (line.startsWith("total ") || line.length === 0) continue;
const parsed = parseLsLine(line);
if (!parsed) continue;
if (parsed.name === "." || parsed.name === "..") continue;
// Rust ls.rs: show_all flag respected — for LLM context always skip noise
if (LS_NOISE_DIRS.includes(parsed.name)) continue;
if (parsed.fileType === "d") {
dirs.push(parsed.name);
} else if (parsed.fileType === "-" || parsed.fileType === "l") {
const dot = parsed.name.lastIndexOf(".");
const ext = dot > 0 ? parsed.name.slice(dot) : "no ext";
byExt.set(ext, (byExt.get(ext) || 0) + 1);
files.push([parsed.name, humanSize(parsed.size)]);
}
}
if (dirs.length === 0 && files.length === 0) return input;
let out = "";
for (const d of dirs) out += `${d}/\n`;
for (const [name, size] of files) out += `${name} ${size}\n`;
// Summary line (Rust port)
let summary = `\nSummary: ${files.length} files, ${dirs.length} dirs`;
if (byExt.size > 0) {
const ext = Array.from(byExt.entries()).sort((a, b) => b[1] - a[1]);
const parts = ext.slice(0, LS_EXT_SUMMARY_TOP).map(([e, c]) => `${c} ${e}`);
summary += ` (${parts.join(", ")}`;
if (ext.length > LS_EXT_SUMMARY_TOP) {
summary += `, +${ext.length - LS_EXT_SUMMARY_TOP} more`;
}
summary += ")";
}
return out + summary;
}
ls.filterName = "ls";

View File

@@ -0,0 +1,27 @@
// Handles Cursor/Codex read_file output: " 1|content\n 2|content".
// Strategy mirrors Rust filter::smart_truncate (filter.rs): keep head+tail, drop middle.
import { SMART_TRUNCATE_HEAD, SMART_TRUNCATE_TAIL, SMART_TRUNCATE_MIN_LINES } from "../constants.js";
const LINE_RE = /^\s*\d+\|/;
export function readNumbered(input) {
const lines = input.split("\n");
if (lines.length < SMART_TRUNCATE_MIN_LINES) return input;
// Count how many lines match "N|content" to verify shape (hit ratio check
// already done by autodetect; here we just truncate).
const head = lines.slice(0, SMART_TRUNCATE_HEAD);
const tail = lines.slice(lines.length - SMART_TRUNCATE_TAIL);
const cut = lines.length - head.length - tail.length;
return [
...head,
`... +${cut} lines truncated (file continues)`,
...tail
].join("\n");
}
readNumbered.filterName = "read-numbered";
// Exposed for autodetect
export const READ_NUMBERED_LINE_RE = LINE_RE;

View File

@@ -0,0 +1,52 @@
// Compact "Result of search in '...' (total N files):\n- path\n- path" output
// (Cursor Glob tool). Groups by parent dir like find, shows basenames.
import { SEARCH_LIST_PER_DIR_MAX, SEARCH_LIST_TOTAL_DIR_MAX } from "../constants.js";
const HEADER_RE = /^Result of search in '[^']*' \(total (\d+) files?\):/;
export function searchList(input) {
const lines = input.split("\n");
if (lines.length === 0) return input;
// First line must be the header (validated by autodetect too)
const header = lines[0] || "";
const rest = lines.slice(1);
const paths = [];
for (const raw of rest) {
const t = raw.trim();
if (!t.startsWith("- ")) continue;
paths.push(t.slice(2));
}
if (paths.length === 0) return input;
const byDir = new Map();
for (const p of paths) {
const slash = p.lastIndexOf("/");
const dir = slash === -1 ? "." : (p.slice(0, slash) || "/");
const name = slash === -1 ? p : p.slice(slash + 1);
if (!byDir.has(dir)) byDir.set(dir, []);
byDir.get(dir).push(name);
}
const dirs = Array.from(byDir.keys()).sort();
let out = `${header}\n${paths.length} files in ${dirs.length} dirs:\n\n`;
for (const dir of dirs.slice(0, SEARCH_LIST_TOTAL_DIR_MAX)) {
const names = byDir.get(dir);
out += `${dir}/ (${names.length}):\n`;
for (const n of names.slice(0, SEARCH_LIST_PER_DIR_MAX)) out += ` ${n}\n`;
if (names.length > SEARCH_LIST_PER_DIR_MAX) {
out += ` +${names.length - SEARCH_LIST_PER_DIR_MAX}\n`;
}
out += "\n";
}
if (dirs.length > SEARCH_LIST_TOTAL_DIR_MAX) {
out += `+${dirs.length - SEARCH_LIST_TOTAL_DIR_MAX} more dirs\n`;
}
return out.replace(/\n+$/, "");
}
searchList.filterName = "search-list";
export const SEARCH_LIST_HEADER_RE = HEADER_RE;

View File

@@ -0,0 +1,15 @@
// Port concept of filter::smart_truncate (rtk/src/core/filter.rs).
// Keep HEAD + TAIL lines, replace middle with "... +N lines truncated".
import { SMART_TRUNCATE_HEAD, SMART_TRUNCATE_TAIL, SMART_TRUNCATE_MIN_LINES } from "../constants.js";
export function smartTruncate(input) {
const lines = input.split("\n");
if (lines.length < SMART_TRUNCATE_MIN_LINES) return input;
const head = lines.slice(0, SMART_TRUNCATE_HEAD);
const tail = lines.slice(lines.length - SMART_TRUNCATE_TAIL);
const cut = lines.length - head.length - tail.length;
return [...head, `... +${cut} lines truncated`, ...tail].join("\n");
}
smartTruncate.filterName = "smart-truncate";

View File

@@ -0,0 +1,32 @@
// Port of filter_tree_output (rtk/src/cmds/system/tree.rs:65-94)
// Removes summary line (e.g. "5 directories, 23 files") and trailing blanks.
import { TREE_MAX_LINES } from "../constants.js";
export function tree(input) {
const lines = input.split("\n");
if (lines.length === 0) return input;
const filtered = [];
for (const line of lines) {
// Drop "X directories, Y files" summary
if (line.includes("director") && line.includes("file")) continue;
// Drop leading blanks
if (line.trim() === "" && filtered.length === 0) continue;
filtered.push(line);
}
// Drop trailing blanks
while (filtered.length > 0 && filtered[filtered.length - 1].trim() === "") {
filtered.pop();
}
// Cap overly long trees (JS-only safeguard; Rust has no cap)
if (filtered.length > TREE_MAX_LINES) {
const cut = filtered.length - TREE_MAX_LINES;
return filtered.slice(0, TREE_MAX_LINES).join("\n") + `\n... +${cut} more lines`;
}
return filtered.join("\n");
}
tree.filterName = "tree";

11
open-sse/rtk/flag.js Normal file
View File

@@ -0,0 +1,11 @@
// Synchronous RTK toggle cache. Updated by /api/settings PATCH handler
// and initialized from DB on server boot.
let enabled = false;
export function setRtkEnabled(value) {
enabled = Boolean(value);
}
export function isRtkEnabled() {
return enabled;
}

102
open-sse/rtk/index.js Normal file
View File

@@ -0,0 +1,102 @@
// RTK port: compress tool_result content in LLM request bodies
// Injected at the top of translateRequest (before any format translation)
import { RAW_CAP, MIN_COMPRESS_SIZE } from "./constants.js";
import { autoDetectFilter } from "./autodetect.js";
import { safeApply } from "./applyFilter.js";
import { isRtkEnabled } from "./flag.js";
export { isRtkEnabled, setRtkEnabled } from "./flag.js";
// Compress tool_result content in-place. Returns stats or null if disabled/failed.
export function compressMessages(body) {
if (!isRtkEnabled()) return null;
if (!body || !Array.isArray(body.messages)) return null;
const stats = { bytesBefore: 0, bytesAfter: 0, hits: [] };
try {
for (let i = 0; i < body.messages.length; i++) {
const msg = body.messages[i];
if (!msg) continue;
// Shape 1: OpenAI tool message — { role:"tool", content: "string" }
if (msg.role === "tool" && typeof msg.content === "string") {
msg.content = compressText(msg.content, stats, "openai-tool");
continue;
}
if (!Array.isArray(msg.content)) continue;
// Shape 1b: OpenAI tool message — { role:"tool", content:[{type:"text", text:"..."}] }
if (msg.role === "tool") {
for (let k = 0; k < msg.content.length; k++) {
const part = msg.content[k];
if (part && part.type === "text" && typeof part.text === "string") {
part.text = compressText(part.text, stats, "openai-tool-array");
}
}
continue;
}
// Shape 2/3: blocks array with tool_result entries
for (let j = 0; j < msg.content.length; j++) {
const block = msg.content[j];
if (!block || block.type !== "tool_result") continue;
if (block.is_error === true) continue; // preserve error traces
if (typeof block.content === "string") {
// Shape 2: claude string form
block.content = compressText(block.content, stats, "claude-string");
} else if (Array.isArray(block.content)) {
// Shape 3: claude array form — compress each text part
for (let k = 0; k < block.content.length; k++) {
const part = block.content[k];
if (part && part.type === "text" && typeof part.text === "string") {
part.text = compressText(part.text, stats, "claude-array");
}
}
}
}
}
} catch (e) {
console.warn("[RTK] compressMessages error:", e.message);
return null;
}
return stats;
}
function compressText(text, stats, shape) {
const bytesIn = text.length;
stats.bytesBefore += bytesIn;
if (bytesIn < MIN_COMPRESS_SIZE || bytesIn > RAW_CAP) {
stats.bytesAfter += bytesIn;
return text;
}
const fn = autoDetectFilter(text);
if (!fn) {
stats.bytesAfter += bytesIn;
return text;
}
const out = safeApply(fn, text);
// Safety: never return empty, never grow the input
if (!out || out.length === 0 || out.length >= bytesIn) {
stats.bytesAfter += bytesIn;
return text;
}
stats.bytesAfter += out.length;
stats.hits.push({ shape, filter: fn.filterName || fn.name, saved: bytesIn - out.length });
return out;
}
// Convenience: format a log line from stats
export function formatRtkLog(stats) {
if (!stats || !stats.hits || stats.hits.length === 0) return null;
const saved = stats.bytesBefore - stats.bytesAfter;
const pct = stats.bytesBefore > 0 ? ((saved / stats.bytesBefore) * 100).toFixed(1) : "0";
const filters = Array.from(new Set(stats.hits.map(h => h.filter))).join(",");
return `[RTK] saved ${saved}B / ${stats.bytesBefore}B (${pct}%) via [${filters}] hits=${stats.hits.length}`;
}

38
open-sse/rtk/registry.js Normal file
View File

@@ -0,0 +1,38 @@
import { FILTERS } from "./constants.js";
import { gitDiff } from "./filters/gitDiff.js";
import { gitStatus } from "./filters/gitStatus.js";
import { grep } from "./filters/grep.js";
import { find } from "./filters/find.js";
import { dedupLog } from "./filters/dedupLog.js";
import { ls } from "./filters/ls.js";
import { tree } from "./filters/tree.js";
import { smartTruncate } from "./filters/smartTruncate.js";
import { readNumbered } from "./filters/readNumbered.js";
import { searchList } from "./filters/searchList.js";
const REGISTRY = {
[FILTERS.GIT_DIFF]: gitDiff,
[FILTERS.GIT_STATUS]: gitStatus,
[FILTERS.GREP]: grep,
[FILTERS.FIND]: find,
[FILTERS.DEDUP_LOG]: dedupLog,
[FILTERS.LS]: ls,
[FILTERS.TREE]: tree,
[FILTERS.SMART_TRUNCATE]: smartTruncate,
[FILTERS.READ_NUMBERED]: readNumbered,
[FILTERS.SEARCH_LIST]: searchList
};
// Rust resolve_filter aliases (pipe_cmd.rs): grep|rg, find|fd
const ALIASES = {
rg: grep,
fd: find
};
export function resolveFilter(name) {
return REGISTRY[name] || ALIASES[name] || null;
}
export function allFilters() {
return REGISTRY;
}

View File

@@ -5,6 +5,7 @@ import { cloakClaudeTools } from "../utils/claudeCloaking.js";
import { filterToOpenAIFormat } from "./helpers/openaiHelper.js";
import { normalizeThinkingConfig } from "../services/provider.js";
import { AntigravityExecutor } from "../executors/antigravity.js";
import { compressMessages, formatRtkLog } from "../rtk/index.js";
// Registry for translators
const requestRegistry = new Map();
@@ -74,6 +75,13 @@ export function translateRequest(sourceFormat, targetFormat, model, body, stream
ensureInitialized();
let result = body;
// RTK: compress tool_result content before any translation (shape-agnostic)
const rtkStats = compressMessages(result);
if (rtkStats) {
const line = formatRtkLog(rtkStats);
if (line) console.log(line);
}
// Strip explicit content types (opt-in via strip[] in PROVIDER_MODELS entry)
stripContentTypes(result, stripList);

View File

@@ -25,6 +25,7 @@ export default function APIPageClient({ machineId }) {
const [requireLogin, setRequireLogin] = useState(true);
const [hasPassword, setHasPassword] = useState(true);
const [tunnelDashboardAccess, setTunnelDashboardAccess] = useState(false);
const [rtkEnabled, setRtkEnabledState] = useState(false);
// Cloudflare Tunnel state
const [tunnelChecking, setTunnelChecking] = useState(true);
@@ -80,6 +81,7 @@ export default function APIPageClient({ machineId }) {
setRequireLogin(data.requireLogin !== false);
setHasPassword(data.hasPassword || false);
setTunnelDashboardAccess(data.tunnelDashboardAccess || false);
setRtkEnabledState(data.rtkEnabled || false);
}
if (statusRes.ok) {
const data = await statusRes.json();
@@ -167,6 +169,19 @@ export default function APIPageClient({ machineId }) {
}
};
const handleRtkEnabled = async (value) => {
try {
const res = await fetch("/api/settings", {
method: "PATCH",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ rtkEnabled: value }),
});
if (res.ok) setRtkEnabledState(value);
} catch (error) {
console.log("Error updating rtkEnabled:", error);
}
};
const fetchData = async () => {
try {
const keysRes = await fetch("/api/keys");
@@ -798,6 +813,42 @@ export default function APIPageClient({ machineId }) {
)}
</Card>
{/* Token Saver (RTK) */}
<Card id="rtk">
<div className="flex items-center justify-between mb-2">
<div className="flex items-center gap-2">
<h2 className="text-lg font-semibold">Token Saver</h2>
<span className="px-2 py-0.5 text-xs font-medium rounded-full bg-amber-500/15 text-amber-600 dark:text-amber-400 border border-amber-500/30">
Experimental
</span>
</div>
</div>
<div className="flex items-center justify-between pt-2">
<div className="pr-4">
<p className="font-medium">Compress tool output</p>
<p className="text-sm text-text-muted">
Auto-compress git diff / status / grep / find / ls / tree / logs in <code>tool_result</code> before sending to LLM. Check server console for <code>[RTK] saved ...</code> log.
</p>
<p className="text-xs text-text-muted mt-1">
Inspired by{" "}
<a
href="https://github.com/rtk-ai/rtk"
target="_blank"
rel="noopener noreferrer"
className="underline hover:text-primary"
>
RTK (Rust Token Killer)
</a>
{" "} ported to JavaScript. This feature is still under testing; disable it if you notice unexpected results.
</p>
</div>
<Toggle
checked={rtkEnabled}
onChange={() => handleRtkEnabled(!rtkEnabled)}
/>
</div>
</Card>
{/* API Keys */}
<Card id="require-api-key">
<div className="flex items-center justify-between mb-4">

View File

@@ -197,6 +197,21 @@ export async function POST(request) {
break;
}
case "opencode-go": {
const res = await fetch("https://opencode.ai/zen/go/v1/chat/completions", {
method: "POST",
headers: { "Content-Type": "application/json", "Authorization": `Bearer ${apiKey}` },
body: JSON.stringify({
model: getDefaultModel("opencode-go"),
messages: [{ role: "user", content: "ping" }],
max_tokens: 1,
stream: false,
}),
});
isValid = res.status !== 401 && res.status !== 403;
break;
}
case "deepgram": {
const res = await fetch("https://api.deepgram.com/v1/projects", {
headers: { "Authorization": `Token ${apiKey}` },

View File

@@ -1,6 +1,7 @@
import { NextResponse } from "next/server";
import { getSettings, updateSettings } from "@/lib/localDb";
import { applyOutboundProxyEnv } from "@/lib/network/outboundProxy";
import { setRtkEnabled } from "open-sse/rtk/flag.js";
import bcrypt from "bcryptjs";
export async function GET() {
@@ -65,6 +66,11 @@ export async function PATCH(request) {
) {
applyOutboundProxyEnv(settings);
}
// Sync RTK toggle immediately (sync cache for request hot path)
if (Object.prototype.hasOwnProperty.call(body, "rtkEnabled")) {
setRtkEnabled(settings.rtkEnabled);
}
const { password, ...safeSettings } = settings;
return NextResponse.json(safeSettings);
} catch (error) {

View File

@@ -3,6 +3,7 @@ import "./globals.css";
import { ThemeProvider } from "@/shared/components/ThemeProvider";
import "@/lib/initCloudSync"; // Auto-initialize cloud sync
import "@/lib/network/initOutboundProxy"; // Auto-initialize outbound proxy env
import "@/lib/rtk/initRtk"; // Auto-initialize RTK toggle from DB
import { initConsoleLogCapture } from "@/lib/consoleLogBuffer";
import { RuntimeI18nProvider } from "@/i18n/RuntimeI18nProvider";

View File

@@ -36,6 +36,7 @@ const DEFAULT_SETTINGS = {
outboundProxyUrl: "",
outboundNoProxy: "",
mitmRouterBaseUrl: DEFAULT_MITM_ROUTER_BASE,
rtkEnabled: false,
};
function cloneDefaultData() {

20
src/lib/rtk/initRtk.js Normal file
View File

@@ -0,0 +1,20 @@
import { getSettings } from "@/lib/localDb";
import { setRtkEnabled } from "open-sse/rtk/flag.js";
let initialized = false;
export async function ensureRtkInitialized() {
if (initialized) return true;
try {
const settings = await getSettings();
setRtkEnabled(settings.rtkEnabled === true);
initialized = true;
} catch (error) {
console.error("[ServerInit] Error initializing RTK flag:", error);
}
return initialized;
}
ensureRtkInitialized().catch(console.log);
export default ensureRtkInitialized;

136
tests/unit/rtk.e2e.test.js Normal file
View File

@@ -0,0 +1,136 @@
// End-to-end integration test: hit live local proxy and verify [RTK] behavior.
// Run with: RUN_E2E=1 RTK_E2E_PORT=... RTK_E2E_KEY=... RTK_E2E_LOG=<absolute path to server stdout file> npm test rtk.e2e.test.js
// Requires: dev server running, rtkEnabled=true, API key present.
import { describe, it, expect } from "vitest";
import fs from "node:fs";
const PORT = process.env.RTK_E2E_PORT || "20128";
const BASE = `http://localhost:${PORT}`;
const API_KEY = process.env.RTK_E2E_KEY || "";
const LOG_FILE = process.env.RTK_E2E_LOG || "";
const RUN = process.env.RUN_E2E === "1";
const maybe = RUN ? describe : describe.skip;
function readLogTail(bytes = 8192) {
if (!LOG_FILE || !fs.existsSync(LOG_FILE)) return "";
const stat = fs.statSync(LOG_FILE);
const start = Math.max(0, stat.size - bytes);
const fd = fs.openSync(LOG_FILE, "r");
const buf = Buffer.alloc(stat.size - start);
fs.readSync(fd, buf, 0, buf.length, start);
fs.closeSync(fd);
return buf.toString("utf8");
}
// Read new bytes appended to log since `offset`. Returns text + new offset.
function readLogSince(offset) {
if (!LOG_FILE || !fs.existsSync(LOG_FILE)) return { text: "", next: offset };
const stat = fs.statSync(LOG_FILE);
if (stat.size <= offset) return { text: "", next: stat.size };
const fd = fs.openSync(LOG_FILE, "r");
const buf = Buffer.alloc(stat.size - offset);
fs.readSync(fd, buf, 0, buf.length, offset);
fs.closeSync(fd);
return { text: buf.toString("utf8"), next: stat.size };
}
function logOffset() {
if (!LOG_FILE || !fs.existsSync(LOG_FILE)) return 0;
return fs.statSync(LOG_FILE).size;
}
async function sendChat(body) {
return fetch(`${BASE}/v1/chat/completions`, {
method: "POST",
headers: { "content-type": "application/json", "authorization": `Bearer ${API_KEY}` },
body: JSON.stringify(body)
});
}
function makeBigDiff(fileCount = 3, linesPerFile = 80) {
const out = [];
for (let f = 0; f < fileCount; f++) {
out.push(`diff --git a/src/file${f}.js b/src/file${f}.js`);
out.push(`index abc${f}..def${f} 100644`);
out.push(`--- a/src/file${f}.js`);
out.push(`+++ b/src/file${f}.js`);
out.push(`@@ -1,${linesPerFile} +1,${linesPerFile} @@`);
for (let i = 0; i < linesPerFile; i++) {
out.push(`-const old${f}_${i} = "removed value ${i} padding padding padding";`);
out.push(`+const new${f}_${i} = "added value ${i} padding padding padding padding";`);
}
}
return out.join("\n");
}
maybe("RTK end-to-end", () => {
it("server is reachable", async () => {
const res = await fetch(`${BASE}/api/health`);
expect(res.ok).toBe(true);
});
it("rtkEnabled flag is true (user must enable via dashboard)", async () => {
const res = await fetch(`${BASE}/api/settings`);
const data = await res.json();
expect(data.rtkEnabled).toBe(true);
});
it("compresses git diff tool_result and writes [RTK] savings to log", async () => {
const diff = makeBigDiff(2, 60);
expect(diff.length).toBeGreaterThan(500);
const offset = logOffset();
const res = await sendChat({
model: "cc/claude-opus-4-7",
stream: false,
max_tokens: 64,
messages: [
{ role: "user", content: "run git diff" },
{ role: "assistant", content: null, tool_calls: [{ id: "call_1", type: "function", function: { name: "Bash", arguments: JSON.stringify({ command: "git diff" }) } }] },
{ role: "tool", tool_call_id: "call_1", content: diff },
{ role: "user", content: "summarize in 10 words" }
]
});
expect([200, 400, 401, 402, 500]).toContain(res.status);
if (!LOG_FILE) return;
await new Promise(r => setTimeout(r, 500));
const { text } = readLogSince(offset);
const matches = [...text.matchAll(/\[RTK\] saved (\d+)B \/ (\d+)B \([\d.]+%\) via \[([\w,-]+)\] hits=(\d+)/g)];
// Find the log line that corresponds to OUR request (total ≥ diff.length and contains git-diff)
const mine = matches.find(m => Number(m[2]) >= diff.length && m[3].includes("git-diff"));
expect(mine, `no matching [RTK] line for our request (diff=${diff.length}B) in ${matches.length} log entries`).toBeTruthy();
expect(Number(mine[1])).toBeGreaterThan(500);
expect(mine[3]).toContain("git-diff");
expect(Number(mine[4])).toBeGreaterThanOrEqual(1);
});
it("compresses grep-style tool_result", async () => {
const lines = [];
for (let i = 1; i <= 30; i++) lines.push(`src/lib/foo.js:${i}:const v${i} = "matching content with enough padding to exceed threshold";`);
const grepOut = lines.join("\n");
expect(grepOut.length).toBeGreaterThan(500);
const offset = logOffset();
const res = await sendChat({
model: "cc/claude-opus-4-7",
stream: false,
max_tokens: 32,
messages: [
{ role: "user", content: "grep" },
{ role: "assistant", content: null, tool_calls: [{ id: "c3", type: "function", function: { name: "Bash", arguments: "{}" } }] },
{ role: "tool", tool_call_id: "c3", content: grepOut },
{ role: "user", content: "ok" }
]
});
expect([200, 400, 401, 402, 500]).toContain(res.status);
if (!LOG_FILE) return;
await new Promise(r => setTimeout(r, 500));
const { text } = readLogSince(offset);
const matches = [...text.matchAll(/\[RTK\] saved (\d+)B \/ (\d+)B \([\d.]+%\) via \[([\w,-]+)\] hits=(\d+)/g)];
const mine = matches.find(m => Number(m[2]) >= grepOut.length && m[3].includes("grep"));
expect(mine, `no matching [RTK] line for grep payload`).toBeTruthy();
});
});

View File

@@ -0,0 +1,137 @@
// E2E test: verify RTK compression runs for every configured provider/route.
// Each test covers a different source→target translator path.
// Run with: RUN_E2E=1 RTK_E2E_PORT=... RTK_E2E_KEY=... RTK_E2E_LOG=<server stdout file> npm test rtk.multi-provider.e2e.test.js
import { describe, it, expect } from "vitest";
import fs from "node:fs";
const PORT = process.env.RTK_E2E_PORT || "20128";
const BASE = `http://localhost:${PORT}`;
const API_KEY = process.env.RTK_E2E_KEY || "";
const LOG_FILE = process.env.RTK_E2E_LOG || "";
const RUN = process.env.RUN_E2E === "1";
const maybe = RUN ? describe : describe.skip;
function logOffset() {
if (!LOG_FILE || !fs.existsSync(LOG_FILE)) return 0;
return fs.statSync(LOG_FILE).size;
}
function readLogSince(offset) {
if (!LOG_FILE || !fs.existsSync(LOG_FILE)) return "";
const stat = fs.statSync(LOG_FILE);
if (stat.size <= offset) return "";
const fd = fs.openSync(LOG_FILE, "r");
const buf = Buffer.alloc(stat.size - offset);
fs.readSync(fd, buf, 0, buf.length, offset);
fs.closeSync(fd);
return buf.toString("utf8");
}
function makeBigDiff(fileCount = 2, linesPerFile = 60) {
const out = [];
for (let f = 0; f < fileCount; f++) {
out.push(`diff --git a/src/file${f}.js b/src/file${f}.js`);
out.push(`index abc${f}..def${f} 100644`);
out.push(`--- a/src/file${f}.js`);
out.push(`+++ b/src/file${f}.js`);
out.push(`@@ -1,${linesPerFile} +1,${linesPerFile} @@`);
for (let i = 0; i < linesPerFile; i++) {
out.push(`-const old${f}_${i} = "removed value ${i} padding padding padding";`);
out.push(`+const new${f}_${i} = "added value ${i} padding padding padding padding";`);
}
}
return out.join("\n");
}
async function sendChat(body) {
return fetch(`${BASE}/v1/chat/completions`, {
method: "POST",
headers: { "content-type": "application/json", "authorization": `Bearer ${API_KEY}` },
body: JSON.stringify(body)
});
}
// Wait for server to emit a matching [RTK] log line (race-safe against concurrent traffic).
async function waitForRtkLine({ minBytes, filterName, timeoutMs = 5000 }) {
const start = Date.now();
const startOffset = logOffset();
while (Date.now() - start < timeoutMs) {
const text = readLogSince(startOffset);
const matches = [...text.matchAll(/\[RTK\] saved (\d+)B \/ (\d+)B \(([\d.]+)%\) via \[([\w,-]+)\] hits=(\d+)/g)];
const mine = matches.find(m => Number(m[2]) >= minBytes && m[4].includes(filterName));
if (mine) {
return {
saved: Number(mine[1]),
total: Number(mine[2]),
pct: Number(mine[3]),
filters: mine[4],
hits: Number(mine[5])
};
}
await new Promise(r => setTimeout(r, 200));
}
return null;
}
// Build a chat request with OpenAI-style tool_result carrying large content.
function chatBodyWithDiff(model, diff) {
return {
model,
stream: false,
max_tokens: 16,
messages: [
{ role: "user", content: "run git diff" },
{
role: "assistant",
content: null,
tool_calls: [{ id: "call_1", type: "function", function: { name: "Bash", arguments: JSON.stringify({ command: "git diff" }) } }]
},
{ role: "tool", tool_call_id: "call_1", content: diff },
{ role: "user", content: "ok" }
]
};
}
// Matrix of routes to cover — one entry per translator target format.
const ROUTES = [
{ name: "claude (cc/* → openai→claude)", model: "cc/claude-opus-4-7" },
{ name: "codex (cx/* → openai→openai-responses)", model: "cx/gpt-5.4" },
{ name: "antigravity (ag/* → openai→antigravity)", model: "ag/gemini-3-flash" },
{ name: "cursor (cu/* → openai→cursor)", model: "cu/claude-4.5-sonnet" },
{ name: "kiro (kr/* → openai→kiro)", model: "kr/claude-sonnet-4.5" },
{ name: "gemini (gemini/* → openai→gemini)", model: "gemini/gemini-2.5-flash" },
{ name: "deepseek (deepseek/* → openai, passthrough)", model: "deepseek/deepseek-chat" },
{ name: "ollama (ollama/* → openai→ollama)", model: "ollama/gpt-oss:120b" },
];
maybe("RTK multi-provider E2E", () => {
it("server reachable and rtkEnabled=true", async () => {
const health = await fetch(`${BASE}/api/health`);
expect(health.ok).toBe(true);
const settings = await fetch(`${BASE}/api/settings`).then(r => r.json());
expect(settings.rtkEnabled).toBe(true);
});
for (const route of ROUTES) {
it(`compresses git diff for ${route.name}`, async () => {
const diff = makeBigDiff();
expect(diff.length).toBeGreaterThan(500);
const res = await sendChat(chatBodyWithDiff(route.model, diff));
// Provider may respond with 200/400/401/402/404/429/500 depending on account state.
// The important thing: proxy must NOT crash (we just need status code).
expect(res.status).toBeGreaterThanOrEqual(200);
expect(res.status).toBeLessThan(600);
if (!LOG_FILE) return;
const hit = await waitForRtkLine({ minBytes: diff.length, filterName: "git-diff" });
expect(hit, `[RTK] git-diff log line not found for ${route.name}`).toBeTruthy();
expect(hit.saved).toBeGreaterThan(500);
expect(hit.filters).toContain("git-diff");
// Log actual savings for visibility
console.log(`${route.name}: saved ${hit.saved}B / ${hit.total}B (${hit.pct}%) filters=${hit.filters}`);
}, 20000);
}
});

358
tests/unit/rtk.test.js Normal file
View File

@@ -0,0 +1,358 @@
import { describe, it, expect, beforeEach } from "vitest";
import { compressMessages, setRtkEnabled, isRtkEnabled, formatRtkLog } from "../../open-sse/rtk/index.js";
import { gitDiff } from "../../open-sse/rtk/filters/gitDiff.js";
import { gitStatus } from "../../open-sse/rtk/filters/gitStatus.js";
import { grep } from "../../open-sse/rtk/filters/grep.js";
import { find } from "../../open-sse/rtk/filters/find.js";
import { dedupLog } from "../../open-sse/rtk/filters/dedupLog.js";
import { ls } from "../../open-sse/rtk/filters/ls.js";
import { tree } from "../../open-sse/rtk/filters/tree.js";
import { smartTruncate } from "../../open-sse/rtk/filters/smartTruncate.js";
import { readNumbered } from "../../open-sse/rtk/filters/readNumbered.js";
import { searchList } from "../../open-sse/rtk/filters/searchList.js";
import { autoDetectFilter } from "../../open-sse/rtk/autodetect.js";
import { safeApply } from "../../open-sse/rtk/applyFilter.js";
function makeLongDiff() {
const lines = ["diff --git a/foo.js b/foo.js", "index abc..def 100644", "--- a/foo.js", "+++ b/foo.js", "@@ -1,3 +1,200 @@"];
for (let i = 0; i < 200; i++) lines.push(`+added line ${i} ${"x".repeat(20)}`);
return lines.join("\n");
}
function makeGitStatus() {
return [
"On branch main",
"Your branch is up to date with 'origin/main'.",
"",
"Changes not staged for commit:",
" (use \"git add <file>...\" to update what will be committed)",
"\tmodified: src/a.js",
"\tmodified: src/b.js",
"\tnew file: src/c.js",
"\tdeleted: src/old.js",
"",
"Untracked files:",
"\tnotes.txt",
"",
"no changes added to commit"
].join("\n");
}
function makeGrepOutput() {
const lines = [];
for (let i = 1; i <= 40; i++) lines.push(`src/foo.js:${i}:const x${i} = "some value here with padding text padding text"`);
for (let i = 1; i <= 10; i++) lines.push(`src/bar.js:${i}:const y${i} = "another value here with padding padding padding"`);
return lines.join("\n");
}
function makeFindOutput() {
const lines = [];
for (let i = 0; i < 30; i++) lines.push(`./src/a/${i}.js`);
for (let i = 0; i < 20; i++) lines.push(`./src/b/${i}.js`);
for (let i = 0; i < 5; i++) lines.push(`./top${i}.md`);
return lines.join("\n");
}
describe("RTK flag", () => {
it("default off, toggle works", () => {
setRtkEnabled(false);
expect(isRtkEnabled()).toBe(false);
setRtkEnabled(true);
expect(isRtkEnabled()).toBe(true);
setRtkEnabled(false);
});
});
describe("RTK filters", () => {
it("gitDiff truncates hunks beyond 100 lines and preserves file header", () => {
const input = makeLongDiff();
const out = gitDiff(input, 500);
expect(out).toContain("foo.js");
expect(out).toContain("lines truncated");
expect(out.length).toBeLessThan(input.length);
});
it("gitStatus groups by kind and produces compact output (Rust format)", () => {
const input = makeGitStatus();
const out = gitStatus(input);
expect(out).toContain("* main");
expect(out).toMatch(/~ Modified: \d+ files/);
expect(out).toContain("src/a.js");
expect(out.length).toBeLessThan(input.length);
});
it("grep groups matches by file and caps per-file lines (Rust format)", () => {
const input = makeGrepOutput();
const out = grep(input);
expect(out).toContain("50 matches in 2F:");
expect(out).toContain("[file] src/foo.js (40):");
expect(out).toContain("[file] src/bar.js (10):");
expect(out).toMatch(/\+\d+/); // overflow marker
expect(out.length).toBeLessThan(input.length);
});
it("find groups paths by parent dir, shows basenames (Rust format)", () => {
const input = makeFindOutput();
const out = find(input);
expect(out).toContain("55 files in 3 dirs:");
expect(out).toContain("./src/a/ (30):");
expect(out).toContain("./src/b/ (20):");
expect(out).toContain("./ (5):");
expect(out.length).toBeLessThan(input.length);
});
it("dedupLog collapses consecutive duplicates", () => {
const input = Array(20).fill("repeated log line A").join("\n") + "\nunique\n" + Array(10).fill("another dup").join("\n");
const out = dedupLog(input);
expect(out).toContain("repeated log line A");
expect(out).toContain("duplicate lines");
expect(out.length).toBeLessThan(input.length);
});
});
describe("autoDetectFilter", () => {
it("detects git diff", () => {
expect(autoDetectFilter("diff --git a/x b/x\n@@ -1 +1 @@\n+a").filterName).toBe("git-diff");
});
it("detects git status", () => {
expect(autoDetectFilter("On branch main\n modified: x.js\n").filterName).toBe("git-status");
});
it("detects grep", () => {
expect(autoDetectFilter("a.js:1:hello\nb.js:2:world\nc.js:3:foo").filterName).toBe("grep");
});
it("detects find", () => {
expect(autoDetectFilter("./a/b.js\n./a/c.js\n./a/d.js").filterName).toBe("find");
});
it("falls back to dedupLog for generic text", () => {
const txt = "line1\nline2\nline3\nline4\nline5\nline6\n";
expect(autoDetectFilter(txt).filterName).toBe("dedup-log");
});
});
describe("RTK filters (extras)", () => {
it("ls: compact_ls strips perms/owner, keeps name + size", () => {
const input = [
"total 48",
"drwxr-xr-x 2 user staff 64 Jan 1 12:00 .",
"drwxr-xr-x 2 user staff 64 Jan 1 12:00 ..",
"drwxr-xr-x 2 user staff 64 Jan 1 12:00 src",
"-rw-r--r-- 1 user staff 1234 Jan 1 12:00 Cargo.toml",
"-rw-r--r-- 1 user staff 5678 Jan 1 12:00 README.md"
].join("\n");
const out = ls(input);
expect(out).toContain("src/");
expect(out).toContain("Cargo.toml");
expect(out).toContain("1.2K");
expect(out).toContain("5.5K");
expect(out).not.toContain("drwx");
expect(out).toContain("Summary: 2 files, 1 dirs");
});
it("ls: filters noise dirs", () => {
const input = [
"total 8",
"drwxr-xr-x 2 user staff 64 Jan 1 12:00 node_modules",
"drwxr-xr-x 2 user staff 64 Jan 1 12:00 .git",
"drwxr-xr-x 2 user staff 64 Jan 1 12:00 src",
"-rw-r--r-- 1 user staff 100 Jan 1 12:00 main.js"
].join("\n");
const out = ls(input);
expect(out).not.toContain("node_modules");
expect(out).not.toContain(".git");
expect(out).toContain("src/");
expect(out).toContain("main.js");
});
it("tree: removes summary, keeps structure", () => {
const input = ".\n├── src\n│ └── main.rs\n└── Cargo.toml\n\n2 directories, 3 files\n";
const out = tree(input);
expect(out).not.toContain("directories");
expect(out).toContain("├──");
expect(out).toContain("main.rs");
});
it("smartTruncate: keeps head+tail, drops middle", () => {
const input = Array.from({ length: 400 }, (_, i) => `line ${i}`).join("\n");
const out = smartTruncate(input);
expect(out).toContain("line 0");
expect(out).toContain("line 399");
expect(out).toContain("lines truncated");
expect(out.length).toBeLessThan(input.length);
});
it("smartTruncate: passes through small input", () => {
const input = Array.from({ length: 10 }, (_, i) => `line ${i}`).join("\n");
expect(smartTruncate(input)).toBe(input);
});
it("readNumbered: compacts very long line-numbered dump", () => {
const lines = [];
for (let i = 1; i <= 400; i++) lines.push(` ${i}|content ${i}`);
const input = lines.join("\n");
const out = readNumbered(input);
expect(out).toContain("1|content 1");
expect(out).toContain("400|content 400");
expect(out).toContain("lines truncated");
expect(out.length).toBeLessThan(input.length);
});
it("searchList: groups Cursor Glob output by parent dir", () => {
const paths = [];
for (let i = 0; i < 30; i++) paths.push(`- src/a/f${i}.js`);
for (let i = 0; i < 10; i++) paths.push(`- src/b/g${i}.js`);
const input = [
"Result of search in '/Users/x' (total 40 files):",
...paths
].join("\n");
const out = searchList(input);
expect(out).toContain("Result of search in");
expect(out).toContain("40 files in 2 dirs:");
expect(out).toContain("src/a/ (30):");
expect(out).toContain("src/b/ (10):");
expect(out).toMatch(/\+\d+/);
expect(out.length).toBeLessThan(input.length);
});
});
describe("autoDetectFilter (extras)", () => {
it("detects tree via box-drawing glyphs", () => {
expect(autoDetectFilter(".\n├── src\n│ └── main.rs\n└── Cargo.toml\n").filterName).toBe("tree");
});
it("detects ls via total + perms rows", () => {
const input = [
"total 48",
"drwxr-xr-x 2 user staff 64 Jan 1 12:00 src",
"-rw-r--r-- 1 user staff 1234 Jan 1 12:00 main.js",
"-rw-r--r-- 1 user staff 5678 Jan 1 12:00 README.md"
].join("\n");
expect(autoDetectFilter(input).filterName).toBe("ls");
});
it("detects Cursor search list", () => {
const input = "Result of search in '/x' (total 3 files):\n- a/b.js\n- a/c.js\n- a/d.js";
expect(autoDetectFilter(input).filterName).toBe("search-list");
});
});
describe("safeApply", () => {
it("returns input if filter throws", () => {
const out = safeApply(() => { throw new Error("boom"); }, "hello");
expect(out).toBe("hello");
});
it("returns input if filter returns non-string", () => {
const out = safeApply(() => 42, "hello");
expect(out).toBe("hello");
});
});
describe("compressMessages (disabled)", () => {
beforeEach(() => setRtkEnabled(false));
it("returns null when disabled", () => {
const body = { messages: [{ role: "tool", tool_call_id: "x", content: makeLongDiff() }] };
expect(compressMessages(body)).toBeNull();
});
});
describe("compressMessages (enabled)", () => {
beforeEach(() => setRtkEnabled(true));
it("compresses OpenAI tool message (string content)", () => {
const big = makeLongDiff();
const body = { messages: [{ role: "tool", tool_call_id: "call_1", content: big }] };
const stats = compressMessages(body);
expect(stats.hits.length).toBeGreaterThan(0);
expect(body.messages[0].content.length).toBeLessThan(big.length);
expect(stats.bytesBefore).toBeGreaterThan(stats.bytesAfter);
});
it("compresses Claude string-form tool_result", () => {
const big = makeLongDiff();
const body = {
messages: [{
role: "user",
content: [{ type: "tool_result", tool_use_id: "toolu_1", content: big }]
}]
};
const stats = compressMessages(body);
expect(stats.hits.length).toBeGreaterThan(0);
expect(body.messages[0].content[0].content.length).toBeLessThan(big.length);
});
it("compresses Claude array-form tool_result text parts", () => {
const big = makeLongDiff();
const body = {
messages: [{
role: "user",
content: [{
type: "tool_result",
tool_use_id: "toolu_1",
content: [{ type: "text", text: big }, { type: "text", text: "unchanged short" }]
}]
}]
};
const stats = compressMessages(body);
expect(stats.hits.length).toBeGreaterThan(0);
expect(body.messages[0].content[0].content[0].text.length).toBeLessThan(big.length);
// short part unchanged
expect(body.messages[0].content[0].content[1].text).toBe("unchanged short");
});
it("skips is_error tool_result", () => {
const big = makeLongDiff();
const body = {
messages: [{
role: "user",
content: [{ type: "tool_result", tool_use_id: "toolu_1", content: big, is_error: true }]
}]
};
const stats = compressMessages(body);
expect(stats.hits.length).toBe(0);
expect(body.messages[0].content[0].content).toBe(big);
});
it("skips below MIN_COMPRESS_SIZE (<500 bytes)", () => {
const small = "diff --git a/x b/x\n@@ -1 +1 @@\n+a";
const body = { messages: [{ role: "tool", tool_call_id: "x", content: small }] };
const stats = compressMessages(body);
expect(stats.hits.length).toBe(0);
expect(body.messages[0].content).toBe(small);
});
it("never produces empty content (R14 guard)", () => {
const input = "a".repeat(1000);
const body = { messages: [{ role: "tool", tool_call_id: "x", content: input }] };
compressMessages(body);
expect(body.messages[0].content.length).toBeGreaterThan(0);
});
it("skips when body has no messages", () => {
expect(compressMessages({})).toBeNull();
expect(compressMessages({ messages: null })).toBeNull();
});
it("handles mix of messages without crashing", () => {
const body = {
messages: [
{ role: "system", content: "you are" },
{ role: "user", content: "hi" },
{ role: "assistant", content: null, tool_calls: [{ id: "c1", function: { name: "x", arguments: "{}" } }] },
{ role: "tool", tool_call_id: "c1", content: makeGrepOutput() },
{ role: "user", content: [{ type: "text", text: "next" }] }
]
};
const stats = compressMessages(body);
expect(stats).not.toBeNull();
expect(stats.hits.length).toBeGreaterThan(0);
});
});
describe("formatRtkLog", () => {
it("returns null when no hits", () => {
expect(formatRtkLog({ bytesBefore: 0, bytesAfter: 0, hits: [] })).toBeNull();
});
it("formats savings line with percentage", () => {
const line = formatRtkLog({ bytesBefore: 1000, bytesAfter: 400, hits: [{ filter: "git-diff" }] });
expect(line).toContain("saved 600B");
expect(line).toContain("60.0%");
expect(line).toContain("git-diff");
});
});