feat: Tool Compression MCP server for Phase 8

MCP server providing compressed versions of Read/Grep/Glob:
- compressed_read: removes comments, blanks, collapses imports
- compressed_grep: groups by file, dedupes adjacent matches
- compressed_glob: collapses directories, shows type distribution

Test results: 66.7% compression on sample file

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
Christian Gick
2026-01-08 11:05:27 +02:00
commit 0879633faf
14 changed files with 1598 additions and 0 deletions

24
dist/compressors/glob.d.ts vendored Normal file
View File

@@ -0,0 +1,24 @@
/**
* Glob Compressor - Compress file listing while preserving useful structure
*
* Strategies:
* - Collapse deep directory paths
* - Group by directory with counts
* - Prioritize recently modified files
* - Show file type distribution
*/
interface GlobOptions {
maxFiles?: number;
collapseDepth?: number;
showCounts?: boolean;
groupByExtension?: boolean;
}
interface CompressResult {
content: string;
originalCount: number;
compressedCount: number;
directories: number;
savings: string;
}
export declare function compressGlob(paths: string[], options?: GlobOptions): CompressResult;
export {};

128
dist/compressors/glob.js vendored Normal file
View File

@@ -0,0 +1,128 @@
/**
* Glob Compressor - Compress file listing while preserving useful structure
*
* Strategies:
* - Collapse deep directory paths
* - Group by directory with counts
* - Prioritize recently modified files
* - Show file type distribution
*/
function parseFilePath(path) {
const parts = path.split('/');
const name = parts.pop() || '';
const dir = parts.join('/') || '.';
const ext = name.includes('.') ? name.split('.').pop()?.toLowerCase() || '' : '';
return { path, dir, name, ext };
}
function collapseDirectory(dir, depth) {
const parts = dir.split('/').filter(p => p);
if (parts.length <= depth) {
return dir;
}
// Keep first and last N parts
const keep = Math.floor(depth / 2);
const start = parts.slice(0, keep);
const end = parts.slice(-keep);
return [...start, '...', ...end].join('/');
}
function groupByDirectory(files) {
const grouped = new Map();
for (const file of files) {
const existing = grouped.get(file.dir) || [];
existing.push(file);
grouped.set(file.dir, existing);
}
return grouped;
}
function groupByExtension(files) {
const counts = new Map();
for (const file of files) {
const ext = file.ext || '(no extension)';
counts.set(ext, (counts.get(ext) || 0) + 1);
}
return counts;
}
export function compressGlob(paths, options = {}) {
const { maxFiles = 30, collapseDepth = 4, showCounts = true, groupByExtension: showExtensions = true, } = options;
const originalCount = paths.length;
if (originalCount === 0) {
return {
content: 'No files found.',
originalCount: 0,
compressedCount: 0,
directories: 0,
savings: '0%',
};
}
const files = paths.map(parseFilePath);
const byDir = groupByDirectory(files);
const directories = byDir.size;
const result = [];
// Show extension distribution if configured
if (showExtensions && originalCount > 10) {
const extCounts = groupByExtension(files);
const sorted = Array.from(extCounts.entries())
.sort((a, b) => b[1] - a[1])
.slice(0, 5);
result.push('**File types:**');
for (const [ext, count] of sorted) {
result.push(` .${ext}: ${count}`);
}
result.push('');
}
// Sort directories by file count (most files first)
const sortedDirs = Array.from(byDir.entries())
.sort((a, b) => b[1].length - a[1].length);
let totalShown = 0;
let dirsShown = 0;
for (const [dir, dirFiles] of sortedDirs) {
if (totalShown >= maxFiles) {
const remainingDirs = sortedDirs.length - dirsShown;
const remainingFiles = originalCount - totalShown;
if (remainingDirs > 0) {
result.push(`\n... [${remainingFiles} more files in ${remainingDirs} directories]`);
}
break;
}
const collapsedDir = collapseDirectory(dir, collapseDepth);
const fileCount = dirFiles.length;
// For directories with many files, show summary
if (fileCount > 5) {
result.push(`📁 ${collapsedDir}/ (${fileCount} files)`);
// Show first few files
const sample = dirFiles.slice(0, 3);
for (const file of sample) {
result.push(` ${file.name}`);
totalShown++;
}
if (fileCount > 3) {
result.push(` ... [${fileCount - 3} more]`);
}
}
else {
// Show all files for small directories
result.push(`📁 ${collapsedDir}/`);
for (const file of dirFiles) {
result.push(` ${file.name}`);
totalShown++;
if (totalShown >= maxFiles)
break;
}
}
dirsShown++;
result.push('');
}
// Summary line
if (showCounts) {
result.unshift(`**Found ${originalCount} files in ${directories} directories**\n`);
}
const compressedCount = totalShown;
const savings = ((1 - result.length / (originalCount + directories)) * 100).toFixed(1);
return {
content: result.join('\n').trim(),
originalCount,
compressedCount,
directories,
savings: `${savings}%`,
};
}

24
dist/compressors/grep.d.ts vendored Normal file
View File

@@ -0,0 +1,24 @@
/**
* Grep Compressor - Compress search results while preserving essential matches
*
* Strategies:
* - Group by file
* - Show first N matches per file + count
* - Dedupe similar/adjacent matches
* - Prioritize exact matches
*/
interface CompressOptions {
maxMatchesPerFile?: number;
maxTotalMatches?: number;
dedupeAdjacent?: boolean;
showCounts?: boolean;
}
interface CompressResult {
content: string;
originalMatches: number;
compressedMatches: number;
filesMatched: number;
savings: string;
}
export declare function compressGrep(output: string, options?: CompressOptions): CompressResult;
export {};

121
dist/compressors/grep.js vendored Normal file
View File

@@ -0,0 +1,121 @@
/**
* Grep Compressor - Compress search results while preserving essential matches
*
* Strategies:
* - Group by file
* - Show first N matches per file + count
* - Dedupe similar/adjacent matches
* - Prioritize exact matches
*/
function parseGrepOutput(output) {
const matches = [];
const lines = output.split('\n').filter(l => l.trim());
for (const line of lines) {
// Parse format: file:line:content or file:line-content
const match = line.match(/^(.+?):(\d+)[:-](.*)$/);
if (match) {
matches.push({
file: match[1],
line: parseInt(match[2]),
content: match[3],
});
}
}
return matches;
}
function groupByFile(matches) {
const grouped = new Map();
for (const match of matches) {
const existing = grouped.get(match.file) || [];
existing.push(match);
grouped.set(match.file, existing);
}
return grouped;
}
function dedupeAdjacent(matches, threshold = 3) {
if (matches.length <= 1)
return matches;
const result = [matches[0]];
let skipped = 0;
for (let i = 1; i < matches.length; i++) {
const prev = result[result.length - 1];
const curr = matches[i];
// Skip if within threshold lines of previous match
if (curr.line - prev.line <= threshold) {
skipped++;
continue;
}
result.push(curr);
}
// Add note about skipped adjacent matches
if (skipped > 0 && result.length > 0) {
const last = result[result.length - 1];
result.push({
file: last.file,
line: -1,
content: `[${skipped} adjacent matches omitted]`,
});
}
return result;
}
export function compressGrep(output, options = {}) {
const { maxMatchesPerFile = 3, maxTotalMatches = 20, dedupeAdjacent: shouldDedupe = true, showCounts = true, } = options;
const matches = parseGrepOutput(output);
const originalMatches = matches.length;
if (originalMatches === 0) {
return {
content: 'No matches found.',
originalMatches: 0,
compressedMatches: 0,
filesMatched: 0,
savings: '0%',
};
}
const grouped = groupByFile(matches);
const filesMatched = grouped.size;
const result = [];
let totalShown = 0;
// Sort files by match count (most matches first)
const sortedFiles = Array.from(grouped.entries()).sort((a, b) => b[1].length - a[1].length);
for (const [file, fileMatches] of sortedFiles) {
if (totalShown >= maxTotalMatches) {
const remaining = sortedFiles.length - result.filter(l => l.startsWith('## ')).length;
if (remaining > 0) {
result.push(`\n... [${remaining} more files with matches]`);
}
break;
}
// Dedupe adjacent matches if configured
let processed = shouldDedupe ? dedupeAdjacent(fileMatches) : fileMatches;
// Limit matches per file
const totalInFile = fileMatches.length;
const shown = processed.slice(0, maxMatchesPerFile);
const omitted = totalInFile - shown.length;
result.push(`## ${file}`);
if (showCounts && totalInFile > maxMatchesPerFile) {
result.push(`(${totalInFile} matches, showing ${shown.length})`);
}
for (const match of shown) {
if (match.line === -1) {
result.push(` ${match.content}`);
}
else {
result.push(` ${match.line}: ${match.content.trim()}`);
totalShown++;
}
}
if (omitted > 0) {
result.push(` ... [${omitted} more matches in this file]`);
}
result.push('');
}
const compressedMatches = totalShown;
const savings = ((1 - compressedMatches / originalMatches) * 100).toFixed(1);
return {
content: result.join('\n').trim(),
originalMatches,
compressedMatches,
filesMatched,
savings: `${savings}%`,
};
}

23
dist/compressors/read.d.ts vendored Normal file
View File

@@ -0,0 +1,23 @@
/**
* Read Compressor - Compress file content while preserving essential information
*
* Strategies:
* - Remove blank lines (configurable)
* - Remove comment-only lines (language-aware)
* - Collapse import blocks
* - Preserve line numbers for reference
*/
interface CompressOptions {
removeBlankLines?: boolean;
removeComments?: boolean;
collapseImports?: boolean;
maxLines?: number;
}
interface CompressResult {
content: string;
originalLines: number;
compressedLines: number;
savings: string;
}
export declare function compressRead(content: string, filename: string, options?: CompressOptions): CompressResult;
export {};

130
dist/compressors/read.js vendored Normal file
View File

@@ -0,0 +1,130 @@
/**
* Read Compressor - Compress file content while preserving essential information
*
* Strategies:
* - Remove blank lines (configurable)
* - Remove comment-only lines (language-aware)
* - Collapse import blocks
* - Preserve line numbers for reference
*/
// Language-specific comment patterns
const COMMENT_PATTERNS = {
// Single-line comments
javascript: [/^\s*\/\/.*$/, /^\s*\/\*.*\*\/\s*$/],
typescript: [/^\s*\/\/.*$/, /^\s*\/\*.*\*\/\s*$/],
python: [/^\s*#.*$/],
ruby: [/^\s*#.*$/],
bash: [/^\s*#.*$/],
swift: [/^\s*\/\/.*$/],
go: [/^\s*\/\/.*$/],
rust: [/^\s*\/\/.*$/],
php: [/^\s*\/\/.*$/, /^\s*#.*$/, /^\s*\/\*.*\*\/\s*$/],
};
// Import patterns by language
const IMPORT_PATTERNS = {
javascript: /^(import|export)\s+/,
typescript: /^(import|export)\s+/,
python: /^(import|from)\s+/,
swift: /^import\s+/,
go: /^import\s+/,
rust: /^use\s+/,
php: /^(use|require|include)/,
};
function detectLanguage(filename) {
const ext = filename.split('.').pop()?.toLowerCase() || '';
const langMap = {
js: 'javascript',
jsx: 'javascript',
ts: 'typescript',
tsx: 'typescript',
py: 'python',
rb: 'ruby',
sh: 'bash',
bash: 'bash',
swift: 'swift',
go: 'go',
rs: 'rust',
php: 'php',
};
return langMap[ext] || 'unknown';
}
function isCommentLine(line, language) {
const patterns = COMMENT_PATTERNS[language];
if (!patterns)
return false;
return patterns.some(pattern => pattern.test(line));
}
function isImportLine(line, language) {
const pattern = IMPORT_PATTERNS[language];
if (!pattern)
return false;
return pattern.test(line.trim());
}
export function compressRead(content, filename, options = {}) {
const { removeBlankLines = true, removeComments = true, collapseImports = true, maxLines = 500, } = options;
const language = detectLanguage(filename);
const lines = content.split('\n');
const originalLines = lines.length;
const result = [];
let importBlock = [];
let inImportBlock = false;
let lineNumber = 0;
for (const line of lines) {
lineNumber++;
// Skip blank lines if configured
if (removeBlankLines && line.trim() === '') {
continue;
}
// Skip comment lines if configured
if (removeComments && isCommentLine(line, language)) {
continue;
}
// Handle import collapsing
if (collapseImports && isImportLine(line, language)) {
if (!inImportBlock) {
inImportBlock = true;
importBlock = [];
}
importBlock.push(line.trim());
continue;
}
else if (inImportBlock) {
// End of import block - collapse it
if (importBlock.length > 3) {
result.push(`// [${importBlock.length} imports collapsed]`);
}
else {
result.push(...importBlock);
}
importBlock = [];
inImportBlock = false;
}
// Add line with number prefix for reference
result.push(`${lineNumber}: ${line}`);
}
// Handle remaining imports at end of file
if (importBlock.length > 0) {
if (importBlock.length > 3) {
result.push(`// [${importBlock.length} imports collapsed]`);
}
else {
result.push(...importBlock);
}
}
// Truncate if too long
let compressed = result;
let truncated = false;
if (compressed.length > maxLines) {
compressed = compressed.slice(0, maxLines);
compressed.push(`\n... [${result.length - maxLines} more lines truncated]`);
truncated = true;
}
const compressedLines = compressed.length;
const savings = ((1 - compressedLines / originalLines) * 100).toFixed(1);
return {
content: compressed.join('\n'),
originalLines,
compressedLines,
savings: `${savings}%`,
};
}

13
dist/index.d.ts vendored Normal file
View File

@@ -0,0 +1,13 @@
#!/usr/bin/env node
/**
* Tool Compression MCP Server
*
* Provides compressed versions of Read/Grep/Glob operations
* to reduce context token usage by 40-50%
*
* Tools:
* - compressed_read: Read file with comment/blank removal
* - compressed_grep: Search with grouped/deduped results
* - compressed_glob: File listing with collapsed directories
*/
export {};

265
dist/index.js vendored Normal file
View File

@@ -0,0 +1,265 @@
#!/usr/bin/env node
/**
* Tool Compression MCP Server
*
* Provides compressed versions of Read/Grep/Glob operations
* to reduce context token usage by 40-50%
*
* Tools:
* - compressed_read: Read file with comment/blank removal
* - compressed_grep: Search with grouped/deduped results
* - compressed_glob: File listing with collapsed directories
*/
import { Server } from "@modelcontextprotocol/sdk/server/index.js";
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
import { CallToolRequestSchema, ListToolsRequestSchema, } from "@modelcontextprotocol/sdk/types.js";
import { readFileSync, existsSync, statSync } from "fs";
import { execSync } from "child_process";
import { compressRead } from "./compressors/read.js";
import { compressGrep } from "./compressors/grep.js";
import { compressGlob } from "./compressors/glob.js";
// Configuration from environment
const COMPRESSION_LEVEL = process.env.COMPRESSION_LEVEL || "medium";
// Compression presets
const PRESETS = {
light: {
read: { removeBlankLines: true, removeComments: false, collapseImports: false, maxLines: 1000 },
grep: { maxMatchesPerFile: 5, maxTotalMatches: 50, dedupeAdjacent: false },
glob: { maxFiles: 50, collapseDepth: 6 },
},
medium: {
read: { removeBlankLines: true, removeComments: true, collapseImports: true, maxLines: 500 },
grep: { maxMatchesPerFile: 3, maxTotalMatches: 20, dedupeAdjacent: true },
glob: { maxFiles: 30, collapseDepth: 4 },
},
aggressive: {
read: { removeBlankLines: true, removeComments: true, collapseImports: true, maxLines: 200 },
grep: { maxMatchesPerFile: 2, maxTotalMatches: 10, dedupeAdjacent: true },
glob: { maxFiles: 15, collapseDepth: 3 },
},
};
const preset = PRESETS[COMPRESSION_LEVEL] || PRESETS.medium;
// Create MCP server
const server = new Server({
name: "tool-compression-mcp",
version: "1.0.0",
}, {
capabilities: {
tools: {},
},
});
// List available tools
server.setRequestHandler(ListToolsRequestSchema, async () => {
return {
tools: [
{
name: "compressed_read",
description: "Read a file with compression: removes blank lines, comments, and collapses imports. " +
"Use this instead of Read for large files to save context tokens. " +
"Returns content with line numbers preserved for reference.",
inputSchema: {
type: "object",
properties: {
path: {
type: "string",
description: "Absolute path to the file to read",
},
maxLines: {
type: "number",
description: "Maximum lines to return (default: 500)",
},
keepComments: {
type: "boolean",
description: "Keep comment lines (default: false)",
},
},
required: ["path"],
},
},
{
name: "compressed_grep",
description: "Search files with compressed results: groups by file, shows top matches, dedupes adjacent. " +
"Use this instead of Grep when expecting many matches. " +
"Returns summary with match counts per file.",
inputSchema: {
type: "object",
properties: {
pattern: {
type: "string",
description: "Regex pattern to search for",
},
path: {
type: "string",
description: "Directory or file to search in",
},
glob: {
type: "string",
description: "File glob pattern (e.g., '*.ts')",
},
maxMatchesPerFile: {
type: "number",
description: "Max matches to show per file (default: 3)",
},
},
required: ["pattern"],
},
},
{
name: "compressed_glob",
description: "List files with compression: collapses deep paths, groups by directory, shows file type distribution. " +
"Use this instead of Glob when expecting many files. " +
"Returns structured summary with counts.",
inputSchema: {
type: "object",
properties: {
pattern: {
type: "string",
description: "Glob pattern (e.g., '**/*.ts')",
},
path: {
type: "string",
description: "Base directory to search from",
},
maxFiles: {
type: "number",
description: "Max files to list (default: 30)",
},
},
required: ["pattern"],
},
},
],
};
});
// Handle tool calls
server.setRequestHandler(CallToolRequestSchema, async (request) => {
const { name, arguments: args } = request.params;
try {
switch (name) {
case "compressed_read": {
const path = args?.path;
if (!path) {
return { content: [{ type: "text", text: "Error: path is required" }] };
}
if (!existsSync(path)) {
return { content: [{ type: "text", text: `Error: File not found: ${path}` }] };
}
const stats = statSync(path);
if (stats.isDirectory()) {
return { content: [{ type: "text", text: `Error: Path is a directory: ${path}` }] };
}
const content = readFileSync(path, "utf-8");
const options = {
...preset.read,
maxLines: args?.maxLines || preset.read.maxLines,
removeComments: !args?.keepComments,
};
const result = compressRead(content, path, options);
return {
content: [
{
type: "text",
text: `📄 ${path}\n` +
`[Compressed: ${result.originalLines}${result.compressedLines} lines (${result.savings} saved)]\n\n` +
result.content,
},
],
};
}
case "compressed_grep": {
const pattern = args?.pattern;
if (!pattern) {
return { content: [{ type: "text", text: "Error: pattern is required" }] };
}
const searchPath = args?.path || ".";
const glob = args?.glob;
// Build ripgrep command
let cmd = `rg -n "${pattern.replace(/"/g, '\\"')}"`;
if (glob) {
cmd += ` --glob "${glob}"`;
}
cmd += ` "${searchPath}" 2>/dev/null || true`;
let output;
try {
output = execSync(cmd, { encoding: "utf-8", maxBuffer: 10 * 1024 * 1024 });
}
catch {
output = "";
}
const options = {
...preset.grep,
maxMatchesPerFile: args?.maxMatchesPerFile || preset.grep.maxMatchesPerFile,
};
const result = compressGrep(output, options);
return {
content: [
{
type: "text",
text: `🔍 Search: "${pattern}"${glob ? ` (${glob})` : ""}\n` +
`[Found ${result.originalMatches} matches in ${result.filesMatched} files, showing ${result.compressedMatches} (${result.savings} compressed)]\n\n` +
result.content,
},
],
};
}
case "compressed_glob": {
const pattern = args?.pattern;
if (!pattern) {
return { content: [{ type: "text", text: "Error: pattern is required" }] };
}
const basePath = args?.path || ".";
// Use find or fd for globbing
let cmd;
try {
// Try fd first (faster)
execSync("which fd", { encoding: "utf-8" });
cmd = `fd --type f "${pattern}" "${basePath}" 2>/dev/null || true`;
}
catch {
// Fall back to find
cmd = `find "${basePath}" -type f -name "${pattern}" 2>/dev/null || true`;
}
let output;
try {
output = execSync(cmd, { encoding: "utf-8", maxBuffer: 10 * 1024 * 1024 });
}
catch {
output = "";
}
const paths = output.split("\n").filter((p) => p.trim());
const options = {
...preset.glob,
maxFiles: args?.maxFiles || preset.glob.maxFiles,
};
const result = compressGlob(paths, options);
return {
content: [
{
type: "text",
text: `📁 Glob: "${pattern}" in ${basePath}\n` +
`[${result.savings} compression]\n\n` +
result.content,
},
],
};
}
default:
return {
content: [{ type: "text", text: `Unknown tool: ${name}` }],
};
}
}
catch (error) {
const message = error instanceof Error ? error.message : String(error);
return {
content: [{ type: "text", text: `Error: ${message}` }],
};
}
});
// Start server
async function main() {
const transport = new StdioServerTransport();
await server.connect(transport);
console.error("Tool Compression MCP server started");
}
main().catch(console.error);

24
package.json Normal file
View File

@@ -0,0 +1,24 @@
{
"name": "tool-compression-mcp",
"version": "1.0.0",
"description": "MCP server providing compressed versions of Read/Grep/Glob tools",
"main": "dist/index.js",
"type": "module",
"scripts": {
"build": "tsc",
"start": "node dist/index.js",
"dev": "tsx src/index.ts",
"clean": "rm -rf dist"
},
"keywords": ["mcp", "compression", "claude-code", "token-optimization"],
"author": "Agiliton",
"license": "MIT",
"dependencies": {
"@modelcontextprotocol/sdk": "^1.0.4"
},
"devDependencies": {
"@types/node": "^20.11.0",
"typescript": "^5.3.3",
"tsx": "^4.7.0"
}
}

186
src/compressors/glob.ts Normal file
View File

@@ -0,0 +1,186 @@
/**
* Glob Compressor - Compress file listing while preserving useful structure
*
* Strategies:
* - Collapse deep directory paths
* - Group by directory with counts
* - Prioritize recently modified files
* - Show file type distribution
*/
interface GlobOptions {
maxFiles?: number;
collapseDepth?: number;
showCounts?: boolean;
groupByExtension?: boolean;
}
interface CompressResult {
content: string;
originalCount: number;
compressedCount: number;
directories: number;
savings: string;
}
interface FileEntry {
path: string;
dir: string;
name: string;
ext: string;
}
function parseFilePath(path: string): FileEntry {
const parts = path.split('/');
const name = parts.pop() || '';
const dir = parts.join('/') || '.';
const ext = name.includes('.') ? name.split('.').pop()?.toLowerCase() || '' : '';
return { path, dir, name, ext };
}
function collapseDirectory(dir: string, depth: number): string {
const parts = dir.split('/').filter(p => p);
if (parts.length <= depth) {
return dir;
}
// Keep first and last N parts
const keep = Math.floor(depth / 2);
const start = parts.slice(0, keep);
const end = parts.slice(-keep);
return [...start, '...', ...end].join('/');
}
function groupByDirectory(files: FileEntry[]): Map<string, FileEntry[]> {
const grouped = new Map<string, FileEntry[]>();
for (const file of files) {
const existing = grouped.get(file.dir) || [];
existing.push(file);
grouped.set(file.dir, existing);
}
return grouped;
}
function groupByExtension(files: FileEntry[]): Map<string, number> {
const counts = new Map<string, number>();
for (const file of files) {
const ext = file.ext || '(no extension)';
counts.set(ext, (counts.get(ext) || 0) + 1);
}
return counts;
}
export function compressGlob(
paths: string[],
options: GlobOptions = {}
): CompressResult {
const {
maxFiles = 30,
collapseDepth = 4,
showCounts = true,
groupByExtension: showExtensions = true,
} = options;
const originalCount = paths.length;
if (originalCount === 0) {
return {
content: 'No files found.',
originalCount: 0,
compressedCount: 0,
directories: 0,
savings: '0%',
};
}
const files = paths.map(parseFilePath);
const byDir = groupByDirectory(files);
const directories = byDir.size;
const result: string[] = [];
// Show extension distribution if configured
if (showExtensions && originalCount > 10) {
const extCounts = groupByExtension(files);
const sorted = Array.from(extCounts.entries())
.sort((a, b) => b[1] - a[1])
.slice(0, 5);
result.push('**File types:**');
for (const [ext, count] of sorted) {
result.push(` .${ext}: ${count}`);
}
result.push('');
}
// Sort directories by file count (most files first)
const sortedDirs = Array.from(byDir.entries())
.sort((a, b) => b[1].length - a[1].length);
let totalShown = 0;
let dirsShown = 0;
for (const [dir, dirFiles] of sortedDirs) {
if (totalShown >= maxFiles) {
const remainingDirs = sortedDirs.length - dirsShown;
const remainingFiles = originalCount - totalShown;
if (remainingDirs > 0) {
result.push(`\n... [${remainingFiles} more files in ${remainingDirs} directories]`);
}
break;
}
const collapsedDir = collapseDirectory(dir, collapseDepth);
const fileCount = dirFiles.length;
// For directories with many files, show summary
if (fileCount > 5) {
result.push(`📁 ${collapsedDir}/ (${fileCount} files)`);
// Show first few files
const sample = dirFiles.slice(0, 3);
for (const file of sample) {
result.push(` ${file.name}`);
totalShown++;
}
if (fileCount > 3) {
result.push(` ... [${fileCount - 3} more]`);
}
} else {
// Show all files for small directories
result.push(`📁 ${collapsedDir}/`);
for (const file of dirFiles) {
result.push(` ${file.name}`);
totalShown++;
if (totalShown >= maxFiles) break;
}
}
dirsShown++;
result.push('');
}
// Summary line
if (showCounts) {
result.unshift(`**Found ${originalCount} files in ${directories} directories**\n`);
}
const compressedCount = totalShown;
const savings = ((1 - result.length / (originalCount + directories)) * 100).toFixed(1);
return {
content: result.join('\n').trim(),
originalCount,
compressedCount,
directories,
savings: `${savings}%`,
};
}

178
src/compressors/grep.ts Normal file
View File

@@ -0,0 +1,178 @@
/**
* Grep Compressor - Compress search results while preserving essential matches
*
* Strategies:
* - Group by file
* - Show first N matches per file + count
* - Dedupe similar/adjacent matches
* - Prioritize exact matches
*/
interface GrepMatch {
file: string;
line: number;
content: string;
}
interface CompressOptions {
maxMatchesPerFile?: number;
maxTotalMatches?: number;
dedupeAdjacent?: boolean;
showCounts?: boolean;
}
interface CompressResult {
content: string;
originalMatches: number;
compressedMatches: number;
filesMatched: number;
savings: string;
}
function parseGrepOutput(output: string): GrepMatch[] {
const matches: GrepMatch[] = [];
const lines = output.split('\n').filter(l => l.trim());
for (const line of lines) {
// Parse format: file:line:content or file:line-content
const match = line.match(/^(.+?):(\d+)[:-](.*)$/);
if (match) {
matches.push({
file: match[1],
line: parseInt(match[2]),
content: match[3],
});
}
}
return matches;
}
function groupByFile(matches: GrepMatch[]): Map<string, GrepMatch[]> {
const grouped = new Map<string, GrepMatch[]>();
for (const match of matches) {
const existing = grouped.get(match.file) || [];
existing.push(match);
grouped.set(match.file, existing);
}
return grouped;
}
function dedupeAdjacent(matches: GrepMatch[], threshold = 3): GrepMatch[] {
if (matches.length <= 1) return matches;
const result: GrepMatch[] = [matches[0]];
let skipped = 0;
for (let i = 1; i < matches.length; i++) {
const prev = result[result.length - 1];
const curr = matches[i];
// Skip if within threshold lines of previous match
if (curr.line - prev.line <= threshold) {
skipped++;
continue;
}
result.push(curr);
}
// Add note about skipped adjacent matches
if (skipped > 0 && result.length > 0) {
const last = result[result.length - 1];
result.push({
file: last.file,
line: -1,
content: `[${skipped} adjacent matches omitted]`,
});
}
return result;
}
export function compressGrep(
output: string,
options: CompressOptions = {}
): CompressResult {
const {
maxMatchesPerFile = 3,
maxTotalMatches = 20,
dedupeAdjacent: shouldDedupe = true,
showCounts = true,
} = options;
const matches = parseGrepOutput(output);
const originalMatches = matches.length;
if (originalMatches === 0) {
return {
content: 'No matches found.',
originalMatches: 0,
compressedMatches: 0,
filesMatched: 0,
savings: '0%',
};
}
const grouped = groupByFile(matches);
const filesMatched = grouped.size;
const result: string[] = [];
let totalShown = 0;
// Sort files by match count (most matches first)
const sortedFiles = Array.from(grouped.entries()).sort(
(a, b) => b[1].length - a[1].length
);
for (const [file, fileMatches] of sortedFiles) {
if (totalShown >= maxTotalMatches) {
const remaining = sortedFiles.length - result.filter(l => l.startsWith('## ')).length;
if (remaining > 0) {
result.push(`\n... [${remaining} more files with matches]`);
}
break;
}
// Dedupe adjacent matches if configured
let processed = shouldDedupe ? dedupeAdjacent(fileMatches) : fileMatches;
// Limit matches per file
const totalInFile = fileMatches.length;
const shown = processed.slice(0, maxMatchesPerFile);
const omitted = totalInFile - shown.length;
result.push(`## ${file}`);
if (showCounts && totalInFile > maxMatchesPerFile) {
result.push(`(${totalInFile} matches, showing ${shown.length})`);
}
for (const match of shown) {
if (match.line === -1) {
result.push(` ${match.content}`);
} else {
result.push(` ${match.line}: ${match.content.trim()}`);
totalShown++;
}
}
if (omitted > 0) {
result.push(` ... [${omitted} more matches in this file]`);
}
result.push('');
}
const compressedMatches = totalShown;
const savings = ((1 - compressedMatches / originalMatches) * 100).toFixed(1);
return {
content: result.join('\n').trim(),
originalMatches,
compressedMatches,
filesMatched,
savings: `${savings}%`,
};
}

165
src/compressors/read.ts Normal file
View File

@@ -0,0 +1,165 @@
/**
* Read Compressor - Compress file content while preserving essential information
*
* Strategies:
* - Remove blank lines (configurable)
* - Remove comment-only lines (language-aware)
* - Collapse import blocks
* - Preserve line numbers for reference
*/
interface CompressOptions {
removeBlankLines?: boolean;
removeComments?: boolean;
collapseImports?: boolean;
maxLines?: number;
}
interface CompressResult {
content: string;
originalLines: number;
compressedLines: number;
savings: string;
}
// Language-specific comment patterns
const COMMENT_PATTERNS: Record<string, RegExp[]> = {
// Single-line comments
javascript: [/^\s*\/\/.*$/, /^\s*\/\*.*\*\/\s*$/],
typescript: [/^\s*\/\/.*$/, /^\s*\/\*.*\*\/\s*$/],
python: [/^\s*#.*$/],
ruby: [/^\s*#.*$/],
bash: [/^\s*#.*$/],
swift: [/^\s*\/\/.*$/],
go: [/^\s*\/\/.*$/],
rust: [/^\s*\/\/.*$/],
php: [/^\s*\/\/.*$/, /^\s*#.*$/, /^\s*\/\*.*\*\/\s*$/],
};
// Import patterns by language
const IMPORT_PATTERNS: Record<string, RegExp> = {
javascript: /^(import|export)\s+/,
typescript: /^(import|export)\s+/,
python: /^(import|from)\s+/,
swift: /^import\s+/,
go: /^import\s+/,
rust: /^use\s+/,
php: /^(use|require|include)/,
};
function detectLanguage(filename: string): string {
const ext = filename.split('.').pop()?.toLowerCase() || '';
const langMap: Record<string, string> = {
js: 'javascript',
jsx: 'javascript',
ts: 'typescript',
tsx: 'typescript',
py: 'python',
rb: 'ruby',
sh: 'bash',
bash: 'bash',
swift: 'swift',
go: 'go',
rs: 'rust',
php: 'php',
};
return langMap[ext] || 'unknown';
}
function isCommentLine(line: string, language: string): boolean {
const patterns = COMMENT_PATTERNS[language];
if (!patterns) return false;
return patterns.some(pattern => pattern.test(line));
}
function isImportLine(line: string, language: string): boolean {
const pattern = IMPORT_PATTERNS[language];
if (!pattern) return false;
return pattern.test(line.trim());
}
export function compressRead(
content: string,
filename: string,
options: CompressOptions = {}
): CompressResult {
const {
removeBlankLines = true,
removeComments = true,
collapseImports = true,
maxLines = 500,
} = options;
const language = detectLanguage(filename);
const lines = content.split('\n');
const originalLines = lines.length;
const result: string[] = [];
let importBlock: string[] = [];
let inImportBlock = false;
let lineNumber = 0;
for (const line of lines) {
lineNumber++;
// Skip blank lines if configured
if (removeBlankLines && line.trim() === '') {
continue;
}
// Skip comment lines if configured
if (removeComments && isCommentLine(line, language)) {
continue;
}
// Handle import collapsing
if (collapseImports && isImportLine(line, language)) {
if (!inImportBlock) {
inImportBlock = true;
importBlock = [];
}
importBlock.push(line.trim());
continue;
} else if (inImportBlock) {
// End of import block - collapse it
if (importBlock.length > 3) {
result.push(`// [${importBlock.length} imports collapsed]`);
} else {
result.push(...importBlock);
}
importBlock = [];
inImportBlock = false;
}
// Add line with number prefix for reference
result.push(`${lineNumber}: ${line}`);
}
// Handle remaining imports at end of file
if (importBlock.length > 0) {
if (importBlock.length > 3) {
result.push(`// [${importBlock.length} imports collapsed]`);
} else {
result.push(...importBlock);
}
}
// Truncate if too long
let compressed = result;
let truncated = false;
if (compressed.length > maxLines) {
compressed = compressed.slice(0, maxLines);
compressed.push(`\n... [${result.length - maxLines} more lines truncated]`);
truncated = true;
}
const compressedLines = compressed.length;
const savings = ((1 - compressedLines / originalLines) * 100).toFixed(1);
return {
content: compressed.join('\n'),
originalLines,
compressedLines,
savings: `${savings}%`,
};
}

300
src/index.ts Normal file
View File

@@ -0,0 +1,300 @@
#!/usr/bin/env node
/**
* Tool Compression MCP Server
*
* Provides compressed versions of Read/Grep/Glob operations
* to reduce context token usage by 40-50%
*
* Tools:
* - compressed_read: Read file with comment/blank removal
* - compressed_grep: Search with grouped/deduped results
* - compressed_glob: File listing with collapsed directories
*/
import { Server } from "@modelcontextprotocol/sdk/server/index.js";
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
import {
CallToolRequestSchema,
ListToolsRequestSchema,
} from "@modelcontextprotocol/sdk/types.js";
import { readFileSync, existsSync, statSync } from "fs";
import { execSync } from "child_process";
import { compressRead } from "./compressors/read.js";
import { compressGrep } from "./compressors/grep.js";
import { compressGlob } from "./compressors/glob.js";
// Configuration from environment
const COMPRESSION_LEVEL = process.env.COMPRESSION_LEVEL || "medium";
// Compression presets
const PRESETS = {
light: {
read: { removeBlankLines: true, removeComments: false, collapseImports: false, maxLines: 1000 },
grep: { maxMatchesPerFile: 5, maxTotalMatches: 50, dedupeAdjacent: false },
glob: { maxFiles: 50, collapseDepth: 6 },
},
medium: {
read: { removeBlankLines: true, removeComments: true, collapseImports: true, maxLines: 500 },
grep: { maxMatchesPerFile: 3, maxTotalMatches: 20, dedupeAdjacent: true },
glob: { maxFiles: 30, collapseDepth: 4 },
},
aggressive: {
read: { removeBlankLines: true, removeComments: true, collapseImports: true, maxLines: 200 },
grep: { maxMatchesPerFile: 2, maxTotalMatches: 10, dedupeAdjacent: true },
glob: { maxFiles: 15, collapseDepth: 3 },
},
};
const preset = PRESETS[COMPRESSION_LEVEL as keyof typeof PRESETS] || PRESETS.medium;
// Create MCP server
const server = new Server(
{
name: "tool-compression-mcp",
version: "1.0.0",
},
{
capabilities: {
tools: {},
},
}
);
// List available tools
server.setRequestHandler(ListToolsRequestSchema, async () => {
return {
tools: [
{
name: "compressed_read",
description:
"Read a file with compression: removes blank lines, comments, and collapses imports. " +
"Use this instead of Read for large files to save context tokens. " +
"Returns content with line numbers preserved for reference.",
inputSchema: {
type: "object",
properties: {
path: {
type: "string",
description: "Absolute path to the file to read",
},
maxLines: {
type: "number",
description: "Maximum lines to return (default: 500)",
},
keepComments: {
type: "boolean",
description: "Keep comment lines (default: false)",
},
},
required: ["path"],
},
},
{
name: "compressed_grep",
description:
"Search files with compressed results: groups by file, shows top matches, dedupes adjacent. " +
"Use this instead of Grep when expecting many matches. " +
"Returns summary with match counts per file.",
inputSchema: {
type: "object",
properties: {
pattern: {
type: "string",
description: "Regex pattern to search for",
},
path: {
type: "string",
description: "Directory or file to search in",
},
glob: {
type: "string",
description: "File glob pattern (e.g., '*.ts')",
},
maxMatchesPerFile: {
type: "number",
description: "Max matches to show per file (default: 3)",
},
},
required: ["pattern"],
},
},
{
name: "compressed_glob",
description:
"List files with compression: collapses deep paths, groups by directory, shows file type distribution. " +
"Use this instead of Glob when expecting many files. " +
"Returns structured summary with counts.",
inputSchema: {
type: "object",
properties: {
pattern: {
type: "string",
description: "Glob pattern (e.g., '**/*.ts')",
},
path: {
type: "string",
description: "Base directory to search from",
},
maxFiles: {
type: "number",
description: "Max files to list (default: 30)",
},
},
required: ["pattern"],
},
},
],
};
});
// Handle tool calls
server.setRequestHandler(CallToolRequestSchema, async (request) => {
const { name, arguments: args } = request.params;
try {
switch (name) {
case "compressed_read": {
const path = args?.path as string;
if (!path) {
return { content: [{ type: "text", text: "Error: path is required" }] };
}
if (!existsSync(path)) {
return { content: [{ type: "text", text: `Error: File not found: ${path}` }] };
}
const stats = statSync(path);
if (stats.isDirectory()) {
return { content: [{ type: "text", text: `Error: Path is a directory: ${path}` }] };
}
const content = readFileSync(path, "utf-8");
const options = {
...preset.read,
maxLines: (args?.maxLines as number) || preset.read.maxLines,
removeComments: !(args?.keepComments as boolean),
};
const result = compressRead(content, path, options);
return {
content: [
{
type: "text",
text: `📄 ${path}\n` +
`[Compressed: ${result.originalLines}${result.compressedLines} lines (${result.savings} saved)]\n\n` +
result.content,
},
],
};
}
case "compressed_grep": {
const pattern = args?.pattern as string;
if (!pattern) {
return { content: [{ type: "text", text: "Error: pattern is required" }] };
}
const searchPath = (args?.path as string) || ".";
const glob = args?.glob as string;
// Build ripgrep command
let cmd = `rg -n "${pattern.replace(/"/g, '\\"')}"`;
if (glob) {
cmd += ` --glob "${glob}"`;
}
cmd += ` "${searchPath}" 2>/dev/null || true`;
let output: string;
try {
output = execSync(cmd, { encoding: "utf-8", maxBuffer: 10 * 1024 * 1024 });
} catch {
output = "";
}
const options = {
...preset.grep,
maxMatchesPerFile: (args?.maxMatchesPerFile as number) || preset.grep.maxMatchesPerFile,
};
const result = compressGrep(output, options);
return {
content: [
{
type: "text",
text: `🔍 Search: "${pattern}"${glob ? ` (${glob})` : ""}\n` +
`[Found ${result.originalMatches} matches in ${result.filesMatched} files, showing ${result.compressedMatches} (${result.savings} compressed)]\n\n` +
result.content,
},
],
};
}
case "compressed_glob": {
const pattern = args?.pattern as string;
if (!pattern) {
return { content: [{ type: "text", text: "Error: pattern is required" }] };
}
const basePath = (args?.path as string) || ".";
// Use find or fd for globbing
let cmd: string;
try {
// Try fd first (faster)
execSync("which fd", { encoding: "utf-8" });
cmd = `fd --type f "${pattern}" "${basePath}" 2>/dev/null || true`;
} catch {
// Fall back to find
cmd = `find "${basePath}" -type f -name "${pattern}" 2>/dev/null || true`;
}
let output: string;
try {
output = execSync(cmd, { encoding: "utf-8", maxBuffer: 10 * 1024 * 1024 });
} catch {
output = "";
}
const paths = output.split("\n").filter((p) => p.trim());
const options = {
...preset.glob,
maxFiles: (args?.maxFiles as number) || preset.glob.maxFiles,
};
const result = compressGlob(paths, options);
return {
content: [
{
type: "text",
text: `📁 Glob: "${pattern}" in ${basePath}\n` +
`[${result.savings} compression]\n\n` +
result.content,
},
],
};
}
default:
return {
content: [{ type: "text", text: `Unknown tool: ${name}` }],
};
}
} catch (error) {
const message = error instanceof Error ? error.message : String(error);
return {
content: [{ type: "text", text: `Error: ${message}` }],
};
}
});
// Start server
async function main() {
const transport = new StdioServerTransport();
await server.connect(transport);
console.error("Tool Compression MCP server started");
}
main().catch(console.error);

17
tsconfig.json Normal file
View File

@@ -0,0 +1,17 @@
{
"compilerOptions": {
"target": "ES2022",
"module": "NodeNext",
"moduleResolution": "NodeNext",
"outDir": "./dist",
"rootDir": "./src",
"strict": true,
"esModuleInterop": true,
"skipLibCheck": true,
"forceConsistentCasingInFileNames": true,
"declaration": true,
"resolveJsonModule": true
},
"include": ["src/**/*"],
"exclude": ["node_modules", "dist"]
}