feat: Tool Compression MCP server for Phase 8

MCP server providing compressed versions of Read/Grep/Glob:
- compressed_read: removes comments, blanks, collapses imports
- compressed_grep: groups by file, dedupes adjacent matches
- compressed_glob: collapses directories, shows type distribution

Test results: 66.7% compression on sample file

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
Christian Gick
2026-01-08 11:05:27 +02:00
commit 0879633faf
14 changed files with 1598 additions and 0 deletions

24
dist/compressors/glob.d.ts vendored Normal file
View File

@@ -0,0 +1,24 @@
/**
* Glob Compressor - Compress file listing while preserving useful structure
*
* Strategies:
* - Collapse deep directory paths
* - Group by directory with counts
* - Prioritize recently modified files
* - Show file type distribution
*/
interface GlobOptions {
maxFiles?: number;
collapseDepth?: number;
showCounts?: boolean;
groupByExtension?: boolean;
}
interface CompressResult {
content: string;
originalCount: number;
compressedCount: number;
directories: number;
savings: string;
}
export declare function compressGlob(paths: string[], options?: GlobOptions): CompressResult;
export {};

128
dist/compressors/glob.js vendored Normal file
View File

@@ -0,0 +1,128 @@
/**
* Glob Compressor - Compress file listing while preserving useful structure
*
* Strategies:
* - Collapse deep directory paths
* - Group by directory with counts
* - Prioritize recently modified files
* - Show file type distribution
*/
function parseFilePath(path) {
const parts = path.split('/');
const name = parts.pop() || '';
const dir = parts.join('/') || '.';
const ext = name.includes('.') ? name.split('.').pop()?.toLowerCase() || '' : '';
return { path, dir, name, ext };
}
function collapseDirectory(dir, depth) {
const parts = dir.split('/').filter(p => p);
if (parts.length <= depth) {
return dir;
}
// Keep first and last N parts
const keep = Math.floor(depth / 2);
const start = parts.slice(0, keep);
const end = parts.slice(-keep);
return [...start, '...', ...end].join('/');
}
function groupByDirectory(files) {
const grouped = new Map();
for (const file of files) {
const existing = grouped.get(file.dir) || [];
existing.push(file);
grouped.set(file.dir, existing);
}
return grouped;
}
function groupByExtension(files) {
const counts = new Map();
for (const file of files) {
const ext = file.ext || '(no extension)';
counts.set(ext, (counts.get(ext) || 0) + 1);
}
return counts;
}
export function compressGlob(paths, options = {}) {
const { maxFiles = 30, collapseDepth = 4, showCounts = true, groupByExtension: showExtensions = true, } = options;
const originalCount = paths.length;
if (originalCount === 0) {
return {
content: 'No files found.',
originalCount: 0,
compressedCount: 0,
directories: 0,
savings: '0%',
};
}
const files = paths.map(parseFilePath);
const byDir = groupByDirectory(files);
const directories = byDir.size;
const result = [];
// Show extension distribution if configured
if (showExtensions && originalCount > 10) {
const extCounts = groupByExtension(files);
const sorted = Array.from(extCounts.entries())
.sort((a, b) => b[1] - a[1])
.slice(0, 5);
result.push('**File types:**');
for (const [ext, count] of sorted) {
result.push(` .${ext}: ${count}`);
}
result.push('');
}
// Sort directories by file count (most files first)
const sortedDirs = Array.from(byDir.entries())
.sort((a, b) => b[1].length - a[1].length);
let totalShown = 0;
let dirsShown = 0;
for (const [dir, dirFiles] of sortedDirs) {
if (totalShown >= maxFiles) {
const remainingDirs = sortedDirs.length - dirsShown;
const remainingFiles = originalCount - totalShown;
if (remainingDirs > 0) {
result.push(`\n... [${remainingFiles} more files in ${remainingDirs} directories]`);
}
break;
}
const collapsedDir = collapseDirectory(dir, collapseDepth);
const fileCount = dirFiles.length;
// For directories with many files, show summary
if (fileCount > 5) {
result.push(`📁 ${collapsedDir}/ (${fileCount} files)`);
// Show first few files
const sample = dirFiles.slice(0, 3);
for (const file of sample) {
result.push(` ${file.name}`);
totalShown++;
}
if (fileCount > 3) {
result.push(` ... [${fileCount - 3} more]`);
}
}
else {
// Show all files for small directories
result.push(`📁 ${collapsedDir}/`);
for (const file of dirFiles) {
result.push(` ${file.name}`);
totalShown++;
if (totalShown >= maxFiles)
break;
}
}
dirsShown++;
result.push('');
}
// Summary line
if (showCounts) {
result.unshift(`**Found ${originalCount} files in ${directories} directories**\n`);
}
const compressedCount = totalShown;
const savings = ((1 - result.length / (originalCount + directories)) * 100).toFixed(1);
return {
content: result.join('\n').trim(),
originalCount,
compressedCount,
directories,
savings: `${savings}%`,
};
}

24
dist/compressors/grep.d.ts vendored Normal file
View File

@@ -0,0 +1,24 @@
/**
* Grep Compressor - Compress search results while preserving essential matches
*
* Strategies:
* - Group by file
* - Show first N matches per file + count
* - Dedupe similar/adjacent matches
* - Prioritize exact matches
*/
interface CompressOptions {
maxMatchesPerFile?: number;
maxTotalMatches?: number;
dedupeAdjacent?: boolean;
showCounts?: boolean;
}
interface CompressResult {
content: string;
originalMatches: number;
compressedMatches: number;
filesMatched: number;
savings: string;
}
export declare function compressGrep(output: string, options?: CompressOptions): CompressResult;
export {};

121
dist/compressors/grep.js vendored Normal file
View File

@@ -0,0 +1,121 @@
/**
* Grep Compressor - Compress search results while preserving essential matches
*
* Strategies:
* - Group by file
* - Show first N matches per file + count
* - Dedupe similar/adjacent matches
* - Prioritize exact matches
*/
function parseGrepOutput(output) {
const matches = [];
const lines = output.split('\n').filter(l => l.trim());
for (const line of lines) {
// Parse format: file:line:content or file:line-content
const match = line.match(/^(.+?):(\d+)[:-](.*)$/);
if (match) {
matches.push({
file: match[1],
line: parseInt(match[2]),
content: match[3],
});
}
}
return matches;
}
function groupByFile(matches) {
const grouped = new Map();
for (const match of matches) {
const existing = grouped.get(match.file) || [];
existing.push(match);
grouped.set(match.file, existing);
}
return grouped;
}
function dedupeAdjacent(matches, threshold = 3) {
if (matches.length <= 1)
return matches;
const result = [matches[0]];
let skipped = 0;
for (let i = 1; i < matches.length; i++) {
const prev = result[result.length - 1];
const curr = matches[i];
// Skip if within threshold lines of previous match
if (curr.line - prev.line <= threshold) {
skipped++;
continue;
}
result.push(curr);
}
// Add note about skipped adjacent matches
if (skipped > 0 && result.length > 0) {
const last = result[result.length - 1];
result.push({
file: last.file,
line: -1,
content: `[${skipped} adjacent matches omitted]`,
});
}
return result;
}
export function compressGrep(output, options = {}) {
const { maxMatchesPerFile = 3, maxTotalMatches = 20, dedupeAdjacent: shouldDedupe = true, showCounts = true, } = options;
const matches = parseGrepOutput(output);
const originalMatches = matches.length;
if (originalMatches === 0) {
return {
content: 'No matches found.',
originalMatches: 0,
compressedMatches: 0,
filesMatched: 0,
savings: '0%',
};
}
const grouped = groupByFile(matches);
const filesMatched = grouped.size;
const result = [];
let totalShown = 0;
// Sort files by match count (most matches first)
const sortedFiles = Array.from(grouped.entries()).sort((a, b) => b[1].length - a[1].length);
for (const [file, fileMatches] of sortedFiles) {
if (totalShown >= maxTotalMatches) {
const remaining = sortedFiles.length - result.filter(l => l.startsWith('## ')).length;
if (remaining > 0) {
result.push(`\n... [${remaining} more files with matches]`);
}
break;
}
// Dedupe adjacent matches if configured
let processed = shouldDedupe ? dedupeAdjacent(fileMatches) : fileMatches;
// Limit matches per file
const totalInFile = fileMatches.length;
const shown = processed.slice(0, maxMatchesPerFile);
const omitted = totalInFile - shown.length;
result.push(`## ${file}`);
if (showCounts && totalInFile > maxMatchesPerFile) {
result.push(`(${totalInFile} matches, showing ${shown.length})`);
}
for (const match of shown) {
if (match.line === -1) {
result.push(` ${match.content}`);
}
else {
result.push(` ${match.line}: ${match.content.trim()}`);
totalShown++;
}
}
if (omitted > 0) {
result.push(` ... [${omitted} more matches in this file]`);
}
result.push('');
}
const compressedMatches = totalShown;
const savings = ((1 - compressedMatches / originalMatches) * 100).toFixed(1);
return {
content: result.join('\n').trim(),
originalMatches,
compressedMatches,
filesMatched,
savings: `${savings}%`,
};
}

23
dist/compressors/read.d.ts vendored Normal file
View File

@@ -0,0 +1,23 @@
/**
* Read Compressor - Compress file content while preserving essential information
*
* Strategies:
* - Remove blank lines (configurable)
* - Remove comment-only lines (language-aware)
* - Collapse import blocks
* - Preserve line numbers for reference
*/
interface CompressOptions {
removeBlankLines?: boolean;
removeComments?: boolean;
collapseImports?: boolean;
maxLines?: number;
}
interface CompressResult {
content: string;
originalLines: number;
compressedLines: number;
savings: string;
}
export declare function compressRead(content: string, filename: string, options?: CompressOptions): CompressResult;
export {};

130
dist/compressors/read.js vendored Normal file
View File

@@ -0,0 +1,130 @@
/**
* Read Compressor - Compress file content while preserving essential information
*
* Strategies:
* - Remove blank lines (configurable)
* - Remove comment-only lines (language-aware)
* - Collapse import blocks
* - Preserve line numbers for reference
*/
// Language-specific comment patterns
const COMMENT_PATTERNS = {
// Single-line comments
javascript: [/^\s*\/\/.*$/, /^\s*\/\*.*\*\/\s*$/],
typescript: [/^\s*\/\/.*$/, /^\s*\/\*.*\*\/\s*$/],
python: [/^\s*#.*$/],
ruby: [/^\s*#.*$/],
bash: [/^\s*#.*$/],
swift: [/^\s*\/\/.*$/],
go: [/^\s*\/\/.*$/],
rust: [/^\s*\/\/.*$/],
php: [/^\s*\/\/.*$/, /^\s*#.*$/, /^\s*\/\*.*\*\/\s*$/],
};
// Import patterns by language
const IMPORT_PATTERNS = {
javascript: /^(import|export)\s+/,
typescript: /^(import|export)\s+/,
python: /^(import|from)\s+/,
swift: /^import\s+/,
go: /^import\s+/,
rust: /^use\s+/,
php: /^(use|require|include)/,
};
function detectLanguage(filename) {
const ext = filename.split('.').pop()?.toLowerCase() || '';
const langMap = {
js: 'javascript',
jsx: 'javascript',
ts: 'typescript',
tsx: 'typescript',
py: 'python',
rb: 'ruby',
sh: 'bash',
bash: 'bash',
swift: 'swift',
go: 'go',
rs: 'rust',
php: 'php',
};
return langMap[ext] || 'unknown';
}
function isCommentLine(line, language) {
const patterns = COMMENT_PATTERNS[language];
if (!patterns)
return false;
return patterns.some(pattern => pattern.test(line));
}
function isImportLine(line, language) {
const pattern = IMPORT_PATTERNS[language];
if (!pattern)
return false;
return pattern.test(line.trim());
}
export function compressRead(content, filename, options = {}) {
const { removeBlankLines = true, removeComments = true, collapseImports = true, maxLines = 500, } = options;
const language = detectLanguage(filename);
const lines = content.split('\n');
const originalLines = lines.length;
const result = [];
let importBlock = [];
let inImportBlock = false;
let lineNumber = 0;
for (const line of lines) {
lineNumber++;
// Skip blank lines if configured
if (removeBlankLines && line.trim() === '') {
continue;
}
// Skip comment lines if configured
if (removeComments && isCommentLine(line, language)) {
continue;
}
// Handle import collapsing
if (collapseImports && isImportLine(line, language)) {
if (!inImportBlock) {
inImportBlock = true;
importBlock = [];
}
importBlock.push(line.trim());
continue;
}
else if (inImportBlock) {
// End of import block - collapse it
if (importBlock.length > 3) {
result.push(`// [${importBlock.length} imports collapsed]`);
}
else {
result.push(...importBlock);
}
importBlock = [];
inImportBlock = false;
}
// Add line with number prefix for reference
result.push(`${lineNumber}: ${line}`);
}
// Handle remaining imports at end of file
if (importBlock.length > 0) {
if (importBlock.length > 3) {
result.push(`// [${importBlock.length} imports collapsed]`);
}
else {
result.push(...importBlock);
}
}
// Truncate if too long
let compressed = result;
let truncated = false;
if (compressed.length > maxLines) {
compressed = compressed.slice(0, maxLines);
compressed.push(`\n... [${result.length - maxLines} more lines truncated]`);
truncated = true;
}
const compressedLines = compressed.length;
const savings = ((1 - compressedLines / originalLines) * 100).toFixed(1);
return {
content: compressed.join('\n'),
originalLines,
compressedLines,
savings: `${savings}%`,
};
}