feat(CF-524): Add project_archive MCP method

Implements complete project archival workflow:
- Migration 024: Add archival fields to projects table
- New project-archive.ts tool coordinating:
  * Tarball creation via shell
  * S3 upload with vault credentials
  * Database metadata tracking
  * Optional local deletion
  * Cleanup of temp files
- Registered in tool definitions and handlers

Replaces manual archival process used for Fireberries/CyprusPulse.

Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
This commit is contained in:
Christian Gick
2026-01-27 20:45:56 +02:00
parent a6d24ab86c
commit 3e20a8ea3c
4 changed files with 264 additions and 0 deletions

View File

@@ -83,6 +83,7 @@ import {
sessionPatternDetection,
} from './tools/session-docs.js';
import { archiveAdd, archiveSearch, archiveList, archiveGet } from './tools/archives.js';
import { projectArchive } from './tools/project-archive.js';
// Create MCP server
const server = new Server(
@@ -667,6 +668,16 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
});
break;
// Project archival
case 'project_archive':
result = await projectArchive({
project_key: a.project_key,
project_path: a.project_path,
delete_local: a.delete_local,
session_id: a.session_id,
});
break;
default:
throw new Error(`Unknown tool: ${name}`);
}

View File

@@ -1059,4 +1059,20 @@ export const toolDefinitions = [
required: ['id'],
},
},
// Project Archival
{
name: 'project_archive',
description: 'Archive complete project to S3 with database tracking. Creates tarball, uploads to s3://agiliton-archive/projects/, updates database, and optionally deletes local copy.',
inputSchema: {
type: 'object',
properties: {
project_key: { type: 'string', description: 'Project key (must exist in database)' },
project_path: { type: 'string', description: 'Absolute path to project directory' },
delete_local: { type: 'boolean', description: 'Delete local project after successful archive (default: false)' },
session_id: { type: 'string', description: 'Session ID performing the archival (optional)' },
},
required: ['project_key', 'project_path'],
},
},
];

View File

@@ -0,0 +1,217 @@
// Project archival operations - Complete project archival to S3
// Coordinates workflow: tar + S3 upload + database tracking + optional local deletion
import { execute, queryOne } from '../db.js';
import { exec } from 'child_process';
import { promisify } from 'util';
import { existsSync, statSync } from 'fs';
import { basename, dirname } from 'path';
const execAsync = promisify(exec);
interface ProjectArchiveArgs {
project_key: string;
project_path: string;
delete_local?: boolean;
session_id?: string;
}
interface ProjectArchiveResult {
success: boolean;
message: string;
archive_location?: string;
archive_size?: number;
}
/**
* Verify project exists in database
*/
async function verifyProject(projectKey: string): Promise<boolean> {
const result = await queryOne<{ key: string }>(
'SELECT key FROM projects WHERE key = $1',
[projectKey]
);
return !!result;
}
/**
* Get vault credentials for S3 access
*/
async function getS3Credentials(): Promise<{
accessKey: string;
secretKey: string;
endpoint: string;
}> {
try {
const { stdout: accessKey } = await execAsync('vault get hetzner.s3_access_key');
const { stdout: secretKey } = await execAsync('vault get hetzner.s3_secret_key');
const { stdout: endpoint } = await execAsync('vault get hetzner.s3_endpoint');
return {
accessKey: accessKey.trim(),
secretKey: secretKey.trim(),
endpoint: endpoint.trim()
};
} catch (error) {
throw new Error(`Failed to get S3 credentials from vault: ${error}`);
}
}
/**
* Create tarball of project directory
*/
async function createTarball(
projectPath: string,
projectKey: string
): Promise<{ tarballPath: string; size: number }> {
if (!existsSync(projectPath)) {
throw new Error(`Project path not found: ${projectPath}`);
}
const parentDir = dirname(projectPath);
const projectDir = basename(projectPath);
const date = new Date().toISOString().split('T')[0].replace(/-/g, '');
const tarballName = `${projectKey}-${date}.tar.gz`;
const tarballPath = `/tmp/${tarballName}`;
console.log(`Creating tarball: ${tarballPath}`);
await execAsync(`cd "${parentDir}" && tar -czf "${tarballPath}" "${projectDir}"`);
const stats = statSync(tarballPath);
return { tarballPath, size: stats.size };
}
/**
* Upload tarball to S3
*/
async function uploadToS3(
tarballPath: string,
projectKey: string,
credentials: { accessKey: string; secretKey: string; endpoint: string }
): Promise<string> {
const s3Path = `s3://agiliton-archive/projects/${basename(tarballPath)}`;
console.log(`Uploading to S3: ${s3Path}`);
const env = {
...process.env,
AWS_ACCESS_KEY_ID: credentials.accessKey,
AWS_SECRET_ACCESS_KEY: credentials.secretKey
};
try {
await execAsync(
`aws --endpoint-url ${credentials.endpoint} s3 cp "${tarballPath}" "${s3Path}"`,
{ env }
);
// Verify upload
await execAsync(
`aws --endpoint-url ${credentials.endpoint} s3 ls "${s3Path}"`,
{ env }
);
return s3Path;
} catch (error) {
throw new Error(`Failed to upload to S3: ${error}`);
}
}
/**
* Update database with archive metadata
*/
async function updateProjectArchive(
projectKey: string,
archiveLocation: string,
archiveSize: number,
sessionId?: string
): Promise<void> {
await execute(
`UPDATE projects
SET archived_at = NOW(),
archive_location = $1,
archive_size = $2,
archived_by_session = $3,
active = false
WHERE key = $4`,
[archiveLocation, archiveSize, sessionId || null, projectKey]
);
}
/**
* Delete local project directory
*/
async function deleteLocalProject(projectPath: string): Promise<void> {
console.log(`Deleting local project: ${projectPath}`);
await execAsync(`rm -rf "${projectPath}"`);
}
/**
* Clean up temporary tarball
*/
async function cleanupTarball(tarballPath: string): Promise<void> {
try {
await execAsync(`rm -f "${tarballPath}"`);
} catch (error) {
console.warn(`Failed to clean up tarball: ${error}`);
}
}
/**
* Archive a complete project to S3
*
* Workflow:
* 1. Verify project exists
* 2. Create tarball of project directory
* 3. Upload tarball to S3 (s3://agiliton-archive/projects/)
* 4. Update database with archive metadata
* 5. Optional: Delete local project directory
* 6. Clean up temporary tarball
*
* @param args - Archive parameters
* @returns Archive result with status and metadata
*/
export async function projectArchive(
args: ProjectArchiveArgs
): Promise<string> {
const { project_key, project_path, delete_local = false, session_id } = args;
try {
// 1. Verify project exists
const exists = await verifyProject(project_key);
if (!exists) {
return `Error: Project not found in database: ${project_key}`;
}
// 2. Get S3 credentials
const credentials = await getS3Credentials();
// 3. Create tarball
const { tarballPath, size } = await createTarball(project_path, project_key);
// 4. Upload to S3
const archiveLocation = await uploadToS3(tarballPath, project_key, credentials);
// 5. Update database
await updateProjectArchive(project_key, archiveLocation, size, session_id);
// 6. Optional: Delete local project
if (delete_local) {
await deleteLocalProject(project_path);
}
// 7. Clean up tarball
await cleanupTarball(tarballPath);
const sizeStr = `${(size / (1024 * 1024)).toFixed(1)}MB`;
const deletedStr = delete_local ? ' (local copy deleted)' : '';
return `✅ Project archived successfully\n` +
`Project: ${project_key}\n` +
`Location: ${archiveLocation}\n` +
`Size: ${sizeStr}${deletedStr}`;
} catch (error) {
return `❌ Archive failed: ${error}`;
}
}