Files
session-mcp/scripts/consolidate-projects.ts
Christian Gick 1227e5b339 feat(CF-762): Complete Jira migration - consolidate projects, cleanup
- Remove task CRUD/epic/search/relation/version tools (moved to Jira)
- Add migration scripts: migrate-tasks-to-jira, jira-admin, prepare-all-projects
- Add consolidate-projects.ts for merging duplicate Jira projects
- Add validate-migration.ts for post-migration integrity checks
- Add jira_issue_key columns migration (030)
- Consolidate 11 duplicate projects (LIT→LITE, CARD→CS, etc.)
- Delete 92 placeholder issues, 11 empty source projects
- Remove SG project completely
- 2,798 tasks migrated across 46 Jira projects

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-07 12:33:49 +02:00

491 lines
16 KiB
TypeScript

#!/usr/bin/env npx tsx
/**
* Consolidate/merge Jira projects after CF-762 migration.
*
* Uses Jira Cloud Bulk Move API (POST /rest/api/3/bulk/issues/move)
* to move all issues from SOURCE to TARGET project, then updates
* task_migration_map and tasks table in PostgreSQL.
*
* Usage:
* npx tsx scripts/consolidate-projects.ts --from LIT --to LITE [--dry-run] [--delete-source]
* npx tsx scripts/consolidate-projects.ts --batch tier1 [--dry-run] [--delete-source]
* npx tsx scripts/consolidate-projects.ts --batch all [--dry-run] [--delete-source]
*/
import pg from 'pg';
import dotenv from 'dotenv';
import { dirname, join } from 'path';
import { fileURLToPath } from 'url';
const __dirname = dirname(fileURLToPath(import.meta.url));
dotenv.config({ path: join(__dirname, '..', '.env'), override: true });
const JIRA_URL = process.env.JIRA_URL || 'https://agiliton.atlassian.net';
const JIRA_USER = process.env.JIRA_USERNAME || '';
const JIRA_TOKEN = process.env.JIRA_API_TOKEN || '';
const JIRA_AUTH = Buffer.from(`${JIRA_USER}:${JIRA_TOKEN}`).toString('base64');
const pool = new pg.Pool({
host: process.env.POSTGRES_HOST || 'postgres.agiliton.internal',
port: parseInt(process.env.POSTGRES_PORT || '5432'),
database: 'agiliton',
user: 'agiliton',
password: 'QtqiwCOAUpQNF6pjzOMAREzUny2bY8V1',
max: 3,
});
const args = process.argv.slice(2);
const DRY_RUN = args.includes('--dry-run');
const DELETE_SOURCE = args.includes('--delete-source');
const FROM = args.find((_, i) => args[i - 1] === '--from') || '';
const TO = args.find((_, i) => args[i - 1] === '--to') || '';
const BATCH = args.find((_, i) => args[i - 1] === '--batch') || '';
const DELAY_MS = 700;
const MAX_RETRIES = 5;
const POLL_INTERVAL_MS = 2000;
const POLL_TIMEOUT_MS = 120000;
// Batch definitions — LIT already moved manually during testing
const TIER1: Array<[string, string]> = [
['CARD', 'CS'],
['TES', 'TS'],
['DA', 'DB'],
['AF', 'AFNE'],
];
const TIER2: Array<[string, string]> = [
['RUBI', 'RUB'],
['ET', 'TG'],
['ZORK', 'ZOS'],
];
const TIER3: Array<[string, string]> = [
['IS', 'INFR'],
['CLN', 'INFR'],
['TOOLS', 'INFR'],
];
interface JiraIssue {
key: string;
id: string;
fields: {
summary: string;
issuetype: { id: string; name: string };
status: { name: string };
};
}
function delay(ms: number): Promise<void> {
return new Promise(resolve => setTimeout(resolve, ms));
}
async function jiraFetch(path: string, options: RequestInit = {}): Promise<Response> {
const url = `${JIRA_URL}/rest/api/3${path}`;
return fetch(url, {
...options,
headers: {
'Authorization': `Basic ${JIRA_AUTH}`,
'Content-Type': 'application/json',
'Accept': 'application/json',
...options.headers,
},
});
}
async function jiraFetchWithRetry(path: string, options: RequestInit = {}): Promise<Response> {
let lastResponse: Response | null = null;
for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
await delay(DELAY_MS);
const response = await jiraFetch(path, options);
lastResponse = response;
if (response.status === 429 || response.status >= 500) {
if (attempt < MAX_RETRIES) {
const retryAfter = response.headers.get('Retry-After');
const backoffMs = retryAfter
? parseInt(retryAfter) * 1000
: DELAY_MS * Math.pow(2, attempt + 1);
console.warn(` [RETRY] ${response.status}, attempt ${attempt + 1}/${MAX_RETRIES}, waiting ${backoffMs}ms`);
await delay(backoffMs);
continue;
}
}
return response;
}
return lastResponse!;
}
// Get project ID for a project key
async function getProjectId(key: string): Promise<string | null> {
const res = await jiraFetchWithRetry(`/project/${key}`);
if (!res.ok) return null;
const data = await res.json() as { id: string };
return data.id;
}
// Get all issues in a project (v3 GET /search/jql)
async function getAllIssues(projectKey: string): Promise<JiraIssue[]> {
const issues: JiraIssue[] = [];
let startAt = 0;
while (true) {
const jql = encodeURIComponent(`project="${projectKey}" ORDER BY key ASC`);
const res = await jiraFetchWithRetry(`/search/jql?jql=${jql}&maxResults=100&startAt=${startAt}&fields=summary,issuetype,status`);
if (!res.ok) {
console.error(` Failed to search ${projectKey}: ${res.status} ${await res.text()}`);
break;
}
const data = await res.json() as { total?: number; issues: JiraIssue[]; isLast?: boolean };
issues.push(...data.issues);
startAt += data.issues.length;
if (data.isLast || (data.total !== undefined && startAt >= data.total) || data.issues.length === 0) break;
}
return issues;
}
// Get issue type IDs available in a project
async function getProjectIssueTypes(projectKey: string): Promise<Map<string, string>> {
const res = await jiraFetchWithRetry(`/project/${projectKey}/statuses`);
if (!res.ok) return new Map();
const types = await res.json() as Array<{ id: string; name: string }>;
return new Map(types.map(t => [t.name, t.id]));
}
// Bulk move issues using Jira Cloud API
// Key format: "targetProjectId,targetIssueTypeId"
async function bulkMoveIssues(
issueKeys: string[],
targetProjectId: string,
targetIssueTypeId: string,
): Promise<{ taskId: string } | null> {
const mappingKey = `${targetProjectId},${targetIssueTypeId}`;
const body = {
sendBulkNotification: false,
targetToSourcesMapping: {
[mappingKey]: {
inferFieldDefaults: true,
inferStatusDefaults: true,
inferSubtaskTypeDefault: true,
issueIdsOrKeys: issueKeys,
},
},
};
const res = await jiraFetchWithRetry('/bulk/issues/move', {
method: 'POST',
body: JSON.stringify(body),
});
if (!res.ok) {
const errorBody = await res.text();
console.error(` FAIL bulk move: ${res.status} ${errorBody}`);
return null;
}
const data = await res.json() as { taskId: string };
return data;
}
// Poll a Jira async task until complete
async function pollTask(taskId: string): Promise<{ success: number[]; failed: Record<string, unknown> } | null> {
const start = Date.now();
while (Date.now() - start < POLL_TIMEOUT_MS) {
await delay(POLL_INTERVAL_MS);
const res = await jiraFetchWithRetry(`/task/${taskId}`);
if (!res.ok) {
console.error(` FAIL poll task ${taskId}: ${res.status}`);
return null;
}
const data = await res.json() as {
status: string;
progress: number;
result?: { successfulIssues: number[]; failedIssues: Record<string, unknown>; totalIssueCount: number };
};
if (data.status === 'COMPLETE') {
return {
success: data.result?.successfulIssues || [],
failed: data.result?.failedIssues || {},
};
}
if (data.status === 'FAILED' || data.status === 'CANCELLED') {
console.error(` Task ${taskId} ${data.status}`);
return null;
}
// Still running
if (data.progress > 0) {
process.stdout.write(`\r Task ${taskId}: ${data.progress}%`);
}
}
console.error(` Task ${taskId} timed out after ${POLL_TIMEOUT_MS / 1000}s`);
return null;
}
// Get issue key by numeric ID
async function getIssueKey(issueId: number): Promise<string | null> {
const res = await jiraFetchWithRetry(`/issue/${issueId}?fields=key`);
if (!res.ok) return null;
const data = await res.json() as { key: string };
return data.key;
}
// Delete a Jira project
async function deleteProject(key: string): Promise<boolean> {
if (DRY_RUN) {
console.log(` [DRY] Would delete project ${key}`);
return true;
}
const res = await jiraFetch(`/project/${key}?enableUndo=false`, { method: 'DELETE' });
return res.status === 204;
}
// Consolidate one pair
async function consolidate(from: string, to: string): Promise<{ moved: number; failed: number }> {
console.log(`\n=== Consolidating ${from}${to} ===`);
// Get project IDs
const fromProjectId = await getProjectId(from);
const toProjectId = await getProjectId(to);
if (!fromProjectId) {
console.error(` Source project ${from} does not exist in Jira. Skipping.`);
return { moved: 0, failed: 0 };
}
if (!toProjectId) {
console.error(` Target project ${to} does not exist in Jira. Skipping.`);
return { moved: 0, failed: 0 };
}
// Get target project issue types
const targetTypes = await getProjectIssueTypes(to);
console.log(` Target ${to} (id=${toProjectId}) issue types: ${Array.from(targetTypes.entries()).map(([n, id]) => `${n}=${id}`).join(', ')}`);
// Get all issues from source
const issues = await getAllIssues(from);
console.log(` Found ${issues.length} issues in ${from}`);
if (issues.length === 0) {
console.log(` Nothing to move.`);
if (DELETE_SOURCE) {
console.log(` Deleting empty source project ${from}...`);
const deleted = await deleteProject(from);
console.log(` ${deleted ? 'Deleted' : 'FAILED to delete'} ${from}`);
}
return { moved: 0, failed: 0 };
}
if (DRY_RUN) {
console.log(` [DRY] Would move ${issues.length} issues:`);
for (const issue of issues) {
console.log(` ${issue.key} [${issue.fields.issuetype.name}] ${issue.fields.status.name}: ${issue.fields.summary.substring(0, 60)}`);
}
// Still do DB updates in dry run? No.
return { moved: issues.length, failed: 0 };
}
// Build old issue ID → old key map (for tracking after move)
const idToOldKey = new Map<number, string>();
for (const issue of issues) {
idToOldKey.set(parseInt(issue.id), issue.key);
}
// Group issues by issue type for bulk move
const byType = new Map<string, { typeId: string; typeName: string; keys: string[] }>();
for (const issue of issues) {
const typeName = issue.fields.issuetype.name;
const targetTypeId = targetTypes.get(typeName);
if (!targetTypeId) {
// Fall back to Task if type doesn't exist in target
const fallbackId = targetTypes.get('Task');
if (!fallbackId) {
console.error(` No matching type for ${typeName} in ${to}, and no Task fallback. Skipping ${issue.key}`);
continue;
}
console.warn(` [WARN] ${issue.key} type ${typeName} not in target, using Task (${fallbackId})`);
const group = byType.get('Task') || { typeId: fallbackId, typeName: 'Task', keys: [] };
group.keys.push(issue.key);
byType.set('Task', group);
} else {
const group = byType.get(typeName) || { typeId: targetTypeId, typeName, keys: [] };
group.keys.push(issue.key);
byType.set(typeName, group);
}
}
let totalMoved = 0;
let totalFailed = 0;
const keyMapping = new Map<string, string>(); // old key → new key
// Move each type group
for (const [typeName, group] of byType) {
console.log(` Moving ${group.keys.length} ${typeName} issues...`);
const result = await bulkMoveIssues(group.keys, toProjectId, group.typeId);
if (!result) {
totalFailed += group.keys.length;
continue;
}
console.log(` Waiting for task ${result.taskId}...`);
const taskResult = await pollTask(result.taskId);
process.stdout.write('\r');
if (!taskResult) {
totalFailed += group.keys.length;
continue;
}
const failedCount = Object.keys(taskResult.failed).length;
console.log(` Task complete: ${taskResult.success.length} moved, ${failedCount} failed`);
totalMoved += taskResult.success.length;
totalFailed += failedCount;
// Resolve new keys for moved issues
for (const movedId of taskResult.success) {
const oldKey = idToOldKey.get(movedId);
if (!oldKey) continue;
const newKey = await getIssueKey(movedId);
if (newKey) {
keyMapping.set(oldKey, newKey);
}
}
}
console.log(` Total moved: ${totalMoved}, failed: ${totalFailed}`);
console.log(` Key mappings resolved: ${keyMapping.size}`);
// Log all mappings
for (const [oldKey, newKey] of keyMapping) {
console.log(` ${oldKey}${newKey}`);
}
// Update PostgreSQL
if (totalMoved > 0) {
console.log(` Updating PostgreSQL...`);
// 1. Update task_migration_map with new Jira keys
let mapUpdated = 0;
for (const [oldKey, newKey] of keyMapping) {
const res = await pool.query(
`UPDATE task_migration_map SET jira_issue_key = $1, migrated_at = NOW()
WHERE jira_issue_key = $2`,
[newKey, oldKey]
);
if ((res.rowCount || 0) > 0) {
mapUpdated++;
} else {
// Try where old_task_id matches (identity mapping case)
const res2 = await pool.query(
`UPDATE task_migration_map SET jira_issue_key = $1, migrated_at = NOW()
WHERE old_task_id = $2`,
[newKey, oldKey]
);
if ((res2.rowCount || 0) > 0) mapUpdated++;
}
}
console.log(` task_migration_map: ${mapUpdated} entries updated`);
// 2. Update tasks table: change project from SOURCE to TARGET
const taskUpdate = await pool.query(
`UPDATE tasks SET project = $1 WHERE project = $2`,
[to, from]
);
console.log(` tasks: ${taskUpdate.rowCount} rows (project ${from}${to})`);
// 3. Update epics table
try {
const epicUpdate = await pool.query(
`UPDATE epics SET project = $1 WHERE project = $2`,
[to, from]
);
console.log(` epics: ${epicUpdate.rowCount} rows`);
} catch { /* epics may not reference this project */ }
// 4. Update FK references that use Jira keys
for (const [oldKey, newKey] of keyMapping) {
try { await pool.query(`UPDATE memories SET jira_issue_key = $1 WHERE jira_issue_key = $2`, [newKey, oldKey]); } catch {}
try { await pool.query(`UPDATE session_context SET jira_issue_key = $1 WHERE jira_issue_key = $2`, [newKey, oldKey]); } catch {}
try { await pool.query(`UPDATE sessions SET jira_issue_key = $1 WHERE jira_issue_key = $2`, [newKey, oldKey]); } catch {}
try { await pool.query(`UPDATE task_commits SET jira_issue_key = $1 WHERE jira_issue_key = $2`, [newKey, oldKey]); } catch {}
try { await pool.query(`UPDATE deployments SET jira_issue_key = $1 WHERE jira_issue_key = $2`, [newKey, oldKey]); } catch {}
}
console.log(` FK references updated`);
// 5. Update projects table references
try {
await pool.query(`DELETE FROM project_archives WHERE project_key = $1`, [from]);
} catch {}
}
// Delete source project if requested
if (DELETE_SOURCE) {
const remaining = await getAllIssues(from);
if (remaining.length === 0) {
console.log(` Deleting empty source project ${from}...`);
const deleted = await deleteProject(from);
console.log(` ${deleted ? 'Deleted' : 'FAILED to delete'} ${from}`);
} else {
console.log(` Source ${from} still has ${remaining.length} issues, not deleting.`);
}
}
return { moved: totalMoved, failed: totalFailed };
}
async function main() {
console.log('=== Project Consolidation (CF-762 Post-Migration) ===');
console.log(`Mode: ${DRY_RUN ? 'DRY RUN' : 'LIVE'}`);
console.log(`Delete source: ${DELETE_SOURCE ? 'yes' : 'no'}`);
console.log('');
if (!JIRA_USER || !JIRA_TOKEN) {
console.error('Missing JIRA_USERNAME or JIRA_API_TOKEN');
process.exit(1);
}
let pairs: Array<[string, string]> = [];
if (BATCH) {
switch (BATCH) {
case 'tier1': pairs = TIER1; break;
case 'tier2': pairs = TIER2; break;
case 'tier3': pairs = TIER3; break;
case 'all': pairs = [...TIER1, ...TIER2, ...TIER3]; break;
default:
console.error(`Unknown batch: ${BATCH}. Use: tier1, tier2, tier3, all`);
process.exit(1);
}
} else if (FROM && TO) {
pairs = [[FROM, TO]];
} else {
console.error('Usage:');
console.error(' npx tsx scripts/consolidate-projects.ts --from LIT --to LITE [--dry-run] [--delete-source]');
console.error(' npx tsx scripts/consolidate-projects.ts --batch tier1|tier2|tier3|all [--dry-run] [--delete-source]');
process.exit(1);
}
console.log(`Pairs to consolidate (${pairs.length}):`);
for (const [from, to] of pairs) {
console.log(` ${from}${to}`);
}
console.log('');
let totalMoved = 0;
let totalFailed = 0;
for (const [from, to] of pairs) {
const result = await consolidate(from, to);
totalMoved += result.moved;
totalFailed += result.failed;
}
console.log('\n=== Consolidation Summary ===');
console.log(`Total moved: ${totalMoved}`);
console.log(`Total failed: ${totalFailed}`);
console.log(`Mode: ${DRY_RUN ? 'DRY RUN' : 'LIVE'}`);
await pool.end();
}
main().catch(err => {
console.error('Consolidation failed:', err);
process.exit(1);
});