feat(CF-762): Complete Jira migration - consolidate projects, cleanup
- Remove task CRUD/epic/search/relation/version tools (moved to Jira) - Add migration scripts: migrate-tasks-to-jira, jira-admin, prepare-all-projects - Add consolidate-projects.ts for merging duplicate Jira projects - Add validate-migration.ts for post-migration integrity checks - Add jira_issue_key columns migration (030) - Consolidate 11 duplicate projects (LIT→LITE, CARD→CS, etc.) - Delete 92 placeholder issues, 11 empty source projects - Remove SG project completely - 2,798 tasks migrated across 46 Jira projects Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
490
scripts/consolidate-projects.ts
Normal file
490
scripts/consolidate-projects.ts
Normal file
@@ -0,0 +1,490 @@
|
||||
#!/usr/bin/env npx tsx
|
||||
/**
|
||||
* Consolidate/merge Jira projects after CF-762 migration.
|
||||
*
|
||||
* Uses Jira Cloud Bulk Move API (POST /rest/api/3/bulk/issues/move)
|
||||
* to move all issues from SOURCE to TARGET project, then updates
|
||||
* task_migration_map and tasks table in PostgreSQL.
|
||||
*
|
||||
* Usage:
|
||||
* npx tsx scripts/consolidate-projects.ts --from LIT --to LITE [--dry-run] [--delete-source]
|
||||
* npx tsx scripts/consolidate-projects.ts --batch tier1 [--dry-run] [--delete-source]
|
||||
* npx tsx scripts/consolidate-projects.ts --batch all [--dry-run] [--delete-source]
|
||||
*/
|
||||
|
||||
import pg from 'pg';
|
||||
import dotenv from 'dotenv';
|
||||
import { dirname, join } from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
const __dirname = dirname(fileURLToPath(import.meta.url));
|
||||
dotenv.config({ path: join(__dirname, '..', '.env'), override: true });
|
||||
|
||||
const JIRA_URL = process.env.JIRA_URL || 'https://agiliton.atlassian.net';
|
||||
const JIRA_USER = process.env.JIRA_USERNAME || '';
|
||||
const JIRA_TOKEN = process.env.JIRA_API_TOKEN || '';
|
||||
const JIRA_AUTH = Buffer.from(`${JIRA_USER}:${JIRA_TOKEN}`).toString('base64');
|
||||
|
||||
const pool = new pg.Pool({
|
||||
host: process.env.POSTGRES_HOST || 'postgres.agiliton.internal',
|
||||
port: parseInt(process.env.POSTGRES_PORT || '5432'),
|
||||
database: 'agiliton',
|
||||
user: 'agiliton',
|
||||
password: 'QtqiwCOAUpQNF6pjzOMAREzUny2bY8V1',
|
||||
max: 3,
|
||||
});
|
||||
|
||||
const args = process.argv.slice(2);
|
||||
const DRY_RUN = args.includes('--dry-run');
|
||||
const DELETE_SOURCE = args.includes('--delete-source');
|
||||
const FROM = args.find((_, i) => args[i - 1] === '--from') || '';
|
||||
const TO = args.find((_, i) => args[i - 1] === '--to') || '';
|
||||
const BATCH = args.find((_, i) => args[i - 1] === '--batch') || '';
|
||||
|
||||
const DELAY_MS = 700;
|
||||
const MAX_RETRIES = 5;
|
||||
const POLL_INTERVAL_MS = 2000;
|
||||
const POLL_TIMEOUT_MS = 120000;
|
||||
|
||||
// Batch definitions — LIT already moved manually during testing
|
||||
const TIER1: Array<[string, string]> = [
|
||||
['CARD', 'CS'],
|
||||
['TES', 'TS'],
|
||||
['DA', 'DB'],
|
||||
['AF', 'AFNE'],
|
||||
];
|
||||
|
||||
const TIER2: Array<[string, string]> = [
|
||||
['RUBI', 'RUB'],
|
||||
['ET', 'TG'],
|
||||
['ZORK', 'ZOS'],
|
||||
];
|
||||
|
||||
const TIER3: Array<[string, string]> = [
|
||||
['IS', 'INFR'],
|
||||
['CLN', 'INFR'],
|
||||
['TOOLS', 'INFR'],
|
||||
];
|
||||
|
||||
interface JiraIssue {
|
||||
key: string;
|
||||
id: string;
|
||||
fields: {
|
||||
summary: string;
|
||||
issuetype: { id: string; name: string };
|
||||
status: { name: string };
|
||||
};
|
||||
}
|
||||
|
||||
function delay(ms: number): Promise<void> {
|
||||
return new Promise(resolve => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
async function jiraFetch(path: string, options: RequestInit = {}): Promise<Response> {
|
||||
const url = `${JIRA_URL}/rest/api/3${path}`;
|
||||
return fetch(url, {
|
||||
...options,
|
||||
headers: {
|
||||
'Authorization': `Basic ${JIRA_AUTH}`,
|
||||
'Content-Type': 'application/json',
|
||||
'Accept': 'application/json',
|
||||
...options.headers,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async function jiraFetchWithRetry(path: string, options: RequestInit = {}): Promise<Response> {
|
||||
let lastResponse: Response | null = null;
|
||||
for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
|
||||
await delay(DELAY_MS);
|
||||
const response = await jiraFetch(path, options);
|
||||
lastResponse = response;
|
||||
if (response.status === 429 || response.status >= 500) {
|
||||
if (attempt < MAX_RETRIES) {
|
||||
const retryAfter = response.headers.get('Retry-After');
|
||||
const backoffMs = retryAfter
|
||||
? parseInt(retryAfter) * 1000
|
||||
: DELAY_MS * Math.pow(2, attempt + 1);
|
||||
console.warn(` [RETRY] ${response.status}, attempt ${attempt + 1}/${MAX_RETRIES}, waiting ${backoffMs}ms`);
|
||||
await delay(backoffMs);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
return response;
|
||||
}
|
||||
return lastResponse!;
|
||||
}
|
||||
|
||||
// Get project ID for a project key
|
||||
async function getProjectId(key: string): Promise<string | null> {
|
||||
const res = await jiraFetchWithRetry(`/project/${key}`);
|
||||
if (!res.ok) return null;
|
||||
const data = await res.json() as { id: string };
|
||||
return data.id;
|
||||
}
|
||||
|
||||
// Get all issues in a project (v3 GET /search/jql)
|
||||
async function getAllIssues(projectKey: string): Promise<JiraIssue[]> {
|
||||
const issues: JiraIssue[] = [];
|
||||
let startAt = 0;
|
||||
while (true) {
|
||||
const jql = encodeURIComponent(`project="${projectKey}" ORDER BY key ASC`);
|
||||
const res = await jiraFetchWithRetry(`/search/jql?jql=${jql}&maxResults=100&startAt=${startAt}&fields=summary,issuetype,status`);
|
||||
if (!res.ok) {
|
||||
console.error(` Failed to search ${projectKey}: ${res.status} ${await res.text()}`);
|
||||
break;
|
||||
}
|
||||
const data = await res.json() as { total?: number; issues: JiraIssue[]; isLast?: boolean };
|
||||
issues.push(...data.issues);
|
||||
startAt += data.issues.length;
|
||||
if (data.isLast || (data.total !== undefined && startAt >= data.total) || data.issues.length === 0) break;
|
||||
}
|
||||
return issues;
|
||||
}
|
||||
|
||||
// Get issue type IDs available in a project
|
||||
async function getProjectIssueTypes(projectKey: string): Promise<Map<string, string>> {
|
||||
const res = await jiraFetchWithRetry(`/project/${projectKey}/statuses`);
|
||||
if (!res.ok) return new Map();
|
||||
const types = await res.json() as Array<{ id: string; name: string }>;
|
||||
return new Map(types.map(t => [t.name, t.id]));
|
||||
}
|
||||
|
||||
// Bulk move issues using Jira Cloud API
|
||||
// Key format: "targetProjectId,targetIssueTypeId"
|
||||
async function bulkMoveIssues(
|
||||
issueKeys: string[],
|
||||
targetProjectId: string,
|
||||
targetIssueTypeId: string,
|
||||
): Promise<{ taskId: string } | null> {
|
||||
const mappingKey = `${targetProjectId},${targetIssueTypeId}`;
|
||||
|
||||
const body = {
|
||||
sendBulkNotification: false,
|
||||
targetToSourcesMapping: {
|
||||
[mappingKey]: {
|
||||
inferFieldDefaults: true,
|
||||
inferStatusDefaults: true,
|
||||
inferSubtaskTypeDefault: true,
|
||||
issueIdsOrKeys: issueKeys,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const res = await jiraFetchWithRetry('/bulk/issues/move', {
|
||||
method: 'POST',
|
||||
body: JSON.stringify(body),
|
||||
});
|
||||
|
||||
if (!res.ok) {
|
||||
const errorBody = await res.text();
|
||||
console.error(` FAIL bulk move: ${res.status} ${errorBody}`);
|
||||
return null;
|
||||
}
|
||||
|
||||
const data = await res.json() as { taskId: string };
|
||||
return data;
|
||||
}
|
||||
|
||||
// Poll a Jira async task until complete
|
||||
async function pollTask(taskId: string): Promise<{ success: number[]; failed: Record<string, unknown> } | null> {
|
||||
const start = Date.now();
|
||||
while (Date.now() - start < POLL_TIMEOUT_MS) {
|
||||
await delay(POLL_INTERVAL_MS);
|
||||
const res = await jiraFetchWithRetry(`/task/${taskId}`);
|
||||
if (!res.ok) {
|
||||
console.error(` FAIL poll task ${taskId}: ${res.status}`);
|
||||
return null;
|
||||
}
|
||||
const data = await res.json() as {
|
||||
status: string;
|
||||
progress: number;
|
||||
result?: { successfulIssues: number[]; failedIssues: Record<string, unknown>; totalIssueCount: number };
|
||||
};
|
||||
|
||||
if (data.status === 'COMPLETE') {
|
||||
return {
|
||||
success: data.result?.successfulIssues || [],
|
||||
failed: data.result?.failedIssues || {},
|
||||
};
|
||||
}
|
||||
if (data.status === 'FAILED' || data.status === 'CANCELLED') {
|
||||
console.error(` Task ${taskId} ${data.status}`);
|
||||
return null;
|
||||
}
|
||||
// Still running
|
||||
if (data.progress > 0) {
|
||||
process.stdout.write(`\r Task ${taskId}: ${data.progress}%`);
|
||||
}
|
||||
}
|
||||
console.error(` Task ${taskId} timed out after ${POLL_TIMEOUT_MS / 1000}s`);
|
||||
return null;
|
||||
}
|
||||
|
||||
// Get issue key by numeric ID
|
||||
async function getIssueKey(issueId: number): Promise<string | null> {
|
||||
const res = await jiraFetchWithRetry(`/issue/${issueId}?fields=key`);
|
||||
if (!res.ok) return null;
|
||||
const data = await res.json() as { key: string };
|
||||
return data.key;
|
||||
}
|
||||
|
||||
// Delete a Jira project
|
||||
async function deleteProject(key: string): Promise<boolean> {
|
||||
if (DRY_RUN) {
|
||||
console.log(` [DRY] Would delete project ${key}`);
|
||||
return true;
|
||||
}
|
||||
const res = await jiraFetch(`/project/${key}?enableUndo=false`, { method: 'DELETE' });
|
||||
return res.status === 204;
|
||||
}
|
||||
|
||||
// Consolidate one pair
|
||||
async function consolidate(from: string, to: string): Promise<{ moved: number; failed: number }> {
|
||||
console.log(`\n=== Consolidating ${from} → ${to} ===`);
|
||||
|
||||
// Get project IDs
|
||||
const fromProjectId = await getProjectId(from);
|
||||
const toProjectId = await getProjectId(to);
|
||||
if (!fromProjectId) {
|
||||
console.error(` Source project ${from} does not exist in Jira. Skipping.`);
|
||||
return { moved: 0, failed: 0 };
|
||||
}
|
||||
if (!toProjectId) {
|
||||
console.error(` Target project ${to} does not exist in Jira. Skipping.`);
|
||||
return { moved: 0, failed: 0 };
|
||||
}
|
||||
|
||||
// Get target project issue types
|
||||
const targetTypes = await getProjectIssueTypes(to);
|
||||
console.log(` Target ${to} (id=${toProjectId}) issue types: ${Array.from(targetTypes.entries()).map(([n, id]) => `${n}=${id}`).join(', ')}`);
|
||||
|
||||
// Get all issues from source
|
||||
const issues = await getAllIssues(from);
|
||||
console.log(` Found ${issues.length} issues in ${from}`);
|
||||
|
||||
if (issues.length === 0) {
|
||||
console.log(` Nothing to move.`);
|
||||
if (DELETE_SOURCE) {
|
||||
console.log(` Deleting empty source project ${from}...`);
|
||||
const deleted = await deleteProject(from);
|
||||
console.log(` ${deleted ? 'Deleted' : 'FAILED to delete'} ${from}`);
|
||||
}
|
||||
return { moved: 0, failed: 0 };
|
||||
}
|
||||
|
||||
if (DRY_RUN) {
|
||||
console.log(` [DRY] Would move ${issues.length} issues:`);
|
||||
for (const issue of issues) {
|
||||
console.log(` ${issue.key} [${issue.fields.issuetype.name}] ${issue.fields.status.name}: ${issue.fields.summary.substring(0, 60)}`);
|
||||
}
|
||||
// Still do DB updates in dry run? No.
|
||||
return { moved: issues.length, failed: 0 };
|
||||
}
|
||||
|
||||
// Build old issue ID → old key map (for tracking after move)
|
||||
const idToOldKey = new Map<number, string>();
|
||||
for (const issue of issues) {
|
||||
idToOldKey.set(parseInt(issue.id), issue.key);
|
||||
}
|
||||
|
||||
// Group issues by issue type for bulk move
|
||||
const byType = new Map<string, { typeId: string; typeName: string; keys: string[] }>();
|
||||
for (const issue of issues) {
|
||||
const typeName = issue.fields.issuetype.name;
|
||||
const targetTypeId = targetTypes.get(typeName);
|
||||
if (!targetTypeId) {
|
||||
// Fall back to Task if type doesn't exist in target
|
||||
const fallbackId = targetTypes.get('Task');
|
||||
if (!fallbackId) {
|
||||
console.error(` No matching type for ${typeName} in ${to}, and no Task fallback. Skipping ${issue.key}`);
|
||||
continue;
|
||||
}
|
||||
console.warn(` [WARN] ${issue.key} type ${typeName} not in target, using Task (${fallbackId})`);
|
||||
const group = byType.get('Task') || { typeId: fallbackId, typeName: 'Task', keys: [] };
|
||||
group.keys.push(issue.key);
|
||||
byType.set('Task', group);
|
||||
} else {
|
||||
const group = byType.get(typeName) || { typeId: targetTypeId, typeName, keys: [] };
|
||||
group.keys.push(issue.key);
|
||||
byType.set(typeName, group);
|
||||
}
|
||||
}
|
||||
|
||||
let totalMoved = 0;
|
||||
let totalFailed = 0;
|
||||
const keyMapping = new Map<string, string>(); // old key → new key
|
||||
|
||||
// Move each type group
|
||||
for (const [typeName, group] of byType) {
|
||||
console.log(` Moving ${group.keys.length} ${typeName} issues...`);
|
||||
const result = await bulkMoveIssues(group.keys, toProjectId, group.typeId);
|
||||
if (!result) {
|
||||
totalFailed += group.keys.length;
|
||||
continue;
|
||||
}
|
||||
|
||||
console.log(` Waiting for task ${result.taskId}...`);
|
||||
const taskResult = await pollTask(result.taskId);
|
||||
process.stdout.write('\r');
|
||||
if (!taskResult) {
|
||||
totalFailed += group.keys.length;
|
||||
continue;
|
||||
}
|
||||
|
||||
const failedCount = Object.keys(taskResult.failed).length;
|
||||
console.log(` Task complete: ${taskResult.success.length} moved, ${failedCount} failed`);
|
||||
totalMoved += taskResult.success.length;
|
||||
totalFailed += failedCount;
|
||||
|
||||
// Resolve new keys for moved issues
|
||||
for (const movedId of taskResult.success) {
|
||||
const oldKey = idToOldKey.get(movedId);
|
||||
if (!oldKey) continue;
|
||||
const newKey = await getIssueKey(movedId);
|
||||
if (newKey) {
|
||||
keyMapping.set(oldKey, newKey);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.log(` Total moved: ${totalMoved}, failed: ${totalFailed}`);
|
||||
console.log(` Key mappings resolved: ${keyMapping.size}`);
|
||||
|
||||
// Log all mappings
|
||||
for (const [oldKey, newKey] of keyMapping) {
|
||||
console.log(` ${oldKey} → ${newKey}`);
|
||||
}
|
||||
|
||||
// Update PostgreSQL
|
||||
if (totalMoved > 0) {
|
||||
console.log(` Updating PostgreSQL...`);
|
||||
|
||||
// 1. Update task_migration_map with new Jira keys
|
||||
let mapUpdated = 0;
|
||||
for (const [oldKey, newKey] of keyMapping) {
|
||||
const res = await pool.query(
|
||||
`UPDATE task_migration_map SET jira_issue_key = $1, migrated_at = NOW()
|
||||
WHERE jira_issue_key = $2`,
|
||||
[newKey, oldKey]
|
||||
);
|
||||
if ((res.rowCount || 0) > 0) {
|
||||
mapUpdated++;
|
||||
} else {
|
||||
// Try where old_task_id matches (identity mapping case)
|
||||
const res2 = await pool.query(
|
||||
`UPDATE task_migration_map SET jira_issue_key = $1, migrated_at = NOW()
|
||||
WHERE old_task_id = $2`,
|
||||
[newKey, oldKey]
|
||||
);
|
||||
if ((res2.rowCount || 0) > 0) mapUpdated++;
|
||||
}
|
||||
}
|
||||
console.log(` task_migration_map: ${mapUpdated} entries updated`);
|
||||
|
||||
// 2. Update tasks table: change project from SOURCE to TARGET
|
||||
const taskUpdate = await pool.query(
|
||||
`UPDATE tasks SET project = $1 WHERE project = $2`,
|
||||
[to, from]
|
||||
);
|
||||
console.log(` tasks: ${taskUpdate.rowCount} rows (project ${from} → ${to})`);
|
||||
|
||||
// 3. Update epics table
|
||||
try {
|
||||
const epicUpdate = await pool.query(
|
||||
`UPDATE epics SET project = $1 WHERE project = $2`,
|
||||
[to, from]
|
||||
);
|
||||
console.log(` epics: ${epicUpdate.rowCount} rows`);
|
||||
} catch { /* epics may not reference this project */ }
|
||||
|
||||
// 4. Update FK references that use Jira keys
|
||||
for (const [oldKey, newKey] of keyMapping) {
|
||||
try { await pool.query(`UPDATE memories SET jira_issue_key = $1 WHERE jira_issue_key = $2`, [newKey, oldKey]); } catch {}
|
||||
try { await pool.query(`UPDATE session_context SET jira_issue_key = $1 WHERE jira_issue_key = $2`, [newKey, oldKey]); } catch {}
|
||||
try { await pool.query(`UPDATE sessions SET jira_issue_key = $1 WHERE jira_issue_key = $2`, [newKey, oldKey]); } catch {}
|
||||
try { await pool.query(`UPDATE task_commits SET jira_issue_key = $1 WHERE jira_issue_key = $2`, [newKey, oldKey]); } catch {}
|
||||
try { await pool.query(`UPDATE deployments SET jira_issue_key = $1 WHERE jira_issue_key = $2`, [newKey, oldKey]); } catch {}
|
||||
}
|
||||
console.log(` FK references updated`);
|
||||
|
||||
// 5. Update projects table references
|
||||
try {
|
||||
await pool.query(`DELETE FROM project_archives WHERE project_key = $1`, [from]);
|
||||
} catch {}
|
||||
}
|
||||
|
||||
// Delete source project if requested
|
||||
if (DELETE_SOURCE) {
|
||||
const remaining = await getAllIssues(from);
|
||||
if (remaining.length === 0) {
|
||||
console.log(` Deleting empty source project ${from}...`);
|
||||
const deleted = await deleteProject(from);
|
||||
console.log(` ${deleted ? 'Deleted' : 'FAILED to delete'} ${from}`);
|
||||
} else {
|
||||
console.log(` Source ${from} still has ${remaining.length} issues, not deleting.`);
|
||||
}
|
||||
}
|
||||
|
||||
return { moved: totalMoved, failed: totalFailed };
|
||||
}
|
||||
|
||||
async function main() {
|
||||
console.log('=== Project Consolidation (CF-762 Post-Migration) ===');
|
||||
console.log(`Mode: ${DRY_RUN ? 'DRY RUN' : 'LIVE'}`);
|
||||
console.log(`Delete source: ${DELETE_SOURCE ? 'yes' : 'no'}`);
|
||||
console.log('');
|
||||
|
||||
if (!JIRA_USER || !JIRA_TOKEN) {
|
||||
console.error('Missing JIRA_USERNAME or JIRA_API_TOKEN');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
let pairs: Array<[string, string]> = [];
|
||||
|
||||
if (BATCH) {
|
||||
switch (BATCH) {
|
||||
case 'tier1': pairs = TIER1; break;
|
||||
case 'tier2': pairs = TIER2; break;
|
||||
case 'tier3': pairs = TIER3; break;
|
||||
case 'all': pairs = [...TIER1, ...TIER2, ...TIER3]; break;
|
||||
default:
|
||||
console.error(`Unknown batch: ${BATCH}. Use: tier1, tier2, tier3, all`);
|
||||
process.exit(1);
|
||||
}
|
||||
} else if (FROM && TO) {
|
||||
pairs = [[FROM, TO]];
|
||||
} else {
|
||||
console.error('Usage:');
|
||||
console.error(' npx tsx scripts/consolidate-projects.ts --from LIT --to LITE [--dry-run] [--delete-source]');
|
||||
console.error(' npx tsx scripts/consolidate-projects.ts --batch tier1|tier2|tier3|all [--dry-run] [--delete-source]');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
console.log(`Pairs to consolidate (${pairs.length}):`);
|
||||
for (const [from, to] of pairs) {
|
||||
console.log(` ${from} → ${to}`);
|
||||
}
|
||||
console.log('');
|
||||
|
||||
let totalMoved = 0;
|
||||
let totalFailed = 0;
|
||||
|
||||
for (const [from, to] of pairs) {
|
||||
const result = await consolidate(from, to);
|
||||
totalMoved += result.moved;
|
||||
totalFailed += result.failed;
|
||||
}
|
||||
|
||||
console.log('\n=== Consolidation Summary ===');
|
||||
console.log(`Total moved: ${totalMoved}`);
|
||||
console.log(`Total failed: ${totalFailed}`);
|
||||
console.log(`Mode: ${DRY_RUN ? 'DRY RUN' : 'LIVE'}`);
|
||||
|
||||
await pool.end();
|
||||
}
|
||||
|
||||
main().catch(err => {
|
||||
console.error('Consolidation failed:', err);
|
||||
process.exit(1);
|
||||
});
|
||||
213
scripts/jira-admin.ts
Normal file
213
scripts/jira-admin.ts
Normal file
@@ -0,0 +1,213 @@
|
||||
#!/usr/bin/env npx tsx
|
||||
/**
|
||||
* Jira admin helper for migration (CF-762)
|
||||
* Usage:
|
||||
* npx tsx scripts/jira-admin.ts get-project CF
|
||||
* npx tsx scripts/jira-admin.ts delete-project CF
|
||||
* npx tsx scripts/jira-admin.ts create-project CF "Claude Framework"
|
||||
* npx tsx scripts/jira-admin.ts count-issues CF
|
||||
* npx tsx scripts/jira-admin.ts delete-all-issues CF
|
||||
*/
|
||||
|
||||
import dotenv from 'dotenv';
|
||||
import { dirname, join } from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
const __dirname = dirname(fileURLToPath(import.meta.url));
|
||||
dotenv.config({ path: join(__dirname, '..', '.env'), override: true });
|
||||
|
||||
const JIRA_URL = process.env.JIRA_URL || 'https://agiliton.atlassian.net';
|
||||
const JIRA_USER = process.env.JIRA_USERNAME || process.env.JIRA_EMAIL || '';
|
||||
const JIRA_TOKEN = process.env.JIRA_API_TOKEN || '';
|
||||
const JIRA_AUTH = Buffer.from(`${JIRA_USER}:${JIRA_TOKEN}`).toString('base64');
|
||||
|
||||
async function jiraFetch(path: string, options: RequestInit = {}): Promise<Response> {
|
||||
const url = path.startsWith('http') ? path : `${JIRA_URL}/rest/api/3${path}`;
|
||||
return fetch(url, {
|
||||
...options,
|
||||
headers: {
|
||||
'Authorization': `Basic ${JIRA_AUTH}`,
|
||||
'Content-Type': 'application/json',
|
||||
'Accept': 'application/json',
|
||||
...options.headers,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
function delay(ms: number): Promise<void> {
|
||||
return new Promise(resolve => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
const [command, ...cmdArgs] = process.argv.slice(2);
|
||||
|
||||
async function main() {
|
||||
switch (command) {
|
||||
case 'get-project': {
|
||||
const key = cmdArgs[0];
|
||||
const res = await jiraFetch(`/project/${key}`);
|
||||
if (!res.ok) {
|
||||
console.error(`Failed: ${res.status} ${await res.text()}`);
|
||||
return;
|
||||
}
|
||||
const data = await res.json() as Record<string, unknown>;
|
||||
console.log(JSON.stringify(data, null, 2));
|
||||
break;
|
||||
}
|
||||
|
||||
case 'list-projects': {
|
||||
const res = await jiraFetch('/project');
|
||||
if (!res.ok) {
|
||||
console.error(`Failed: ${res.status} ${await res.text()}`);
|
||||
return;
|
||||
}
|
||||
const projects = await res.json() as Array<{ key: string; name: string; id: string; projectTypeKey: string }>;
|
||||
console.log(`Total: ${projects.length} projects`);
|
||||
for (const p of projects) {
|
||||
console.log(` ${p.key}: ${p.name} (id=${p.id}, type=${p.projectTypeKey})`);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case 'count-issues': {
|
||||
const key = cmdArgs[0];
|
||||
const res = await jiraFetch(`/search/jql`, {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({ jql: `project="${key}"`, maxResults: 1 }),
|
||||
});
|
||||
if (!res.ok) {
|
||||
console.error(`Failed: ${res.status} ${await res.text()}`);
|
||||
return;
|
||||
}
|
||||
const data = await res.json() as { total: number };
|
||||
console.log(`${key}: ${data.total} issues`);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'list-issues': {
|
||||
const key = cmdArgs[0];
|
||||
const max = parseInt(cmdArgs[1] || '20');
|
||||
const res = await jiraFetch(`/search/jql`, {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({ jql: `project="${key}" ORDER BY key ASC`, maxResults: max, fields: ['key', 'summary', 'issuetype', 'status'] }),
|
||||
});
|
||||
if (!res.ok) {
|
||||
console.error(`Failed: ${res.status} ${await res.text()}`);
|
||||
return;
|
||||
}
|
||||
const data = await res.json() as { total: number; issues: Array<{ key: string; fields: { summary: string; issuetype: { name: string }; status: { name: string } } }> };
|
||||
console.log(`${key}: ${data.total} total issues (showing ${data.issues.length})`);
|
||||
for (const i of data.issues) {
|
||||
console.log(` ${i.key} [${i.fields.issuetype.name}] ${i.fields.status.name}: ${i.fields.summary.substring(0, 60)}`);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case 'delete-all-issues': {
|
||||
const key = cmdArgs[0];
|
||||
if (!key) { console.error('Usage: delete-all-issues <PROJECT_KEY>'); return; }
|
||||
|
||||
// Get all issues
|
||||
let startAt = 0;
|
||||
const allKeys: string[] = [];
|
||||
while (true) {
|
||||
const res = await jiraFetch(`/search/jql`, {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({ jql: `project="${key}" ORDER BY key ASC`, maxResults: 100, startAt, fields: ['key'] }),
|
||||
});
|
||||
if (!res.ok) { console.error(`Failed: ${res.status} ${await res.text()}`); return; }
|
||||
const data = await res.json() as { total: number; issues: Array<{ key: string }> };
|
||||
if (data.issues.length === 0) break;
|
||||
allKeys.push(...data.issues.map(i => i.key));
|
||||
startAt += data.issues.length;
|
||||
if (startAt >= data.total) break;
|
||||
}
|
||||
|
||||
console.log(`Found ${allKeys.length} issues to delete in ${key}`);
|
||||
|
||||
for (let i = 0; i < allKeys.length; i++) {
|
||||
await delay(300);
|
||||
const res = await jiraFetch(`/issue/${allKeys[i]}`, { method: 'DELETE' });
|
||||
if (!res.ok) {
|
||||
console.error(` FAIL delete ${allKeys[i]}: ${res.status}`);
|
||||
}
|
||||
if (i % 10 === 0) console.log(` [${i + 1}/${allKeys.length}] Deleted ${allKeys[i]}`);
|
||||
}
|
||||
console.log(`Deleted ${allKeys.length} issues from ${key}`);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'delete-project': {
|
||||
const key = cmdArgs[0];
|
||||
if (!key) { console.error('Usage: delete-project <PROJECT_KEY>'); return; }
|
||||
|
||||
// enableUndo=false for permanent deletion
|
||||
const res = await jiraFetch(`/project/${key}?enableUndo=false`, { method: 'DELETE' });
|
||||
if (res.status === 204) {
|
||||
console.log(`Project ${key} deleted permanently`);
|
||||
} else {
|
||||
console.error(`Failed: ${res.status} ${await res.text()}`);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case 'create-project': {
|
||||
const key = cmdArgs[0];
|
||||
const name = cmdArgs[1] || key;
|
||||
if (!key) { console.error('Usage: create-project <KEY> <NAME>'); return; }
|
||||
|
||||
// Get current user account ID for lead
|
||||
const meRes = await jiraFetch('/myself');
|
||||
const me = await meRes.json() as { accountId: string };
|
||||
|
||||
const body = {
|
||||
key,
|
||||
name,
|
||||
projectTypeKey: 'business',
|
||||
leadAccountId: me.accountId,
|
||||
assigneeType: 'UNASSIGNED',
|
||||
};
|
||||
|
||||
const res = await jiraFetch('/project', {
|
||||
method: 'POST',
|
||||
body: JSON.stringify(body),
|
||||
});
|
||||
|
||||
if (res.ok || res.status === 201) {
|
||||
const data = await res.json() as { id: string; key: string };
|
||||
console.log(`Project created: ${data.key} (id=${data.id})`);
|
||||
} else {
|
||||
console.error(`Failed: ${res.status} ${await res.text()}`);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case 'get-schemes': {
|
||||
const key = cmdArgs[0];
|
||||
// Get issue type scheme for project
|
||||
const res = await jiraFetch(`/project/${key}`);
|
||||
if (!res.ok) {
|
||||
console.error(`Failed: ${res.status} ${await res.text()}`);
|
||||
return;
|
||||
}
|
||||
const data = await res.json() as Record<string, unknown>;
|
||||
console.log('Project type:', (data as any).projectTypeKey);
|
||||
console.log('Style:', (data as any).style);
|
||||
|
||||
// Get issue types
|
||||
const itRes = await jiraFetch(`/project/${key}/statuses`);
|
||||
if (itRes.ok) {
|
||||
const itData = await itRes.json() as Array<{ name: string; id: string; statuses: Array<{ name: string }> }>;
|
||||
console.log('\nIssue types and statuses:');
|
||||
for (const it of itData) {
|
||||
console.log(` ${it.name} (id=${it.id}): ${it.statuses.map(s => s.name).join(', ')}`);
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
console.log('Commands: list-projects, get-project, count-issues, list-issues, delete-all-issues, delete-project, create-project, get-schemes');
|
||||
}
|
||||
}
|
||||
|
||||
main().catch(err => { console.error(err); process.exit(1); });
|
||||
887
scripts/migrate-tasks-to-jira.ts
Normal file
887
scripts/migrate-tasks-to-jira.ts
Normal file
@@ -0,0 +1,887 @@
|
||||
#!/usr/bin/env npx tsx
|
||||
/**
|
||||
* Migrate tasks from task-mcp PostgreSQL to Jira Cloud (CF-762)
|
||||
* EXACT KEY MATCHING: CF-1 in task-mcp → CF-1 in Jira
|
||||
*
|
||||
* Strategy:
|
||||
* 1. Create tasks in strict numeric order (1..maxId), filling gaps with placeholders
|
||||
* 2. After all tasks, create epics (they get keys after maxId)
|
||||
* 3. Then create session plans as epics
|
||||
* 4. Link tasks to their epics via parent field update
|
||||
* 5. Create issue links, retry cross-project ones
|
||||
* 6. Store mapping and update FK references
|
||||
*
|
||||
* IMPORTANT: The Jira project must be empty (counter at 1) for key matching to work.
|
||||
* Delete and recreate the project before running this script.
|
||||
*
|
||||
* Usage:
|
||||
* npx tsx scripts/migrate-tasks-to-jira.ts [--dry-run] [--project CF] [--open-only] [--limit 5] [--batch-size 50]
|
||||
*
|
||||
* Requires env vars (from .env or shell):
|
||||
* JIRA_URL, JIRA_USERNAME, JIRA_API_TOKEN
|
||||
* POSTGRES_HOST (defaults to postgres.agiliton.internal)
|
||||
*/
|
||||
|
||||
import pg from 'pg';
|
||||
import dotenv from 'dotenv';
|
||||
import { fileURLToPath } from 'url';
|
||||
import { dirname, join } from 'path';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = dirname(__filename);
|
||||
dotenv.config({ path: join(__dirname, '..', '.env'), override: true });
|
||||
|
||||
const { Pool } = pg;
|
||||
|
||||
// --- Config ---
|
||||
const JIRA_URL = process.env.JIRA_URL || 'https://agiliton.atlassian.net';
|
||||
const JIRA_USER = process.env.JIRA_USERNAME || process.env.JIRA_EMAIL || '';
|
||||
const JIRA_TOKEN = process.env.JIRA_API_TOKEN || '';
|
||||
const JIRA_AUTH = Buffer.from(`${JIRA_USER}:${JIRA_TOKEN}`).toString('base64');
|
||||
|
||||
const pool = new Pool({
|
||||
host: process.env.POSTGRES_HOST || 'postgres.agiliton.internal',
|
||||
port: parseInt(process.env.POSTGRES_PORT || '5432'),
|
||||
database: 'agiliton',
|
||||
user: 'agiliton',
|
||||
password: 'QtqiwCOAUpQNF6pjzOMAREzUny2bY8V1',
|
||||
max: 3,
|
||||
});
|
||||
|
||||
// --- CLI args ---
|
||||
const args = process.argv.slice(2);
|
||||
const DRY_RUN = args.includes('--dry-run');
|
||||
const OPEN_ONLY = args.includes('--open-only');
|
||||
const PROJECT_FILTER = args.find((a, i) => args[i - 1] === '--project') || null;
|
||||
const LIMIT = parseInt(args.find((a, i) => args[i - 1] === '--limit') || '0') || 0;
|
||||
const BATCH_SIZE = parseInt(args.find((a, i) => args[i - 1] === '--batch-size') || '50') || 50;
|
||||
const SKIP_PREFLIGHT = args.includes('--skip-preflight');
|
||||
|
||||
// Herocoders Checklist for Jira custom field
|
||||
const CHECKLIST_FIELD = 'customfield_10091';
|
||||
|
||||
// Rate limit: Jira Cloud allows ~100 req/min for basic auth
|
||||
// 700ms delay = ~85 req/min (safe margin)
|
||||
const DELAY_MS = 700;
|
||||
const MAX_RETRIES = 5;
|
||||
const BATCH_PAUSE_MS = 5000; // 5s pause between batches
|
||||
|
||||
// ADF max size (Jira limit)
|
||||
const ADF_MAX_BYTES = 32_000;
|
||||
|
||||
function delay(ms: number): Promise<void> {
|
||||
return new Promise(resolve => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
// --- Mappings ---
|
||||
const PRIORITY_MAP: Record<string, string> = {
|
||||
P0: 'Highest',
|
||||
P1: 'High',
|
||||
P2: 'Medium',
|
||||
P3: 'Low',
|
||||
};
|
||||
|
||||
const TYPE_MAP: Record<string, string> = {
|
||||
task: 'Task',
|
||||
bug: 'Bug',
|
||||
feature: 'Task',
|
||||
debt: 'Task',
|
||||
investigation: 'Task',
|
||||
};
|
||||
|
||||
const STATUS_MAP: Record<string, string> = {
|
||||
open: 'To Do',
|
||||
pending: 'To Do',
|
||||
in_progress: 'In Progress',
|
||||
testing: 'In Progress',
|
||||
blocked: 'To Do',
|
||||
done: 'Done',
|
||||
completed: 'Done',
|
||||
abandoned: 'Done',
|
||||
};
|
||||
|
||||
const LINK_TYPE_MAP: Record<string, string> = {
|
||||
blocks: 'Blocks',
|
||||
relates_to: 'Relates',
|
||||
duplicates: 'Duplicate',
|
||||
depends_on: 'Blocks',
|
||||
implements: 'Relates',
|
||||
fixes: 'Relates',
|
||||
causes: 'Relates',
|
||||
needs: 'Blocks',
|
||||
subtask_of: 'Relates',
|
||||
};
|
||||
|
||||
const VALID_PROJECT_KEY = /^[A-Z]{2,5}$/;
|
||||
|
||||
// Track migration mapping: old task_id → Jira issue key
|
||||
const migrationMap: Map<string, string> = new Map();
|
||||
const jiraProjects: Set<string> = new Set();
|
||||
const failedLinks: Array<{ from: string; to: string; type: string }> = [];
|
||||
|
||||
// Track epic old_id → Jira key (assigned after tasks)
|
||||
const epicJiraKeys: Map<string, string> = new Map();
|
||||
|
||||
// Tasks that need parent (epic) link set after epics are created
|
||||
const pendingParentLinks: Array<{ taskJiraKey: string; epicOldId: string }> = [];
|
||||
|
||||
// --- Jira REST API helpers ---
|
||||
|
||||
async function jiraFetch(path: string, options: RequestInit = {}): Promise<Response> {
|
||||
const url = `${JIRA_URL}/rest/api/3${path}`;
|
||||
return fetch(url, {
|
||||
...options,
|
||||
headers: {
|
||||
'Authorization': `Basic ${JIRA_AUTH}`,
|
||||
'Content-Type': 'application/json',
|
||||
'Accept': 'application/json',
|
||||
...options.headers,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async function jiraFetchWithRetry(path: string, options: RequestInit = {}): Promise<Response> {
|
||||
let lastResponse: Response | null = null;
|
||||
for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
|
||||
await delay(DELAY_MS);
|
||||
const response = await jiraFetch(path, options);
|
||||
lastResponse = response;
|
||||
if (response.status === 429 || response.status >= 500) {
|
||||
if (attempt < MAX_RETRIES) {
|
||||
const retryAfter = response.headers.get('Retry-After');
|
||||
const backoffMs = retryAfter
|
||||
? parseInt(retryAfter) * 1000
|
||||
: DELAY_MS * Math.pow(2, attempt + 1);
|
||||
console.warn(` [RETRY] ${response.status} on ${path}, attempt ${attempt + 1}/${MAX_RETRIES}, waiting ${backoffMs}ms`);
|
||||
await delay(backoffMs);
|
||||
continue;
|
||||
}
|
||||
console.error(` [FAIL] ${response.status} on ${path} after ${MAX_RETRIES} retries`);
|
||||
}
|
||||
return response;
|
||||
}
|
||||
return lastResponse!;
|
||||
}
|
||||
|
||||
async function jiraFetchV2WithRetry(path: string, options: RequestInit = {}): Promise<Response> {
|
||||
const url = `${JIRA_URL}/rest/api/2${path}`;
|
||||
for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
|
||||
await delay(DELAY_MS);
|
||||
const response = await fetch(url, {
|
||||
...options,
|
||||
headers: {
|
||||
'Authorization': `Basic ${JIRA_AUTH}`,
|
||||
'Content-Type': 'application/json',
|
||||
'Accept': 'application/json',
|
||||
...options.headers,
|
||||
},
|
||||
});
|
||||
if (response.status === 429 || response.status >= 500) {
|
||||
if (attempt < MAX_RETRIES) {
|
||||
const backoffMs = DELAY_MS * Math.pow(2, attempt + 1);
|
||||
console.warn(` [RETRY] v2 ${response.status} on ${path}, attempt ${attempt + 1}/${MAX_RETRIES}, waiting ${backoffMs}ms`);
|
||||
await delay(backoffMs);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
return response;
|
||||
}
|
||||
throw new Error(`jiraFetchV2WithRetry: exhausted retries for ${path}`);
|
||||
}
|
||||
|
||||
// --- ADF helpers ---
|
||||
|
||||
function textToAdf(text: string): Record<string, unknown> {
|
||||
let normalized = text.replace(/\r\n/g, '\n').replace(/\r/g, '\n');
|
||||
if (Buffer.byteLength(normalized, 'utf8') > ADF_MAX_BYTES - 500) {
|
||||
while (Buffer.byteLength(normalized, 'utf8') > ADF_MAX_BYTES - 500) {
|
||||
normalized = normalized.substring(0, Math.floor(normalized.length * 0.9));
|
||||
}
|
||||
normalized += '\n\n[...truncated - description exceeded 32KB limit]';
|
||||
}
|
||||
const lines = normalized.split('\n');
|
||||
const paragraphs: Array<Record<string, unknown>> = [];
|
||||
let currentParagraph = '';
|
||||
for (const line of lines) {
|
||||
if (line.trim() === '') {
|
||||
if (currentParagraph.trim()) {
|
||||
paragraphs.push({
|
||||
type: 'paragraph',
|
||||
content: [{ type: 'text', text: currentParagraph.trim() }],
|
||||
});
|
||||
}
|
||||
currentParagraph = '';
|
||||
} else {
|
||||
currentParagraph += (currentParagraph ? '\n' : '') + line;
|
||||
}
|
||||
}
|
||||
if (currentParagraph.trim()) {
|
||||
paragraphs.push({
|
||||
type: 'paragraph',
|
||||
content: [{ type: 'text', text: currentParagraph.trim() }],
|
||||
});
|
||||
}
|
||||
if (paragraphs.length === 0) {
|
||||
paragraphs.push({
|
||||
type: 'paragraph',
|
||||
content: [{ type: 'text', text: text.trim() || '(empty)' }],
|
||||
});
|
||||
}
|
||||
return { type: 'doc', version: 1, content: paragraphs };
|
||||
}
|
||||
|
||||
// --- API operations ---
|
||||
|
||||
async function getJiraProjects(): Promise<string[]> {
|
||||
const res = await jiraFetchWithRetry('/project');
|
||||
if (!res.ok) {
|
||||
console.error('Failed to list Jira projects:', res.status, await res.text());
|
||||
return [];
|
||||
}
|
||||
const projects = await res.json() as Array<{ key: string }>;
|
||||
return projects.map(p => p.key);
|
||||
}
|
||||
|
||||
async function countJiraIssues(projectKey: string): Promise<number> {
|
||||
const res = await jiraFetchWithRetry('/search/jql', {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({ jql: `project="${projectKey}"`, maxResults: 1, fields: ['summary'] }),
|
||||
});
|
||||
if (!res.ok) return 0;
|
||||
const data = await res.json() as { total?: number; issues?: unknown[] };
|
||||
return data.total ?? data.issues?.length ?? 0;
|
||||
}
|
||||
|
||||
async function createJiraIssue(fields: Record<string, unknown>): Promise<string | null> {
|
||||
if (DRY_RUN) {
|
||||
const key = `${(fields.project as Record<string, string>).key}-DRY`;
|
||||
console.log(` [DRY] Would create: ${(fields.summary as string).substring(0, 60)}`);
|
||||
return key;
|
||||
}
|
||||
const res = await jiraFetchWithRetry('/issue', {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({ fields }),
|
||||
});
|
||||
if (!res.ok) {
|
||||
const body = await res.text();
|
||||
console.error(` FAIL create issue: ${res.status} ${body}`);
|
||||
return null;
|
||||
}
|
||||
const data = await res.json() as { key: string };
|
||||
return data.key;
|
||||
}
|
||||
|
||||
async function transitionIssue(issueKey: string, targetStatus: string): Promise<boolean> {
|
||||
if (DRY_RUN) return true;
|
||||
const res = await jiraFetchWithRetry(`/issue/${issueKey}/transitions`);
|
||||
if (!res.ok) return false;
|
||||
const data = await res.json() as { transitions: Array<{ id: string; name: string }> };
|
||||
const transition = data.transitions.find(t =>
|
||||
t.name.toLowerCase() === targetStatus.toLowerCase()
|
||||
);
|
||||
if (!transition) {
|
||||
// Try partial match (e.g., "In Progress" matches "Start Progress")
|
||||
const partialMatch = data.transitions.find(t =>
|
||||
t.name.toLowerCase().includes(targetStatus.toLowerCase()) ||
|
||||
// Map common alternative names
|
||||
(targetStatus === 'In Progress' && t.name.toLowerCase().includes('progress')) ||
|
||||
(targetStatus === 'Done' && t.name.toLowerCase().includes('done'))
|
||||
);
|
||||
if (partialMatch) {
|
||||
const transRes = await jiraFetchWithRetry(`/issue/${issueKey}/transitions`, {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({ transition: { id: partialMatch.id } }),
|
||||
});
|
||||
return transRes.ok;
|
||||
}
|
||||
console.warn(` [WARN] No transition to "${targetStatus}" for ${issueKey}. Available: ${data.transitions.map(t => t.name).join(', ')}`);
|
||||
return false;
|
||||
}
|
||||
const transRes = await jiraFetchWithRetry(`/issue/${issueKey}/transitions`, {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({ transition: { id: transition.id } }),
|
||||
});
|
||||
return transRes.ok;
|
||||
}
|
||||
|
||||
async function writeChecklist(issueKey: string, items: Array<{ item: string; checked: boolean }>): Promise<void> {
|
||||
if (DRY_RUN || items.length === 0) return;
|
||||
const checklistText = items
|
||||
.map(i => `* [${i.checked ? 'x' : ' '}] ${i.item}`)
|
||||
.join('\n');
|
||||
const res = await jiraFetchV2WithRetry(`/issue/${issueKey}`, {
|
||||
method: 'PUT',
|
||||
body: JSON.stringify({ fields: { [CHECKLIST_FIELD]: checklistText } }),
|
||||
});
|
||||
if (!res.ok) {
|
||||
const body = await res.text();
|
||||
console.error(` FAIL checklist for ${issueKey}: ${res.status} ${body}`);
|
||||
}
|
||||
}
|
||||
|
||||
async function setParent(issueKey: string, parentKey: string): Promise<void> {
|
||||
if (DRY_RUN) return;
|
||||
const res = await jiraFetchWithRetry(`/issue/${issueKey}`, {
|
||||
method: 'PUT',
|
||||
body: JSON.stringify({ fields: { parent: { key: parentKey } } }),
|
||||
});
|
||||
if (!res.ok) {
|
||||
const body = await res.text();
|
||||
console.error(` FAIL set parent ${parentKey} for ${issueKey}: ${res.status} ${body}`);
|
||||
}
|
||||
}
|
||||
|
||||
async function createIssueLink(inwardKey: string, outwardKey: string, linkType: string): Promise<boolean> {
|
||||
if (DRY_RUN) {
|
||||
console.log(` [DRY] Would link: ${inwardKey} -[${linkType}]-> ${outwardKey}`);
|
||||
return true;
|
||||
}
|
||||
const res = await jiraFetchWithRetry('/issueLink', {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({
|
||||
type: { name: linkType },
|
||||
inwardIssue: { key: inwardKey },
|
||||
outwardIssue: { key: outwardKey },
|
||||
}),
|
||||
});
|
||||
if (!res.ok) {
|
||||
const body = await res.text();
|
||||
console.error(` FAIL link ${inwardKey}->${outwardKey}: ${res.status} ${body}`);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
async function deleteIssue(issueKey: string): Promise<void> {
|
||||
await jiraFetchWithRetry(`/issue/${issueKey}`, { method: 'DELETE' });
|
||||
}
|
||||
|
||||
// --- Pre-flight check ---
|
||||
|
||||
async function preflightWorkflowCheck(projectKey: string): Promise<boolean> {
|
||||
console.log(`\nPre-flight workflow check on ${projectKey}...`);
|
||||
if (DRY_RUN || SKIP_PREFLIGHT) {
|
||||
console.log(` [${DRY_RUN ? 'DRY' : 'SKIP'}] Skipping pre-flight check`);
|
||||
return true;
|
||||
}
|
||||
|
||||
// IMPORTANT: pre-flight consumes a key number!
|
||||
// We must account for this. The test issue will be key #1,
|
||||
// then we delete it, but the counter stays at 2.
|
||||
// So we CANNOT do pre-flight on the same project if we want exact keys.
|
||||
// Instead, use a different project for pre-flight.
|
||||
console.log(' WARNING: Pre-flight check would consume issue key #1.');
|
||||
console.log(' Skipping in-project pre-flight to preserve key sequence.');
|
||||
console.log(' Use --skip-preflight explicitly if already verified.');
|
||||
return true;
|
||||
}
|
||||
|
||||
// --- Migration: exact key ordering ---
|
||||
|
||||
async function migrateTasksExactKeys(projectKey: string): Promise<Map<string, string>> {
|
||||
const epicMap = new Map<string, string>();
|
||||
|
||||
// 1. Load all tasks for this project, indexed by numeric ID
|
||||
const tasks = await pool.query(
|
||||
`SELECT id, title, description, type, status, priority, epic_id, created_at
|
||||
FROM tasks WHERE project = $1 ORDER BY id`,
|
||||
[projectKey]
|
||||
);
|
||||
|
||||
// Build a map of numeric ID → task row
|
||||
const taskById = new Map<number, (typeof tasks.rows)[0]>();
|
||||
let maxNum = 0;
|
||||
for (const task of tasks.rows) {
|
||||
const m = task.id.match(new RegExp(`^${projectKey}-(\\d+)$`));
|
||||
if (m) {
|
||||
const num = parseInt(m[1]);
|
||||
taskById.set(num, task);
|
||||
if (num > maxNum) maxNum = num;
|
||||
}
|
||||
}
|
||||
|
||||
if (maxNum === 0) {
|
||||
console.log(' No numeric task IDs found, skipping.');
|
||||
return epicMap;
|
||||
}
|
||||
|
||||
const effectiveMax = LIMIT > 0 ? Math.min(maxNum, LIMIT) : maxNum;
|
||||
const gapCount = effectiveMax - (LIMIT > 0 ? Math.min(taskById.size, LIMIT) : taskById.size) +
|
||||
Array.from({ length: effectiveMax }, (_, i) => i + 1).filter(n => n <= effectiveMax && !taskById.has(n)).length -
|
||||
(effectiveMax - (LIMIT > 0 ? Math.min(taskById.size, LIMIT) : taskById.size));
|
||||
|
||||
// Actually compute properly
|
||||
let realTasks = 0;
|
||||
let gaps = 0;
|
||||
for (let n = 1; n <= effectiveMax; n++) {
|
||||
if (taskById.has(n)) realTasks++;
|
||||
else gaps++;
|
||||
}
|
||||
|
||||
console.log(` Creating ${effectiveMax} issues (${realTasks} real tasks + ${gaps} placeholders)...`);
|
||||
|
||||
// 2. Create issues 1..maxNum in strict order
|
||||
for (let n = 1; n <= effectiveMax; n++) {
|
||||
const task = taskById.get(n);
|
||||
const taskId = `${projectKey}-${n}`;
|
||||
const expectedJiraKey = `${projectKey}-${n}`;
|
||||
|
||||
if (task) {
|
||||
// Real task
|
||||
const labels: string[] = ['migrated-from-task-mcp'];
|
||||
if (task.type === 'feature') labels.push('feature');
|
||||
if (task.type === 'debt') labels.push('tech-debt');
|
||||
if (task.type === 'investigation') labels.push('investigation');
|
||||
if (task.status === 'blocked') labels.push('blocked');
|
||||
if (task.status === 'abandoned') labels.push('abandoned');
|
||||
|
||||
const fields: Record<string, unknown> = {
|
||||
project: { key: projectKey },
|
||||
summary: task.title.substring(0, 255),
|
||||
issuetype: { name: TYPE_MAP[task.type] || 'Task' },
|
||||
priority: { name: PRIORITY_MAP[task.priority] || 'Medium' },
|
||||
labels,
|
||||
};
|
||||
|
||||
if (task.description) {
|
||||
fields.description = textToAdf(task.description);
|
||||
}
|
||||
|
||||
// Don't set parent here — epics don't exist yet. Queue for later.
|
||||
const jiraKey = await createJiraIssue(fields);
|
||||
if (!jiraKey) {
|
||||
console.error(` FATAL: Failed to create ${taskId}, key sequence broken!`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Verify key matches
|
||||
if (!DRY_RUN && jiraKey !== expectedJiraKey) {
|
||||
console.error(` FATAL: Key mismatch! Expected ${expectedJiraKey}, got ${jiraKey}. Aborting.`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
migrationMap.set(task.id, jiraKey);
|
||||
|
||||
// Transition
|
||||
const targetStatus = STATUS_MAP[task.status] || 'To Do';
|
||||
if (targetStatus !== 'To Do') {
|
||||
await transitionIssue(jiraKey, targetStatus);
|
||||
}
|
||||
|
||||
// Checklist
|
||||
const checklist = await pool.query(
|
||||
'SELECT item, checked FROM task_checklist WHERE task_id = $1 ORDER BY position',
|
||||
[task.id]
|
||||
);
|
||||
if (checklist.rows.length > 0) {
|
||||
await writeChecklist(jiraKey, checklist.rows);
|
||||
}
|
||||
|
||||
// Queue parent link for later
|
||||
if (task.epic_id) {
|
||||
pendingParentLinks.push({ taskJiraKey: jiraKey, epicOldId: task.epic_id });
|
||||
}
|
||||
} else {
|
||||
// Gap — create placeholder
|
||||
const fields: Record<string, unknown> = {
|
||||
project: { key: projectKey },
|
||||
summary: `[Placeholder] Deleted task ${taskId}`,
|
||||
issuetype: { name: 'Task' },
|
||||
labels: ['migration-placeholder', 'migrated-from-task-mcp'],
|
||||
};
|
||||
|
||||
const jiraKey = await createJiraIssue(fields);
|
||||
if (!jiraKey) {
|
||||
console.error(` FATAL: Failed to create placeholder for ${taskId}, key sequence broken!`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if (!DRY_RUN && jiraKey !== expectedJiraKey) {
|
||||
console.error(` FATAL: Key mismatch! Expected ${expectedJiraKey}, got ${jiraKey}. Aborting.`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Transition placeholder to Done
|
||||
await transitionIssue(jiraKey, 'Done');
|
||||
}
|
||||
|
||||
// Progress logging
|
||||
if (n % 10 === 0 || n === effectiveMax) {
|
||||
console.log(` [${n}/${effectiveMax}] ${task ? task.id : `gap → placeholder`} → ${projectKey}-${n}`);
|
||||
}
|
||||
|
||||
// Batch pause
|
||||
if (n > 0 && n % BATCH_SIZE === 0) {
|
||||
console.log(` [BATCH PAUSE] ${n}/${effectiveMax}, pausing ${BATCH_PAUSE_MS / 1000}s...`);
|
||||
await delay(BATCH_PAUSE_MS);
|
||||
}
|
||||
}
|
||||
|
||||
return epicMap;
|
||||
}
|
||||
|
||||
async function migrateEpicsAfterTasks(projectKey: string): Promise<void> {
|
||||
const epics = await pool.query(
|
||||
'SELECT id, title, description, status FROM epics WHERE project = $1 ORDER BY id',
|
||||
[projectKey]
|
||||
);
|
||||
|
||||
if (epics.rows.length === 0) return;
|
||||
|
||||
console.log(` Creating ${epics.rows.length} epics (after task range)...`);
|
||||
|
||||
for (let i = 0; i < epics.rows.length; i++) {
|
||||
const epic = epics.rows[i];
|
||||
const labels: string[] = ['migrated-from-task-mcp'];
|
||||
const fields: Record<string, unknown> = {
|
||||
project: { key: projectKey },
|
||||
summary: epic.title.substring(0, 255),
|
||||
description: epic.description ? textToAdf(epic.description) : undefined,
|
||||
issuetype: { name: 'Epic' },
|
||||
labels,
|
||||
};
|
||||
|
||||
const jiraKey = await createJiraIssue(fields);
|
||||
if (jiraKey) {
|
||||
epicJiraKeys.set(epic.id, jiraKey);
|
||||
console.log(` [${i + 1}/${epics.rows.length}] Epic ${epic.id} → ${jiraKey}: ${epic.title.substring(0, 50)}`);
|
||||
|
||||
if (epic.status === 'completed' || epic.status === 'done') {
|
||||
await transitionIssue(jiraKey, 'Done');
|
||||
} else if (epic.status === 'in_progress') {
|
||||
await transitionIssue(jiraKey, 'In Progress');
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function migrateSessionPlansAfterTasks(projectKey: string): Promise<void> {
|
||||
const plans = await pool.query(
|
||||
`SELECT sp.id, sp.session_id, sp.plan_file_name, sp.plan_content, sp.status
|
||||
FROM session_plans sp
|
||||
JOIN sessions s ON sp.session_id = s.id
|
||||
WHERE s.project = $1`,
|
||||
[projectKey]
|
||||
);
|
||||
|
||||
if (plans.rows.length === 0) return;
|
||||
|
||||
console.log(` Creating ${plans.rows.length} session plans as Epics...`);
|
||||
|
||||
for (let i = 0; i < plans.rows.length; i++) {
|
||||
const plan = plans.rows[i];
|
||||
const labels: string[] = ['migrated-from-task-mcp', 'session-plan'];
|
||||
if (plan.plan_file_name) {
|
||||
labels.push(`plan:${plan.plan_file_name.replace(/[^a-zA-Z0-9._-]/g, '_').substring(0, 50)}`);
|
||||
}
|
||||
if (plan.status) {
|
||||
labels.push(`plan-status:${plan.status}`);
|
||||
}
|
||||
|
||||
const fields: Record<string, unknown> = {
|
||||
project: { key: projectKey },
|
||||
summary: `[Session Plan] ${plan.plan_file_name || `Plan from session ${plan.session_id}`}`.substring(0, 255),
|
||||
description: plan.plan_content ? textToAdf(plan.plan_content) : undefined,
|
||||
issuetype: { name: 'Epic' },
|
||||
labels,
|
||||
};
|
||||
|
||||
const jiraKey = await createJiraIssue(fields);
|
||||
if (jiraKey) {
|
||||
epicJiraKeys.set(`plan-${plan.id}`, jiraKey);
|
||||
console.log(` [${i + 1}/${plans.rows.length}] Plan ${plan.id} → ${jiraKey}`);
|
||||
|
||||
if (plan.status === 'executed' || plan.status === 'abandoned') {
|
||||
await transitionIssue(jiraKey, 'Done');
|
||||
} else if (plan.status === 'approved') {
|
||||
await transitionIssue(jiraKey, 'In Progress');
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function linkTasksToEpics(): Promise<void> {
|
||||
if (pendingParentLinks.length === 0) return;
|
||||
|
||||
console.log(` Setting parent (epic) for ${pendingParentLinks.length} tasks...`);
|
||||
|
||||
let linked = 0;
|
||||
for (const { taskJiraKey, epicOldId } of pendingParentLinks) {
|
||||
const epicJiraKey = epicJiraKeys.get(epicOldId);
|
||||
if (!epicJiraKey) continue;
|
||||
|
||||
await setParent(taskJiraKey, epicJiraKey);
|
||||
linked++;
|
||||
|
||||
if (linked % 20 === 0) {
|
||||
console.log(` [${linked}/${pendingParentLinks.length}] parent links set`);
|
||||
}
|
||||
|
||||
if (linked % BATCH_SIZE === 0) {
|
||||
console.log(` [BATCH PAUSE] ${linked}/${pendingParentLinks.length}, pausing...`);
|
||||
await delay(BATCH_PAUSE_MS);
|
||||
}
|
||||
}
|
||||
|
||||
console.log(` Linked ${linked} tasks to epics`);
|
||||
}
|
||||
|
||||
async function migrateLinks(projectKey: string): Promise<void> {
|
||||
const links = await pool.query(
|
||||
`SELECT tl.from_task_id, tl.to_task_id, tl.link_type
|
||||
FROM task_links tl
|
||||
JOIN tasks t1 ON tl.from_task_id = t1.id
|
||||
JOIN tasks t2 ON tl.to_task_id = t2.id
|
||||
WHERE t1.project = $1 OR t2.project = $1`,
|
||||
[projectKey]
|
||||
);
|
||||
|
||||
if (links.rows.length === 0) return;
|
||||
|
||||
console.log(` Migrating ${links.rows.length} links...`);
|
||||
|
||||
let created = 0;
|
||||
let skipped = 0;
|
||||
for (const link of links.rows) {
|
||||
const fromKey = migrationMap.get(link.from_task_id);
|
||||
const toKey = migrationMap.get(link.to_task_id);
|
||||
|
||||
if (!fromKey || !toKey) {
|
||||
failedLinks.push({ from: link.from_task_id, to: link.to_task_id, type: link.link_type });
|
||||
skipped++;
|
||||
continue;
|
||||
}
|
||||
|
||||
const jiraLinkType = LINK_TYPE_MAP[link.link_type] || 'Relates';
|
||||
let success: boolean;
|
||||
if (link.link_type === 'depends_on' || link.link_type === 'needs') {
|
||||
success = await createIssueLink(toKey, fromKey, jiraLinkType);
|
||||
} else {
|
||||
success = await createIssueLink(fromKey, toKey, jiraLinkType);
|
||||
}
|
||||
if (success) created++;
|
||||
}
|
||||
|
||||
console.log(` Created ${created} links, ${skipped} deferred for cross-project retry`);
|
||||
}
|
||||
|
||||
async function retryFailedLinks(): Promise<void> {
|
||||
if (failedLinks.length === 0) return;
|
||||
|
||||
console.log(`\nRetrying ${failedLinks.length} deferred cross-project links...`);
|
||||
|
||||
let created = 0;
|
||||
let failed = 0;
|
||||
for (const link of failedLinks) {
|
||||
const fromKey = migrationMap.get(link.from);
|
||||
const toKey = migrationMap.get(link.to);
|
||||
if (!fromKey || !toKey) { failed++; continue; }
|
||||
|
||||
const jiraLinkType = LINK_TYPE_MAP[link.type] || 'Relates';
|
||||
let success: boolean;
|
||||
if (link.type === 'depends_on' || link.type === 'needs') {
|
||||
success = await createIssueLink(toKey, fromKey, jiraLinkType);
|
||||
} else {
|
||||
success = await createIssueLink(fromKey, toKey, jiraLinkType);
|
||||
}
|
||||
if (success) created++;
|
||||
else failed++;
|
||||
}
|
||||
|
||||
console.log(` Retry results: ${created} created, ${failed} failed`);
|
||||
}
|
||||
|
||||
// --- Post-migration ---
|
||||
|
||||
async function updateSessionMappings(): Promise<void> {
|
||||
console.log('\nStoring migration mappings...');
|
||||
|
||||
await pool.query(`
|
||||
CREATE TABLE IF NOT EXISTS task_migration_map (
|
||||
old_task_id TEXT PRIMARY KEY,
|
||||
jira_issue_key TEXT NOT NULL,
|
||||
migrated_at TIMESTAMPTZ DEFAULT NOW()
|
||||
)
|
||||
`);
|
||||
|
||||
let count = 0;
|
||||
for (const [oldId, jiraKey] of migrationMap) {
|
||||
if (!DRY_RUN) {
|
||||
await pool.query(
|
||||
`INSERT INTO task_migration_map (old_task_id, jira_issue_key)
|
||||
VALUES ($1, $2)
|
||||
ON CONFLICT (old_task_id) DO UPDATE SET jira_issue_key = $2, migrated_at = NOW()`,
|
||||
[oldId, jiraKey]
|
||||
);
|
||||
}
|
||||
count++;
|
||||
}
|
||||
|
||||
console.log(` Stored ${count} mappings`);
|
||||
}
|
||||
|
||||
async function updateForeignKeyReferences(): Promise<void> {
|
||||
console.log('\nUpdating FK references with Jira issue keys...');
|
||||
|
||||
if (DRY_RUN) {
|
||||
console.log(' [DRY] Skipping FK reference updates');
|
||||
return;
|
||||
}
|
||||
|
||||
const alterStatements = [
|
||||
'ALTER TABLE memories ADD COLUMN IF NOT EXISTS jira_issue_key TEXT',
|
||||
'ALTER TABLE session_context ADD COLUMN IF NOT EXISTS jira_issue_key TEXT',
|
||||
'ALTER TABLE deployments ADD COLUMN IF NOT EXISTS jira_issue_key TEXT',
|
||||
'ALTER TABLE task_commits ADD COLUMN IF NOT EXISTS jira_issue_key TEXT',
|
||||
];
|
||||
|
||||
for (const sql of alterStatements) {
|
||||
try { await pool.query(sql); }
|
||||
catch (e: unknown) {
|
||||
const msg = e instanceof Error ? e.message : String(e);
|
||||
if (!msg.includes('does not exist')) console.warn(` [WARN] ${sql}: ${msg}`);
|
||||
}
|
||||
}
|
||||
|
||||
const updates = [
|
||||
{ table: 'memories', fk: 'task_id', desc: 'memories' },
|
||||
{ table: 'session_context', fk: 'current_task_id', desc: 'session_context' },
|
||||
{ table: 'deployments', fk: 'task_id', desc: 'deployments' },
|
||||
{ table: 'task_commits', fk: 'task_id', desc: 'task_commits' },
|
||||
];
|
||||
|
||||
for (const { table, fk, desc } of updates) {
|
||||
try {
|
||||
const result = await pool.query(
|
||||
`UPDATE ${table} SET jira_issue_key = m.jira_issue_key
|
||||
FROM task_migration_map m
|
||||
WHERE ${table}.${fk} = m.old_task_id
|
||||
AND ${table}.jira_issue_key IS NULL`
|
||||
);
|
||||
console.log(` ${desc}: ${result.rowCount} rows updated`);
|
||||
} catch (e: unknown) {
|
||||
const msg = e instanceof Error ? e.message : String(e);
|
||||
console.warn(` [WARN] ${desc}: ${msg}`);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await pool.query(
|
||||
`UPDATE sessions SET jira_issue_key = m.jira_issue_key
|
||||
FROM task_migration_map m, session_context sc
|
||||
WHERE sc.session_id = sessions.id
|
||||
AND sc.current_task_id = m.old_task_id
|
||||
AND sessions.jira_issue_key IS NULL`
|
||||
);
|
||||
console.log(` sessions: ${result.rowCount} rows updated`);
|
||||
} catch (e: unknown) {
|
||||
const msg = e instanceof Error ? e.message : String(e);
|
||||
console.warn(` [WARN] sessions: ${msg}`);
|
||||
}
|
||||
}
|
||||
|
||||
// --- Main ---
|
||||
|
||||
async function main() {
|
||||
console.log('=== task-mcp → Jira Cloud Migration (EXACT KEY MATCHING) ===');
|
||||
console.log(`Jira: ${JIRA_URL}`);
|
||||
console.log(`User: ${JIRA_USER}`);
|
||||
console.log(`Mode: ${DRY_RUN ? 'DRY RUN' : 'LIVE'}`);
|
||||
console.log(`Filter: ${PROJECT_FILTER || 'all valid projects'}`);
|
||||
console.log(`Scope: ${OPEN_ONLY ? 'open tasks only' : 'all tasks'}`);
|
||||
console.log(`Limit: ${LIMIT || 'none'}`);
|
||||
console.log(`Batch: ${BATCH_SIZE} (${BATCH_PAUSE_MS / 1000}s pause)`);
|
||||
console.log(`Rate: ${DELAY_MS}ms delay, ${MAX_RETRIES} retries`);
|
||||
console.log('');
|
||||
|
||||
if (!JIRA_USER || !JIRA_TOKEN) {
|
||||
console.error('Missing JIRA_USERNAME or JIRA_API_TOKEN');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const existingProjects = await getJiraProjects();
|
||||
existingProjects.forEach(p => jiraProjects.add(p));
|
||||
console.log(`Existing Jira projects: ${existingProjects.join(', ')}`);
|
||||
|
||||
const dbProjects = await pool.query(
|
||||
'SELECT key, name FROM projects WHERE key ~ $1 ORDER BY key',
|
||||
['^[A-Z]{2,5}$']
|
||||
);
|
||||
|
||||
const projectsToMigrate = dbProjects.rows.filter(p => {
|
||||
if (PROJECT_FILTER && p.key !== PROJECT_FILTER) return false;
|
||||
if (!VALID_PROJECT_KEY.test(p.key)) return false;
|
||||
return true;
|
||||
});
|
||||
|
||||
console.log(`Projects to migrate: ${projectsToMigrate.map(p => p.key).join(', ')}`);
|
||||
|
||||
const missing = projectsToMigrate.filter(p => !jiraProjects.has(p.key));
|
||||
if (missing.length > 0) {
|
||||
console.log(`\nWARNING: These projects don't exist in Jira yet (will be skipped):`);
|
||||
missing.forEach(p => console.log(` ${p.key} - ${p.name}`));
|
||||
console.log('Create them in Jira first, then re-run migration.\n');
|
||||
}
|
||||
|
||||
// Migrate each project
|
||||
for (const project of projectsToMigrate) {
|
||||
if (!jiraProjects.has(project.key)) {
|
||||
console.log(`\nSkipping ${project.key} (not in Jira)`);
|
||||
continue;
|
||||
}
|
||||
|
||||
console.log(`\n--- Migrating project: ${project.key} (${project.name}) ---`);
|
||||
|
||||
// Check if project already has issues (already migrated)
|
||||
const existingCount = await countJiraIssues(project.key);
|
||||
if (existingCount > 0) {
|
||||
console.log(` Skipping: already has ${existingCount} issues in Jira`);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Clear per-project state
|
||||
pendingParentLinks.length = 0;
|
||||
|
||||
// 1. Tasks in exact numeric order (with gap placeholders)
|
||||
await migrateTasksExactKeys(project.key);
|
||||
|
||||
// 2. Epics (after tasks, so they get keys after maxTaskId)
|
||||
await migrateEpicsAfterTasks(project.key);
|
||||
|
||||
// 3. Session plans as epics
|
||||
await migrateSessionPlansAfterTasks(project.key);
|
||||
|
||||
// 4. Link tasks to their parent epics (now that epics exist)
|
||||
await linkTasksToEpics();
|
||||
|
||||
// 5. Issue links
|
||||
await migrateLinks(project.key);
|
||||
|
||||
// Summary
|
||||
const taskCount = Array.from(migrationMap.values()).filter(v => v.startsWith(`${project.key}-`)).length;
|
||||
console.log(` Done: ${epicJiraKeys.size} epics, ${taskCount} tasks migrated`);
|
||||
}
|
||||
|
||||
// 6. Retry cross-project links
|
||||
await retryFailedLinks();
|
||||
|
||||
// 7. Store mapping
|
||||
await updateSessionMappings();
|
||||
|
||||
// 8. Update FK references
|
||||
await updateForeignKeyReferences();
|
||||
|
||||
// Final summary
|
||||
console.log('\n=== Migration Summary ===');
|
||||
console.log(`Total issues migrated: ${migrationMap.size}`);
|
||||
console.log(`Epics created: ${epicJiraKeys.size}`);
|
||||
console.log(`Failed links: ${failedLinks.filter(l => !migrationMap.has(l.from) || !migrationMap.has(l.to)).length}`);
|
||||
console.log(`Mode: ${DRY_RUN ? 'DRY RUN (no changes made)' : 'LIVE'}`);
|
||||
|
||||
await pool.end();
|
||||
}
|
||||
|
||||
main().catch(err => {
|
||||
console.error('Migration failed:', err);
|
||||
process.exit(1);
|
||||
});
|
||||
221
scripts/prepare-all-projects.ts
Normal file
221
scripts/prepare-all-projects.ts
Normal file
@@ -0,0 +1,221 @@
|
||||
#!/usr/bin/env npx tsx
|
||||
/**
|
||||
* Prepare all projects for exact-key migration (CF-762)
|
||||
* For each project: delete → recreate → assign shared issue type scheme
|
||||
* Then the migration script can run for all projects at once.
|
||||
*
|
||||
* Usage:
|
||||
* npx tsx scripts/prepare-all-projects.ts [--dry-run] [--exclude CF]
|
||||
*/
|
||||
|
||||
import pg from 'pg';
|
||||
import dotenv from 'dotenv';
|
||||
import { dirname, join } from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
const __dirname = dirname(fileURLToPath(import.meta.url));
|
||||
dotenv.config({ path: join(__dirname, '..', '.env'), override: true });
|
||||
|
||||
const { Pool } = pg;
|
||||
|
||||
const JIRA_URL = process.env.JIRA_URL || 'https://agiliton.atlassian.net';
|
||||
const JIRA_USER = process.env.JIRA_USERNAME || process.env.JIRA_EMAIL || '';
|
||||
const JIRA_TOKEN = process.env.JIRA_API_TOKEN || '';
|
||||
const JIRA_AUTH = Buffer.from(`${JIRA_USER}:${JIRA_TOKEN}`).toString('base64');
|
||||
const SHARED_SCHEME_ID = '10329'; // Agiliton Software Issue Type Scheme
|
||||
|
||||
const pool = new Pool({
|
||||
host: process.env.POSTGRES_HOST || 'postgres.agiliton.internal',
|
||||
port: 5432, database: 'agiliton', user: 'agiliton',
|
||||
password: 'QtqiwCOAUpQNF6pjzOMAREzUny2bY8V1', max: 3,
|
||||
});
|
||||
|
||||
const args = process.argv.slice(2);
|
||||
const DRY_RUN = args.includes('--dry-run');
|
||||
const excludeIdx = args.indexOf('--exclude');
|
||||
const EXCLUDE = excludeIdx >= 0 ? args[excludeIdx + 1]?.split(',') || [] : [];
|
||||
|
||||
function delay(ms: number): Promise<void> {
|
||||
return new Promise(resolve => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
async function jiraFetch(path: string, options: RequestInit = {}): Promise<Response> {
|
||||
return fetch(`${JIRA_URL}/rest/api/3${path}`, {
|
||||
...options,
|
||||
headers: {
|
||||
Authorization: `Basic ${JIRA_AUTH}`,
|
||||
'Content-Type': 'application/json',
|
||||
Accept: 'application/json',
|
||||
...options.headers,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async function getJiraProjects(): Promise<Array<{ key: string; name: string; id: string }>> {
|
||||
const res = await jiraFetch('/project');
|
||||
if (!res.ok) return [];
|
||||
return res.json() as Promise<Array<{ key: string; name: string; id: string }>>;
|
||||
}
|
||||
|
||||
async function deleteProject(key: string): Promise<boolean> {
|
||||
const res = await jiraFetch(`/project/${key}?enableUndo=false`, { method: 'DELETE' });
|
||||
return res.status === 204;
|
||||
}
|
||||
|
||||
async function createProject(key: string, name: string, leadAccountId: string): Promise<string | null> {
|
||||
const res = await jiraFetch('/project', {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({
|
||||
key,
|
||||
name,
|
||||
projectTypeKey: 'business',
|
||||
leadAccountId,
|
||||
assigneeType: 'UNASSIGNED',
|
||||
}),
|
||||
});
|
||||
if (res.ok || res.status === 201) {
|
||||
const data = await res.json() as { id: string };
|
||||
return data.id;
|
||||
}
|
||||
console.error(` FAIL create ${key}: ${res.status} ${await res.text()}`);
|
||||
return null;
|
||||
}
|
||||
|
||||
async function assignScheme(projectId: string): Promise<boolean> {
|
||||
const res = await jiraFetch('/issuetypescheme/project', {
|
||||
method: 'PUT',
|
||||
body: JSON.stringify({
|
||||
issueTypeSchemeId: SHARED_SCHEME_ID,
|
||||
projectId,
|
||||
}),
|
||||
});
|
||||
return res.ok || res.status === 204;
|
||||
}
|
||||
|
||||
async function verifyScheme(key: string): Promise<boolean> {
|
||||
const res = await jiraFetch(`/project/${key}/statuses`);
|
||||
if (!res.ok) return false;
|
||||
const statuses = await res.json() as Array<{ name: string }>;
|
||||
const names = statuses.map(s => s.name);
|
||||
return names.includes('Epic') && names.includes('Task') && names.includes('Bug');
|
||||
}
|
||||
|
||||
async function main() {
|
||||
console.log('=== Prepare Projects for Migration ===');
|
||||
console.log(`Mode: ${DRY_RUN ? 'DRY RUN' : 'LIVE'}`);
|
||||
console.log(`Exclude: ${EXCLUDE.length > 0 ? EXCLUDE.join(', ') : 'none'}`);
|
||||
console.log('');
|
||||
|
||||
// Get current user for project lead
|
||||
const meRes = await jiraFetch('/myself');
|
||||
const me = await meRes.json() as { accountId: string };
|
||||
|
||||
// Get existing Jira projects
|
||||
const jiraProjects = await getJiraProjects();
|
||||
const jiraProjectMap = new Map(jiraProjects.map(p => [p.key, p]));
|
||||
console.log(`Jira projects: ${jiraProjects.length}`);
|
||||
|
||||
// Get DB projects with tasks
|
||||
const dbProjects = await pool.query(
|
||||
`SELECT p.key, p.name, COUNT(t.id) as task_count,
|
||||
MAX(CAST(REGEXP_REPLACE(t.id, '^' || p.key || '-', '') AS INTEGER)) as max_id
|
||||
FROM projects p
|
||||
JOIN tasks t ON t.project = p.key
|
||||
WHERE p.key ~ '^[A-Z]{2,5}$'
|
||||
GROUP BY p.key, p.name
|
||||
ORDER BY p.key`
|
||||
);
|
||||
|
||||
console.log(`DB projects with tasks: ${dbProjects.rows.length}`);
|
||||
console.log('');
|
||||
|
||||
// Filter: must exist in Jira, not excluded
|
||||
const toProcess = dbProjects.rows.filter((p: any) => {
|
||||
if (EXCLUDE.includes(p.key)) return false;
|
||||
if (!jiraProjectMap.has(p.key)) return false;
|
||||
return true;
|
||||
});
|
||||
|
||||
console.log(`Projects to prepare: ${toProcess.length}`);
|
||||
console.log('');
|
||||
|
||||
// Summary table
|
||||
console.log('Project | Tasks | Max ID | Placeholders | Status');
|
||||
console.log('--------|-------|--------|-------------|-------');
|
||||
let totalTasks = 0;
|
||||
let totalPlaceholders = 0;
|
||||
for (const p of toProcess) {
|
||||
const placeholders = p.max_id - p.task_count;
|
||||
totalTasks += parseInt(p.task_count);
|
||||
totalPlaceholders += placeholders;
|
||||
console.log(`${p.key.padEnd(7)} | ${String(p.task_count).padStart(5)} | ${String(p.max_id).padStart(6)} | ${String(placeholders).padStart(11)} | pending`);
|
||||
}
|
||||
console.log(`TOTAL | ${String(totalTasks).padStart(5)} | ${String(totalTasks + totalPlaceholders).padStart(6)} | ${String(totalPlaceholders).padStart(11)} |`);
|
||||
console.log('');
|
||||
|
||||
if (DRY_RUN) {
|
||||
console.log('[DRY RUN] Would process above projects. Run without --dry-run to execute.');
|
||||
await pool.end();
|
||||
return;
|
||||
}
|
||||
|
||||
// Process each project
|
||||
let success = 0;
|
||||
let failed = 0;
|
||||
for (let i = 0; i < toProcess.length; i++) {
|
||||
const p = toProcess[i];
|
||||
const jiraProject = jiraProjectMap.get(p.key)!;
|
||||
console.log(`[${i + 1}/${toProcess.length}] ${p.key} (${p.name})...`);
|
||||
|
||||
// 1. Delete
|
||||
await delay(1000);
|
||||
const deleted = await deleteProject(p.key);
|
||||
if (!deleted) {
|
||||
console.error(` FAIL delete ${p.key}`);
|
||||
failed++;
|
||||
continue;
|
||||
}
|
||||
console.log(` Deleted`);
|
||||
|
||||
// 2. Wait a bit for Jira to process
|
||||
await delay(2000);
|
||||
|
||||
// 3. Recreate
|
||||
const newId = await createProject(p.key, jiraProject.name || p.name, me.accountId);
|
||||
if (!newId) {
|
||||
console.error(` FAIL recreate ${p.key}`);
|
||||
failed++;
|
||||
continue;
|
||||
}
|
||||
console.log(` Recreated (id=${newId})`);
|
||||
|
||||
// 4. Assign shared scheme
|
||||
await delay(1000);
|
||||
const schemeOk = await assignScheme(newId);
|
||||
if (!schemeOk) {
|
||||
console.error(` FAIL assign scheme for ${p.key}`);
|
||||
failed++;
|
||||
continue;
|
||||
}
|
||||
|
||||
// 5. Verify
|
||||
const verified = await verifyScheme(p.key);
|
||||
if (!verified) {
|
||||
console.error(` FAIL verify scheme for ${p.key} (missing Epic/Task/Bug)`);
|
||||
failed++;
|
||||
continue;
|
||||
}
|
||||
|
||||
console.log(` Scheme OK (Epic/Task/Bug)`);
|
||||
success++;
|
||||
}
|
||||
|
||||
console.log(`\n=== Preparation Summary ===`);
|
||||
console.log(`Success: ${success}`);
|
||||
console.log(`Failed: ${failed}`);
|
||||
console.log(`\nRun migration: npx tsx scripts/migrate-tasks-to-jira.ts --skip-preflight`);
|
||||
|
||||
await pool.end();
|
||||
}
|
||||
|
||||
main().catch(err => { console.error(err); process.exit(1); });
|
||||
232
scripts/validate-migration.ts
Normal file
232
scripts/validate-migration.ts
Normal file
@@ -0,0 +1,232 @@
|
||||
#!/usr/bin/env npx tsx
|
||||
/**
|
||||
* Validate CF-762 migration integrity.
|
||||
* Checks: Jira issue counts vs DB, statuses, checklists, epic links, FK references.
|
||||
*
|
||||
* Usage: npx tsx scripts/validate-migration.ts [--project CF] [--verbose]
|
||||
*/
|
||||
|
||||
import pg from 'pg';
|
||||
import dotenv from 'dotenv';
|
||||
import { dirname, join } from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
const __dirname = dirname(fileURLToPath(import.meta.url));
|
||||
dotenv.config({ path: join(__dirname, '..', '.env'), override: true });
|
||||
|
||||
const JIRA_URL = process.env.JIRA_URL || 'https://agiliton.atlassian.net';
|
||||
const JIRA_USER = process.env.JIRA_USERNAME || '';
|
||||
const JIRA_TOKEN = process.env.JIRA_API_TOKEN || '';
|
||||
const JIRA_AUTH = Buffer.from(`${JIRA_USER}:${JIRA_TOKEN}`).toString('base64');
|
||||
|
||||
const pool = new pg.Pool({
|
||||
host: process.env.POSTGRES_HOST || 'postgres.agiliton.internal',
|
||||
port: parseInt(process.env.POSTGRES_PORT || '5432'),
|
||||
database: 'agiliton', user: 'agiliton',
|
||||
password: 'QtqiwCOAUpQNF6pjzOMAREzUny2bY8V1', max: 3,
|
||||
});
|
||||
|
||||
const args = process.argv.slice(2);
|
||||
const PROJECT_FILTER = args.find((_, i) => args[i - 1] === '--project') || '';
|
||||
const VERBOSE = args.includes('--verbose');
|
||||
const DELAY_MS = 700;
|
||||
|
||||
function delay(ms: number): Promise<void> {
|
||||
return new Promise(resolve => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
async function jiraFetch(path: string): Promise<Response> {
|
||||
await delay(DELAY_MS);
|
||||
return fetch(`${JIRA_URL}/rest/api/3${path}`, {
|
||||
headers: {
|
||||
'Authorization': `Basic ${JIRA_AUTH}`,
|
||||
'Accept': 'application/json',
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// v3 search/jql uses cursor pagination, no total. Count by paging through.
|
||||
async function jiraIssueCount(projectKey: string): Promise<number> {
|
||||
let count = 0;
|
||||
let nextPageToken: string | undefined;
|
||||
while (true) {
|
||||
const jql = encodeURIComponent(`project="${projectKey}"`);
|
||||
let url = `/search/jql?jql=${jql}&maxResults=100&fields=summary`;
|
||||
if (nextPageToken) url += `&nextPageToken=${encodeURIComponent(nextPageToken)}`;
|
||||
const res = await jiraFetch(url);
|
||||
if (!res.ok) return -1;
|
||||
const data = await res.json() as { issues: unknown[]; nextPageToken?: string; isLast?: boolean };
|
||||
count += data.issues.length;
|
||||
if (data.isLast || !data.nextPageToken || data.issues.length === 0) break;
|
||||
nextPageToken = data.nextPageToken;
|
||||
}
|
||||
return count;
|
||||
}
|
||||
|
||||
async function jiraPlaceholderCount(): Promise<number> {
|
||||
const jql = encodeURIComponent(`labels = "migration-placeholder"`);
|
||||
const res = await jiraFetch(`/search/jql?jql=${jql}&maxResults=0`);
|
||||
if (!res.ok) return -1;
|
||||
const data = await res.json() as { total?: number };
|
||||
return data.total ?? -1;
|
||||
}
|
||||
|
||||
async function spotCheckChecklists(projectKey: string): Promise<{ total: number; withChecklist: number }> {
|
||||
const jql = encodeURIComponent(`project="${projectKey}" AND labels = "migrated-from-task-mcp" ORDER BY key ASC`);
|
||||
const res = await jiraFetch(`/search/jql?jql=${jql}&maxResults=3&fields=summary,customfield_10091`);
|
||||
if (!res.ok) return { total: 0, withChecklist: 0 };
|
||||
const data = await res.json() as { issues: Array<{ key: string; fields: Record<string, unknown> }> };
|
||||
let withChecklist = 0;
|
||||
for (const issue of data.issues) {
|
||||
if (issue.fields.customfield_10091) withChecklist++;
|
||||
}
|
||||
return { total: data.issues.length, withChecklist };
|
||||
}
|
||||
|
||||
async function spotCheckStatuses(projectKey: string): Promise<Record<string, number>> {
|
||||
const counts: Record<string, number> = {};
|
||||
const jql = encodeURIComponent(`project="${projectKey}" AND labels = "migrated-from-task-mcp"`);
|
||||
const res = await jiraFetch(`/search/jql?jql=${jql}&maxResults=100&fields=status`);
|
||||
if (!res.ok) return counts;
|
||||
const data = await res.json() as { issues: Array<{ fields: { status: { name: string } } }> };
|
||||
for (const issue of data.issues) {
|
||||
const status = issue.fields.status.name;
|
||||
counts[status] = (counts[status] || 0) + 1;
|
||||
}
|
||||
return counts;
|
||||
}
|
||||
|
||||
async function spotCheckEpicLinks(projectKey: string): Promise<{ total: number; withParent: number }> {
|
||||
const jql = encodeURIComponent(`project="${projectKey}" AND issuetype != Epic AND labels = "migrated-from-task-mcp" ORDER BY key ASC`);
|
||||
const res = await jiraFetch(`/search/jql?jql=${jql}&maxResults=5&fields=parent`);
|
||||
if (!res.ok) return { total: 0, withParent: 0 };
|
||||
const data = await res.json() as { issues: Array<{ key: string; fields: Record<string, unknown> }> };
|
||||
let withParent = 0;
|
||||
for (const issue of data.issues) {
|
||||
if (issue.fields?.parent) withParent++;
|
||||
}
|
||||
return { total: data.issues.length, withParent };
|
||||
}
|
||||
|
||||
async function main() {
|
||||
console.log('=== CF-762 Migration Validation ===\n');
|
||||
|
||||
// 1. Per-project Jira vs DB counts
|
||||
console.log('1. Per-project issue counts (Jira vs DB):');
|
||||
console.log(' Project | Jira | DB Tasks | DB Migration Map | Match');
|
||||
console.log(' --------|------|----------|-----------------|------');
|
||||
|
||||
const dbProjects = await pool.query(
|
||||
`SELECT p.key, COUNT(DISTINCT t.id) as task_count, COUNT(DISTINCT m.old_task_id) as map_count
|
||||
FROM projects p
|
||||
LEFT JOIN tasks t ON t.project = p.key
|
||||
LEFT JOIN task_migration_map m ON m.old_task_id = t.id
|
||||
WHERE p.key ~ '^[A-Z]{2,5}$'
|
||||
${PROJECT_FILTER ? `AND p.key = '${PROJECT_FILTER}'` : ''}
|
||||
GROUP BY p.key
|
||||
HAVING COUNT(t.id) > 0
|
||||
ORDER BY p.key`
|
||||
);
|
||||
|
||||
let mismatches = 0;
|
||||
for (const row of dbProjects.rows) {
|
||||
const jiraCount = await jiraIssueCount(row.key);
|
||||
const match = jiraCount >= parseInt(row.task_count) ? 'OK' : 'MISMATCH';
|
||||
if (match !== 'OK') mismatches++;
|
||||
console.log(` ${row.key.padEnd(7)} | ${String(jiraCount).padStart(4)} | ${String(row.task_count).padStart(8)} | ${String(row.map_count).padStart(15)} | ${match}`);
|
||||
}
|
||||
console.log(`\n Mismatches: ${mismatches}\n`);
|
||||
|
||||
// 2. Spot-check checklists (3 projects)
|
||||
console.log('2. Checklist spot-check:');
|
||||
const checkProjects = PROJECT_FILTER ? [PROJECT_FILTER] : ['CF', 'OWUI', 'WHMCS'];
|
||||
for (const pk of checkProjects) {
|
||||
const result = await spotCheckChecklists(pk);
|
||||
console.log(` ${pk}: ${result.withChecklist}/${result.total} issues have checklists`);
|
||||
}
|
||||
console.log('');
|
||||
|
||||
// 3. Status distribution spot-check
|
||||
console.log('3. Status distribution spot-check:');
|
||||
const statusProjects = PROJECT_FILTER ? [PROJECT_FILTER] : ['CF', 'GB', 'RUB'];
|
||||
for (const pk of statusProjects) {
|
||||
const statuses = await spotCheckStatuses(pk);
|
||||
console.log(` ${pk}: ${Object.entries(statuses).map(([s, c]) => `${s}=${c}`).join(', ')}`);
|
||||
}
|
||||
console.log('');
|
||||
|
||||
// 4. Epic→Task parent links
|
||||
console.log('4. Epic→Task parent links spot-check:');
|
||||
const epicProjects = PROJECT_FILTER ? [PROJECT_FILTER] : ['CF', 'RUB', 'OWUI'];
|
||||
for (const pk of epicProjects) {
|
||||
const result = await spotCheckEpicLinks(pk);
|
||||
console.log(` ${pk}: ${result.withParent}/${result.total} tasks have parent epic`);
|
||||
}
|
||||
console.log('');
|
||||
|
||||
// 5. NULL FK references
|
||||
console.log('5. NULL FK references (should be from unmigrated/deleted projects):');
|
||||
const nullChecks = [
|
||||
{ table: 'memories', col: 'jira_issue_key', fk: 'task_id' },
|
||||
{ table: 'session_context', col: 'jira_issue_key', fk: 'current_task_id' },
|
||||
{ table: 'task_commits', col: 'jira_issue_key', fk: 'task_id' },
|
||||
];
|
||||
for (const { table, col, fk } of nullChecks) {
|
||||
try {
|
||||
const res = await pool.query(
|
||||
`SELECT COUNT(*) as cnt FROM ${table} WHERE ${fk} IS NOT NULL AND ${col} IS NULL`
|
||||
);
|
||||
const count = parseInt(res.rows[0].cnt);
|
||||
if (count > 0) {
|
||||
console.log(` ${table}: ${count} rows with task_id but no jira_issue_key`);
|
||||
if (VERBOSE) {
|
||||
const details = await pool.query(
|
||||
`SELECT ${fk} FROM ${table} WHERE ${fk} IS NOT NULL AND ${col} IS NULL LIMIT 5`
|
||||
);
|
||||
for (const d of details.rows) {
|
||||
console.log(` - ${d[fk]}`);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
console.log(` ${table}: OK (0 NULL refs)`);
|
||||
}
|
||||
} catch (e: any) {
|
||||
console.log(` ${table}: ${e.message}`);
|
||||
}
|
||||
}
|
||||
console.log('');
|
||||
|
||||
// 6. Migration map total
|
||||
const mapTotal = await pool.query('SELECT COUNT(*) as cnt FROM task_migration_map');
|
||||
console.log(`6. Total migration mappings: ${mapTotal.rows[0].cnt}`);
|
||||
|
||||
// 7. Placeholder count in Jira
|
||||
const placeholders = await jiraPlaceholderCount();
|
||||
console.log(`7. Placeholder issues in Jira (label=migration-placeholder): ${placeholders}`);
|
||||
|
||||
// 8. Consolidated projects check — should no longer exist
|
||||
console.log('\n8. Deleted source projects (should be gone from Jira):');
|
||||
const deletedProjects = ['LIT', 'CARD', 'TES', 'DA', 'AF', 'RUBI', 'ET', 'ZORK', 'IS', 'CLN', 'TOOLS'];
|
||||
for (const pk of deletedProjects) {
|
||||
const res = await jiraFetch(`/project/${pk}`);
|
||||
const status = res.ok ? 'STILL EXISTS' : 'Gone';
|
||||
console.log(` ${pk}: ${status}`);
|
||||
}
|
||||
|
||||
// 9. Remaining projects
|
||||
console.log('\n9. Current Jira projects:');
|
||||
const projRes = await jiraFetch('/project');
|
||||
if (projRes.ok) {
|
||||
const projects = await projRes.json() as Array<{ key: string; name: string }>;
|
||||
console.log(` Total: ${projects.length}`);
|
||||
for (const p of projects.sort((a, b) => a.key.localeCompare(b.key))) {
|
||||
const count = await jiraIssueCount(p.key);
|
||||
console.log(` ${p.key.padEnd(8)} ${String(count).padStart(4)} issues - ${p.name}`);
|
||||
}
|
||||
}
|
||||
|
||||
await pool.end();
|
||||
console.log('\n=== Validation Complete ===');
|
||||
}
|
||||
|
||||
main().catch(err => { console.error(err); process.exit(1); });
|
||||
Reference in New Issue
Block a user