Files
session-mcp/scripts/migrate-tasks-to-jira.ts
Christian Gick 1227e5b339 feat(CF-762): Complete Jira migration - consolidate projects, cleanup
- Remove task CRUD/epic/search/relation/version tools (moved to Jira)
- Add migration scripts: migrate-tasks-to-jira, jira-admin, prepare-all-projects
- Add consolidate-projects.ts for merging duplicate Jira projects
- Add validate-migration.ts for post-migration integrity checks
- Add jira_issue_key columns migration (030)
- Consolidate 11 duplicate projects (LIT→LITE, CARD→CS, etc.)
- Delete 92 placeholder issues, 11 empty source projects
- Remove SG project completely
- 2,798 tasks migrated across 46 Jira projects

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-07 12:33:49 +02:00

888 lines
29 KiB
TypeScript

#!/usr/bin/env npx tsx
/**
* Migrate tasks from task-mcp PostgreSQL to Jira Cloud (CF-762)
* EXACT KEY MATCHING: CF-1 in task-mcp → CF-1 in Jira
*
* Strategy:
* 1. Create tasks in strict numeric order (1..maxId), filling gaps with placeholders
* 2. After all tasks, create epics (they get keys after maxId)
* 3. Then create session plans as epics
* 4. Link tasks to their epics via parent field update
* 5. Create issue links, retry cross-project ones
* 6. Store mapping and update FK references
*
* IMPORTANT: The Jira project must be empty (counter at 1) for key matching to work.
* Delete and recreate the project before running this script.
*
* Usage:
* npx tsx scripts/migrate-tasks-to-jira.ts [--dry-run] [--project CF] [--open-only] [--limit 5] [--batch-size 50]
*
* Requires env vars (from .env or shell):
* JIRA_URL, JIRA_USERNAME, JIRA_API_TOKEN
* POSTGRES_HOST (defaults to postgres.agiliton.internal)
*/
import pg from 'pg';
import dotenv from 'dotenv';
import { fileURLToPath } from 'url';
import { dirname, join } from 'path';
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
dotenv.config({ path: join(__dirname, '..', '.env'), override: true });
const { Pool } = pg;
// --- Config ---
const JIRA_URL = process.env.JIRA_URL || 'https://agiliton.atlassian.net';
const JIRA_USER = process.env.JIRA_USERNAME || process.env.JIRA_EMAIL || '';
const JIRA_TOKEN = process.env.JIRA_API_TOKEN || '';
const JIRA_AUTH = Buffer.from(`${JIRA_USER}:${JIRA_TOKEN}`).toString('base64');
const pool = new Pool({
host: process.env.POSTGRES_HOST || 'postgres.agiliton.internal',
port: parseInt(process.env.POSTGRES_PORT || '5432'),
database: 'agiliton',
user: 'agiliton',
password: 'QtqiwCOAUpQNF6pjzOMAREzUny2bY8V1',
max: 3,
});
// --- CLI args ---
const args = process.argv.slice(2);
const DRY_RUN = args.includes('--dry-run');
const OPEN_ONLY = args.includes('--open-only');
const PROJECT_FILTER = args.find((a, i) => args[i - 1] === '--project') || null;
const LIMIT = parseInt(args.find((a, i) => args[i - 1] === '--limit') || '0') || 0;
const BATCH_SIZE = parseInt(args.find((a, i) => args[i - 1] === '--batch-size') || '50') || 50;
const SKIP_PREFLIGHT = args.includes('--skip-preflight');
// Herocoders Checklist for Jira custom field
const CHECKLIST_FIELD = 'customfield_10091';
// Rate limit: Jira Cloud allows ~100 req/min for basic auth
// 700ms delay = ~85 req/min (safe margin)
const DELAY_MS = 700;
const MAX_RETRIES = 5;
const BATCH_PAUSE_MS = 5000; // 5s pause between batches
// ADF max size (Jira limit)
const ADF_MAX_BYTES = 32_000;
function delay(ms: number): Promise<void> {
return new Promise(resolve => setTimeout(resolve, ms));
}
// --- Mappings ---
const PRIORITY_MAP: Record<string, string> = {
P0: 'Highest',
P1: 'High',
P2: 'Medium',
P3: 'Low',
};
const TYPE_MAP: Record<string, string> = {
task: 'Task',
bug: 'Bug',
feature: 'Task',
debt: 'Task',
investigation: 'Task',
};
const STATUS_MAP: Record<string, string> = {
open: 'To Do',
pending: 'To Do',
in_progress: 'In Progress',
testing: 'In Progress',
blocked: 'To Do',
done: 'Done',
completed: 'Done',
abandoned: 'Done',
};
const LINK_TYPE_MAP: Record<string, string> = {
blocks: 'Blocks',
relates_to: 'Relates',
duplicates: 'Duplicate',
depends_on: 'Blocks',
implements: 'Relates',
fixes: 'Relates',
causes: 'Relates',
needs: 'Blocks',
subtask_of: 'Relates',
};
const VALID_PROJECT_KEY = /^[A-Z]{2,5}$/;
// Track migration mapping: old task_id → Jira issue key
const migrationMap: Map<string, string> = new Map();
const jiraProjects: Set<string> = new Set();
const failedLinks: Array<{ from: string; to: string; type: string }> = [];
// Track epic old_id → Jira key (assigned after tasks)
const epicJiraKeys: Map<string, string> = new Map();
// Tasks that need parent (epic) link set after epics are created
const pendingParentLinks: Array<{ taskJiraKey: string; epicOldId: string }> = [];
// --- Jira REST API helpers ---
async function jiraFetch(path: string, options: RequestInit = {}): Promise<Response> {
const url = `${JIRA_URL}/rest/api/3${path}`;
return fetch(url, {
...options,
headers: {
'Authorization': `Basic ${JIRA_AUTH}`,
'Content-Type': 'application/json',
'Accept': 'application/json',
...options.headers,
},
});
}
async function jiraFetchWithRetry(path: string, options: RequestInit = {}): Promise<Response> {
let lastResponse: Response | null = null;
for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
await delay(DELAY_MS);
const response = await jiraFetch(path, options);
lastResponse = response;
if (response.status === 429 || response.status >= 500) {
if (attempt < MAX_RETRIES) {
const retryAfter = response.headers.get('Retry-After');
const backoffMs = retryAfter
? parseInt(retryAfter) * 1000
: DELAY_MS * Math.pow(2, attempt + 1);
console.warn(` [RETRY] ${response.status} on ${path}, attempt ${attempt + 1}/${MAX_RETRIES}, waiting ${backoffMs}ms`);
await delay(backoffMs);
continue;
}
console.error(` [FAIL] ${response.status} on ${path} after ${MAX_RETRIES} retries`);
}
return response;
}
return lastResponse!;
}
async function jiraFetchV2WithRetry(path: string, options: RequestInit = {}): Promise<Response> {
const url = `${JIRA_URL}/rest/api/2${path}`;
for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
await delay(DELAY_MS);
const response = await fetch(url, {
...options,
headers: {
'Authorization': `Basic ${JIRA_AUTH}`,
'Content-Type': 'application/json',
'Accept': 'application/json',
...options.headers,
},
});
if (response.status === 429 || response.status >= 500) {
if (attempt < MAX_RETRIES) {
const backoffMs = DELAY_MS * Math.pow(2, attempt + 1);
console.warn(` [RETRY] v2 ${response.status} on ${path}, attempt ${attempt + 1}/${MAX_RETRIES}, waiting ${backoffMs}ms`);
await delay(backoffMs);
continue;
}
}
return response;
}
throw new Error(`jiraFetchV2WithRetry: exhausted retries for ${path}`);
}
// --- ADF helpers ---
function textToAdf(text: string): Record<string, unknown> {
let normalized = text.replace(/\r\n/g, '\n').replace(/\r/g, '\n');
if (Buffer.byteLength(normalized, 'utf8') > ADF_MAX_BYTES - 500) {
while (Buffer.byteLength(normalized, 'utf8') > ADF_MAX_BYTES - 500) {
normalized = normalized.substring(0, Math.floor(normalized.length * 0.9));
}
normalized += '\n\n[...truncated - description exceeded 32KB limit]';
}
const lines = normalized.split('\n');
const paragraphs: Array<Record<string, unknown>> = [];
let currentParagraph = '';
for (const line of lines) {
if (line.trim() === '') {
if (currentParagraph.trim()) {
paragraphs.push({
type: 'paragraph',
content: [{ type: 'text', text: currentParagraph.trim() }],
});
}
currentParagraph = '';
} else {
currentParagraph += (currentParagraph ? '\n' : '') + line;
}
}
if (currentParagraph.trim()) {
paragraphs.push({
type: 'paragraph',
content: [{ type: 'text', text: currentParagraph.trim() }],
});
}
if (paragraphs.length === 0) {
paragraphs.push({
type: 'paragraph',
content: [{ type: 'text', text: text.trim() || '(empty)' }],
});
}
return { type: 'doc', version: 1, content: paragraphs };
}
// --- API operations ---
async function getJiraProjects(): Promise<string[]> {
const res = await jiraFetchWithRetry('/project');
if (!res.ok) {
console.error('Failed to list Jira projects:', res.status, await res.text());
return [];
}
const projects = await res.json() as Array<{ key: string }>;
return projects.map(p => p.key);
}
async function countJiraIssues(projectKey: string): Promise<number> {
const res = await jiraFetchWithRetry('/search/jql', {
method: 'POST',
body: JSON.stringify({ jql: `project="${projectKey}"`, maxResults: 1, fields: ['summary'] }),
});
if (!res.ok) return 0;
const data = await res.json() as { total?: number; issues?: unknown[] };
return data.total ?? data.issues?.length ?? 0;
}
async function createJiraIssue(fields: Record<string, unknown>): Promise<string | null> {
if (DRY_RUN) {
const key = `${(fields.project as Record<string, string>).key}-DRY`;
console.log(` [DRY] Would create: ${(fields.summary as string).substring(0, 60)}`);
return key;
}
const res = await jiraFetchWithRetry('/issue', {
method: 'POST',
body: JSON.stringify({ fields }),
});
if (!res.ok) {
const body = await res.text();
console.error(` FAIL create issue: ${res.status} ${body}`);
return null;
}
const data = await res.json() as { key: string };
return data.key;
}
async function transitionIssue(issueKey: string, targetStatus: string): Promise<boolean> {
if (DRY_RUN) return true;
const res = await jiraFetchWithRetry(`/issue/${issueKey}/transitions`);
if (!res.ok) return false;
const data = await res.json() as { transitions: Array<{ id: string; name: string }> };
const transition = data.transitions.find(t =>
t.name.toLowerCase() === targetStatus.toLowerCase()
);
if (!transition) {
// Try partial match (e.g., "In Progress" matches "Start Progress")
const partialMatch = data.transitions.find(t =>
t.name.toLowerCase().includes(targetStatus.toLowerCase()) ||
// Map common alternative names
(targetStatus === 'In Progress' && t.name.toLowerCase().includes('progress')) ||
(targetStatus === 'Done' && t.name.toLowerCase().includes('done'))
);
if (partialMatch) {
const transRes = await jiraFetchWithRetry(`/issue/${issueKey}/transitions`, {
method: 'POST',
body: JSON.stringify({ transition: { id: partialMatch.id } }),
});
return transRes.ok;
}
console.warn(` [WARN] No transition to "${targetStatus}" for ${issueKey}. Available: ${data.transitions.map(t => t.name).join(', ')}`);
return false;
}
const transRes = await jiraFetchWithRetry(`/issue/${issueKey}/transitions`, {
method: 'POST',
body: JSON.stringify({ transition: { id: transition.id } }),
});
return transRes.ok;
}
async function writeChecklist(issueKey: string, items: Array<{ item: string; checked: boolean }>): Promise<void> {
if (DRY_RUN || items.length === 0) return;
const checklistText = items
.map(i => `* [${i.checked ? 'x' : ' '}] ${i.item}`)
.join('\n');
const res = await jiraFetchV2WithRetry(`/issue/${issueKey}`, {
method: 'PUT',
body: JSON.stringify({ fields: { [CHECKLIST_FIELD]: checklistText } }),
});
if (!res.ok) {
const body = await res.text();
console.error(` FAIL checklist for ${issueKey}: ${res.status} ${body}`);
}
}
async function setParent(issueKey: string, parentKey: string): Promise<void> {
if (DRY_RUN) return;
const res = await jiraFetchWithRetry(`/issue/${issueKey}`, {
method: 'PUT',
body: JSON.stringify({ fields: { parent: { key: parentKey } } }),
});
if (!res.ok) {
const body = await res.text();
console.error(` FAIL set parent ${parentKey} for ${issueKey}: ${res.status} ${body}`);
}
}
async function createIssueLink(inwardKey: string, outwardKey: string, linkType: string): Promise<boolean> {
if (DRY_RUN) {
console.log(` [DRY] Would link: ${inwardKey} -[${linkType}]-> ${outwardKey}`);
return true;
}
const res = await jiraFetchWithRetry('/issueLink', {
method: 'POST',
body: JSON.stringify({
type: { name: linkType },
inwardIssue: { key: inwardKey },
outwardIssue: { key: outwardKey },
}),
});
if (!res.ok) {
const body = await res.text();
console.error(` FAIL link ${inwardKey}->${outwardKey}: ${res.status} ${body}`);
return false;
}
return true;
}
async function deleteIssue(issueKey: string): Promise<void> {
await jiraFetchWithRetry(`/issue/${issueKey}`, { method: 'DELETE' });
}
// --- Pre-flight check ---
async function preflightWorkflowCheck(projectKey: string): Promise<boolean> {
console.log(`\nPre-flight workflow check on ${projectKey}...`);
if (DRY_RUN || SKIP_PREFLIGHT) {
console.log(` [${DRY_RUN ? 'DRY' : 'SKIP'}] Skipping pre-flight check`);
return true;
}
// IMPORTANT: pre-flight consumes a key number!
// We must account for this. The test issue will be key #1,
// then we delete it, but the counter stays at 2.
// So we CANNOT do pre-flight on the same project if we want exact keys.
// Instead, use a different project for pre-flight.
console.log(' WARNING: Pre-flight check would consume issue key #1.');
console.log(' Skipping in-project pre-flight to preserve key sequence.');
console.log(' Use --skip-preflight explicitly if already verified.');
return true;
}
// --- Migration: exact key ordering ---
async function migrateTasksExactKeys(projectKey: string): Promise<Map<string, string>> {
const epicMap = new Map<string, string>();
// 1. Load all tasks for this project, indexed by numeric ID
const tasks = await pool.query(
`SELECT id, title, description, type, status, priority, epic_id, created_at
FROM tasks WHERE project = $1 ORDER BY id`,
[projectKey]
);
// Build a map of numeric ID → task row
const taskById = new Map<number, (typeof tasks.rows)[0]>();
let maxNum = 0;
for (const task of tasks.rows) {
const m = task.id.match(new RegExp(`^${projectKey}-(\\d+)$`));
if (m) {
const num = parseInt(m[1]);
taskById.set(num, task);
if (num > maxNum) maxNum = num;
}
}
if (maxNum === 0) {
console.log(' No numeric task IDs found, skipping.');
return epicMap;
}
const effectiveMax = LIMIT > 0 ? Math.min(maxNum, LIMIT) : maxNum;
const gapCount = effectiveMax - (LIMIT > 0 ? Math.min(taskById.size, LIMIT) : taskById.size) +
Array.from({ length: effectiveMax }, (_, i) => i + 1).filter(n => n <= effectiveMax && !taskById.has(n)).length -
(effectiveMax - (LIMIT > 0 ? Math.min(taskById.size, LIMIT) : taskById.size));
// Actually compute properly
let realTasks = 0;
let gaps = 0;
for (let n = 1; n <= effectiveMax; n++) {
if (taskById.has(n)) realTasks++;
else gaps++;
}
console.log(` Creating ${effectiveMax} issues (${realTasks} real tasks + ${gaps} placeholders)...`);
// 2. Create issues 1..maxNum in strict order
for (let n = 1; n <= effectiveMax; n++) {
const task = taskById.get(n);
const taskId = `${projectKey}-${n}`;
const expectedJiraKey = `${projectKey}-${n}`;
if (task) {
// Real task
const labels: string[] = ['migrated-from-task-mcp'];
if (task.type === 'feature') labels.push('feature');
if (task.type === 'debt') labels.push('tech-debt');
if (task.type === 'investigation') labels.push('investigation');
if (task.status === 'blocked') labels.push('blocked');
if (task.status === 'abandoned') labels.push('abandoned');
const fields: Record<string, unknown> = {
project: { key: projectKey },
summary: task.title.substring(0, 255),
issuetype: { name: TYPE_MAP[task.type] || 'Task' },
priority: { name: PRIORITY_MAP[task.priority] || 'Medium' },
labels,
};
if (task.description) {
fields.description = textToAdf(task.description);
}
// Don't set parent here — epics don't exist yet. Queue for later.
const jiraKey = await createJiraIssue(fields);
if (!jiraKey) {
console.error(` FATAL: Failed to create ${taskId}, key sequence broken!`);
process.exit(1);
}
// Verify key matches
if (!DRY_RUN && jiraKey !== expectedJiraKey) {
console.error(` FATAL: Key mismatch! Expected ${expectedJiraKey}, got ${jiraKey}. Aborting.`);
process.exit(1);
}
migrationMap.set(task.id, jiraKey);
// Transition
const targetStatus = STATUS_MAP[task.status] || 'To Do';
if (targetStatus !== 'To Do') {
await transitionIssue(jiraKey, targetStatus);
}
// Checklist
const checklist = await pool.query(
'SELECT item, checked FROM task_checklist WHERE task_id = $1 ORDER BY position',
[task.id]
);
if (checklist.rows.length > 0) {
await writeChecklist(jiraKey, checklist.rows);
}
// Queue parent link for later
if (task.epic_id) {
pendingParentLinks.push({ taskJiraKey: jiraKey, epicOldId: task.epic_id });
}
} else {
// Gap — create placeholder
const fields: Record<string, unknown> = {
project: { key: projectKey },
summary: `[Placeholder] Deleted task ${taskId}`,
issuetype: { name: 'Task' },
labels: ['migration-placeholder', 'migrated-from-task-mcp'],
};
const jiraKey = await createJiraIssue(fields);
if (!jiraKey) {
console.error(` FATAL: Failed to create placeholder for ${taskId}, key sequence broken!`);
process.exit(1);
}
if (!DRY_RUN && jiraKey !== expectedJiraKey) {
console.error(` FATAL: Key mismatch! Expected ${expectedJiraKey}, got ${jiraKey}. Aborting.`);
process.exit(1);
}
// Transition placeholder to Done
await transitionIssue(jiraKey, 'Done');
}
// Progress logging
if (n % 10 === 0 || n === effectiveMax) {
console.log(` [${n}/${effectiveMax}] ${task ? task.id : `gap → placeholder`}${projectKey}-${n}`);
}
// Batch pause
if (n > 0 && n % BATCH_SIZE === 0) {
console.log(` [BATCH PAUSE] ${n}/${effectiveMax}, pausing ${BATCH_PAUSE_MS / 1000}s...`);
await delay(BATCH_PAUSE_MS);
}
}
return epicMap;
}
async function migrateEpicsAfterTasks(projectKey: string): Promise<void> {
const epics = await pool.query(
'SELECT id, title, description, status FROM epics WHERE project = $1 ORDER BY id',
[projectKey]
);
if (epics.rows.length === 0) return;
console.log(` Creating ${epics.rows.length} epics (after task range)...`);
for (let i = 0; i < epics.rows.length; i++) {
const epic = epics.rows[i];
const labels: string[] = ['migrated-from-task-mcp'];
const fields: Record<string, unknown> = {
project: { key: projectKey },
summary: epic.title.substring(0, 255),
description: epic.description ? textToAdf(epic.description) : undefined,
issuetype: { name: 'Epic' },
labels,
};
const jiraKey = await createJiraIssue(fields);
if (jiraKey) {
epicJiraKeys.set(epic.id, jiraKey);
console.log(` [${i + 1}/${epics.rows.length}] Epic ${epic.id}${jiraKey}: ${epic.title.substring(0, 50)}`);
if (epic.status === 'completed' || epic.status === 'done') {
await transitionIssue(jiraKey, 'Done');
} else if (epic.status === 'in_progress') {
await transitionIssue(jiraKey, 'In Progress');
}
}
}
}
async function migrateSessionPlansAfterTasks(projectKey: string): Promise<void> {
const plans = await pool.query(
`SELECT sp.id, sp.session_id, sp.plan_file_name, sp.plan_content, sp.status
FROM session_plans sp
JOIN sessions s ON sp.session_id = s.id
WHERE s.project = $1`,
[projectKey]
);
if (plans.rows.length === 0) return;
console.log(` Creating ${plans.rows.length} session plans as Epics...`);
for (let i = 0; i < plans.rows.length; i++) {
const plan = plans.rows[i];
const labels: string[] = ['migrated-from-task-mcp', 'session-plan'];
if (plan.plan_file_name) {
labels.push(`plan:${plan.plan_file_name.replace(/[^a-zA-Z0-9._-]/g, '_').substring(0, 50)}`);
}
if (plan.status) {
labels.push(`plan-status:${plan.status}`);
}
const fields: Record<string, unknown> = {
project: { key: projectKey },
summary: `[Session Plan] ${plan.plan_file_name || `Plan from session ${plan.session_id}`}`.substring(0, 255),
description: plan.plan_content ? textToAdf(plan.plan_content) : undefined,
issuetype: { name: 'Epic' },
labels,
};
const jiraKey = await createJiraIssue(fields);
if (jiraKey) {
epicJiraKeys.set(`plan-${plan.id}`, jiraKey);
console.log(` [${i + 1}/${plans.rows.length}] Plan ${plan.id}${jiraKey}`);
if (plan.status === 'executed' || plan.status === 'abandoned') {
await transitionIssue(jiraKey, 'Done');
} else if (plan.status === 'approved') {
await transitionIssue(jiraKey, 'In Progress');
}
}
}
}
async function linkTasksToEpics(): Promise<void> {
if (pendingParentLinks.length === 0) return;
console.log(` Setting parent (epic) for ${pendingParentLinks.length} tasks...`);
let linked = 0;
for (const { taskJiraKey, epicOldId } of pendingParentLinks) {
const epicJiraKey = epicJiraKeys.get(epicOldId);
if (!epicJiraKey) continue;
await setParent(taskJiraKey, epicJiraKey);
linked++;
if (linked % 20 === 0) {
console.log(` [${linked}/${pendingParentLinks.length}] parent links set`);
}
if (linked % BATCH_SIZE === 0) {
console.log(` [BATCH PAUSE] ${linked}/${pendingParentLinks.length}, pausing...`);
await delay(BATCH_PAUSE_MS);
}
}
console.log(` Linked ${linked} tasks to epics`);
}
async function migrateLinks(projectKey: string): Promise<void> {
const links = await pool.query(
`SELECT tl.from_task_id, tl.to_task_id, tl.link_type
FROM task_links tl
JOIN tasks t1 ON tl.from_task_id = t1.id
JOIN tasks t2 ON tl.to_task_id = t2.id
WHERE t1.project = $1 OR t2.project = $1`,
[projectKey]
);
if (links.rows.length === 0) return;
console.log(` Migrating ${links.rows.length} links...`);
let created = 0;
let skipped = 0;
for (const link of links.rows) {
const fromKey = migrationMap.get(link.from_task_id);
const toKey = migrationMap.get(link.to_task_id);
if (!fromKey || !toKey) {
failedLinks.push({ from: link.from_task_id, to: link.to_task_id, type: link.link_type });
skipped++;
continue;
}
const jiraLinkType = LINK_TYPE_MAP[link.link_type] || 'Relates';
let success: boolean;
if (link.link_type === 'depends_on' || link.link_type === 'needs') {
success = await createIssueLink(toKey, fromKey, jiraLinkType);
} else {
success = await createIssueLink(fromKey, toKey, jiraLinkType);
}
if (success) created++;
}
console.log(` Created ${created} links, ${skipped} deferred for cross-project retry`);
}
async function retryFailedLinks(): Promise<void> {
if (failedLinks.length === 0) return;
console.log(`\nRetrying ${failedLinks.length} deferred cross-project links...`);
let created = 0;
let failed = 0;
for (const link of failedLinks) {
const fromKey = migrationMap.get(link.from);
const toKey = migrationMap.get(link.to);
if (!fromKey || !toKey) { failed++; continue; }
const jiraLinkType = LINK_TYPE_MAP[link.type] || 'Relates';
let success: boolean;
if (link.type === 'depends_on' || link.type === 'needs') {
success = await createIssueLink(toKey, fromKey, jiraLinkType);
} else {
success = await createIssueLink(fromKey, toKey, jiraLinkType);
}
if (success) created++;
else failed++;
}
console.log(` Retry results: ${created} created, ${failed} failed`);
}
// --- Post-migration ---
async function updateSessionMappings(): Promise<void> {
console.log('\nStoring migration mappings...');
await pool.query(`
CREATE TABLE IF NOT EXISTS task_migration_map (
old_task_id TEXT PRIMARY KEY,
jira_issue_key TEXT NOT NULL,
migrated_at TIMESTAMPTZ DEFAULT NOW()
)
`);
let count = 0;
for (const [oldId, jiraKey] of migrationMap) {
if (!DRY_RUN) {
await pool.query(
`INSERT INTO task_migration_map (old_task_id, jira_issue_key)
VALUES ($1, $2)
ON CONFLICT (old_task_id) DO UPDATE SET jira_issue_key = $2, migrated_at = NOW()`,
[oldId, jiraKey]
);
}
count++;
}
console.log(` Stored ${count} mappings`);
}
async function updateForeignKeyReferences(): Promise<void> {
console.log('\nUpdating FK references with Jira issue keys...');
if (DRY_RUN) {
console.log(' [DRY] Skipping FK reference updates');
return;
}
const alterStatements = [
'ALTER TABLE memories ADD COLUMN IF NOT EXISTS jira_issue_key TEXT',
'ALTER TABLE session_context ADD COLUMN IF NOT EXISTS jira_issue_key TEXT',
'ALTER TABLE deployments ADD COLUMN IF NOT EXISTS jira_issue_key TEXT',
'ALTER TABLE task_commits ADD COLUMN IF NOT EXISTS jira_issue_key TEXT',
];
for (const sql of alterStatements) {
try { await pool.query(sql); }
catch (e: unknown) {
const msg = e instanceof Error ? e.message : String(e);
if (!msg.includes('does not exist')) console.warn(` [WARN] ${sql}: ${msg}`);
}
}
const updates = [
{ table: 'memories', fk: 'task_id', desc: 'memories' },
{ table: 'session_context', fk: 'current_task_id', desc: 'session_context' },
{ table: 'deployments', fk: 'task_id', desc: 'deployments' },
{ table: 'task_commits', fk: 'task_id', desc: 'task_commits' },
];
for (const { table, fk, desc } of updates) {
try {
const result = await pool.query(
`UPDATE ${table} SET jira_issue_key = m.jira_issue_key
FROM task_migration_map m
WHERE ${table}.${fk} = m.old_task_id
AND ${table}.jira_issue_key IS NULL`
);
console.log(` ${desc}: ${result.rowCount} rows updated`);
} catch (e: unknown) {
const msg = e instanceof Error ? e.message : String(e);
console.warn(` [WARN] ${desc}: ${msg}`);
}
}
try {
const result = await pool.query(
`UPDATE sessions SET jira_issue_key = m.jira_issue_key
FROM task_migration_map m, session_context sc
WHERE sc.session_id = sessions.id
AND sc.current_task_id = m.old_task_id
AND sessions.jira_issue_key IS NULL`
);
console.log(` sessions: ${result.rowCount} rows updated`);
} catch (e: unknown) {
const msg = e instanceof Error ? e.message : String(e);
console.warn(` [WARN] sessions: ${msg}`);
}
}
// --- Main ---
async function main() {
console.log('=== task-mcp → Jira Cloud Migration (EXACT KEY MATCHING) ===');
console.log(`Jira: ${JIRA_URL}`);
console.log(`User: ${JIRA_USER}`);
console.log(`Mode: ${DRY_RUN ? 'DRY RUN' : 'LIVE'}`);
console.log(`Filter: ${PROJECT_FILTER || 'all valid projects'}`);
console.log(`Scope: ${OPEN_ONLY ? 'open tasks only' : 'all tasks'}`);
console.log(`Limit: ${LIMIT || 'none'}`);
console.log(`Batch: ${BATCH_SIZE} (${BATCH_PAUSE_MS / 1000}s pause)`);
console.log(`Rate: ${DELAY_MS}ms delay, ${MAX_RETRIES} retries`);
console.log('');
if (!JIRA_USER || !JIRA_TOKEN) {
console.error('Missing JIRA_USERNAME or JIRA_API_TOKEN');
process.exit(1);
}
const existingProjects = await getJiraProjects();
existingProjects.forEach(p => jiraProjects.add(p));
console.log(`Existing Jira projects: ${existingProjects.join(', ')}`);
const dbProjects = await pool.query(
'SELECT key, name FROM projects WHERE key ~ $1 ORDER BY key',
['^[A-Z]{2,5}$']
);
const projectsToMigrate = dbProjects.rows.filter(p => {
if (PROJECT_FILTER && p.key !== PROJECT_FILTER) return false;
if (!VALID_PROJECT_KEY.test(p.key)) return false;
return true;
});
console.log(`Projects to migrate: ${projectsToMigrate.map(p => p.key).join(', ')}`);
const missing = projectsToMigrate.filter(p => !jiraProjects.has(p.key));
if (missing.length > 0) {
console.log(`\nWARNING: These projects don't exist in Jira yet (will be skipped):`);
missing.forEach(p => console.log(` ${p.key} - ${p.name}`));
console.log('Create them in Jira first, then re-run migration.\n');
}
// Migrate each project
for (const project of projectsToMigrate) {
if (!jiraProjects.has(project.key)) {
console.log(`\nSkipping ${project.key} (not in Jira)`);
continue;
}
console.log(`\n--- Migrating project: ${project.key} (${project.name}) ---`);
// Check if project already has issues (already migrated)
const existingCount = await countJiraIssues(project.key);
if (existingCount > 0) {
console.log(` Skipping: already has ${existingCount} issues in Jira`);
continue;
}
// Clear per-project state
pendingParentLinks.length = 0;
// 1. Tasks in exact numeric order (with gap placeholders)
await migrateTasksExactKeys(project.key);
// 2. Epics (after tasks, so they get keys after maxTaskId)
await migrateEpicsAfterTasks(project.key);
// 3. Session plans as epics
await migrateSessionPlansAfterTasks(project.key);
// 4. Link tasks to their parent epics (now that epics exist)
await linkTasksToEpics();
// 5. Issue links
await migrateLinks(project.key);
// Summary
const taskCount = Array.from(migrationMap.values()).filter(v => v.startsWith(`${project.key}-`)).length;
console.log(` Done: ${epicJiraKeys.size} epics, ${taskCount} tasks migrated`);
}
// 6. Retry cross-project links
await retryFailedLinks();
// 7. Store mapping
await updateSessionMappings();
// 8. Update FK references
await updateForeignKeyReferences();
// Final summary
console.log('\n=== Migration Summary ===');
console.log(`Total issues migrated: ${migrationMap.size}`);
console.log(`Epics created: ${epicJiraKeys.size}`);
console.log(`Failed links: ${failedLinks.filter(l => !migrationMap.has(l.from) || !migrationMap.has(l.to)).length}`);
console.log(`Mode: ${DRY_RUN ? 'DRY RUN (no changes made)' : 'LIVE'}`);
await pool.end();
}
main().catch(err => {
console.error('Migration failed:', err);
process.exit(1);
});