Compare commits

...

10 Commits

Author SHA1 Message Date
Christian Gick
c0c6918e2c chore: Use per-product Sentry DSN (CF-835)
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-08 18:46:06 +02:00
Christian Gick
507e98ef8e fix: Use actual project key for session Jira issues instead of hardcoded CF
The project parameter was passed to createSessionIssue() but ignored -
all sessions were created in the CF Jira project regardless of the
actual session project (ST, LLB, GB, etc.).

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-08 17:09:34 +02:00
Christian Gick
9042bf0878 fix(CF-816): Self-healing session number sequence to prevent drift
session_sequences table fell behind when sessions were inserted with
explicit session_number (e.g., retro imports), causing duplicate key
violations on next auto-assigned number. Function now syncs forward
by checking MAX(session_number) before assigning.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-08 07:48:54 +02:00
Christian Gick
3ca40d9100 revert: Keep CF project for session-tracking Jira issues
CU is a separate project. Session tracking stays in CF.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-08 07:32:21 +02:00
Christian Gick
c57f9c6a75 fix(CF-762): Use CU project for session-tracking Jira issues
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-08 07:31:59 +02:00
Christian Gick
63cba97b56 feat(CF-762): Add Jira integration for session tracking
Sessions now auto-create CF Jira issues on start and post full session
output as comments on end, transitioning the issue to Done.

- Add src/services/jira.ts with createSessionIssue, addComment, transitionToDone
- Update session_start to create CF Jira issue and store key in sessions table
- Update session_end to post session output and close Jira issue
- Add migration 031 to archive local task tables (moved to Jira Cloud)
- Update .env.example with Jira Cloud env vars

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-08 07:23:18 +02:00
Christian Gick
1227e5b339 feat(CF-762): Complete Jira migration - consolidate projects, cleanup
- Remove task CRUD/epic/search/relation/version tools (moved to Jira)
- Add migration scripts: migrate-tasks-to-jira, jira-admin, prepare-all-projects
- Add consolidate-projects.ts for merging duplicate Jira projects
- Add validate-migration.ts for post-migration integrity checks
- Add jira_issue_key columns migration (030)
- Consolidate 11 duplicate projects (LIT→LITE, CARD→CS, etc.)
- Delete 92 placeholder issues, 11 empty source projects
- Remove SG project completely
- 2,798 tasks migrated across 46 Jira projects

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-07 12:33:49 +02:00
Christian Gick
bd5d95beff fix: Fallback to cached session ID in session_note_add
When session_id is not provided, falls back to getSessionId() which
reads from CLAUDE_SESSION_ID env or ~/.cache/session-memory/current_session.
Fixes NOT NULL constraint violation on session_notes.session_id.

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-02-04 21:02:12 +02:00
Christian Gick
6cbb5ce6cb feat(CF-314): Add planning_mode_required field and smart planning mode support
Adds DB column, TypeScript types, MCP tool schemas, and CRUD handlers
for planning_mode_required (NULL=auto-detect, true=always, false=never).

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-02-04 20:30:36 +02:00
Christian Gick
baec42810c fix(CF-635): Fix task_list returning empty for non-completed tasks
- Remove invalid 'pending' status from task_list and task_update enums
- Default to excluding completed tasks when no status filter provided
- Previously, task_list(status=open) missed in_progress/blocked/testing tasks

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-02-02 07:00:39 +02:00
30 changed files with 2736 additions and 2246 deletions

34
.env Normal file
View File

@@ -0,0 +1,34 @@
# ===========================================
# SENTRY ERROR TRACKING (CF-536)
# ===========================================
# Get DSN from vault: vault get sentry.dsn.mcp-servers
# Phase 2: Node.js MCP Servers Integration
#
# Features:
# - Automatic error capture with PII scrubbing
# - Performance tracing and profiling
# - MCP protocol-aware filtering
# - PostgreSQL integration for database error tracking
#
# Created: 2026-01-29
SENTRY_DSN=$(vault get sentry.dsn.mcp-servers)
SENTRY_ENVIRONMENT=production
SENTRY_TRACE_SAMPLE_RATE=0.1
SENTRY_PROFILE_SAMPLE_RATE=0.01
APP_VERSION=1.0.0
# ===========================================
# Task MCP Environment Variables
# ===========================================
# PostgreSQL connection via pgbouncer
POSTGRES_HOST=postgres.agiliton.internal
POSTGRES_PORT=6432
# Embedding service configuration
LLM_API_URL=https://api.agiliton.cloud/llm
LLM_API_KEY=sk-c02d41a118ce8330c428100afaa816c8
# Jira Cloud (CF-762 migration)
JIRA_URL=https://agiliton.atlassian.net
JIRA_USERNAME=christian.gick@agiliton.eu
JIRA_API_TOKEN=ATATT3xFfGF0tpaJTS4nJklW587McubEw-1SYbLWqfovkxI5320NdbFc-3fgHlw0HGTLOikgV082m9N-SIsYVZveGXa553_1LAyOevV6Qples93xF4hIExWGAvwvXPy_4pW2tH5FNusN5ieMca5_-YUP0i69SIN0RLIMQjfqDmQyhZXbkIvrm-I=A8A2A1FC

View File

@@ -1,9 +1,14 @@
# Task MCP Environment Variables
# Session MCP Environment Variables (forked from task-mcp, CF-762)
# PostgreSQL connection via pgbouncer
POSTGRES_HOST=infra.agiliton.internal
POSTGRES_HOST=postgres.agiliton.internal
POSTGRES_PORT=6432
# Embedding service configuration
LLM_API_URL=https://api.agiliton.cloud/llm
LLM_API_KEY=your_llm_api_key_here
# Jira Cloud (session tracking)
JIRA_URL=https://agiliton.atlassian.net
JIRA_USERNAME=your_email@agiliton.eu
JIRA_API_TOKEN=your_jira_api_token

View File

@@ -0,0 +1,7 @@
-- Migration 028: Add unique index for session checkpoint upserts (CF-572)
-- Ensures at most ONE checkpoint row per session+note_type.
-- Normal session_note_add calls (recovered_from IS NULL) are unaffected.
CREATE UNIQUE INDEX IF NOT EXISTS uq_session_checkpoint
ON session_notes (session_id, note_type)
WHERE recovered_from = 'checkpoint';

View File

@@ -0,0 +1,3 @@
-- CF-314: Add planning_mode_required flag for smart planning mode auto-detection
-- NULL = auto-detect (scoring algorithm), true = always plan, false = never plan
ALTER TABLE tasks ADD COLUMN IF NOT EXISTS planning_mode_required BOOLEAN DEFAULT NULL;

View File

@@ -0,0 +1,9 @@
-- Migration 030: Add jira_issue_key to sessions for Jira Cloud linking (CF-762)
-- Links sessions to Jira issues after task management moved to Jira Cloud
ALTER TABLE sessions ADD COLUMN IF NOT EXISTS jira_issue_key TEXT;
CREATE INDEX IF NOT EXISTS idx_sessions_jira_issue_key ON sessions (jira_issue_key) WHERE jira_issue_key IS NOT NULL;
-- Also add to task_activity for historical audit trail linking
ALTER TABLE task_activity ADD COLUMN IF NOT EXISTS jira_issue_key TEXT;

View File

@@ -0,0 +1,50 @@
-- Migration 031: Archive task tables after Jira Cloud migration (CF-762)
-- Task management moved to Jira Cloud. Archive local task tables for historical reference.
-- Session, memory, archive, and infrastructure tables remain active.
BEGIN;
-- 1. Archive task tables (rename with archived_ prefix)
ALTER TABLE IF EXISTS tasks RENAME TO archived_tasks;
ALTER TABLE IF EXISTS task_checklist RENAME TO archived_task_checklist;
ALTER TABLE IF EXISTS task_links RENAME TO archived_task_links;
ALTER TABLE IF EXISTS task_activity RENAME TO archived_task_activity;
ALTER TABLE IF EXISTS task_sequences RENAME TO archived_task_sequences;
-- 2. Add archived_at timestamp to archived tables
ALTER TABLE IF EXISTS archived_tasks ADD COLUMN IF NOT EXISTS archived_at TIMESTAMP WITH TIME ZONE DEFAULT NOW();
ALTER TABLE IF EXISTS archived_task_checklist ADD COLUMN IF NOT EXISTS archived_at TIMESTAMP WITH TIME ZONE DEFAULT NOW();
ALTER TABLE IF EXISTS archived_task_links ADD COLUMN IF NOT EXISTS archived_at TIMESTAMP WITH TIME ZONE DEFAULT NOW();
ALTER TABLE IF EXISTS archived_task_activity ADD COLUMN IF NOT EXISTS archived_at TIMESTAMP WITH TIME ZONE DEFAULT NOW();
ALTER TABLE IF EXISTS archived_task_sequences ADD COLUMN IF NOT EXISTS archived_at TIMESTAMP WITH TIME ZONE DEFAULT NOW();
-- 3. Drop tables that are fully replaced by Jira (data already migrated)
DROP TABLE IF EXISTS epics CASCADE;
DROP TABLE IF EXISTS epic_sequences CASCADE;
DROP TABLE IF EXISTS versions CASCADE;
-- 4. Keep these tables (still referenced by session tools):
-- - task_commits (git commit ↔ Jira issue linking)
-- - task_migration_map (maps old local IDs → Jira keys)
-- - task_delegations (code delegation tracking)
-- 5. Update task_commits to remove FK constraint on archived_tasks
-- (commits now reference Jira issue keys, not local task IDs)
ALTER TABLE IF EXISTS task_commits DROP CONSTRAINT IF EXISTS task_commits_task_id_fkey;
-- 6. Update task_delegations to remove FK constraint on archived_tasks
ALTER TABLE IF EXISTS task_delegations DROP CONSTRAINT IF EXISTS task_delegations_task_id_fkey;
-- 7. Drop unused indexes on archived tables (save space, they're read-only now)
DROP INDEX IF EXISTS idx_tasks_status;
DROP INDEX IF EXISTS idx_tasks_type;
DROP INDEX IF EXISTS idx_tasks_priority;
DROP INDEX IF EXISTS idx_tasks_epic;
DROP INDEX IF EXISTS idx_tasks_version;
DROP INDEX IF EXISTS idx_tasks_embedding;
-- 8. Record migration
INSERT INTO schema_migrations (version, applied_at) VALUES ('031_archive_task_tables', NOW())
ON CONFLICT DO NOTHING;
COMMIT;

View File

@@ -0,0 +1,57 @@
-- Migration 032: Fix session_sequences drift (CF-816)
-- Problem: Retro-imported sessions with explicit session_number bypass the trigger,
-- leaving session_sequences.next_number behind the actual MAX(session_number).
-- Next auto-assigned number then collides with the unique index.
-- Fix: Make get_next_session_number() self-healing by always checking actual max.
-- Step 1: Replace the function with a self-healing version
CREATE OR REPLACE FUNCTION get_next_session_number(p_project TEXT)
RETURNS INTEGER AS $$
DECLARE
v_seq_number INTEGER;
v_max_number INTEGER;
v_number INTEGER;
BEGIN
-- Insert project if doesn't exist
INSERT INTO projects (key, name) VALUES (p_project, p_project)
ON CONFLICT (key) DO NOTHING;
-- Insert sequence if doesn't exist
INSERT INTO session_sequences (project, next_number)
VALUES (p_project, 1)
ON CONFLICT (project) DO NOTHING;
-- Get the actual max session_number for this project (handles external inserts)
SELECT COALESCE(MAX(session_number), 0) INTO v_max_number
FROM sessions
WHERE project = p_project;
-- Sync sequence forward if it fell behind (self-healing)
UPDATE session_sequences
SET next_number = GREATEST(next_number, v_max_number + 1),
last_updated = NOW()
WHERE project = p_project;
-- Now atomically increment and return
UPDATE session_sequences
SET next_number = next_number + 1,
last_updated = NOW()
WHERE project = p_project
RETURNING next_number - 1 INTO v_number;
RETURN v_number;
END;
$$ LANGUAGE plpgsql;
-- Step 2: Sync all existing sequences to match actual data
UPDATE session_sequences sq
SET next_number = GREATEST(sq.next_number, sub.actual_max + 1),
last_updated = NOW()
FROM (
SELECT project, COALESCE(MAX(session_number), 0) AS actual_max
FROM sessions
WHERE project IS NOT NULL
GROUP BY project
) sub
WHERE sq.project = sub.project
AND sq.next_number <= sub.actual_max;

View File

@@ -1,7 +1,7 @@
{
"name": "task-mcp",
"name": "session-mcp",
"version": "1.0.0",
"description": "MCP server for task management with PostgreSQL/pgvector backend",
"description": "MCP server for session/memory/archive management with PostgreSQL/pgvector. Forked from task-mcp (CF-762).",
"main": "dist/index.js",
"type": "module",
"scripts": {

View File

@@ -5,7 +5,7 @@ const { Pool } = pg;
// Database configuration
const pool = new Pool({
host: 'infra.agiliton.internal',
host: 'postgres.agiliton.internal',
port: 5432,
database: 'agiliton',
user: 'agiliton',

View File

@@ -12,7 +12,7 @@ const { Pool } = pg;
// Configuration - Direct WireGuard connection to INFRA VM PostgreSQL
const config = {
host: process.env.POSTGRES_HOST || 'infra.agiliton.internal',
host: process.env.POSTGRES_HOST || 'postgres.agiliton.internal',
port: 5432,
database: 'agiliton',
user: 'agiliton',

View File

@@ -16,7 +16,7 @@ const { Pool } = pg;
// Database configuration
const pool = new Pool({
host: process.env.POSTGRES_HOST || 'infra.agiliton.internal',
host: process.env.POSTGRES_HOST || 'postgres.agiliton.internal',
port: 5432,
database: 'agiliton',
user: 'agiliton',

2
run.sh
View File

@@ -1,6 +1,6 @@
#!/bin/bash
echo "task-mcp: run.sh executing with database connection" >&2
export DB_HOST="infra.agiliton.internal"
export DB_HOST="postgres.agiliton.internal"
export DB_PORT="5432"
export DB_NAME="agiliton"
export DB_USER="agiliton"

View File

@@ -0,0 +1,490 @@
#!/usr/bin/env npx tsx
/**
* Consolidate/merge Jira projects after CF-762 migration.
*
* Uses Jira Cloud Bulk Move API (POST /rest/api/3/bulk/issues/move)
* to move all issues from SOURCE to TARGET project, then updates
* task_migration_map and tasks table in PostgreSQL.
*
* Usage:
* npx tsx scripts/consolidate-projects.ts --from LIT --to LITE [--dry-run] [--delete-source]
* npx tsx scripts/consolidate-projects.ts --batch tier1 [--dry-run] [--delete-source]
* npx tsx scripts/consolidate-projects.ts --batch all [--dry-run] [--delete-source]
*/
import pg from 'pg';
import dotenv from 'dotenv';
import { dirname, join } from 'path';
import { fileURLToPath } from 'url';
const __dirname = dirname(fileURLToPath(import.meta.url));
dotenv.config({ path: join(__dirname, '..', '.env'), override: true });
const JIRA_URL = process.env.JIRA_URL || 'https://agiliton.atlassian.net';
const JIRA_USER = process.env.JIRA_USERNAME || '';
const JIRA_TOKEN = process.env.JIRA_API_TOKEN || '';
const JIRA_AUTH = Buffer.from(`${JIRA_USER}:${JIRA_TOKEN}`).toString('base64');
const pool = new pg.Pool({
host: process.env.POSTGRES_HOST || 'postgres.agiliton.internal',
port: parseInt(process.env.POSTGRES_PORT || '5432'),
database: 'agiliton',
user: 'agiliton',
password: 'QtqiwCOAUpQNF6pjzOMAREzUny2bY8V1',
max: 3,
});
const args = process.argv.slice(2);
const DRY_RUN = args.includes('--dry-run');
const DELETE_SOURCE = args.includes('--delete-source');
const FROM = args.find((_, i) => args[i - 1] === '--from') || '';
const TO = args.find((_, i) => args[i - 1] === '--to') || '';
const BATCH = args.find((_, i) => args[i - 1] === '--batch') || '';
const DELAY_MS = 700;
const MAX_RETRIES = 5;
const POLL_INTERVAL_MS = 2000;
const POLL_TIMEOUT_MS = 120000;
// Batch definitions — LIT already moved manually during testing
const TIER1: Array<[string, string]> = [
['CARD', 'CS'],
['TES', 'TS'],
['DA', 'DB'],
['AF', 'AFNE'],
];
const TIER2: Array<[string, string]> = [
['RUBI', 'RUB'],
['ET', 'TG'],
['ZORK', 'ZOS'],
];
const TIER3: Array<[string, string]> = [
['IS', 'INFR'],
['CLN', 'INFR'],
['TOOLS', 'INFR'],
];
interface JiraIssue {
key: string;
id: string;
fields: {
summary: string;
issuetype: { id: string; name: string };
status: { name: string };
};
}
function delay(ms: number): Promise<void> {
return new Promise(resolve => setTimeout(resolve, ms));
}
async function jiraFetch(path: string, options: RequestInit = {}): Promise<Response> {
const url = `${JIRA_URL}/rest/api/3${path}`;
return fetch(url, {
...options,
headers: {
'Authorization': `Basic ${JIRA_AUTH}`,
'Content-Type': 'application/json',
'Accept': 'application/json',
...options.headers,
},
});
}
async function jiraFetchWithRetry(path: string, options: RequestInit = {}): Promise<Response> {
let lastResponse: Response | null = null;
for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
await delay(DELAY_MS);
const response = await jiraFetch(path, options);
lastResponse = response;
if (response.status === 429 || response.status >= 500) {
if (attempt < MAX_RETRIES) {
const retryAfter = response.headers.get('Retry-After');
const backoffMs = retryAfter
? parseInt(retryAfter) * 1000
: DELAY_MS * Math.pow(2, attempt + 1);
console.warn(` [RETRY] ${response.status}, attempt ${attempt + 1}/${MAX_RETRIES}, waiting ${backoffMs}ms`);
await delay(backoffMs);
continue;
}
}
return response;
}
return lastResponse!;
}
// Get project ID for a project key
async function getProjectId(key: string): Promise<string | null> {
const res = await jiraFetchWithRetry(`/project/${key}`);
if (!res.ok) return null;
const data = await res.json() as { id: string };
return data.id;
}
// Get all issues in a project (v3 GET /search/jql)
async function getAllIssues(projectKey: string): Promise<JiraIssue[]> {
const issues: JiraIssue[] = [];
let startAt = 0;
while (true) {
const jql = encodeURIComponent(`project="${projectKey}" ORDER BY key ASC`);
const res = await jiraFetchWithRetry(`/search/jql?jql=${jql}&maxResults=100&startAt=${startAt}&fields=summary,issuetype,status`);
if (!res.ok) {
console.error(` Failed to search ${projectKey}: ${res.status} ${await res.text()}`);
break;
}
const data = await res.json() as { total?: number; issues: JiraIssue[]; isLast?: boolean };
issues.push(...data.issues);
startAt += data.issues.length;
if (data.isLast || (data.total !== undefined && startAt >= data.total) || data.issues.length === 0) break;
}
return issues;
}
// Get issue type IDs available in a project
async function getProjectIssueTypes(projectKey: string): Promise<Map<string, string>> {
const res = await jiraFetchWithRetry(`/project/${projectKey}/statuses`);
if (!res.ok) return new Map();
const types = await res.json() as Array<{ id: string; name: string }>;
return new Map(types.map(t => [t.name, t.id]));
}
// Bulk move issues using Jira Cloud API
// Key format: "targetProjectId,targetIssueTypeId"
async function bulkMoveIssues(
issueKeys: string[],
targetProjectId: string,
targetIssueTypeId: string,
): Promise<{ taskId: string } | null> {
const mappingKey = `${targetProjectId},${targetIssueTypeId}`;
const body = {
sendBulkNotification: false,
targetToSourcesMapping: {
[mappingKey]: {
inferFieldDefaults: true,
inferStatusDefaults: true,
inferSubtaskTypeDefault: true,
issueIdsOrKeys: issueKeys,
},
},
};
const res = await jiraFetchWithRetry('/bulk/issues/move', {
method: 'POST',
body: JSON.stringify(body),
});
if (!res.ok) {
const errorBody = await res.text();
console.error(` FAIL bulk move: ${res.status} ${errorBody}`);
return null;
}
const data = await res.json() as { taskId: string };
return data;
}
// Poll a Jira async task until complete
async function pollTask(taskId: string): Promise<{ success: number[]; failed: Record<string, unknown> } | null> {
const start = Date.now();
while (Date.now() - start < POLL_TIMEOUT_MS) {
await delay(POLL_INTERVAL_MS);
const res = await jiraFetchWithRetry(`/task/${taskId}`);
if (!res.ok) {
console.error(` FAIL poll task ${taskId}: ${res.status}`);
return null;
}
const data = await res.json() as {
status: string;
progress: number;
result?: { successfulIssues: number[]; failedIssues: Record<string, unknown>; totalIssueCount: number };
};
if (data.status === 'COMPLETE') {
return {
success: data.result?.successfulIssues || [],
failed: data.result?.failedIssues || {},
};
}
if (data.status === 'FAILED' || data.status === 'CANCELLED') {
console.error(` Task ${taskId} ${data.status}`);
return null;
}
// Still running
if (data.progress > 0) {
process.stdout.write(`\r Task ${taskId}: ${data.progress}%`);
}
}
console.error(` Task ${taskId} timed out after ${POLL_TIMEOUT_MS / 1000}s`);
return null;
}
// Get issue key by numeric ID
async function getIssueKey(issueId: number): Promise<string | null> {
const res = await jiraFetchWithRetry(`/issue/${issueId}?fields=key`);
if (!res.ok) return null;
const data = await res.json() as { key: string };
return data.key;
}
// Delete a Jira project
async function deleteProject(key: string): Promise<boolean> {
if (DRY_RUN) {
console.log(` [DRY] Would delete project ${key}`);
return true;
}
const res = await jiraFetch(`/project/${key}?enableUndo=false`, { method: 'DELETE' });
return res.status === 204;
}
// Consolidate one pair
async function consolidate(from: string, to: string): Promise<{ moved: number; failed: number }> {
console.log(`\n=== Consolidating ${from}${to} ===`);
// Get project IDs
const fromProjectId = await getProjectId(from);
const toProjectId = await getProjectId(to);
if (!fromProjectId) {
console.error(` Source project ${from} does not exist in Jira. Skipping.`);
return { moved: 0, failed: 0 };
}
if (!toProjectId) {
console.error(` Target project ${to} does not exist in Jira. Skipping.`);
return { moved: 0, failed: 0 };
}
// Get target project issue types
const targetTypes = await getProjectIssueTypes(to);
console.log(` Target ${to} (id=${toProjectId}) issue types: ${Array.from(targetTypes.entries()).map(([n, id]) => `${n}=${id}`).join(', ')}`);
// Get all issues from source
const issues = await getAllIssues(from);
console.log(` Found ${issues.length} issues in ${from}`);
if (issues.length === 0) {
console.log(` Nothing to move.`);
if (DELETE_SOURCE) {
console.log(` Deleting empty source project ${from}...`);
const deleted = await deleteProject(from);
console.log(` ${deleted ? 'Deleted' : 'FAILED to delete'} ${from}`);
}
return { moved: 0, failed: 0 };
}
if (DRY_RUN) {
console.log(` [DRY] Would move ${issues.length} issues:`);
for (const issue of issues) {
console.log(` ${issue.key} [${issue.fields.issuetype.name}] ${issue.fields.status.name}: ${issue.fields.summary.substring(0, 60)}`);
}
// Still do DB updates in dry run? No.
return { moved: issues.length, failed: 0 };
}
// Build old issue ID → old key map (for tracking after move)
const idToOldKey = new Map<number, string>();
for (const issue of issues) {
idToOldKey.set(parseInt(issue.id), issue.key);
}
// Group issues by issue type for bulk move
const byType = new Map<string, { typeId: string; typeName: string; keys: string[] }>();
for (const issue of issues) {
const typeName = issue.fields.issuetype.name;
const targetTypeId = targetTypes.get(typeName);
if (!targetTypeId) {
// Fall back to Task if type doesn't exist in target
const fallbackId = targetTypes.get('Task');
if (!fallbackId) {
console.error(` No matching type for ${typeName} in ${to}, and no Task fallback. Skipping ${issue.key}`);
continue;
}
console.warn(` [WARN] ${issue.key} type ${typeName} not in target, using Task (${fallbackId})`);
const group = byType.get('Task') || { typeId: fallbackId, typeName: 'Task', keys: [] };
group.keys.push(issue.key);
byType.set('Task', group);
} else {
const group = byType.get(typeName) || { typeId: targetTypeId, typeName, keys: [] };
group.keys.push(issue.key);
byType.set(typeName, group);
}
}
let totalMoved = 0;
let totalFailed = 0;
const keyMapping = new Map<string, string>(); // old key → new key
// Move each type group
for (const [typeName, group] of byType) {
console.log(` Moving ${group.keys.length} ${typeName} issues...`);
const result = await bulkMoveIssues(group.keys, toProjectId, group.typeId);
if (!result) {
totalFailed += group.keys.length;
continue;
}
console.log(` Waiting for task ${result.taskId}...`);
const taskResult = await pollTask(result.taskId);
process.stdout.write('\r');
if (!taskResult) {
totalFailed += group.keys.length;
continue;
}
const failedCount = Object.keys(taskResult.failed).length;
console.log(` Task complete: ${taskResult.success.length} moved, ${failedCount} failed`);
totalMoved += taskResult.success.length;
totalFailed += failedCount;
// Resolve new keys for moved issues
for (const movedId of taskResult.success) {
const oldKey = idToOldKey.get(movedId);
if (!oldKey) continue;
const newKey = await getIssueKey(movedId);
if (newKey) {
keyMapping.set(oldKey, newKey);
}
}
}
console.log(` Total moved: ${totalMoved}, failed: ${totalFailed}`);
console.log(` Key mappings resolved: ${keyMapping.size}`);
// Log all mappings
for (const [oldKey, newKey] of keyMapping) {
console.log(` ${oldKey}${newKey}`);
}
// Update PostgreSQL
if (totalMoved > 0) {
console.log(` Updating PostgreSQL...`);
// 1. Update task_migration_map with new Jira keys
let mapUpdated = 0;
for (const [oldKey, newKey] of keyMapping) {
const res = await pool.query(
`UPDATE task_migration_map SET jira_issue_key = $1, migrated_at = NOW()
WHERE jira_issue_key = $2`,
[newKey, oldKey]
);
if ((res.rowCount || 0) > 0) {
mapUpdated++;
} else {
// Try where old_task_id matches (identity mapping case)
const res2 = await pool.query(
`UPDATE task_migration_map SET jira_issue_key = $1, migrated_at = NOW()
WHERE old_task_id = $2`,
[newKey, oldKey]
);
if ((res2.rowCount || 0) > 0) mapUpdated++;
}
}
console.log(` task_migration_map: ${mapUpdated} entries updated`);
// 2. Update tasks table: change project from SOURCE to TARGET
const taskUpdate = await pool.query(
`UPDATE tasks SET project = $1 WHERE project = $2`,
[to, from]
);
console.log(` tasks: ${taskUpdate.rowCount} rows (project ${from}${to})`);
// 3. Update epics table
try {
const epicUpdate = await pool.query(
`UPDATE epics SET project = $1 WHERE project = $2`,
[to, from]
);
console.log(` epics: ${epicUpdate.rowCount} rows`);
} catch { /* epics may not reference this project */ }
// 4. Update FK references that use Jira keys
for (const [oldKey, newKey] of keyMapping) {
try { await pool.query(`UPDATE memories SET jira_issue_key = $1 WHERE jira_issue_key = $2`, [newKey, oldKey]); } catch {}
try { await pool.query(`UPDATE session_context SET jira_issue_key = $1 WHERE jira_issue_key = $2`, [newKey, oldKey]); } catch {}
try { await pool.query(`UPDATE sessions SET jira_issue_key = $1 WHERE jira_issue_key = $2`, [newKey, oldKey]); } catch {}
try { await pool.query(`UPDATE task_commits SET jira_issue_key = $1 WHERE jira_issue_key = $2`, [newKey, oldKey]); } catch {}
try { await pool.query(`UPDATE deployments SET jira_issue_key = $1 WHERE jira_issue_key = $2`, [newKey, oldKey]); } catch {}
}
console.log(` FK references updated`);
// 5. Update projects table references
try {
await pool.query(`DELETE FROM project_archives WHERE project_key = $1`, [from]);
} catch {}
}
// Delete source project if requested
if (DELETE_SOURCE) {
const remaining = await getAllIssues(from);
if (remaining.length === 0) {
console.log(` Deleting empty source project ${from}...`);
const deleted = await deleteProject(from);
console.log(` ${deleted ? 'Deleted' : 'FAILED to delete'} ${from}`);
} else {
console.log(` Source ${from} still has ${remaining.length} issues, not deleting.`);
}
}
return { moved: totalMoved, failed: totalFailed };
}
async function main() {
console.log('=== Project Consolidation (CF-762 Post-Migration) ===');
console.log(`Mode: ${DRY_RUN ? 'DRY RUN' : 'LIVE'}`);
console.log(`Delete source: ${DELETE_SOURCE ? 'yes' : 'no'}`);
console.log('');
if (!JIRA_USER || !JIRA_TOKEN) {
console.error('Missing JIRA_USERNAME or JIRA_API_TOKEN');
process.exit(1);
}
let pairs: Array<[string, string]> = [];
if (BATCH) {
switch (BATCH) {
case 'tier1': pairs = TIER1; break;
case 'tier2': pairs = TIER2; break;
case 'tier3': pairs = TIER3; break;
case 'all': pairs = [...TIER1, ...TIER2, ...TIER3]; break;
default:
console.error(`Unknown batch: ${BATCH}. Use: tier1, tier2, tier3, all`);
process.exit(1);
}
} else if (FROM && TO) {
pairs = [[FROM, TO]];
} else {
console.error('Usage:');
console.error(' npx tsx scripts/consolidate-projects.ts --from LIT --to LITE [--dry-run] [--delete-source]');
console.error(' npx tsx scripts/consolidate-projects.ts --batch tier1|tier2|tier3|all [--dry-run] [--delete-source]');
process.exit(1);
}
console.log(`Pairs to consolidate (${pairs.length}):`);
for (const [from, to] of pairs) {
console.log(` ${from}${to}`);
}
console.log('');
let totalMoved = 0;
let totalFailed = 0;
for (const [from, to] of pairs) {
const result = await consolidate(from, to);
totalMoved += result.moved;
totalFailed += result.failed;
}
console.log('\n=== Consolidation Summary ===');
console.log(`Total moved: ${totalMoved}`);
console.log(`Total failed: ${totalFailed}`);
console.log(`Mode: ${DRY_RUN ? 'DRY RUN' : 'LIVE'}`);
await pool.end();
}
main().catch(err => {
console.error('Consolidation failed:', err);
process.exit(1);
});

213
scripts/jira-admin.ts Normal file
View File

@@ -0,0 +1,213 @@
#!/usr/bin/env npx tsx
/**
* Jira admin helper for migration (CF-762)
* Usage:
* npx tsx scripts/jira-admin.ts get-project CF
* npx tsx scripts/jira-admin.ts delete-project CF
* npx tsx scripts/jira-admin.ts create-project CF "Claude Framework"
* npx tsx scripts/jira-admin.ts count-issues CF
* npx tsx scripts/jira-admin.ts delete-all-issues CF
*/
import dotenv from 'dotenv';
import { dirname, join } from 'path';
import { fileURLToPath } from 'url';
const __dirname = dirname(fileURLToPath(import.meta.url));
dotenv.config({ path: join(__dirname, '..', '.env'), override: true });
const JIRA_URL = process.env.JIRA_URL || 'https://agiliton.atlassian.net';
const JIRA_USER = process.env.JIRA_USERNAME || process.env.JIRA_EMAIL || '';
const JIRA_TOKEN = process.env.JIRA_API_TOKEN || '';
const JIRA_AUTH = Buffer.from(`${JIRA_USER}:${JIRA_TOKEN}`).toString('base64');
async function jiraFetch(path: string, options: RequestInit = {}): Promise<Response> {
const url = path.startsWith('http') ? path : `${JIRA_URL}/rest/api/3${path}`;
return fetch(url, {
...options,
headers: {
'Authorization': `Basic ${JIRA_AUTH}`,
'Content-Type': 'application/json',
'Accept': 'application/json',
...options.headers,
},
});
}
function delay(ms: number): Promise<void> {
return new Promise(resolve => setTimeout(resolve, ms));
}
const [command, ...cmdArgs] = process.argv.slice(2);
async function main() {
switch (command) {
case 'get-project': {
const key = cmdArgs[0];
const res = await jiraFetch(`/project/${key}`);
if (!res.ok) {
console.error(`Failed: ${res.status} ${await res.text()}`);
return;
}
const data = await res.json() as Record<string, unknown>;
console.log(JSON.stringify(data, null, 2));
break;
}
case 'list-projects': {
const res = await jiraFetch('/project');
if (!res.ok) {
console.error(`Failed: ${res.status} ${await res.text()}`);
return;
}
const projects = await res.json() as Array<{ key: string; name: string; id: string; projectTypeKey: string }>;
console.log(`Total: ${projects.length} projects`);
for (const p of projects) {
console.log(` ${p.key}: ${p.name} (id=${p.id}, type=${p.projectTypeKey})`);
}
break;
}
case 'count-issues': {
const key = cmdArgs[0];
const res = await jiraFetch(`/search/jql`, {
method: 'POST',
body: JSON.stringify({ jql: `project="${key}"`, maxResults: 1 }),
});
if (!res.ok) {
console.error(`Failed: ${res.status} ${await res.text()}`);
return;
}
const data = await res.json() as { total: number };
console.log(`${key}: ${data.total} issues`);
break;
}
case 'list-issues': {
const key = cmdArgs[0];
const max = parseInt(cmdArgs[1] || '20');
const res = await jiraFetch(`/search/jql`, {
method: 'POST',
body: JSON.stringify({ jql: `project="${key}" ORDER BY key ASC`, maxResults: max, fields: ['key', 'summary', 'issuetype', 'status'] }),
});
if (!res.ok) {
console.error(`Failed: ${res.status} ${await res.text()}`);
return;
}
const data = await res.json() as { total: number; issues: Array<{ key: string; fields: { summary: string; issuetype: { name: string }; status: { name: string } } }> };
console.log(`${key}: ${data.total} total issues (showing ${data.issues.length})`);
for (const i of data.issues) {
console.log(` ${i.key} [${i.fields.issuetype.name}] ${i.fields.status.name}: ${i.fields.summary.substring(0, 60)}`);
}
break;
}
case 'delete-all-issues': {
const key = cmdArgs[0];
if (!key) { console.error('Usage: delete-all-issues <PROJECT_KEY>'); return; }
// Get all issues
let startAt = 0;
const allKeys: string[] = [];
while (true) {
const res = await jiraFetch(`/search/jql`, {
method: 'POST',
body: JSON.stringify({ jql: `project="${key}" ORDER BY key ASC`, maxResults: 100, startAt, fields: ['key'] }),
});
if (!res.ok) { console.error(`Failed: ${res.status} ${await res.text()}`); return; }
const data = await res.json() as { total: number; issues: Array<{ key: string }> };
if (data.issues.length === 0) break;
allKeys.push(...data.issues.map(i => i.key));
startAt += data.issues.length;
if (startAt >= data.total) break;
}
console.log(`Found ${allKeys.length} issues to delete in ${key}`);
for (let i = 0; i < allKeys.length; i++) {
await delay(300);
const res = await jiraFetch(`/issue/${allKeys[i]}`, { method: 'DELETE' });
if (!res.ok) {
console.error(` FAIL delete ${allKeys[i]}: ${res.status}`);
}
if (i % 10 === 0) console.log(` [${i + 1}/${allKeys.length}] Deleted ${allKeys[i]}`);
}
console.log(`Deleted ${allKeys.length} issues from ${key}`);
break;
}
case 'delete-project': {
const key = cmdArgs[0];
if (!key) { console.error('Usage: delete-project <PROJECT_KEY>'); return; }
// enableUndo=false for permanent deletion
const res = await jiraFetch(`/project/${key}?enableUndo=false`, { method: 'DELETE' });
if (res.status === 204) {
console.log(`Project ${key} deleted permanently`);
} else {
console.error(`Failed: ${res.status} ${await res.text()}`);
}
break;
}
case 'create-project': {
const key = cmdArgs[0];
const name = cmdArgs[1] || key;
if (!key) { console.error('Usage: create-project <KEY> <NAME>'); return; }
// Get current user account ID for lead
const meRes = await jiraFetch('/myself');
const me = await meRes.json() as { accountId: string };
const body = {
key,
name,
projectTypeKey: 'business',
leadAccountId: me.accountId,
assigneeType: 'UNASSIGNED',
};
const res = await jiraFetch('/project', {
method: 'POST',
body: JSON.stringify(body),
});
if (res.ok || res.status === 201) {
const data = await res.json() as { id: string; key: string };
console.log(`Project created: ${data.key} (id=${data.id})`);
} else {
console.error(`Failed: ${res.status} ${await res.text()}`);
}
break;
}
case 'get-schemes': {
const key = cmdArgs[0];
// Get issue type scheme for project
const res = await jiraFetch(`/project/${key}`);
if (!res.ok) {
console.error(`Failed: ${res.status} ${await res.text()}`);
return;
}
const data = await res.json() as Record<string, unknown>;
console.log('Project type:', (data as any).projectTypeKey);
console.log('Style:', (data as any).style);
// Get issue types
const itRes = await jiraFetch(`/project/${key}/statuses`);
if (itRes.ok) {
const itData = await itRes.json() as Array<{ name: string; id: string; statuses: Array<{ name: string }> }>;
console.log('\nIssue types and statuses:');
for (const it of itData) {
console.log(` ${it.name} (id=${it.id}): ${it.statuses.map(s => s.name).join(', ')}`);
}
}
break;
}
default:
console.log('Commands: list-projects, get-project, count-issues, list-issues, delete-all-issues, delete-project, create-project, get-schemes');
}
}
main().catch(err => { console.error(err); process.exit(1); });

View File

@@ -0,0 +1,887 @@
#!/usr/bin/env npx tsx
/**
* Migrate tasks from task-mcp PostgreSQL to Jira Cloud (CF-762)
* EXACT KEY MATCHING: CF-1 in task-mcp → CF-1 in Jira
*
* Strategy:
* 1. Create tasks in strict numeric order (1..maxId), filling gaps with placeholders
* 2. After all tasks, create epics (they get keys after maxId)
* 3. Then create session plans as epics
* 4. Link tasks to their epics via parent field update
* 5. Create issue links, retry cross-project ones
* 6. Store mapping and update FK references
*
* IMPORTANT: The Jira project must be empty (counter at 1) for key matching to work.
* Delete and recreate the project before running this script.
*
* Usage:
* npx tsx scripts/migrate-tasks-to-jira.ts [--dry-run] [--project CF] [--open-only] [--limit 5] [--batch-size 50]
*
* Requires env vars (from .env or shell):
* JIRA_URL, JIRA_USERNAME, JIRA_API_TOKEN
* POSTGRES_HOST (defaults to postgres.agiliton.internal)
*/
import pg from 'pg';
import dotenv from 'dotenv';
import { fileURLToPath } from 'url';
import { dirname, join } from 'path';
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
dotenv.config({ path: join(__dirname, '..', '.env'), override: true });
const { Pool } = pg;
// --- Config ---
const JIRA_URL = process.env.JIRA_URL || 'https://agiliton.atlassian.net';
const JIRA_USER = process.env.JIRA_USERNAME || process.env.JIRA_EMAIL || '';
const JIRA_TOKEN = process.env.JIRA_API_TOKEN || '';
const JIRA_AUTH = Buffer.from(`${JIRA_USER}:${JIRA_TOKEN}`).toString('base64');
const pool = new Pool({
host: process.env.POSTGRES_HOST || 'postgres.agiliton.internal',
port: parseInt(process.env.POSTGRES_PORT || '5432'),
database: 'agiliton',
user: 'agiliton',
password: 'QtqiwCOAUpQNF6pjzOMAREzUny2bY8V1',
max: 3,
});
// --- CLI args ---
const args = process.argv.slice(2);
const DRY_RUN = args.includes('--dry-run');
const OPEN_ONLY = args.includes('--open-only');
const PROJECT_FILTER = args.find((a, i) => args[i - 1] === '--project') || null;
const LIMIT = parseInt(args.find((a, i) => args[i - 1] === '--limit') || '0') || 0;
const BATCH_SIZE = parseInt(args.find((a, i) => args[i - 1] === '--batch-size') || '50') || 50;
const SKIP_PREFLIGHT = args.includes('--skip-preflight');
// Herocoders Checklist for Jira custom field
const CHECKLIST_FIELD = 'customfield_10091';
// Rate limit: Jira Cloud allows ~100 req/min for basic auth
// 700ms delay = ~85 req/min (safe margin)
const DELAY_MS = 700;
const MAX_RETRIES = 5;
const BATCH_PAUSE_MS = 5000; // 5s pause between batches
// ADF max size (Jira limit)
const ADF_MAX_BYTES = 32_000;
function delay(ms: number): Promise<void> {
return new Promise(resolve => setTimeout(resolve, ms));
}
// --- Mappings ---
const PRIORITY_MAP: Record<string, string> = {
P0: 'Highest',
P1: 'High',
P2: 'Medium',
P3: 'Low',
};
const TYPE_MAP: Record<string, string> = {
task: 'Task',
bug: 'Bug',
feature: 'Task',
debt: 'Task',
investigation: 'Task',
};
const STATUS_MAP: Record<string, string> = {
open: 'To Do',
pending: 'To Do',
in_progress: 'In Progress',
testing: 'In Progress',
blocked: 'To Do',
done: 'Done',
completed: 'Done',
abandoned: 'Done',
};
const LINK_TYPE_MAP: Record<string, string> = {
blocks: 'Blocks',
relates_to: 'Relates',
duplicates: 'Duplicate',
depends_on: 'Blocks',
implements: 'Relates',
fixes: 'Relates',
causes: 'Relates',
needs: 'Blocks',
subtask_of: 'Relates',
};
const VALID_PROJECT_KEY = /^[A-Z]{2,5}$/;
// Track migration mapping: old task_id → Jira issue key
const migrationMap: Map<string, string> = new Map();
const jiraProjects: Set<string> = new Set();
const failedLinks: Array<{ from: string; to: string; type: string }> = [];
// Track epic old_id → Jira key (assigned after tasks)
const epicJiraKeys: Map<string, string> = new Map();
// Tasks that need parent (epic) link set after epics are created
const pendingParentLinks: Array<{ taskJiraKey: string; epicOldId: string }> = [];
// --- Jira REST API helpers ---
async function jiraFetch(path: string, options: RequestInit = {}): Promise<Response> {
const url = `${JIRA_URL}/rest/api/3${path}`;
return fetch(url, {
...options,
headers: {
'Authorization': `Basic ${JIRA_AUTH}`,
'Content-Type': 'application/json',
'Accept': 'application/json',
...options.headers,
},
});
}
async function jiraFetchWithRetry(path: string, options: RequestInit = {}): Promise<Response> {
let lastResponse: Response | null = null;
for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
await delay(DELAY_MS);
const response = await jiraFetch(path, options);
lastResponse = response;
if (response.status === 429 || response.status >= 500) {
if (attempt < MAX_RETRIES) {
const retryAfter = response.headers.get('Retry-After');
const backoffMs = retryAfter
? parseInt(retryAfter) * 1000
: DELAY_MS * Math.pow(2, attempt + 1);
console.warn(` [RETRY] ${response.status} on ${path}, attempt ${attempt + 1}/${MAX_RETRIES}, waiting ${backoffMs}ms`);
await delay(backoffMs);
continue;
}
console.error(` [FAIL] ${response.status} on ${path} after ${MAX_RETRIES} retries`);
}
return response;
}
return lastResponse!;
}
async function jiraFetchV2WithRetry(path: string, options: RequestInit = {}): Promise<Response> {
const url = `${JIRA_URL}/rest/api/2${path}`;
for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
await delay(DELAY_MS);
const response = await fetch(url, {
...options,
headers: {
'Authorization': `Basic ${JIRA_AUTH}`,
'Content-Type': 'application/json',
'Accept': 'application/json',
...options.headers,
},
});
if (response.status === 429 || response.status >= 500) {
if (attempt < MAX_RETRIES) {
const backoffMs = DELAY_MS * Math.pow(2, attempt + 1);
console.warn(` [RETRY] v2 ${response.status} on ${path}, attempt ${attempt + 1}/${MAX_RETRIES}, waiting ${backoffMs}ms`);
await delay(backoffMs);
continue;
}
}
return response;
}
throw new Error(`jiraFetchV2WithRetry: exhausted retries for ${path}`);
}
// --- ADF helpers ---
function textToAdf(text: string): Record<string, unknown> {
let normalized = text.replace(/\r\n/g, '\n').replace(/\r/g, '\n');
if (Buffer.byteLength(normalized, 'utf8') > ADF_MAX_BYTES - 500) {
while (Buffer.byteLength(normalized, 'utf8') > ADF_MAX_BYTES - 500) {
normalized = normalized.substring(0, Math.floor(normalized.length * 0.9));
}
normalized += '\n\n[...truncated - description exceeded 32KB limit]';
}
const lines = normalized.split('\n');
const paragraphs: Array<Record<string, unknown>> = [];
let currentParagraph = '';
for (const line of lines) {
if (line.trim() === '') {
if (currentParagraph.trim()) {
paragraphs.push({
type: 'paragraph',
content: [{ type: 'text', text: currentParagraph.trim() }],
});
}
currentParagraph = '';
} else {
currentParagraph += (currentParagraph ? '\n' : '') + line;
}
}
if (currentParagraph.trim()) {
paragraphs.push({
type: 'paragraph',
content: [{ type: 'text', text: currentParagraph.trim() }],
});
}
if (paragraphs.length === 0) {
paragraphs.push({
type: 'paragraph',
content: [{ type: 'text', text: text.trim() || '(empty)' }],
});
}
return { type: 'doc', version: 1, content: paragraphs };
}
// --- API operations ---
async function getJiraProjects(): Promise<string[]> {
const res = await jiraFetchWithRetry('/project');
if (!res.ok) {
console.error('Failed to list Jira projects:', res.status, await res.text());
return [];
}
const projects = await res.json() as Array<{ key: string }>;
return projects.map(p => p.key);
}
async function countJiraIssues(projectKey: string): Promise<number> {
const res = await jiraFetchWithRetry('/search/jql', {
method: 'POST',
body: JSON.stringify({ jql: `project="${projectKey}"`, maxResults: 1, fields: ['summary'] }),
});
if (!res.ok) return 0;
const data = await res.json() as { total?: number; issues?: unknown[] };
return data.total ?? data.issues?.length ?? 0;
}
async function createJiraIssue(fields: Record<string, unknown>): Promise<string | null> {
if (DRY_RUN) {
const key = `${(fields.project as Record<string, string>).key}-DRY`;
console.log(` [DRY] Would create: ${(fields.summary as string).substring(0, 60)}`);
return key;
}
const res = await jiraFetchWithRetry('/issue', {
method: 'POST',
body: JSON.stringify({ fields }),
});
if (!res.ok) {
const body = await res.text();
console.error(` FAIL create issue: ${res.status} ${body}`);
return null;
}
const data = await res.json() as { key: string };
return data.key;
}
async function transitionIssue(issueKey: string, targetStatus: string): Promise<boolean> {
if (DRY_RUN) return true;
const res = await jiraFetchWithRetry(`/issue/${issueKey}/transitions`);
if (!res.ok) return false;
const data = await res.json() as { transitions: Array<{ id: string; name: string }> };
const transition = data.transitions.find(t =>
t.name.toLowerCase() === targetStatus.toLowerCase()
);
if (!transition) {
// Try partial match (e.g., "In Progress" matches "Start Progress")
const partialMatch = data.transitions.find(t =>
t.name.toLowerCase().includes(targetStatus.toLowerCase()) ||
// Map common alternative names
(targetStatus === 'In Progress' && t.name.toLowerCase().includes('progress')) ||
(targetStatus === 'Done' && t.name.toLowerCase().includes('done'))
);
if (partialMatch) {
const transRes = await jiraFetchWithRetry(`/issue/${issueKey}/transitions`, {
method: 'POST',
body: JSON.stringify({ transition: { id: partialMatch.id } }),
});
return transRes.ok;
}
console.warn(` [WARN] No transition to "${targetStatus}" for ${issueKey}. Available: ${data.transitions.map(t => t.name).join(', ')}`);
return false;
}
const transRes = await jiraFetchWithRetry(`/issue/${issueKey}/transitions`, {
method: 'POST',
body: JSON.stringify({ transition: { id: transition.id } }),
});
return transRes.ok;
}
async function writeChecklist(issueKey: string, items: Array<{ item: string; checked: boolean }>): Promise<void> {
if (DRY_RUN || items.length === 0) return;
const checklistText = items
.map(i => `* [${i.checked ? 'x' : ' '}] ${i.item}`)
.join('\n');
const res = await jiraFetchV2WithRetry(`/issue/${issueKey}`, {
method: 'PUT',
body: JSON.stringify({ fields: { [CHECKLIST_FIELD]: checklistText } }),
});
if (!res.ok) {
const body = await res.text();
console.error(` FAIL checklist for ${issueKey}: ${res.status} ${body}`);
}
}
async function setParent(issueKey: string, parentKey: string): Promise<void> {
if (DRY_RUN) return;
const res = await jiraFetchWithRetry(`/issue/${issueKey}`, {
method: 'PUT',
body: JSON.stringify({ fields: { parent: { key: parentKey } } }),
});
if (!res.ok) {
const body = await res.text();
console.error(` FAIL set parent ${parentKey} for ${issueKey}: ${res.status} ${body}`);
}
}
async function createIssueLink(inwardKey: string, outwardKey: string, linkType: string): Promise<boolean> {
if (DRY_RUN) {
console.log(` [DRY] Would link: ${inwardKey} -[${linkType}]-> ${outwardKey}`);
return true;
}
const res = await jiraFetchWithRetry('/issueLink', {
method: 'POST',
body: JSON.stringify({
type: { name: linkType },
inwardIssue: { key: inwardKey },
outwardIssue: { key: outwardKey },
}),
});
if (!res.ok) {
const body = await res.text();
console.error(` FAIL link ${inwardKey}->${outwardKey}: ${res.status} ${body}`);
return false;
}
return true;
}
async function deleteIssue(issueKey: string): Promise<void> {
await jiraFetchWithRetry(`/issue/${issueKey}`, { method: 'DELETE' });
}
// --- Pre-flight check ---
async function preflightWorkflowCheck(projectKey: string): Promise<boolean> {
console.log(`\nPre-flight workflow check on ${projectKey}...`);
if (DRY_RUN || SKIP_PREFLIGHT) {
console.log(` [${DRY_RUN ? 'DRY' : 'SKIP'}] Skipping pre-flight check`);
return true;
}
// IMPORTANT: pre-flight consumes a key number!
// We must account for this. The test issue will be key #1,
// then we delete it, but the counter stays at 2.
// So we CANNOT do pre-flight on the same project if we want exact keys.
// Instead, use a different project for pre-flight.
console.log(' WARNING: Pre-flight check would consume issue key #1.');
console.log(' Skipping in-project pre-flight to preserve key sequence.');
console.log(' Use --skip-preflight explicitly if already verified.');
return true;
}
// --- Migration: exact key ordering ---
async function migrateTasksExactKeys(projectKey: string): Promise<Map<string, string>> {
const epicMap = new Map<string, string>();
// 1. Load all tasks for this project, indexed by numeric ID
const tasks = await pool.query(
`SELECT id, title, description, type, status, priority, epic_id, created_at
FROM tasks WHERE project = $1 ORDER BY id`,
[projectKey]
);
// Build a map of numeric ID → task row
const taskById = new Map<number, (typeof tasks.rows)[0]>();
let maxNum = 0;
for (const task of tasks.rows) {
const m = task.id.match(new RegExp(`^${projectKey}-(\\d+)$`));
if (m) {
const num = parseInt(m[1]);
taskById.set(num, task);
if (num > maxNum) maxNum = num;
}
}
if (maxNum === 0) {
console.log(' No numeric task IDs found, skipping.');
return epicMap;
}
const effectiveMax = LIMIT > 0 ? Math.min(maxNum, LIMIT) : maxNum;
const gapCount = effectiveMax - (LIMIT > 0 ? Math.min(taskById.size, LIMIT) : taskById.size) +
Array.from({ length: effectiveMax }, (_, i) => i + 1).filter(n => n <= effectiveMax && !taskById.has(n)).length -
(effectiveMax - (LIMIT > 0 ? Math.min(taskById.size, LIMIT) : taskById.size));
// Actually compute properly
let realTasks = 0;
let gaps = 0;
for (let n = 1; n <= effectiveMax; n++) {
if (taskById.has(n)) realTasks++;
else gaps++;
}
console.log(` Creating ${effectiveMax} issues (${realTasks} real tasks + ${gaps} placeholders)...`);
// 2. Create issues 1..maxNum in strict order
for (let n = 1; n <= effectiveMax; n++) {
const task = taskById.get(n);
const taskId = `${projectKey}-${n}`;
const expectedJiraKey = `${projectKey}-${n}`;
if (task) {
// Real task
const labels: string[] = ['migrated-from-task-mcp'];
if (task.type === 'feature') labels.push('feature');
if (task.type === 'debt') labels.push('tech-debt');
if (task.type === 'investigation') labels.push('investigation');
if (task.status === 'blocked') labels.push('blocked');
if (task.status === 'abandoned') labels.push('abandoned');
const fields: Record<string, unknown> = {
project: { key: projectKey },
summary: task.title.substring(0, 255),
issuetype: { name: TYPE_MAP[task.type] || 'Task' },
priority: { name: PRIORITY_MAP[task.priority] || 'Medium' },
labels,
};
if (task.description) {
fields.description = textToAdf(task.description);
}
// Don't set parent here — epics don't exist yet. Queue for later.
const jiraKey = await createJiraIssue(fields);
if (!jiraKey) {
console.error(` FATAL: Failed to create ${taskId}, key sequence broken!`);
process.exit(1);
}
// Verify key matches
if (!DRY_RUN && jiraKey !== expectedJiraKey) {
console.error(` FATAL: Key mismatch! Expected ${expectedJiraKey}, got ${jiraKey}. Aborting.`);
process.exit(1);
}
migrationMap.set(task.id, jiraKey);
// Transition
const targetStatus = STATUS_MAP[task.status] || 'To Do';
if (targetStatus !== 'To Do') {
await transitionIssue(jiraKey, targetStatus);
}
// Checklist
const checklist = await pool.query(
'SELECT item, checked FROM task_checklist WHERE task_id = $1 ORDER BY position',
[task.id]
);
if (checklist.rows.length > 0) {
await writeChecklist(jiraKey, checklist.rows);
}
// Queue parent link for later
if (task.epic_id) {
pendingParentLinks.push({ taskJiraKey: jiraKey, epicOldId: task.epic_id });
}
} else {
// Gap — create placeholder
const fields: Record<string, unknown> = {
project: { key: projectKey },
summary: `[Placeholder] Deleted task ${taskId}`,
issuetype: { name: 'Task' },
labels: ['migration-placeholder', 'migrated-from-task-mcp'],
};
const jiraKey = await createJiraIssue(fields);
if (!jiraKey) {
console.error(` FATAL: Failed to create placeholder for ${taskId}, key sequence broken!`);
process.exit(1);
}
if (!DRY_RUN && jiraKey !== expectedJiraKey) {
console.error(` FATAL: Key mismatch! Expected ${expectedJiraKey}, got ${jiraKey}. Aborting.`);
process.exit(1);
}
// Transition placeholder to Done
await transitionIssue(jiraKey, 'Done');
}
// Progress logging
if (n % 10 === 0 || n === effectiveMax) {
console.log(` [${n}/${effectiveMax}] ${task ? task.id : `gap → placeholder`}${projectKey}-${n}`);
}
// Batch pause
if (n > 0 && n % BATCH_SIZE === 0) {
console.log(` [BATCH PAUSE] ${n}/${effectiveMax}, pausing ${BATCH_PAUSE_MS / 1000}s...`);
await delay(BATCH_PAUSE_MS);
}
}
return epicMap;
}
async function migrateEpicsAfterTasks(projectKey: string): Promise<void> {
const epics = await pool.query(
'SELECT id, title, description, status FROM epics WHERE project = $1 ORDER BY id',
[projectKey]
);
if (epics.rows.length === 0) return;
console.log(` Creating ${epics.rows.length} epics (after task range)...`);
for (let i = 0; i < epics.rows.length; i++) {
const epic = epics.rows[i];
const labels: string[] = ['migrated-from-task-mcp'];
const fields: Record<string, unknown> = {
project: { key: projectKey },
summary: epic.title.substring(0, 255),
description: epic.description ? textToAdf(epic.description) : undefined,
issuetype: { name: 'Epic' },
labels,
};
const jiraKey = await createJiraIssue(fields);
if (jiraKey) {
epicJiraKeys.set(epic.id, jiraKey);
console.log(` [${i + 1}/${epics.rows.length}] Epic ${epic.id}${jiraKey}: ${epic.title.substring(0, 50)}`);
if (epic.status === 'completed' || epic.status === 'done') {
await transitionIssue(jiraKey, 'Done');
} else if (epic.status === 'in_progress') {
await transitionIssue(jiraKey, 'In Progress');
}
}
}
}
async function migrateSessionPlansAfterTasks(projectKey: string): Promise<void> {
const plans = await pool.query(
`SELECT sp.id, sp.session_id, sp.plan_file_name, sp.plan_content, sp.status
FROM session_plans sp
JOIN sessions s ON sp.session_id = s.id
WHERE s.project = $1`,
[projectKey]
);
if (plans.rows.length === 0) return;
console.log(` Creating ${plans.rows.length} session plans as Epics...`);
for (let i = 0; i < plans.rows.length; i++) {
const plan = plans.rows[i];
const labels: string[] = ['migrated-from-task-mcp', 'session-plan'];
if (plan.plan_file_name) {
labels.push(`plan:${plan.plan_file_name.replace(/[^a-zA-Z0-9._-]/g, '_').substring(0, 50)}`);
}
if (plan.status) {
labels.push(`plan-status:${plan.status}`);
}
const fields: Record<string, unknown> = {
project: { key: projectKey },
summary: `[Session Plan] ${plan.plan_file_name || `Plan from session ${plan.session_id}`}`.substring(0, 255),
description: plan.plan_content ? textToAdf(plan.plan_content) : undefined,
issuetype: { name: 'Epic' },
labels,
};
const jiraKey = await createJiraIssue(fields);
if (jiraKey) {
epicJiraKeys.set(`plan-${plan.id}`, jiraKey);
console.log(` [${i + 1}/${plans.rows.length}] Plan ${plan.id}${jiraKey}`);
if (plan.status === 'executed' || plan.status === 'abandoned') {
await transitionIssue(jiraKey, 'Done');
} else if (plan.status === 'approved') {
await transitionIssue(jiraKey, 'In Progress');
}
}
}
}
async function linkTasksToEpics(): Promise<void> {
if (pendingParentLinks.length === 0) return;
console.log(` Setting parent (epic) for ${pendingParentLinks.length} tasks...`);
let linked = 0;
for (const { taskJiraKey, epicOldId } of pendingParentLinks) {
const epicJiraKey = epicJiraKeys.get(epicOldId);
if (!epicJiraKey) continue;
await setParent(taskJiraKey, epicJiraKey);
linked++;
if (linked % 20 === 0) {
console.log(` [${linked}/${pendingParentLinks.length}] parent links set`);
}
if (linked % BATCH_SIZE === 0) {
console.log(` [BATCH PAUSE] ${linked}/${pendingParentLinks.length}, pausing...`);
await delay(BATCH_PAUSE_MS);
}
}
console.log(` Linked ${linked} tasks to epics`);
}
async function migrateLinks(projectKey: string): Promise<void> {
const links = await pool.query(
`SELECT tl.from_task_id, tl.to_task_id, tl.link_type
FROM task_links tl
JOIN tasks t1 ON tl.from_task_id = t1.id
JOIN tasks t2 ON tl.to_task_id = t2.id
WHERE t1.project = $1 OR t2.project = $1`,
[projectKey]
);
if (links.rows.length === 0) return;
console.log(` Migrating ${links.rows.length} links...`);
let created = 0;
let skipped = 0;
for (const link of links.rows) {
const fromKey = migrationMap.get(link.from_task_id);
const toKey = migrationMap.get(link.to_task_id);
if (!fromKey || !toKey) {
failedLinks.push({ from: link.from_task_id, to: link.to_task_id, type: link.link_type });
skipped++;
continue;
}
const jiraLinkType = LINK_TYPE_MAP[link.link_type] || 'Relates';
let success: boolean;
if (link.link_type === 'depends_on' || link.link_type === 'needs') {
success = await createIssueLink(toKey, fromKey, jiraLinkType);
} else {
success = await createIssueLink(fromKey, toKey, jiraLinkType);
}
if (success) created++;
}
console.log(` Created ${created} links, ${skipped} deferred for cross-project retry`);
}
async function retryFailedLinks(): Promise<void> {
if (failedLinks.length === 0) return;
console.log(`\nRetrying ${failedLinks.length} deferred cross-project links...`);
let created = 0;
let failed = 0;
for (const link of failedLinks) {
const fromKey = migrationMap.get(link.from);
const toKey = migrationMap.get(link.to);
if (!fromKey || !toKey) { failed++; continue; }
const jiraLinkType = LINK_TYPE_MAP[link.type] || 'Relates';
let success: boolean;
if (link.type === 'depends_on' || link.type === 'needs') {
success = await createIssueLink(toKey, fromKey, jiraLinkType);
} else {
success = await createIssueLink(fromKey, toKey, jiraLinkType);
}
if (success) created++;
else failed++;
}
console.log(` Retry results: ${created} created, ${failed} failed`);
}
// --- Post-migration ---
async function updateSessionMappings(): Promise<void> {
console.log('\nStoring migration mappings...');
await pool.query(`
CREATE TABLE IF NOT EXISTS task_migration_map (
old_task_id TEXT PRIMARY KEY,
jira_issue_key TEXT NOT NULL,
migrated_at TIMESTAMPTZ DEFAULT NOW()
)
`);
let count = 0;
for (const [oldId, jiraKey] of migrationMap) {
if (!DRY_RUN) {
await pool.query(
`INSERT INTO task_migration_map (old_task_id, jira_issue_key)
VALUES ($1, $2)
ON CONFLICT (old_task_id) DO UPDATE SET jira_issue_key = $2, migrated_at = NOW()`,
[oldId, jiraKey]
);
}
count++;
}
console.log(` Stored ${count} mappings`);
}
async function updateForeignKeyReferences(): Promise<void> {
console.log('\nUpdating FK references with Jira issue keys...');
if (DRY_RUN) {
console.log(' [DRY] Skipping FK reference updates');
return;
}
const alterStatements = [
'ALTER TABLE memories ADD COLUMN IF NOT EXISTS jira_issue_key TEXT',
'ALTER TABLE session_context ADD COLUMN IF NOT EXISTS jira_issue_key TEXT',
'ALTER TABLE deployments ADD COLUMN IF NOT EXISTS jira_issue_key TEXT',
'ALTER TABLE task_commits ADD COLUMN IF NOT EXISTS jira_issue_key TEXT',
];
for (const sql of alterStatements) {
try { await pool.query(sql); }
catch (e: unknown) {
const msg = e instanceof Error ? e.message : String(e);
if (!msg.includes('does not exist')) console.warn(` [WARN] ${sql}: ${msg}`);
}
}
const updates = [
{ table: 'memories', fk: 'task_id', desc: 'memories' },
{ table: 'session_context', fk: 'current_task_id', desc: 'session_context' },
{ table: 'deployments', fk: 'task_id', desc: 'deployments' },
{ table: 'task_commits', fk: 'task_id', desc: 'task_commits' },
];
for (const { table, fk, desc } of updates) {
try {
const result = await pool.query(
`UPDATE ${table} SET jira_issue_key = m.jira_issue_key
FROM task_migration_map m
WHERE ${table}.${fk} = m.old_task_id
AND ${table}.jira_issue_key IS NULL`
);
console.log(` ${desc}: ${result.rowCount} rows updated`);
} catch (e: unknown) {
const msg = e instanceof Error ? e.message : String(e);
console.warn(` [WARN] ${desc}: ${msg}`);
}
}
try {
const result = await pool.query(
`UPDATE sessions SET jira_issue_key = m.jira_issue_key
FROM task_migration_map m, session_context sc
WHERE sc.session_id = sessions.id
AND sc.current_task_id = m.old_task_id
AND sessions.jira_issue_key IS NULL`
);
console.log(` sessions: ${result.rowCount} rows updated`);
} catch (e: unknown) {
const msg = e instanceof Error ? e.message : String(e);
console.warn(` [WARN] sessions: ${msg}`);
}
}
// --- Main ---
async function main() {
console.log('=== task-mcp → Jira Cloud Migration (EXACT KEY MATCHING) ===');
console.log(`Jira: ${JIRA_URL}`);
console.log(`User: ${JIRA_USER}`);
console.log(`Mode: ${DRY_RUN ? 'DRY RUN' : 'LIVE'}`);
console.log(`Filter: ${PROJECT_FILTER || 'all valid projects'}`);
console.log(`Scope: ${OPEN_ONLY ? 'open tasks only' : 'all tasks'}`);
console.log(`Limit: ${LIMIT || 'none'}`);
console.log(`Batch: ${BATCH_SIZE} (${BATCH_PAUSE_MS / 1000}s pause)`);
console.log(`Rate: ${DELAY_MS}ms delay, ${MAX_RETRIES} retries`);
console.log('');
if (!JIRA_USER || !JIRA_TOKEN) {
console.error('Missing JIRA_USERNAME or JIRA_API_TOKEN');
process.exit(1);
}
const existingProjects = await getJiraProjects();
existingProjects.forEach(p => jiraProjects.add(p));
console.log(`Existing Jira projects: ${existingProjects.join(', ')}`);
const dbProjects = await pool.query(
'SELECT key, name FROM projects WHERE key ~ $1 ORDER BY key',
['^[A-Z]{2,5}$']
);
const projectsToMigrate = dbProjects.rows.filter(p => {
if (PROJECT_FILTER && p.key !== PROJECT_FILTER) return false;
if (!VALID_PROJECT_KEY.test(p.key)) return false;
return true;
});
console.log(`Projects to migrate: ${projectsToMigrate.map(p => p.key).join(', ')}`);
const missing = projectsToMigrate.filter(p => !jiraProjects.has(p.key));
if (missing.length > 0) {
console.log(`\nWARNING: These projects don't exist in Jira yet (will be skipped):`);
missing.forEach(p => console.log(` ${p.key} - ${p.name}`));
console.log('Create them in Jira first, then re-run migration.\n');
}
// Migrate each project
for (const project of projectsToMigrate) {
if (!jiraProjects.has(project.key)) {
console.log(`\nSkipping ${project.key} (not in Jira)`);
continue;
}
console.log(`\n--- Migrating project: ${project.key} (${project.name}) ---`);
// Check if project already has issues (already migrated)
const existingCount = await countJiraIssues(project.key);
if (existingCount > 0) {
console.log(` Skipping: already has ${existingCount} issues in Jira`);
continue;
}
// Clear per-project state
pendingParentLinks.length = 0;
// 1. Tasks in exact numeric order (with gap placeholders)
await migrateTasksExactKeys(project.key);
// 2. Epics (after tasks, so they get keys after maxTaskId)
await migrateEpicsAfterTasks(project.key);
// 3. Session plans as epics
await migrateSessionPlansAfterTasks(project.key);
// 4. Link tasks to their parent epics (now that epics exist)
await linkTasksToEpics();
// 5. Issue links
await migrateLinks(project.key);
// Summary
const taskCount = Array.from(migrationMap.values()).filter(v => v.startsWith(`${project.key}-`)).length;
console.log(` Done: ${epicJiraKeys.size} epics, ${taskCount} tasks migrated`);
}
// 6. Retry cross-project links
await retryFailedLinks();
// 7. Store mapping
await updateSessionMappings();
// 8. Update FK references
await updateForeignKeyReferences();
// Final summary
console.log('\n=== Migration Summary ===');
console.log(`Total issues migrated: ${migrationMap.size}`);
console.log(`Epics created: ${epicJiraKeys.size}`);
console.log(`Failed links: ${failedLinks.filter(l => !migrationMap.has(l.from) || !migrationMap.has(l.to)).length}`);
console.log(`Mode: ${DRY_RUN ? 'DRY RUN (no changes made)' : 'LIVE'}`);
await pool.end();
}
main().catch(err => {
console.error('Migration failed:', err);
process.exit(1);
});

View File

@@ -0,0 +1,221 @@
#!/usr/bin/env npx tsx
/**
* Prepare all projects for exact-key migration (CF-762)
* For each project: delete → recreate → assign shared issue type scheme
* Then the migration script can run for all projects at once.
*
* Usage:
* npx tsx scripts/prepare-all-projects.ts [--dry-run] [--exclude CF]
*/
import pg from 'pg';
import dotenv from 'dotenv';
import { dirname, join } from 'path';
import { fileURLToPath } from 'url';
const __dirname = dirname(fileURLToPath(import.meta.url));
dotenv.config({ path: join(__dirname, '..', '.env'), override: true });
const { Pool } = pg;
const JIRA_URL = process.env.JIRA_URL || 'https://agiliton.atlassian.net';
const JIRA_USER = process.env.JIRA_USERNAME || process.env.JIRA_EMAIL || '';
const JIRA_TOKEN = process.env.JIRA_API_TOKEN || '';
const JIRA_AUTH = Buffer.from(`${JIRA_USER}:${JIRA_TOKEN}`).toString('base64');
const SHARED_SCHEME_ID = '10329'; // Agiliton Software Issue Type Scheme
const pool = new Pool({
host: process.env.POSTGRES_HOST || 'postgres.agiliton.internal',
port: 5432, database: 'agiliton', user: 'agiliton',
password: 'QtqiwCOAUpQNF6pjzOMAREzUny2bY8V1', max: 3,
});
const args = process.argv.slice(2);
const DRY_RUN = args.includes('--dry-run');
const excludeIdx = args.indexOf('--exclude');
const EXCLUDE = excludeIdx >= 0 ? args[excludeIdx + 1]?.split(',') || [] : [];
function delay(ms: number): Promise<void> {
return new Promise(resolve => setTimeout(resolve, ms));
}
async function jiraFetch(path: string, options: RequestInit = {}): Promise<Response> {
return fetch(`${JIRA_URL}/rest/api/3${path}`, {
...options,
headers: {
Authorization: `Basic ${JIRA_AUTH}`,
'Content-Type': 'application/json',
Accept: 'application/json',
...options.headers,
},
});
}
async function getJiraProjects(): Promise<Array<{ key: string; name: string; id: string }>> {
const res = await jiraFetch('/project');
if (!res.ok) return [];
return res.json() as Promise<Array<{ key: string; name: string; id: string }>>;
}
async function deleteProject(key: string): Promise<boolean> {
const res = await jiraFetch(`/project/${key}?enableUndo=false`, { method: 'DELETE' });
return res.status === 204;
}
async function createProject(key: string, name: string, leadAccountId: string): Promise<string | null> {
const res = await jiraFetch('/project', {
method: 'POST',
body: JSON.stringify({
key,
name,
projectTypeKey: 'business',
leadAccountId,
assigneeType: 'UNASSIGNED',
}),
});
if (res.ok || res.status === 201) {
const data = await res.json() as { id: string };
return data.id;
}
console.error(` FAIL create ${key}: ${res.status} ${await res.text()}`);
return null;
}
async function assignScheme(projectId: string): Promise<boolean> {
const res = await jiraFetch('/issuetypescheme/project', {
method: 'PUT',
body: JSON.stringify({
issueTypeSchemeId: SHARED_SCHEME_ID,
projectId,
}),
});
return res.ok || res.status === 204;
}
async function verifyScheme(key: string): Promise<boolean> {
const res = await jiraFetch(`/project/${key}/statuses`);
if (!res.ok) return false;
const statuses = await res.json() as Array<{ name: string }>;
const names = statuses.map(s => s.name);
return names.includes('Epic') && names.includes('Task') && names.includes('Bug');
}
async function main() {
console.log('=== Prepare Projects for Migration ===');
console.log(`Mode: ${DRY_RUN ? 'DRY RUN' : 'LIVE'}`);
console.log(`Exclude: ${EXCLUDE.length > 0 ? EXCLUDE.join(', ') : 'none'}`);
console.log('');
// Get current user for project lead
const meRes = await jiraFetch('/myself');
const me = await meRes.json() as { accountId: string };
// Get existing Jira projects
const jiraProjects = await getJiraProjects();
const jiraProjectMap = new Map(jiraProjects.map(p => [p.key, p]));
console.log(`Jira projects: ${jiraProjects.length}`);
// Get DB projects with tasks
const dbProjects = await pool.query(
`SELECT p.key, p.name, COUNT(t.id) as task_count,
MAX(CAST(REGEXP_REPLACE(t.id, '^' || p.key || '-', '') AS INTEGER)) as max_id
FROM projects p
JOIN tasks t ON t.project = p.key
WHERE p.key ~ '^[A-Z]{2,5}$'
GROUP BY p.key, p.name
ORDER BY p.key`
);
console.log(`DB projects with tasks: ${dbProjects.rows.length}`);
console.log('');
// Filter: must exist in Jira, not excluded
const toProcess = dbProjects.rows.filter((p: any) => {
if (EXCLUDE.includes(p.key)) return false;
if (!jiraProjectMap.has(p.key)) return false;
return true;
});
console.log(`Projects to prepare: ${toProcess.length}`);
console.log('');
// Summary table
console.log('Project | Tasks | Max ID | Placeholders | Status');
console.log('--------|-------|--------|-------------|-------');
let totalTasks = 0;
let totalPlaceholders = 0;
for (const p of toProcess) {
const placeholders = p.max_id - p.task_count;
totalTasks += parseInt(p.task_count);
totalPlaceholders += placeholders;
console.log(`${p.key.padEnd(7)} | ${String(p.task_count).padStart(5)} | ${String(p.max_id).padStart(6)} | ${String(placeholders).padStart(11)} | pending`);
}
console.log(`TOTAL | ${String(totalTasks).padStart(5)} | ${String(totalTasks + totalPlaceholders).padStart(6)} | ${String(totalPlaceholders).padStart(11)} |`);
console.log('');
if (DRY_RUN) {
console.log('[DRY RUN] Would process above projects. Run without --dry-run to execute.');
await pool.end();
return;
}
// Process each project
let success = 0;
let failed = 0;
for (let i = 0; i < toProcess.length; i++) {
const p = toProcess[i];
const jiraProject = jiraProjectMap.get(p.key)!;
console.log(`[${i + 1}/${toProcess.length}] ${p.key} (${p.name})...`);
// 1. Delete
await delay(1000);
const deleted = await deleteProject(p.key);
if (!deleted) {
console.error(` FAIL delete ${p.key}`);
failed++;
continue;
}
console.log(` Deleted`);
// 2. Wait a bit for Jira to process
await delay(2000);
// 3. Recreate
const newId = await createProject(p.key, jiraProject.name || p.name, me.accountId);
if (!newId) {
console.error(` FAIL recreate ${p.key}`);
failed++;
continue;
}
console.log(` Recreated (id=${newId})`);
// 4. Assign shared scheme
await delay(1000);
const schemeOk = await assignScheme(newId);
if (!schemeOk) {
console.error(` FAIL assign scheme for ${p.key}`);
failed++;
continue;
}
// 5. Verify
const verified = await verifyScheme(p.key);
if (!verified) {
console.error(` FAIL verify scheme for ${p.key} (missing Epic/Task/Bug)`);
failed++;
continue;
}
console.log(` Scheme OK (Epic/Task/Bug)`);
success++;
}
console.log(`\n=== Preparation Summary ===`);
console.log(`Success: ${success}`);
console.log(`Failed: ${failed}`);
console.log(`\nRun migration: npx tsx scripts/migrate-tasks-to-jira.ts --skip-preflight`);
await pool.end();
}
main().catch(err => { console.error(err); process.exit(1); });

View File

@@ -0,0 +1,232 @@
#!/usr/bin/env npx tsx
/**
* Validate CF-762 migration integrity.
* Checks: Jira issue counts vs DB, statuses, checklists, epic links, FK references.
*
* Usage: npx tsx scripts/validate-migration.ts [--project CF] [--verbose]
*/
import pg from 'pg';
import dotenv from 'dotenv';
import { dirname, join } from 'path';
import { fileURLToPath } from 'url';
const __dirname = dirname(fileURLToPath(import.meta.url));
dotenv.config({ path: join(__dirname, '..', '.env'), override: true });
const JIRA_URL = process.env.JIRA_URL || 'https://agiliton.atlassian.net';
const JIRA_USER = process.env.JIRA_USERNAME || '';
const JIRA_TOKEN = process.env.JIRA_API_TOKEN || '';
const JIRA_AUTH = Buffer.from(`${JIRA_USER}:${JIRA_TOKEN}`).toString('base64');
const pool = new pg.Pool({
host: process.env.POSTGRES_HOST || 'postgres.agiliton.internal',
port: parseInt(process.env.POSTGRES_PORT || '5432'),
database: 'agiliton', user: 'agiliton',
password: 'QtqiwCOAUpQNF6pjzOMAREzUny2bY8V1', max: 3,
});
const args = process.argv.slice(2);
const PROJECT_FILTER = args.find((_, i) => args[i - 1] === '--project') || '';
const VERBOSE = args.includes('--verbose');
const DELAY_MS = 700;
function delay(ms: number): Promise<void> {
return new Promise(resolve => setTimeout(resolve, ms));
}
async function jiraFetch(path: string): Promise<Response> {
await delay(DELAY_MS);
return fetch(`${JIRA_URL}/rest/api/3${path}`, {
headers: {
'Authorization': `Basic ${JIRA_AUTH}`,
'Accept': 'application/json',
},
});
}
// v3 search/jql uses cursor pagination, no total. Count by paging through.
async function jiraIssueCount(projectKey: string): Promise<number> {
let count = 0;
let nextPageToken: string | undefined;
while (true) {
const jql = encodeURIComponent(`project="${projectKey}"`);
let url = `/search/jql?jql=${jql}&maxResults=100&fields=summary`;
if (nextPageToken) url += `&nextPageToken=${encodeURIComponent(nextPageToken)}`;
const res = await jiraFetch(url);
if (!res.ok) return -1;
const data = await res.json() as { issues: unknown[]; nextPageToken?: string; isLast?: boolean };
count += data.issues.length;
if (data.isLast || !data.nextPageToken || data.issues.length === 0) break;
nextPageToken = data.nextPageToken;
}
return count;
}
async function jiraPlaceholderCount(): Promise<number> {
const jql = encodeURIComponent(`labels = "migration-placeholder"`);
const res = await jiraFetch(`/search/jql?jql=${jql}&maxResults=0`);
if (!res.ok) return -1;
const data = await res.json() as { total?: number };
return data.total ?? -1;
}
async function spotCheckChecklists(projectKey: string): Promise<{ total: number; withChecklist: number }> {
const jql = encodeURIComponent(`project="${projectKey}" AND labels = "migrated-from-task-mcp" ORDER BY key ASC`);
const res = await jiraFetch(`/search/jql?jql=${jql}&maxResults=3&fields=summary,customfield_10091`);
if (!res.ok) return { total: 0, withChecklist: 0 };
const data = await res.json() as { issues: Array<{ key: string; fields: Record<string, unknown> }> };
let withChecklist = 0;
for (const issue of data.issues) {
if (issue.fields.customfield_10091) withChecklist++;
}
return { total: data.issues.length, withChecklist };
}
async function spotCheckStatuses(projectKey: string): Promise<Record<string, number>> {
const counts: Record<string, number> = {};
const jql = encodeURIComponent(`project="${projectKey}" AND labels = "migrated-from-task-mcp"`);
const res = await jiraFetch(`/search/jql?jql=${jql}&maxResults=100&fields=status`);
if (!res.ok) return counts;
const data = await res.json() as { issues: Array<{ fields: { status: { name: string } } }> };
for (const issue of data.issues) {
const status = issue.fields.status.name;
counts[status] = (counts[status] || 0) + 1;
}
return counts;
}
async function spotCheckEpicLinks(projectKey: string): Promise<{ total: number; withParent: number }> {
const jql = encodeURIComponent(`project="${projectKey}" AND issuetype != Epic AND labels = "migrated-from-task-mcp" ORDER BY key ASC`);
const res = await jiraFetch(`/search/jql?jql=${jql}&maxResults=5&fields=parent`);
if (!res.ok) return { total: 0, withParent: 0 };
const data = await res.json() as { issues: Array<{ key: string; fields: Record<string, unknown> }> };
let withParent = 0;
for (const issue of data.issues) {
if (issue.fields?.parent) withParent++;
}
return { total: data.issues.length, withParent };
}
async function main() {
console.log('=== CF-762 Migration Validation ===\n');
// 1. Per-project Jira vs DB counts
console.log('1. Per-project issue counts (Jira vs DB):');
console.log(' Project | Jira | DB Tasks | DB Migration Map | Match');
console.log(' --------|------|----------|-----------------|------');
const dbProjects = await pool.query(
`SELECT p.key, COUNT(DISTINCT t.id) as task_count, COUNT(DISTINCT m.old_task_id) as map_count
FROM projects p
LEFT JOIN tasks t ON t.project = p.key
LEFT JOIN task_migration_map m ON m.old_task_id = t.id
WHERE p.key ~ '^[A-Z]{2,5}$'
${PROJECT_FILTER ? `AND p.key = '${PROJECT_FILTER}'` : ''}
GROUP BY p.key
HAVING COUNT(t.id) > 0
ORDER BY p.key`
);
let mismatches = 0;
for (const row of dbProjects.rows) {
const jiraCount = await jiraIssueCount(row.key);
const match = jiraCount >= parseInt(row.task_count) ? 'OK' : 'MISMATCH';
if (match !== 'OK') mismatches++;
console.log(` ${row.key.padEnd(7)} | ${String(jiraCount).padStart(4)} | ${String(row.task_count).padStart(8)} | ${String(row.map_count).padStart(15)} | ${match}`);
}
console.log(`\n Mismatches: ${mismatches}\n`);
// 2. Spot-check checklists (3 projects)
console.log('2. Checklist spot-check:');
const checkProjects = PROJECT_FILTER ? [PROJECT_FILTER] : ['CF', 'OWUI', 'WHMCS'];
for (const pk of checkProjects) {
const result = await spotCheckChecklists(pk);
console.log(` ${pk}: ${result.withChecklist}/${result.total} issues have checklists`);
}
console.log('');
// 3. Status distribution spot-check
console.log('3. Status distribution spot-check:');
const statusProjects = PROJECT_FILTER ? [PROJECT_FILTER] : ['CF', 'GB', 'RUB'];
for (const pk of statusProjects) {
const statuses = await spotCheckStatuses(pk);
console.log(` ${pk}: ${Object.entries(statuses).map(([s, c]) => `${s}=${c}`).join(', ')}`);
}
console.log('');
// 4. Epic→Task parent links
console.log('4. Epic→Task parent links spot-check:');
const epicProjects = PROJECT_FILTER ? [PROJECT_FILTER] : ['CF', 'RUB', 'OWUI'];
for (const pk of epicProjects) {
const result = await spotCheckEpicLinks(pk);
console.log(` ${pk}: ${result.withParent}/${result.total} tasks have parent epic`);
}
console.log('');
// 5. NULL FK references
console.log('5. NULL FK references (should be from unmigrated/deleted projects):');
const nullChecks = [
{ table: 'memories', col: 'jira_issue_key', fk: 'task_id' },
{ table: 'session_context', col: 'jira_issue_key', fk: 'current_task_id' },
{ table: 'task_commits', col: 'jira_issue_key', fk: 'task_id' },
];
for (const { table, col, fk } of nullChecks) {
try {
const res = await pool.query(
`SELECT COUNT(*) as cnt FROM ${table} WHERE ${fk} IS NOT NULL AND ${col} IS NULL`
);
const count = parseInt(res.rows[0].cnt);
if (count > 0) {
console.log(` ${table}: ${count} rows with task_id but no jira_issue_key`);
if (VERBOSE) {
const details = await pool.query(
`SELECT ${fk} FROM ${table} WHERE ${fk} IS NOT NULL AND ${col} IS NULL LIMIT 5`
);
for (const d of details.rows) {
console.log(` - ${d[fk]}`);
}
}
} else {
console.log(` ${table}: OK (0 NULL refs)`);
}
} catch (e: any) {
console.log(` ${table}: ${e.message}`);
}
}
console.log('');
// 6. Migration map total
const mapTotal = await pool.query('SELECT COUNT(*) as cnt FROM task_migration_map');
console.log(`6. Total migration mappings: ${mapTotal.rows[0].cnt}`);
// 7. Placeholder count in Jira
const placeholders = await jiraPlaceholderCount();
console.log(`7. Placeholder issues in Jira (label=migration-placeholder): ${placeholders}`);
// 8. Consolidated projects check — should no longer exist
console.log('\n8. Deleted source projects (should be gone from Jira):');
const deletedProjects = ['LIT', 'CARD', 'TES', 'DA', 'AF', 'RUBI', 'ET', 'ZORK', 'IS', 'CLN', 'TOOLS'];
for (const pk of deletedProjects) {
const res = await jiraFetch(`/project/${pk}`);
const status = res.ok ? 'STILL EXISTS' : 'Gone';
console.log(` ${pk}: ${status}`);
}
// 9. Remaining projects
console.log('\n9. Current Jira projects:');
const projRes = await jiraFetch('/project');
if (projRes.ok) {
const projects = await projRes.json() as Array<{ key: string; name: string }>;
console.log(` Total: ${projects.length}`);
for (const p of projects.sort((a, b) => a.key.localeCompare(b.key))) {
const count = await jiraIssueCount(p.key);
console.log(` ${p.key.padEnd(8)} ${String(count).padStart(4)} issues - ${p.name}`);
}
}
await pool.end();
console.log('\n=== Validation Complete ===');
}
main().catch(err => { console.error(err); process.exit(1); });

View File

@@ -1,12 +1,11 @@
#!/usr/bin/env node
/**
* Task MCP Server
* Session MCP Server
*
* Exposes task management tools via Model Context Protocol.
* Uses PostgreSQL with pgvector for semantic search.
* Forked from task-mcp (CF-762): Sessions, memory, archives, infrastructure.
* Task management now handled by Jira Cloud via mcp-atlassian.
*
* Requires SSH tunnel to infra VM on port 5433:
* ssh -L 5433:localhost:5432 -i ~/.ssh/hetzner_mash_deploy root@46.224.188.157 -N &
* Uses PostgreSQL with pgvector for semantic search on sessions/memories.
*/
// Load environment variables from .env file
@@ -19,17 +18,14 @@ const __dirname = dirname(__filename);
const envPath = join(__dirname, '..', '.env');
const result = dotenv.config({ path: envPath, override: true });
// Initialize Sentry for error tracking (with MCP-aware filtering and PII scrubbing)
// Initialize Sentry for error tracking
import { initSentry } from './sentry.js';
initSentry(process.env.SENTRY_ENVIRONMENT || 'production');
// Log environment loading status (goes to MCP server logs)
if (result.error) {
console.error('Failed to load .env from:', envPath, result.error);
} else {
console.error('Loaded .env from:', envPath);
console.error('LLM_API_KEY present:', !!process.env.LLM_API_KEY);
console.error('LLM_API_URL:', process.env.LLM_API_URL);
}
import { Server } from '@modelcontextprotocol/sdk/server/index.js';
@@ -41,13 +37,10 @@ import {
import { testConnection, close } from './db.js';
import { toolDefinitions } from './tools/index.js';
import { taskAdd, taskList, taskShow, taskClose, taskUpdate, taskInvestigate, taskMoveProject } from './tools/crud.js';
import { taskSimilar, taskContext, taskSessionContext } from './tools/search.js';
import { taskLink, checklistAdd, checklistToggle, taskResolveDuplicate } from './tools/relations.js';
import { epicAdd, epicList, epicShow, epicAssign, epicClose } from './tools/epics.js';
// Kept tools (sessions, memory, archives, infrastructure, docs, delegations, commits)
import { taskDelegations, taskDelegationQuery } from './tools/delegations.js';
import { projectLock, projectUnlock, projectLockStatus, projectContext } from './tools/locks.js';
import { versionAdd, versionList, versionShow, versionUpdate, versionRelease, versionAssignTask } from './tools/versions.js';
import { taskCommitAdd, taskCommitRemove, taskCommitsList, taskLinkCommits, sessionTasks } from './tools/commits.js';
import { changelogAdd, changelogSinceSession, changelogList } from './tools/changelog.js';
import {
@@ -93,7 +86,7 @@ import { projectArchive } from './tools/project-archive.js';
// Create MCP server
const server = new Server(
{ name: 'task-mcp', version: '1.0.0' },
{ name: 'session-mcp', version: '1.0.0' },
{ capabilities: { tools: {} } }
);
@@ -112,132 +105,6 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
const a = args as any;
switch (name) {
// CRUD
case 'task_add':
result = await taskAdd({
title: a.title,
project: a.project,
type: a.type,
priority: a.priority,
description: a.description,
});
break;
case 'task_list':
result = await taskList({
project: a.project,
status: a.status,
type: a.type,
priority: a.priority,
limit: a.limit,
});
break;
case 'task_show':
result = await taskShow(a.id);
break;
case 'task_close':
result = await taskClose(a.id);
break;
case 'task_update':
result = await taskUpdate({
id: a.id,
status: a.status,
priority: a.priority,
type: a.type,
title: a.title,
});
break;
case 'task_investigate':
result = await taskInvestigate({
title: a.title,
project: a.project,
priority: a.priority,
description: a.description,
});
break;
case 'task_move_project':
result = await taskMoveProject({
id: a.id,
target_project: a.target_project,
reason: a.reason,
});
break;
// Search
case 'task_similar':
result = await taskSimilar({
query: a.query,
project: a.project,
limit: a.limit,
});
break;
case 'task_context':
result = await taskContext({
description: a.description,
project: a.project,
limit: a.limit,
});
break;
case 'task_session_context':
result = await taskSessionContext({
id: a.id,
});
break;
// Relations
case 'task_link':
result = await taskLink({
from_id: a.from_id,
to_id: a.to_id,
link_type: a.link_type,
});
break;
case 'task_checklist_add':
result = await checklistAdd({
task_id: a.task_id,
item: a.item,
});
break;
case 'task_checklist_toggle':
result = await checklistToggle({
item_id: a.item_id,
checked: a.checked,
});
break;
case 'task_resolve_duplicate':
result = await taskResolveDuplicate({
duplicate_id: a.duplicate_id,
dominant_id: a.dominant_id,
});
break;
// Epics
case 'epic_add':
result = await epicAdd({
title: a.title,
project: a.project,
description: a.description,
});
break;
case 'epic_list':
result = await epicList({
project: a.project,
status: a.status,
limit: a.limit,
});
break;
case 'epic_show':
result = await epicShow(a.id);
break;
case 'epic_assign':
result = await epicAssign({
task_id: a.task_id,
epic_id: a.epic_id,
});
break;
case 'epic_close':
result = await epicClose(a.id);
break;
// Delegations
case 'task_delegations':
result = await taskDelegations({ task_id: a.task_id });
@@ -275,49 +142,6 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
result = await projectContext();
break;
// Versions
case 'version_add':
result = await versionAdd({
project: a.project,
version: a.version,
build_number: a.build_number,
status: a.status,
release_notes: a.release_notes,
});
break;
case 'version_list':
result = await versionList({
project: a.project,
status: a.status,
limit: a.limit,
});
break;
case 'version_show':
result = await versionShow(a.id);
break;
case 'version_update':
result = await versionUpdate({
id: a.id,
status: a.status,
git_tag: a.git_tag,
git_sha: a.git_sha,
release_notes: a.release_notes,
release_date: a.release_date,
});
break;
case 'version_release':
result = await versionRelease({
id: a.id,
git_tag: a.git_tag,
});
break;
case 'version_assign_task':
result = await versionAssignTask({
task_id: a.task_id,
version_id: a.version_id,
});
break;
// Commits
case 'task_commit_add':
result = await taskCommitAdd({
@@ -482,6 +306,7 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
working_directory: a.working_directory,
git_branch: a.git_branch,
initial_prompt: a.initial_prompt,
jira_issue_key: a.jira_issue_key,
});
break;
case 'session_update':
@@ -718,7 +543,6 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
// Main entry point
async function main() {
// Set up cleanup
process.on('SIGINT', async () => {
await close();
process.exit(0);
@@ -729,23 +553,20 @@ async function main() {
process.exit(0);
});
// Start server FIRST - respond to MCP protocol immediately
// This is critical: Claude Code sends initialize before we finish DB connection
const transport = new StdioServerTransport();
await server.connect(transport);
console.error('task-mcp: Server started');
console.error('session-mcp: Server started');
// Test database connection in background (lazy - will connect on first tool call anyway)
testConnection().then((connected) => {
if (connected) {
console.error('task-mcp: Connected to database');
console.error('session-mcp: Connected to database');
} else {
console.error('task-mcp: Warning - database not reachable, will retry on tool calls');
console.error('session-mcp: Warning - database not reachable, will retry on tool calls');
}
});
}
main().catch((error) => {
console.error('task-mcp: Fatal error:', error);
console.error('session-mcp: Fatal error:', error);
process.exit(1);
});

277
src/services/jira.ts Normal file
View File

@@ -0,0 +1,277 @@
/**
* Jira Cloud REST API client for session-mcp.
* Creates/closes CF issues for sessions and posts session output as comments.
*
* Uses JIRA_URL, JIRA_USERNAME, JIRA_API_TOKEN env vars.
*/
interface JiraIssue {
key: string;
id: string;
self: string;
}
interface JiraTransition {
id: string;
name: string;
}
const getConfig = () => ({
url: process.env.JIRA_URL || 'https://agiliton.atlassian.net',
username: process.env.JIRA_USERNAME || '',
token: process.env.JIRA_API_TOKEN || '',
});
function getAuthHeader(): string {
const { username, token } = getConfig();
return `Basic ${Buffer.from(`${username}:${token}`).toString('base64')}`;
}
function isConfigured(): boolean {
const { username, token } = getConfig();
return !!(username && token);
}
async function jiraFetch(path: string, options: RequestInit = {}): Promise<Response> {
const { url } = getConfig();
const fullUrl = `${url}/rest/api/3${path}`;
return fetch(fullUrl, {
...options,
headers: {
'Authorization': getAuthHeader(),
'Content-Type': 'application/json',
'Accept': 'application/json',
...options.headers,
},
});
}
/**
* Create a Jira issue for session tracking in the given project.
*/
export async function createSessionIssue(params: {
sessionNumber: number | null;
project: string;
parentIssueKey?: string;
branch?: string;
workingDirectory?: string;
}): Promise<{ key: string } | null> {
if (!isConfigured()) {
console.error('session-mcp: Jira not configured, skipping issue creation');
return null;
}
const { sessionNumber, project, parentIssueKey, branch, workingDirectory } = params;
const sessionLabel = sessionNumber ? `#${sessionNumber}` : 'new';
const summary = `Session ${sessionLabel}: ${project}${parentIssueKey ? ` - ${parentIssueKey}` : ''}`;
const descriptionParts = [
`Automated session tracking issue.`,
`Project: ${project}`,
branch ? `Branch: ${branch}` : null,
workingDirectory ? `Working directory: ${workingDirectory}` : null,
parentIssueKey ? `Parent task: ${parentIssueKey}` : null,
`Started: ${new Date().toISOString()}`,
].filter(Boolean);
try {
const response = await jiraFetch('/issue', {
method: 'POST',
body: JSON.stringify({
fields: {
project: { key: project },
summary,
description: {
type: 'doc',
version: 1,
content: [{
type: 'paragraph',
content: [{
type: 'text',
text: descriptionParts.join('\n'),
}],
}],
},
issuetype: { name: 'Task' },
labels: ['session-tracking', `project-${project.toLowerCase()}`],
},
}),
});
if (!response.ok) {
const body = await response.text();
console.error(`session-mcp: Jira create issue failed (${response.status}): ${body}`);
return null;
}
const issue = await response.json() as JiraIssue;
// Link to parent issue if provided
if (parentIssueKey) {
await linkIssues(issue.key, parentIssueKey, 'relates to');
}
return { key: issue.key };
} catch (err) {
console.error('session-mcp: Jira create issue error:', err);
return null;
}
}
/**
* Add a comment to a Jira issue (used for session output).
*/
export async function addComment(issueKey: string, markdownBody: string): Promise<boolean> {
if (!isConfigured()) return false;
try {
const response = await jiraFetch(`/issue/${issueKey}/comment`, {
method: 'POST',
body: JSON.stringify({
body: {
type: 'doc',
version: 1,
content: [{
type: 'codeBlock',
attrs: { language: 'markdown' },
content: [{
type: 'text',
text: markdownBody,
}],
}],
},
}),
});
if (!response.ok) {
const body = await response.text();
console.error(`session-mcp: Jira add comment failed (${response.status}): ${body}`);
return false;
}
return true;
} catch (err) {
console.error('session-mcp: Jira add comment error:', err);
return false;
}
}
/**
* Transition a Jira issue to "Done" status.
*/
export async function transitionToDone(issueKey: string): Promise<boolean> {
if (!isConfigured()) return false;
try {
// Get available transitions
const transResponse = await jiraFetch(`/issue/${issueKey}/transitions`);
if (!transResponse.ok) {
console.error(`session-mcp: Jira get transitions failed (${transResponse.status})`);
return false;
}
const { transitions } = await transResponse.json() as { transitions: JiraTransition[] };
const doneTrans = transitions.find(
t => t.name.toLowerCase() === 'done' || t.name.toLowerCase() === 'resolve'
);
if (!doneTrans) {
console.error(`session-mcp: No "Done" transition found for ${issueKey}. Available: ${transitions.map(t => t.name).join(', ')}`);
return false;
}
// Execute transition
const response = await jiraFetch(`/issue/${issueKey}/transitions`, {
method: 'POST',
body: JSON.stringify({
transition: { id: doneTrans.id },
}),
});
if (!response.ok) {
const body = await response.text();
console.error(`session-mcp: Jira transition failed (${response.status}): ${body}`);
return false;
}
return true;
} catch (err) {
console.error('session-mcp: Jira transition error:', err);
return false;
}
}
/**
* Update a Jira issue description (used for final session summary).
*/
export async function updateIssueDescription(issueKey: string, description: string): Promise<boolean> {
if (!isConfigured()) return false;
try {
const response = await jiraFetch(`/issue/${issueKey}`, {
method: 'PUT',
body: JSON.stringify({
fields: {
description: {
type: 'doc',
version: 1,
content: [{
type: 'codeBlock',
attrs: { language: 'markdown' },
content: [{
type: 'text',
text: description,
}],
}],
},
},
}),
});
if (!response.ok) {
const body = await response.text();
console.error(`session-mcp: Jira update description failed (${response.status}): ${body}`);
return false;
}
return true;
} catch (err) {
console.error('session-mcp: Jira update description error:', err);
return false;
}
}
/**
* Link two Jira issues.
*/
export async function linkIssues(
inwardKey: string,
outwardKey: string,
linkType: string = 'relates to'
): Promise<boolean> {
if (!isConfigured()) return false;
try {
const response = await jiraFetch('/issueLink', {
method: 'POST',
body: JSON.stringify({
type: { name: linkType },
inwardIssue: { key: inwardKey },
outwardIssue: { key: outwardKey },
}),
});
if (!response.ok) {
const body = await response.text();
console.error(`session-mcp: Jira link issues failed (${response.status}): ${body}`);
return false;
}
return true;
} catch (err) {
console.error('session-mcp: Jira link issues error:', err);
return false;
}
}

View File

@@ -1,704 +0,0 @@
// CRUD operations for tasks
import { query, queryOne, execute, getNextTaskId, getProjectKey, detectProjectFromCwd, getClient } from '../db.js';
import { getEmbedding, formatEmbedding } from '../embeddings.js';
import type { Task, ChecklistItem, TaskLink } from '../types.js';
import { getRecentDelegations } from './delegations.js';
import { getTaskCommits } from './commits.js';
import { taskLink } from './relations.js';
import { sessionNoteAdd } from './session-docs.js';
import * as fs from 'fs';
import * as path from 'path';
import * as os from 'os';
/**
* Get current session ID from environment or cache file
*/
function getSessionId(): string {
// Check environment first
if (process.env.CLAUDE_SESSION_ID) {
return process.env.CLAUDE_SESSION_ID;
}
// Try to read from cache file (session-memory format)
const cacheFile = path.join(os.homedir(), '.cache', 'session-memory', 'current_session');
try {
const sessionId = fs.readFileSync(cacheFile, 'utf-8').trim();
if (sessionId) return sessionId;
} catch {
// File doesn't exist or can't be read
}
// Generate a new session ID
const now = new Date();
const timestamp = now.toISOString().replace(/[-:T]/g, '').slice(0, 15);
return `session_${timestamp}`;
}
/**
* Record task activity for session tracking
*/
async function recordActivity(
task_id: string,
activity_type: 'created' | 'updated' | 'status_change' | 'closed',
old_value?: string,
new_value?: string
): Promise<void> {
const session_id = getSessionId();
try {
await execute(
`INSERT INTO task_activity (task_id, session_id, activity_type, old_value, new_value)
VALUES ($1, $2, $3, $4, $5)`,
[task_id, session_id, activity_type, old_value || null, new_value || null]
);
} catch {
// Don't fail the main operation if activity tracking fails
console.error('Failed to record task activity');
}
}
interface TaskAddArgs {
title: string;
project?: string;
type?: string;
priority?: string;
description?: string;
}
interface TaskListArgs {
project?: string;
status?: string;
type?: string;
priority?: string;
limit?: number;
}
interface TaskUpdateArgs {
id: string;
status?: string;
priority?: string;
type?: string;
title?: string;
}
/**
* Create a new task
*/
export async function taskAdd(args: TaskAddArgs): Promise<string> {
const { title, project = 'Unknown', type = 'task', priority = 'P2', description = '' } = args;
// Get project key
const projectKey = await getProjectKey(project);
// Generate embedding for duplicate detection
const embedText = description ? `${title}. ${description}` : title;
const embedding = await getEmbedding(embedText);
const embeddingValue = embedding ? formatEmbedding(embedding) : null;
// Check for similar/duplicate tasks (only if embedding succeeded)
// CF-450: Check both open AND completed tasks to avoid circular work
let duplicateWarning = '';
if (embeddingValue) {
const similarTasks = await query<{ id: string; title: string; status: string; description: string; similarity: number }>(
`SELECT id, title, status, description, 1 - (embedding <=> $1) as similarity
FROM tasks
WHERE project = $2 AND embedding IS NOT NULL
ORDER BY embedding <=> $1
LIMIT 5`,
[embeddingValue, projectKey]
);
// Warn if highly similar tasks exist (>70% similarity)
const highSimilarity = similarTasks.filter(t => t.similarity > 0.70);
if (highSimilarity.length > 0) {
duplicateWarning = '\n\n⚠ Similar tasks found:\n';
const openTasks = highSimilarity.filter(t => t.status !== 'completed');
const completedTasks = highSimilarity.filter(t => t.status === 'completed');
if (openTasks.length > 0) {
duplicateWarning += '\n**Open/In Progress:**\n';
for (const t of openTasks) {
const pct = Math.round(t.similarity * 100);
duplicateWarning += ` - ${t.id}: ${t.title} (${pct}% match, ${t.status})\n`;
}
}
if (completedTasks.length > 0) {
duplicateWarning += '\n**Previously Completed:**\n';
for (const t of completedTasks) {
const pct = Math.round(t.similarity * 100);
duplicateWarning += ` - ${t.id}: ${t.title} (${pct}% match)\n`;
// Show snippet of solution/outcome from description
if (t.description) {
const snippet = t.description.substring(0, 150).replace(/\n/g, ' ').replace(/"/g, '\\"');
const ellipsis = t.description.length > 150 ? '...' : '';
duplicateWarning += ` Context: "${snippet}${ellipsis}"\n`;
}
}
duplicateWarning += '\n 💡 Use "task show <id>" to see full solution before recreating work\n';
}
duplicateWarning += '\nConsider linking with: task link <new-id> <related-id> relates_to';
}
}
// Get next task ID
const taskId = await getNextTaskId(projectKey);
// Get current session ID for linking
const session_id = getSessionId();
// Insert task with session_id
if (embeddingValue) {
await execute(
`INSERT INTO tasks (id, project, title, description, type, status, priority, session_id, embedding)
VALUES ($1, $2, $3, $4, $5, 'open', $6, $7, $8)`,
[taskId, projectKey, title, description, type, priority, session_id, embeddingValue]
);
} else {
await execute(
`INSERT INTO tasks (id, project, title, description, type, status, priority, session_id)
VALUES ($1, $2, $3, $4, $5, 'open', $6, $7)`,
[taskId, projectKey, title, description, type, priority, session_id]
);
}
// Record activity for session tracking
await recordActivity(taskId, 'created', undefined, 'open');
// CF-572 Phase 3: Auto-capture conversation context as session note
// Ensures task context is preserved even if session exits abnormally
if (session_id) {
try {
const contextNote = description
? `Created task: ${title}\n\nDescription:\n${description}`
: `Created task: ${title}`;
await sessionNoteAdd({
session_id,
note_type: 'context',
content: contextNote,
});
} catch (err) {
// Silently fail context capture - don't block task creation
console.error('Failed to capture task context for session:', err);
}
}
// Enhanced auto-linking logic (CF-166)
let autoLinkMessage = '';
try {
const sessionContext = await queryOne<{
current_task_id: string | null;
investigation_parent_id: string | null;
auto_link_enabled: boolean;
}>(
`SELECT current_task_id, investigation_parent_id, auto_link_enabled
FROM session_context WHERE session_id = $1`,
[session_id]
);
if (sessionContext?.auto_link_enabled !== false) {
const linkedTasks: string[] = [];
// 1. Auto-link to investigation parent if this is created during an investigation
if (sessionContext?.investigation_parent_id) {
await execute(
`INSERT INTO task_links (from_task_id, to_task_id, link_type, auto_linked)
VALUES ($1, $2, 'relates_to', true)
ON CONFLICT DO NOTHING`,
[taskId, sessionContext.investigation_parent_id]
);
linkedTasks.push(`${sessionContext.investigation_parent_id} (investigation)`);
}
// 2. Auto-link to current working task if different from investigation parent
if (sessionContext?.current_task_id &&
sessionContext.current_task_id !== sessionContext?.investigation_parent_id) {
await execute(
`INSERT INTO task_links (from_task_id, to_task_id, link_type, auto_linked)
VALUES ($1, $2, 'relates_to', true)
ON CONFLICT DO NOTHING`,
[taskId, sessionContext.current_task_id]
);
linkedTasks.push(`${sessionContext.current_task_id} (current task)`);
}
// 3. Time-based auto-linking: find tasks created within 1 hour in same session
if (!sessionContext?.investigation_parent_id && !sessionContext?.current_task_id) {
const recentTasks = await query<{ id: string; title: string }>(
`SELECT id, title FROM tasks
WHERE session_id = $1 AND id != $2
AND created_at > NOW() - INTERVAL '1 hour'
AND status != 'completed'
ORDER BY created_at DESC
LIMIT 3`,
[session_id, taskId]
);
for (const task of recentTasks) {
await execute(
`INSERT INTO task_links (from_task_id, to_task_id, link_type, auto_linked)
VALUES ($1, $2, 'relates_to', true)
ON CONFLICT DO NOTHING`,
[taskId, task.id]
);
linkedTasks.push(`${task.id} (recent)`);
}
}
if (linkedTasks.length > 0) {
autoLinkMessage = `\n\n🔗 Auto-linked to: ${linkedTasks.join(', ')}`;
}
}
} catch (error) {
// Log but don't fail if auto-linking fails
console.error('Auto-linking failed:', error);
}
return `Created: ${taskId}\n Title: ${title}\n Type: ${type}\n Priority: ${priority}\n Project: ${projectKey}${embedding ? '\n (embedded for semantic search)' : ''}${duplicateWarning}${autoLinkMessage}`;
}
/**
* List tasks with filters
* Auto-detects project from CWD if not explicitly provided
*/
export async function taskList(args: TaskListArgs): Promise<string> {
const { project, status, type, priority, limit = 20 } = args;
let whereClause = 'WHERE 1=1';
const params: unknown[] = [];
let paramIndex = 1;
// Auto-detect project from CWD if not explicitly provided
const effectiveProject = project || detectProjectFromCwd();
if (effectiveProject) {
const projectKey = await getProjectKey(effectiveProject);
whereClause += ` AND project = $${paramIndex++}`;
params.push(projectKey);
}
if (status) {
whereClause += ` AND status = $${paramIndex++}`;
params.push(status);
}
if (type) {
whereClause += ` AND type = $${paramIndex++}`;
params.push(type);
}
if (priority) {
whereClause += ` AND priority = $${paramIndex++}`;
params.push(priority);
}
params.push(limit);
const tasks = await query<Task>(
`SELECT id, title, type, status, priority, project
FROM tasks
${whereClause}
ORDER BY
CASE priority WHEN 'P0' THEN 0 WHEN 'P1' THEN 1 WHEN 'P2' THEN 2 ELSE 3 END,
created_at DESC
LIMIT $${paramIndex}`,
params
);
if (tasks.length === 0) {
return `No tasks found${effectiveProject ? ` for project ${effectiveProject}` : ''}`;
}
const lines = tasks.map(t => {
const statusIcon = t.status === 'completed' ? '[x]' : t.status === 'in_progress' ? '[>]' : t.status === 'blocked' ? '[!]' : '[ ]';
const typeLabel = t.type !== 'task' ? ` [${t.type}]` : '';
return `${statusIcon} ${t.priority} ${t.id}: ${t.title}${typeLabel}`;
});
return `Tasks${effectiveProject ? ` (${effectiveProject})` : ''}:\n\n${lines.join('\n')}`;
}
/**
* Show task details
*/
export async function taskShow(id: string): Promise<string> {
const task = await queryOne<Task & { session_id?: string }>(
`SELECT id, project, title, description, type, status, priority, session_id,
to_char(created_at, 'YYYY-MM-DD HH24:MI') as created,
to_char(updated_at, 'YYYY-MM-DD HH24:MI') as updated,
to_char(completed_at, 'YYYY-MM-DD HH24:MI') as completed
FROM tasks WHERE id = $1`,
[id]
);
if (!task) {
return `Task not found: ${id}`;
}
let output = `# ${task.id}\n\n`;
output += `**Title:** ${task.title}\n`;
output += `**Project:** ${task.project}\n`;
output += `**Type:** ${task.type}\n`;
output += `**Status:** ${task.status}\n`;
output += `**Priority:** ${task.priority}\n`;
output += `**Created:** ${(task as unknown as { created: string }).created}\n`;
output += `**Updated:** ${(task as unknown as { updated: string }).updated}\n`;
if ((task as unknown as { completed: string }).completed) {
output += `**Completed:** ${(task as unknown as { completed: string }).completed}\n`;
}
if (task.session_id) {
output += `**Created in session:** ${task.session_id}\n`;
}
if (task.description) {
output += `\n**Description:**\n${task.description}\n`;
}
// Get checklist
const checklist = await query<ChecklistItem>(
`SELECT id, item, checked FROM task_checklist
WHERE task_id = $1 ORDER BY position, id`,
[id]
);
if (checklist.length > 0) {
const done = checklist.filter(c => c.checked).length;
output += `\n**Checklist:** (${done}/${checklist.length})\n`;
for (const item of checklist) {
output += ` ${item.checked ? '[x]' : '[ ]'} ${item.item} (#${item.id})\n`;
}
}
// Get dependencies
const blockedBy = await query<{ id: string; title: string }>(
`SELECT t.id, t.title FROM task_links l
JOIN tasks t ON t.id = l.from_task_id
WHERE l.to_task_id = $1 AND l.link_type = 'blocks'`,
[id]
);
const blocks = await query<{ id: string; title: string }>(
`SELECT t.id, t.title FROM task_links l
JOIN tasks t ON t.id = l.to_task_id
WHERE l.from_task_id = $1 AND l.link_type = 'blocks'`,
[id]
);
if (blockedBy.length > 0) {
output += `\n**Blocked by:**\n`;
for (const t of blockedBy) {
output += ` - ${t.id}: ${t.title}\n`;
}
}
if (blocks.length > 0) {
output += `\n**Blocks:**\n`;
for (const t of blocks) {
output += ` - ${t.id}: ${t.title}\n`;
}
}
// Get related tasks (bidirectional - only need to query one direction since links are symmetric)
const relatesTo = await query<{ id: string; title: string }>(
`SELECT t.id, t.title FROM task_links l
JOIN tasks t ON t.id = l.to_task_id
WHERE l.from_task_id = $1 AND l.link_type = 'relates_to'`,
[id]
);
if (relatesTo.length > 0) {
output += `\n**Related:**\n`;
for (const t of relatesTo) {
output += ` - ${t.id}: ${t.title}\n`;
}
}
// Get duplicates (bidirectional)
const duplicates = await query<{ id: string; title: string }>(
`SELECT t.id, t.title FROM task_links l
JOIN tasks t ON t.id = l.to_task_id
WHERE l.from_task_id = $1 AND l.link_type = 'duplicates'`,
[id]
);
if (duplicates.length > 0) {
output += `\n**Duplicates:**\n`;
for (const t of duplicates) {
output += ` - ${t.id}: ${t.title}\n`;
}
}
// Get commits
const commitHistory = await getTaskCommits(id);
if (commitHistory) {
output += commitHistory;
}
// Get recent delegations
const delegationHistory = await getRecentDelegations(id);
if (delegationHistory) {
output += delegationHistory;
}
return output;
}
/**
* Close a task
*/
export async function taskClose(id: string): Promise<string> {
// Get current status for activity tracking
const task = await queryOne<{ status: string }>(`SELECT status FROM tasks WHERE id = $1`, [id]);
const result = await execute(
`UPDATE tasks
SET status = 'completed', completed_at = NOW(), updated_at = NOW()
WHERE id = $1`,
[id]
);
if (result === 0) {
return `Task not found: ${id}`;
}
// Record activity
await recordActivity(id, 'closed', task?.status, 'completed');
return `Closed: ${id}`;
}
/**
* Update a task
*/
export async function taskUpdate(args: TaskUpdateArgs): Promise<string> {
const { id, status, priority, type, title } = args;
// Get current values for activity tracking
const task = await queryOne<{ status: string }>(`SELECT status FROM tasks WHERE id = $1`, [id]);
if (!task) {
return `Task not found: ${id}`;
}
const updates: string[] = [];
const params: unknown[] = [];
let paramIndex = 1;
if (status) {
updates.push(`status = $${paramIndex++}`);
params.push(status);
if (status === 'completed') {
updates.push(`completed_at = NOW()`);
}
}
if (priority) {
updates.push(`priority = $${paramIndex++}`);
params.push(priority);
}
if (type) {
updates.push(`type = $${paramIndex++}`);
params.push(type);
}
if (title) {
updates.push(`title = $${paramIndex++}`);
params.push(title);
}
if (updates.length === 0) {
return 'No updates specified';
}
updates.push('updated_at = NOW()');
params.push(id);
const result = await execute(
`UPDATE tasks SET ${updates.join(', ')} WHERE id = $${paramIndex}`,
params
);
if (result === 0) {
return `Task not found: ${id}`;
}
// Record activity
if (status && status !== task.status) {
await recordActivity(id, 'status_change', task.status, status);
} else {
await recordActivity(id, 'updated');
}
// Manage session context based on status changes
if (status) {
const session_id = getSessionId();
try {
if (status === 'in_progress') {
// Set as current working task
await execute(
`INSERT INTO session_context (session_id, current_task_id)
VALUES ($1, $2)
ON CONFLICT (session_id) DO UPDATE SET current_task_id = $2, updated_at = NOW()`,
[session_id, id]
);
} else if (status === 'completed') {
// Clear if this is the current working task
await execute(
`DELETE FROM session_context
WHERE session_id = $1 AND current_task_id = $2`,
[session_id, id]
);
}
} catch {
// Silently fail if session context unavailable
}
}
return `Updated: ${id}`;
}
/**
* Start an investigation workflow (CF-166)
* Creates an investigation task and sets it as the session context parent
* All subsequent tasks will auto-link to this investigation
*/
export async function taskInvestigate(args: TaskAddArgs): Promise<string> {
const { title, project, priority = 'P1', description = '' } = args;
// Create investigation task
const taskResult = await taskAdd({
title,
project,
type: 'investigation',
priority,
description: description || 'Investigation task to coordinate related subtasks',
});
// Extract task ID from result (format: "Created: XX-123\n...")
const taskIdMatch = taskResult.match(/Created: ([\w-]+)/);
if (!taskIdMatch) {
return taskResult; // Return original message if format unexpected
}
const taskId = taskIdMatch[1];
// Set as investigation parent in session context
const session_id = getSessionId();
try {
await execute(
`INSERT INTO session_context (session_id, current_task_id, investigation_parent_id)
VALUES ($1, $2, $2)
ON CONFLICT (session_id) DO UPDATE
SET investigation_parent_id = $2, current_task_id = $2, updated_at = NOW()`,
[session_id, taskId]
);
} catch (error) {
console.error('Failed to set investigation context:', error);
}
return taskResult + '\n\n🔍 Investigation started! All new tasks will auto-link to this investigation.';
}
interface TaskMoveProjectArgs {
id: string;
target_project: string;
reason?: string;
}
/**
* Move task to different project while preserving history (CF-301)
* Creates new task with next ID in target project and transfers all related data
*/
export async function taskMoveProject(args: TaskMoveProjectArgs): Promise<string> {
const { id, target_project, reason } = args;
// Validate source task exists
const task = await queryOne<{ project: string; status: string }>(
`SELECT project, status FROM tasks WHERE id = $1`,
[id]
);
if (!task) {
return `Task not found: ${id}`;
}
if (task.project === target_project) {
return `Task ${id} is already in project ${target_project}`;
}
// Validate target project exists
const targetProj = await queryOne<{ key: string }>(
`SELECT key FROM projects WHERE key = $1`,
[target_project]
);
if (!targetProj) {
return `Target project not found: ${target_project}`;
}
// Generate new ID using getNextTaskId
const newId = await getNextTaskId(target_project);
// Execute move in transaction
const client = await getClient();
try {
await client.query('BEGIN');
// Insert new task (copy of old)
await client.query(`
INSERT INTO tasks (id, project, title, description, type, status, priority,
version_id, epic_id, embedding, created_at, updated_at,
completed_at, session_id)
SELECT $1, $2, title, description, type, status, priority,
version_id, epic_id, embedding, created_at, NOW(), completed_at, session_id
FROM tasks WHERE id = $3
`, [newId, target_project, id]);
// Transfer all related records
const transfers = [
`UPDATE task_checklist SET task_id = $1 WHERE task_id = $2`,
`UPDATE task_commits SET task_id = $1 WHERE task_id = $2`,
`UPDATE task_delegations SET task_id = $1 WHERE task_id = $2`,
`UPDATE task_activity SET task_id = $1 WHERE task_id = $2`,
`UPDATE task_links SET from_task_id = $1 WHERE from_task_id = $2`,
`UPDATE task_links SET to_task_id = $1 WHERE to_task_id = $2`,
`UPDATE deployments SET task_id = $1 WHERE task_id = $2`,
`UPDATE memories SET task_id = $1 WHERE task_id = $2`,
`UPDATE session_context SET current_task_id = $1 WHERE current_task_id = $2`,
`UPDATE session_context SET investigation_parent_id = $1 WHERE investigation_parent_id = $2`,
`UPDATE task_learning_effectiveness SET task_id = $1 WHERE task_id = $2`,
];
for (const sql of transfers) {
await client.query(sql, [newId, id]);
}
// Record activity
await client.query(`
INSERT INTO task_activity (task_id, activity_type, old_value, new_value, note, created_at)
VALUES ($1, 'project_moved', $2, $3, $4, NOW())
`, [newId, task.project, target_project, reason || 'Moved via task_move_project']);
// Update old task
await client.query(`
UPDATE tasks
SET status = 'completed',
completed_at = NOW(),
updated_at = NOW(),
description = COALESCE(description, '') || $1
WHERE id = $2
`, [`\n\n---\n**Moved to ${newId}**${reason ? ` (Reason: ${reason})` : ''}`, id]);
// Add duplicate link
await client.query(`
INSERT INTO task_links (from_task_id, to_task_id, link_type, created_at)
VALUES ($1, $2, 'duplicates', NOW())
`, [id, newId]);
await client.query('COMMIT');
return `Moved ${id}${newId} (project: ${task.project}${target_project})`;
} catch (error) {
await client.query('ROLLBACK');
throw error;
} finally {
client.release();
}
}

View File

@@ -1,241 +0,0 @@
// Epic operations for task management
import { query, queryOne, execute, getProjectKey, detectProjectFromCwd } from '../db.js';
import { getEmbedding, formatEmbedding } from '../embeddings.js';
import type { Epic, Task } from '../types.js';
interface EpicAddArgs {
title: string;
project?: string;
description?: string;
}
interface EpicListArgs {
project?: string;
status?: string;
limit?: number;
}
interface EpicAssignArgs {
task_id: string;
epic_id: string;
}
/**
* Get next epic ID for a project
*/
async function getNextEpicId(projectKey: string): Promise<string> {
const result = await queryOne<{ next_id: number }>(
`INSERT INTO epic_sequences (project, next_id) VALUES ($1, 1)
ON CONFLICT (project) DO UPDATE SET next_id = epic_sequences.next_id + 1
RETURNING next_id`,
[projectKey]
);
return `${projectKey}-E${result?.next_id || 1}`;
}
/**
* Create a new epic
*/
export async function epicAdd(args: EpicAddArgs): Promise<string> {
const { title, project = 'Unknown', description = '' } = args;
// Get project key
const projectKey = await getProjectKey(project);
// Get next epic ID
const epicId = await getNextEpicId(projectKey);
// Generate embedding
const embedText = description ? `${title}. ${description}` : title;
const embedding = await getEmbedding(embedText);
const embeddingValue = embedding ? formatEmbedding(embedding) : null;
// Insert epic
if (embeddingValue) {
await execute(
`INSERT INTO epics (id, project, title, description, embedding)
VALUES ($1, $2, $3, $4, $5)`,
[epicId, projectKey, title, description, embeddingValue]
);
} else {
await execute(
`INSERT INTO epics (id, project, title, description)
VALUES ($1, $2, $3, $4)`,
[epicId, projectKey, title, description]
);
}
return `Created epic: ${epicId}\n Title: ${title}\n Project: ${projectKey}${embedding ? '\n (embedded for semantic search)' : ''}`;
}
/**
* List epics with filters
* Auto-detects project from CWD if not explicitly provided
*/
export async function epicList(args: EpicListArgs): Promise<string> {
const { project, status, limit = 20 } = args;
let whereClause = 'WHERE 1=1';
const params: unknown[] = [];
let paramIndex = 1;
// Auto-detect project from CWD if not explicitly provided
const effectiveProject = project || detectProjectFromCwd();
if (effectiveProject) {
const projectKey = await getProjectKey(effectiveProject);
whereClause += ` AND e.project = $${paramIndex++}`;
params.push(projectKey);
}
if (status) {
whereClause += ` AND e.status = $${paramIndex++}`;
params.push(status);
}
params.push(limit);
const epics = await query<Epic & { task_count: number; open_count: number }>(
`SELECT e.id, e.title, e.status, e.project,
COUNT(t.id) as task_count,
COUNT(t.id) FILTER (WHERE t.status != 'completed') as open_count
FROM epics e
LEFT JOIN tasks t ON t.epic_id = e.id
${whereClause}
GROUP BY e.id, e.title, e.status, e.project, e.created_at
ORDER BY
CASE e.status WHEN 'in_progress' THEN 0 WHEN 'open' THEN 1 ELSE 2 END,
e.created_at DESC
LIMIT $${paramIndex}`,
params
);
if (epics.length === 0) {
return `No epics found${effectiveProject ? ` for project ${effectiveProject}` : ''}`;
}
const lines = epics.map(e => {
const statusIcon = e.status === 'completed' ? '[x]' : e.status === 'in_progress' ? '[>]' : '[ ]';
const progress = e.task_count > 0 ? ` (${e.task_count - e.open_count}/${e.task_count} done)` : '';
return `${statusIcon} ${e.id}: ${e.title}${progress}`;
});
return `Epics${effectiveProject ? ` (${effectiveProject})` : ''}:\n\n${lines.join('\n')}`;
}
/**
* Show epic details with tasks
*/
export async function epicShow(id: string): Promise<string> {
const epic = await queryOne<Epic & { created: string }>(
`SELECT id, project, title, description, status,
to_char(created_at, 'YYYY-MM-DD HH24:MI') as created
FROM epics WHERE id = $1`,
[id]
);
if (!epic) {
return `Epic not found: ${id}`;
}
let output = `# ${epic.id}\n\n`;
output += `**Title:** ${epic.title}\n`;
output += `**Project:** ${epic.project}\n`;
output += `**Status:** ${epic.status}\n`;
output += `**Created:** ${epic.created}\n`;
if (epic.description) {
output += `\n**Description:**\n${epic.description}\n`;
}
// Get tasks in this epic
const tasks = await query<Task>(
`SELECT id, title, status, priority, type
FROM tasks
WHERE epic_id = $1
ORDER BY
CASE status WHEN 'in_progress' THEN 0 WHEN 'open' THEN 1 WHEN 'blocked' THEN 2 ELSE 3 END,
CASE priority WHEN 'P0' THEN 0 WHEN 'P1' THEN 1 WHEN 'P2' THEN 2 ELSE 3 END`,
[id]
);
if (tasks.length > 0) {
const done = tasks.filter(t => t.status === 'completed').length;
output += `\n**Tasks:** (${done}/${tasks.length} done)\n`;
for (const t of tasks) {
const statusIcon = t.status === 'completed' ? '[x]' : t.status === 'in_progress' ? '[>]' : t.status === 'blocked' ? '[!]' : '[ ]';
output += ` ${statusIcon} ${t.priority} ${t.id}: ${t.title}\n`;
}
} else {
output += `\n**Tasks:** None assigned\n`;
}
return output;
}
/**
* Assign a task to an epic
*/
export async function epicAssign(args: EpicAssignArgs): Promise<string> {
const { task_id, epic_id } = args;
// Verify epic exists
const epic = await queryOne<{ id: string }>(`SELECT id FROM epics WHERE id = $1`, [epic_id]);
if (!epic) {
return `Epic not found: ${epic_id}`;
}
// Update task
const result = await execute(
`UPDATE tasks SET epic_id = $1, updated_at = NOW() WHERE id = $2`,
[epic_id, task_id]
);
if (result === 0) {
return `Task not found: ${task_id}`;
}
return `Assigned ${task_id} to epic ${epic_id}`;
}
/**
* Unassign a task from its epic
*/
export async function epicUnassign(task_id: string): Promise<string> {
const result = await execute(
`UPDATE tasks SET epic_id = NULL, updated_at = NOW() WHERE id = $1`,
[task_id]
);
if (result === 0) {
return `Task not found: ${task_id}`;
}
return `Unassigned ${task_id} from its epic`;
}
/**
* Close an epic (mark as completed)
*/
export async function epicClose(id: string): Promise<string> {
// Verify epic exists
const epic = await queryOne<{ id: string; title: string; status: string }>(
`SELECT id, title, status FROM epics WHERE id = $1`,
[id]
);
if (!epic) {
return `Epic not found: ${id}`;
}
if (epic.status === 'completed') {
return `Epic already completed: ${id}`;
}
// Update epic status
await execute(
`UPDATE epics SET status = 'completed', updated_at = NOW() WHERE id = $1`,
[id]
);
return `Closed: ${id} (${epic.title})`;
}

View File

@@ -1,340 +1,16 @@
// Tool definitions for task-mcp
// Tool definitions for session-mcp
// Forked from task-mcp (CF-762): Removed task/epic/version/search/relations tools
// Those are now handled by Jira Cloud via mcp-atlassian
export const toolDefinitions = [
// CRUD Tools
{
name: 'task_add',
description: 'Create a new task with auto-generated ID and semantic embedding',
inputSchema: {
type: 'object',
properties: {
title: { type: 'string', description: 'Task title (required)' },
project: { type: 'string', description: 'Project key (e.g., ST, VPN). Auto-detected from CWD if not provided.' },
type: { type: 'string', enum: ['task', 'bug', 'feature', 'debt', 'investigation'], description: 'Task type (default: task)' },
priority: { type: 'string', enum: ['P0', 'P1', 'P2', 'P3'], description: 'Priority level (default: P2)' },
description: { type: 'string', description: 'Optional description' },
},
required: ['title'],
},
},
{
name: 'task_list',
description: 'List tasks with optional filters',
inputSchema: {
type: 'object',
properties: {
project: { type: 'string', description: 'Filter by project key' },
status: { type: 'string', enum: ['pending', 'open', 'in_progress', 'testing', 'blocked', 'completed'], description: 'Filter by status' },
type: { type: 'string', enum: ['task', 'bug', 'feature', 'debt', 'investigation'], description: 'Filter by type' },
priority: { type: 'string', enum: ['P0', 'P1', 'P2', 'P3'], description: 'Filter by priority' },
limit: { type: 'number', description: 'Max results (default: 20)' },
},
},
},
{
name: 'task_show',
description: 'Show task details including checklist and dependencies',
inputSchema: {
type: 'object',
properties: {
id: { type: 'string', description: 'Task ID (e.g., ST-1, VPN-45)' },
},
required: ['id'],
},
},
{
name: 'task_close',
description: 'Mark a task as completed',
inputSchema: {
type: 'object',
properties: {
id: { type: 'string', description: 'Task ID to close' },
},
required: ['id'],
},
},
{
name: 'task_update',
description: 'Update task fields (status, priority, type, title)',
inputSchema: {
type: 'object',
properties: {
id: { type: 'string', description: 'Task ID to update' },
status: { type: 'string', enum: ['pending', 'open', 'in_progress', 'testing', 'blocked', 'completed'], description: 'New status' },
priority: { type: 'string', enum: ['P0', 'P1', 'P2', 'P3'], description: 'New priority' },
type: { type: 'string', enum: ['task', 'bug', 'feature', 'debt', 'investigation'], description: 'New type' },
title: { type: 'string', description: 'New title' },
},
required: ['id'],
},
},
{
name: 'task_investigate',
description: 'Start an investigation workflow: creates an investigation task and auto-links all subsequent tasks to it. Use when beginning multi-step problem analysis.',
inputSchema: {
type: 'object',
properties: {
title: { type: 'string', description: 'Investigation title (required)' },
project: { type: 'string', description: 'Project key (e.g., ST, VPN). Auto-detected from CWD if not provided.' },
priority: { type: 'string', enum: ['P0', 'P1', 'P2', 'P3'], description: 'Priority level (default: P1)' },
description: { type: 'string', description: 'Optional description of investigation scope' },
},
required: ['title'],
},
},
{
name: 'task_move_project',
description: 'Move task to different project while preserving history',
inputSchema: {
type: 'object',
properties: {
id: { type: 'string', description: 'Task ID to move (e.g., CF-295)' },
target_project: { type: 'string', description: 'Target project key (e.g., VPN, ST, GB)' },
reason: { type: 'string', description: 'Optional reason for move' },
},
required: ['id', 'target_project'],
},
},
// Semantic Search Tools
{
name: 'task_similar',
description: 'Find semantically similar tasks using pgvector',
inputSchema: {
type: 'object',
properties: {
query: { type: 'string', description: 'Search query' },
project: { type: 'string', description: 'Filter by project (optional)' },
limit: { type: 'number', description: 'Max results (default: 5)' },
},
required: ['query'],
},
},
{
name: 'task_context',
description: 'Get related tasks for current work context (useful for delegations)',
inputSchema: {
type: 'object',
properties: {
description: { type: 'string', description: 'Description of current work' },
project: { type: 'string', description: 'Current project' },
limit: { type: 'number', description: 'Max related tasks (default: 3)' },
},
required: ['description'],
},
},
{
name: 'task_session_context',
description: 'Get session context for a task - retrieves notes, decisions, and related tasks from the session where the task was created. Use this to understand the original context and requirements.',
inputSchema: {
type: 'object',
properties: {
id: { type: 'string', description: 'Task ID (e.g., CF-570)' },
},
required: ['id'],
},
},
// Relation Tools
{
name: 'task_link',
description: 'Create dependency between tasks',
inputSchema: {
type: 'object',
properties: {
from_id: { type: 'string', description: 'Source task ID' },
to_id: { type: 'string', description: 'Target task ID' },
link_type: { type: 'string', enum: ['blocks', 'relates_to', 'duplicates', 'depends_on', 'needs', 'implements', 'fixes', 'causes', 'subtask_of'], description: 'Relationship type' },
},
required: ['from_id', 'to_id', 'link_type'],
},
},
{
name: 'task_checklist_add',
description: 'Add a checklist item to a task',
inputSchema: {
type: 'object',
properties: {
task_id: { type: 'string', description: 'Task ID' },
item: { type: 'string', description: 'Checklist item text' },
},
required: ['task_id', 'item'],
},
},
{
name: 'task_checklist_toggle',
description: 'Toggle a checklist item (check/uncheck)',
inputSchema: {
type: 'object',
properties: {
item_id: { type: 'number', description: 'Checklist item ID' },
checked: { type: 'boolean', description: 'New checked state' },
},
required: ['item_id', 'checked'],
},
},
{
name: 'task_resolve_duplicate',
description: 'Resolve a duplicate issue by closing it and linking to the dominant issue',
inputSchema: {
type: 'object',
properties: {
duplicate_id: { type: 'string', description: 'The duplicate task ID to close' },
dominant_id: { type: 'string', description: 'The dominant/original task ID to keep' },
},
required: ['duplicate_id', 'dominant_id'],
},
},
// Epic Tools
{
name: 'epic_add',
description: 'Create a new epic (session-scoped work bundle) with auto-generated ID',
inputSchema: {
type: 'object',
properties: {
title: { type: 'string', description: 'Epic title (required)' },
project: { type: 'string', description: 'Project key (e.g., VPN, ST). Auto-detected if not provided.' },
description: { type: 'string', description: 'Optional description of the epic scope' },
},
required: ['title'],
},
},
{
name: 'epic_list',
description: 'List epics with task counts and progress',
inputSchema: {
type: 'object',
properties: {
project: { type: 'string', description: 'Filter by project key' },
status: { type: 'string', enum: ['open', 'in_progress', 'completed'], description: 'Filter by status' },
limit: { type: 'number', description: 'Max results (default: 20)' },
},
},
},
{
name: 'epic_show',
description: 'Show epic details with all assigned tasks',
inputSchema: {
type: 'object',
properties: {
id: { type: 'string', description: 'Epic ID (e.g., VPN-E1, ST-E3)' },
},
required: ['id'],
},
},
{
name: 'epic_assign',
description: 'Assign a task to an epic',
inputSchema: {
type: 'object',
properties: {
task_id: { type: 'string', description: 'Task ID to assign' },
epic_id: { type: 'string', description: 'Epic ID to assign to' },
},
required: ['task_id', 'epic_id'],
},
},
{
name: 'epic_close',
description: 'Close an epic (mark as completed)',
inputSchema: {
type: 'object',
properties: {
id: { type: 'string', description: 'Epic ID to close (e.g., VPN-E1, ST-E3)' },
},
required: ['id'],
},
},
// Version Tools
{
name: 'version_add',
description: 'Create a new version/release for a project',
inputSchema: {
type: 'object',
properties: {
project: { type: 'string', description: 'Project key (e.g., VPN, ST)' },
version: { type: 'string', description: 'Version number (e.g., 1.0.0, 2.1.0-beta)' },
build_number: { type: 'number', description: 'Optional build number' },
status: { type: 'string', enum: ['planned', 'in_progress', 'released', 'archived'], description: 'Version status (default: planned)' },
release_notes: { type: 'string', description: 'Optional release notes' },
},
required: ['project', 'version'],
},
},
{
name: 'version_list',
description: 'List versions with optional filters',
inputSchema: {
type: 'object',
properties: {
project: { type: 'string', description: 'Filter by project key' },
status: { type: 'string', enum: ['planned', 'in_progress', 'released', 'archived'], description: 'Filter by status' },
limit: { type: 'number', description: 'Max results (default: 20)' },
},
},
},
{
name: 'version_show',
description: 'Show version details with assigned tasks and epics',
inputSchema: {
type: 'object',
properties: {
id: { type: 'string', description: 'Version ID (e.g., VPN-v1.0.0)' },
},
required: ['id'],
},
},
{
name: 'version_update',
description: 'Update version fields (status, git_tag, git_sha, release_notes)',
inputSchema: {
type: 'object',
properties: {
id: { type: 'string', description: 'Version ID to update' },
status: { type: 'string', enum: ['planned', 'in_progress', 'released', 'archived'], description: 'New status' },
git_tag: { type: 'string', description: 'Git tag name (e.g., v1.0.0)' },
git_sha: { type: 'string', description: 'Git commit SHA for this version' },
release_notes: { type: 'string', description: 'Release notes' },
release_date: { type: 'string', description: 'Release date (ISO format)' },
},
required: ['id'],
},
},
{
name: 'version_release',
description: 'Mark a version as released (sets status and release_date)',
inputSchema: {
type: 'object',
properties: {
id: { type: 'string', description: 'Version ID to release' },
git_tag: { type: 'string', description: 'Optional git tag to associate' },
},
required: ['id'],
},
},
{
name: 'version_assign_task',
description: 'Assign a task to a version',
inputSchema: {
type: 'object',
properties: {
task_id: { type: 'string', description: 'Task ID to assign' },
version_id: { type: 'string', description: 'Version ID to assign to' },
},
required: ['task_id', 'version_id'],
},
},
// Delegation Tools
// Delegation Tools (kept for tracking code generation jobs)
{
name: 'task_delegations',
description: 'List delegations for a specific task (quality scores, backends, status)',
description: 'List delegations for a specific Jira issue (quality scores, backends, status)',
inputSchema: {
type: 'object',
properties: {
task_id: { type: 'string', description: 'Task ID (e.g., ST-123)' },
task_id: { type: 'string', description: 'Jira issue key (e.g., CF-123)' },
},
required: ['task_id'],
},
@@ -352,14 +28,14 @@ export const toolDefinitions = [
},
},
// Commit Tools
// Commit Tools (kept for git-session linking)
{
name: 'task_commit_add',
description: 'Link a git commit to a task (SHA reference only, Gitea MCP has full commit data)',
description: 'Link a git commit to a Jira issue key (SHA reference only)',
inputSchema: {
type: 'object',
properties: {
task_id: { type: 'string', description: 'Task ID (e.g., VPN-123)' },
task_id: { type: 'string', description: 'Jira issue key (e.g., CF-123)' },
commit_sha: { type: 'string', description: 'Git commit SHA (full or short)' },
repo: { type: 'string', description: 'Repository (e.g., christian/VPN)' },
source: { type: 'string', enum: ['manual', 'parsed', 'pr_merge'], description: 'How the link was created (default: manual)' },
@@ -369,11 +45,11 @@ export const toolDefinitions = [
},
{
name: 'task_commit_remove',
description: 'Remove a commit link from a task',
description: 'Remove a commit link from a Jira issue',
inputSchema: {
type: 'object',
properties: {
task_id: { type: 'string', description: 'Task ID' },
task_id: { type: 'string', description: 'Jira issue key' },
commit_sha: { type: 'string', description: 'Commit SHA to unlink' },
},
required: ['task_id', 'commit_sha'],
@@ -381,18 +57,18 @@ export const toolDefinitions = [
},
{
name: 'task_commits_list',
description: 'List commits linked to a task',
description: 'List commits linked to a Jira issue',
inputSchema: {
type: 'object',
properties: {
task_id: { type: 'string', description: 'Task ID' },
task_id: { type: 'string', description: 'Jira issue key' },
},
required: ['task_id'],
},
},
{
name: 'task_link_commits',
description: 'Parse commit messages for task references and create links (batch operation)',
description: 'Parse commit messages for Jira issue references and create links (batch operation)',
inputSchema: {
type: 'object',
properties: {
@@ -416,11 +92,11 @@ export const toolDefinitions = [
},
{
name: 'session_tasks',
description: 'List tasks worked on in a session (from task_activity tracking)',
description: 'List Jira issues worked on in a session (from task_activity tracking)',
inputSchema: {
type: 'object',
properties: {
session_id: { type: 'string', description: 'Session ID (supports * wildcard, e.g., session_20260110_*)' },
session_id: { type: 'string', description: 'Session ID (supports * wildcard)' },
limit: { type: 'number', description: 'Max results (default: 20)' },
},
required: ['session_id'],
@@ -440,7 +116,7 @@ export const toolDefinitions = [
impact: { type: 'string', description: 'Effects on existing infrastructure' },
actions_required: { type: 'string', description: 'Steps developers need to take (optional)' },
session_id: { type: 'string', description: 'Session that implemented change (optional)' },
task_ids: { type: 'array', items: { type: 'string' }, description: 'Related task IDs (optional)' },
task_ids: { type: 'array', items: { type: 'string' }, description: 'Related Jira issue keys (optional)' },
},
required: ['date', 'title', 'change_description', 'impact'],
},
@@ -458,7 +134,7 @@ export const toolDefinitions = [
},
{
name: 'changelog_list',
description: 'List recent infrastructure changes by time period (fallback)',
description: 'List recent infrastructure changes by time period',
inputSchema: {
type: 'object',
properties: {
@@ -471,7 +147,7 @@ export const toolDefinitions = [
// Project Lock Tools
{
name: 'project_lock',
description: 'Lock a project for exclusive session access. Prevents other sessions from working on it.',
description: 'Lock a project for exclusive session access.',
inputSchema: {
type: 'object',
properties: {
@@ -508,7 +184,7 @@ export const toolDefinitions = [
},
{
name: 'project_context',
description: 'Get project context from current directory - returns detected project, open tasks, epics, and lock status. Use at session start.',
description: 'Get project context from current directory - returns detected project, lock status, recent sessions.',
inputSchema: {
type: 'object',
properties: {},
@@ -522,11 +198,11 @@ export const toolDefinitions = [
inputSchema: {
type: 'object',
properties: {
id: { type: 'string', description: 'Unique component ID (e.g., propertymap-scraper, gridbot-conductor)' },
id: { type: 'string', description: 'Unique component ID' },
name: { type: 'string', description: 'Human-readable name' },
type: { type: 'string', enum: ['service', 'script', 'config', 'database', 'api', 'ui', 'library'], description: 'Component type' },
path: { type: 'string', description: 'File system path or Docker container name' },
repo: { type: 'string', description: 'Git repository (e.g., christian/propertymap)' },
repo: { type: 'string', description: 'Git repository' },
description: { type: 'string', description: 'What this component does' },
health_check: { type: 'string', description: 'Command or URL to check health' },
},
@@ -550,7 +226,7 @@ export const toolDefinitions = [
type: 'object',
properties: {
component_id: { type: 'string', description: 'Source component ID' },
depends_on: { type: 'string', description: 'Target component ID (what source depends on)' },
depends_on: { type: 'string', description: 'Target component ID' },
dependency_type: { type: 'string', enum: ['hard', 'soft', 'config', 'data'], description: 'Type of dependency' },
description: { type: 'string', description: 'Description of the dependency' },
},
@@ -564,7 +240,7 @@ export const toolDefinitions = [
type: 'object',
properties: {
component_id: { type: 'string', description: 'Component ID' },
file_pattern: { type: 'string', description: 'File pattern (e.g., src/services/*.py, docker-compose.yml)' },
file_pattern: { type: 'string', description: 'File pattern (e.g., src/services/*.py)' },
},
required: ['component_id', 'file_pattern'],
},
@@ -576,7 +252,7 @@ export const toolDefinitions = [
type: 'object',
properties: {
component_id: { type: 'string', description: 'Component ID' },
name: { type: 'string', description: 'Check name (e.g., health-endpoint, container-running)' },
name: { type: 'string', description: 'Check name' },
check_type: { type: 'string', enum: ['command', 'http', 'tcp', 'file'], description: 'Type of check' },
check_command: { type: 'string', description: 'Command/URL to execute' },
expected_result: { type: 'string', description: 'Expected output or status' },
@@ -591,33 +267,29 @@ export const toolDefinitions = [
inputSchema: {
type: 'object',
properties: {
changed_files: {
type: 'array',
items: { type: 'string' },
description: 'List of changed file paths',
},
changed_files: { type: 'array', items: { type: 'string' }, description: 'List of changed file paths' },
},
required: ['changed_files'],
},
},
{
name: 'impact_learn',
description: 'Record a learned impact relationship (when we discover a missed dependency)',
description: 'Record a learned impact relationship',
inputSchema: {
type: 'object',
properties: {
changed_component: { type: 'string', description: 'Component that was changed' },
affected_component: { type: 'string', description: 'Component that was unexpectedly affected' },
impact_description: { type: 'string', description: 'What went wrong' },
error_id: { type: 'string', description: 'Related error ID from error memory' },
task_id: { type: 'string', description: 'Related task ID' },
error_id: { type: 'string', description: 'Related error ID' },
task_id: { type: 'string', description: 'Related Jira issue key' },
},
required: ['changed_component', 'affected_component', 'impact_description'],
},
},
{
name: 'component_graph',
description: 'Get component dependency graph (for visualization)',
description: 'Get component dependency graph',
inputSchema: {
type: 'object',
properties: {
@@ -629,24 +301,24 @@ export const toolDefinitions = [
// Memory Tools
{
name: 'memory_add',
description: 'Store a learning/memory for future sessions. Use at session end to persist insights.',
description: 'Store a learning/memory for future sessions.',
inputSchema: {
type: 'object',
properties: {
category: { type: 'string', enum: ['pattern', 'fix', 'preference', 'gotcha', 'architecture'], description: 'Memory category' },
title: { type: 'string', description: 'Short title for the memory' },
title: { type: 'string', description: 'Short title' },
content: { type: 'string', description: 'The learning/insight to remember' },
context: { type: 'string', description: 'When/where this applies (optional)' },
project: { type: 'string', description: 'Project this relates to (optional)' },
session_id: { type: 'string', description: 'Session ID to link memory to (optional)' },
task_id: { type: 'string', description: 'Task ID to link memory to (optional)' },
project: { type: 'string', description: 'Project (optional)' },
session_id: { type: 'string', description: 'Session ID (optional)' },
task_id: { type: 'string', description: 'Jira issue key (optional)' },
},
required: ['category', 'title', 'content'],
},
},
{
name: 'memory_search',
description: 'Search memories semantically. Returns relevant learnings for current context.',
description: 'Search memories semantically.',
inputSchema: {
type: 'object',
properties: {
@@ -672,12 +344,12 @@ export const toolDefinitions = [
},
{
name: 'memory_context',
description: 'Get memories relevant to current session context. Use at session start.',
description: 'Get memories relevant to current session context.',
inputSchema: {
type: 'object',
properties: {
project: { type: 'string', description: 'Current project' },
task_description: { type: 'string', description: 'Description of planned work (for semantic matching)' },
task_description: { type: 'string', description: 'Description of planned work' },
},
},
},
@@ -692,10 +364,10 @@ export const toolDefinitions = [
tool_name: { type: 'string', description: 'Tool or command name' },
category: { type: 'string', enum: ['mcp', 'cli', 'script', 'internal', 'deprecated'], description: 'Tool category' },
title: { type: 'string', description: 'Short descriptive title' },
description: { type: 'string', description: 'Detailed description of what the tool does' },
description: { type: 'string', description: 'Detailed description' },
usage_example: { type: 'string', description: 'Usage example (optional)' },
parameters: { type: 'object', description: 'Parameter definitions (optional)' },
notes: { type: 'string', description: 'Additional notes, gotchas, tips (optional)' },
notes: { type: 'string', description: 'Additional notes (optional)' },
tags: { type: 'array', items: { type: 'string' }, description: 'Searchable tags (optional)' },
source_file: { type: 'string', description: 'Original source file (optional)' },
},
@@ -741,7 +413,7 @@ export const toolDefinitions = [
},
{
name: 'tool_doc_export',
description: 'Export all tool documentation as markdown (for backup/migration)',
description: 'Export all tool documentation as markdown',
inputSchema: {
type: 'object',
properties: {},
@@ -751,12 +423,13 @@ export const toolDefinitions = [
// Session Management Tools
{
name: 'session_start',
description: 'Start a new session with metadata tracking',
description: 'Start a new session with metadata tracking. Links to Jira issue key.',
inputSchema: {
type: 'object',
properties: {
session_id: { type: 'string', description: 'Session ID (auto-generated if not provided)' },
project: { type: 'string', description: 'Project key (e.g., CF, VPN)' },
jira_issue_key: { type: 'string', description: 'Jira issue key being worked on (e.g., CF-123)' },
working_directory: { type: 'string', description: 'Current working directory' },
git_branch: { type: 'string', description: 'Current git branch' },
initial_prompt: { type: 'string', description: 'First user message' },
@@ -773,11 +446,7 @@ export const toolDefinitions = [
session_id: { type: 'string', description: 'Session ID to update' },
message_count: { type: 'number', description: 'Number of messages exchanged' },
token_count: { type: 'number', description: 'Total tokens used' },
tools_used: {
type: 'array',
items: { type: 'string' },
description: 'Array of tool names used',
},
tools_used: { type: 'array', items: { type: 'string' }, description: 'Array of tool names used' },
},
required: ['session_id'],
},
@@ -823,7 +492,7 @@ export const toolDefinitions = [
},
{
name: 'session_context',
description: 'Get complete context: tasks, commits, builds, memories',
description: 'Get complete context: Jira issues, commits, builds, memories',
inputSchema: {
type: 'object',
properties: {
@@ -834,7 +503,7 @@ export const toolDefinitions = [
},
{
name: 'build_record',
description: 'Record build information linked to session and version',
description: 'Record build information linked to session',
inputSchema: {
type: 'object',
properties: {
@@ -850,13 +519,13 @@ export const toolDefinitions = [
},
{
name: 'session_commit_link',
description: 'Link a commit to a session (automatically called when commits are made)',
description: 'Link a commit to a session',
inputSchema: {
type: 'object',
properties: {
session_id: { type: 'string', description: 'Session ID' },
commit_sha: { type: 'string', description: 'Git commit SHA' },
repo: { type: 'string', description: 'Repository (e.g., christian/ClaudeFramework)' },
repo: { type: 'string', description: 'Repository' },
commit_message: { type: 'string', description: 'Commit message (optional)' },
committed_at: { type: 'string', description: 'Commit timestamp (ISO format, optional)' },
},
@@ -865,7 +534,7 @@ export const toolDefinitions = [
},
{
name: 'session_recover_orphaned',
description: 'Recover abandoned/orphaned sessions (CF-572). Detects sessions active for >2 hours and marks as abandoned',
description: 'Recover abandoned/orphaned sessions (active >2 hours)',
inputSchema: {
type: 'object',
properties: {
@@ -875,7 +544,7 @@ export const toolDefinitions = [
},
{
name: 'session_recover_temp_notes',
description: 'Recover notes from temp files for a specific session (CF-572)',
description: 'Recover notes from temp files for a specific session',
inputSchema: {
type: 'object',
properties: {
@@ -902,7 +571,7 @@ export const toolDefinitions = [
},
{
name: 'session_notes_list',
description: 'List all notes for a session, optionally filtered by type',
description: 'List all notes for a session',
inputSchema: {
type: 'object',
properties: {
@@ -920,7 +589,7 @@ export const toolDefinitions = [
properties: {
session_id: { type: 'string', description: 'Session ID' },
plan_content: { type: 'string', description: 'Plan content in markdown' },
plan_file_name: { type: 'string', description: 'Original filename (e.g., eloquent-yellow-cat.md) - optional' },
plan_file_name: { type: 'string', description: 'Original filename (optional)' },
status: { type: 'string', enum: ['draft', 'approved', 'executed', 'abandoned'], description: 'Plan status (default: draft)' },
},
required: ['session_id', 'plan_content'],
@@ -952,11 +621,11 @@ export const toolDefinitions = [
},
{
name: 'project_doc_upsert',
description: 'Create or update project documentation (replaces CLAUDE.md sections)',
description: 'Create or update project documentation',
inputSchema: {
type: 'object',
properties: {
project: { type: 'string', description: 'Project key (e.g., CF, VPN)' },
project: { type: 'string', description: 'Project key' },
doc_type: { type: 'string', enum: ['overview', 'architecture', 'guidelines', 'history', 'configuration', 'workflow'], description: 'Documentation type' },
title: { type: 'string', description: 'Document title' },
content: { type: 'string', description: 'Document content in markdown' },
@@ -990,7 +659,7 @@ export const toolDefinitions = [
},
{
name: 'session_documentation_generate',
description: 'Auto-generate full markdown documentation for a session (tasks, commits, notes, plans)',
description: 'Auto-generate full markdown documentation for a session',
inputSchema: {
type: 'object',
properties: {
@@ -1001,7 +670,7 @@ export const toolDefinitions = [
},
{
name: 'session_semantic_search',
description: 'Semantic search across all session documentation using vector similarity',
description: 'Semantic search across all session documentation',
inputSchema: {
type: 'object',
properties: {
@@ -1014,7 +683,7 @@ export const toolDefinitions = [
},
{
name: 'session_productivity_analytics',
description: 'Get productivity metrics (avg duration, tasks/commits per session, etc.)',
description: 'Get productivity metrics',
inputSchema: {
type: 'object',
properties: {
@@ -1025,12 +694,12 @@ export const toolDefinitions = [
},
{
name: 'session_pattern_detection',
description: 'Detect patterns across sessions (tool usage, task types)',
description: 'Detect patterns across sessions',
inputSchema: {
type: 'object',
properties: {
project: { type: 'string', description: 'Filter by project (optional)' },
pattern_type: { type: 'string', enum: ['tool_usage', 'task_types', 'error_frequency'], description: 'Type of pattern to detect (default: tool_usage)' },
pattern_type: { type: 'string', enum: ['tool_usage', 'task_types', 'error_frequency'], description: 'Type of pattern to detect' },
},
},
},
@@ -1038,17 +707,17 @@ export const toolDefinitions = [
// Archive Tools
{
name: 'archive_add',
description: 'Archive content to database with semantic embedding. Replaces filesystem archives.',
description: 'Archive content to database with semantic embedding.',
inputSchema: {
type: 'object',
properties: {
project: { type: 'string', description: 'Project key (e.g., CF, VPN)' },
project: { type: 'string', description: 'Project key' },
archive_type: { type: 'string', enum: ['session', 'research', 'audit', 'investigation', 'completed', 'migration'], description: 'Archive type' },
title: { type: 'string', description: 'Archive title' },
content: { type: 'string', description: 'Archive content (markdown)' },
original_path: { type: 'string', description: 'Original file path (optional)' },
file_size: { type: 'number', description: 'File size in bytes (optional)' },
archived_by_session: { type: 'string', description: 'Session ID that archived it (optional)' },
archived_by_session: { type: 'string', description: 'Session ID (optional)' },
metadata: { type: 'object', description: 'Additional metadata (optional)' },
},
required: ['project', 'archive_type', 'title', 'content'],
@@ -1096,11 +765,11 @@ export const toolDefinitions = [
// Project Archival
{
name: 'project_archive',
description: 'Archive complete project to S3 with database tracking. Creates tarball, uploads to s3://agiliton-archive/projects/, updates database, and optionally deletes local copy.',
description: 'Archive complete project to S3 with database tracking.',
inputSchema: {
type: 'object',
properties: {
project_key: { type: 'string', description: 'Project key (must exist in database)' },
project_key: { type: 'string', description: 'Project key' },
project_path: { type: 'string', description: 'Absolute path to project directory' },
delete_local: { type: 'boolean', description: 'Delete local project after successful archive (default: false)' },
session_id: { type: 'string', description: 'Session ID performing the archival (optional)' },

View File

@@ -1,142 +0,0 @@
// Task relations: dependencies and checklists
import { query, queryOne, execute } from '../db.js';
interface TaskLinkArgs {
from_id: string;
to_id: string;
link_type: string;
}
interface ChecklistAddArgs {
task_id: string;
item: string;
}
interface ChecklistToggleArgs {
item_id: number;
checked: boolean;
}
/**
* Create a dependency between tasks
* - blocks: unidirectional (A blocks B)
* - relates_to: bidirectional (A relates to B = B relates to A)
* - duplicates: bidirectional (A duplicates B = B duplicates A)
*/
export async function taskLink(args: TaskLinkArgs): Promise<string> {
const { from_id, to_id, link_type } = args;
try {
// Create the primary link
await execute(
`INSERT INTO task_links (from_task_id, to_task_id, link_type)
VALUES ($1, $2, $3)
ON CONFLICT (from_task_id, to_task_id, link_type) DO NOTHING`,
[from_id, to_id, link_type]
);
// For symmetric relationships, create reverse link
if (link_type === 'relates_to' || link_type === 'duplicates') {
await execute(
`INSERT INTO task_links (from_task_id, to_task_id, link_type)
VALUES ($1, $2, $3)
ON CONFLICT (from_task_id, to_task_id, link_type) DO NOTHING`,
[to_id, from_id, link_type]
);
}
return `Linked: ${from_id} ${link_type} ${to_id}`;
} catch (error) {
return `Error creating link: ${error}`;
}
}
/**
* Add a checklist item to a task
*/
export async function checklistAdd(args: ChecklistAddArgs): Promise<string> {
const { task_id, item } = args;
// Get next position
const result = await queryOne<{ max: number }>(
`SELECT COALESCE(MAX(position), 0) + 1 as max
FROM task_checklist WHERE task_id = $1`,
[task_id]
);
const position = result?.max || 1;
await execute(
`INSERT INTO task_checklist (task_id, item, position)
VALUES ($1, $2, $3)`,
[task_id, item, position]
);
return `Added to ${task_id}: ${item}`;
}
/**
* Toggle a checklist item
*/
export async function checklistToggle(args: ChecklistToggleArgs): Promise<string> {
const { item_id, checked } = args;
const result = await execute(
`UPDATE task_checklist SET checked = $1 WHERE id = $2`,
[checked, item_id]
);
if (result === 0) {
return `Checklist item not found: ${item_id}`;
}
return `${checked ? 'Checked' : 'Unchecked'}: item #${item_id}`;
}
interface ResolveDuplicateArgs {
duplicate_id: string;
dominant_id: string;
}
/**
* Resolve a duplicate issue by closing it and linking to the dominant issue
* - Closes the duplicate task (sets status to completed)
* - Creates bidirectional "duplicates" link between the two tasks
*/
export async function taskResolveDuplicate(args: ResolveDuplicateArgs): Promise<string> {
const { duplicate_id, dominant_id } = args;
try {
// Close the duplicate task
const closeResult = await execute(
`UPDATE tasks
SET status = 'completed', completed_at = NOW(), updated_at = NOW()
WHERE id = $1`,
[duplicate_id]
);
if (closeResult === 0) {
return `Duplicate task not found: ${duplicate_id}`;
}
// Create bidirectional duplicates link
await execute(
`INSERT INTO task_links (from_task_id, to_task_id, link_type)
VALUES ($1, $2, 'duplicates')
ON CONFLICT (from_task_id, to_task_id, link_type) DO NOTHING`,
[duplicate_id, dominant_id]
);
await execute(
`INSERT INTO task_links (from_task_id, to_task_id, link_type)
VALUES ($1, $2, 'duplicates')
ON CONFLICT (from_task_id, to_task_id, link_type) DO NOTHING`,
[dominant_id, duplicate_id]
);
return `Resolved duplicate: ${duplicate_id}${dominant_id}\n Closed: ${duplicate_id}\n Linked: duplicates ${dominant_id}`;
} catch (error) {
return `Error resolving duplicate: ${error}`;
}
}

View File

@@ -1,245 +0,0 @@
// Semantic search operations
import { query, queryOne, getProjectKey } from '../db.js';
import { getEmbedding, formatEmbedding } from '../embeddings.js';
import type { SimilarTask } from '../types.js';
interface SessionNote {
note_type: string;
content: string;
created_at: string;
}
interface SessionTask {
id: string;
title: string;
status: string;
priority: string;
}
interface SessionCommit {
commit_hash: string;
commit_message: string;
}
interface TaskSessionContextArgs {
id: string;
}
/**
* Get session context for a task - retrieves notes, decisions, and related tasks
* from the session where the task was created
*/
export async function taskSessionContext(args: TaskSessionContextArgs): Promise<string> {
const { id } = args;
// Get task with session info
const task = await queryOne<{
id: string;
title: string;
description: string;
session_id: string;
}>(
`SELECT t.id, t.title, t.description, t.session_id
FROM tasks t
WHERE t.id = $1`,
[id]
);
if (!task) {
return `Task not found: ${id}`;
}
if (!task.session_id) {
return `# Context for ${id}\n\n**Task:** ${task.title}\n\n⚠ No session linked to this task. Task was created before session tracking was implemented or via direct database insert.\n\n${task.description ? `**Description:**\n${task.description}` : ''}`;
}
// Get session info
const session = await queryOne<{
session_number: number;
summary: string;
started_at: string;
}>(
`SELECT session_number, summary, to_char(started_at, 'YYYY-MM-DD HH24:MI') as started_at
FROM sessions
WHERE id = $1`,
[task.session_id]
);
let output = `# Context for ${id}\n\n`;
output += `**Task:** ${task.title}\n`;
if (session) {
output += `**Created in Session:** #${session.session_number} (${session.started_at})\n`;
if (session.summary) {
output += `\n## Session Summary\n${session.summary}\n`;
}
} else {
output += `**Session ID:** ${task.session_id} (session record not found)\n`;
}
if (task.description) {
output += `\n## Task Description\n${task.description}\n`;
}
// Get session notes
const notes = await query<SessionNote>(
`SELECT note_type, content, to_char(created_at, 'HH24:MI') as created_at
FROM session_notes
WHERE session_id = $1
ORDER BY created_at`,
[task.session_id]
);
if (notes.length > 0) {
output += `\n## Session Notes\n`;
for (const note of notes) {
output += `- **[${note.note_type}]** ${note.content}\n`;
}
}
// Get related tasks from same session
const relatedTasks = await query<SessionTask>(
`SELECT id, title, status, priority
FROM tasks
WHERE session_id = $1 AND id != $2
ORDER BY created_at`,
[task.session_id, id]
);
if (relatedTasks.length > 0) {
output += `\n## Other Tasks from Same Session\n`;
for (const t of relatedTasks) {
const statusIcon = t.status === 'completed' ? '✓' : t.status === 'in_progress' ? '▶' : '○';
output += `- ${statusIcon} [${t.priority}] ${t.id}: ${t.title}\n`;
}
}
// Get commits from session
const commits = await query<SessionCommit>(
`SELECT DISTINCT commit_hash, commit_message
FROM task_commits
WHERE task_id IN (SELECT id FROM tasks WHERE session_id = $1)
ORDER BY committed_at DESC
LIMIT 10`,
[task.session_id]
);
if (commits.length > 0) {
output += `\n## Commits from Session\n`;
for (const c of commits) {
output += `- \`${c.commit_hash}\` ${c.commit_message}\n`;
}
}
return output;
}
interface TaskSimilarArgs {
query: string;
project?: string;
limit?: number;
}
interface TaskContextArgs {
description: string;
project?: string;
limit?: number;
}
/**
* Find semantically similar tasks using pgvector
*/
export async function taskSimilar(args: TaskSimilarArgs): Promise<string> {
const { query: searchQuery, project, limit = 5 } = args;
// Generate embedding for the query
const embedding = await getEmbedding(searchQuery);
if (!embedding) {
return 'Error: Could not generate embedding for search query';
}
const embeddingStr = formatEmbedding(embedding);
let whereClause = 'WHERE embedding IS NOT NULL';
const params: unknown[] = [embeddingStr, limit];
let paramIndex = 3;
if (project) {
const projectKey = await getProjectKey(project);
whereClause += ` AND project = $${paramIndex}`;
params.push(projectKey);
}
const results = await query<SimilarTask>(
`SELECT id, title, type, status, priority,
1 - (embedding <=> $1) as similarity
FROM tasks
${whereClause}
ORDER BY embedding <=> $1
LIMIT $2`,
params
);
if (results.length === 0) {
return 'No similar tasks found';
}
const lines = results.map(t => {
const pct = Math.round(t.similarity * 100);
const statusIcon = t.status === 'completed' ? '[x]' : t.status === 'in_progress' ? '[>]' : '[ ]';
return `${statusIcon} ${pct}% ${t.id}: ${t.title} [${t.type}] [${t.priority}]`;
});
return `Similar tasks for "${searchQuery}":\n\n${lines.join('\n')}`;
}
/**
* Get related tasks for current work context
* Returns markdown suitable for injection into delegations
*/
export async function taskContext(args: TaskContextArgs): Promise<string> {
const { description, project, limit = 3 } = args;
// Generate embedding for the description
const embedding = await getEmbedding(description);
if (!embedding) {
return '';
}
const embeddingStr = formatEmbedding(embedding);
let whereClause = 'WHERE embedding IS NOT NULL AND status != \'completed\'';
const params: unknown[] = [embeddingStr, limit];
let paramIndex = 3;
if (project) {
const projectKey = await getProjectKey(project);
whereClause += ` AND project = $${paramIndex}`;
params.push(projectKey);
}
const results = await query<SimilarTask>(
`SELECT id, title, type, status, priority,
1 - (embedding <=> $1) as similarity
FROM tasks
${whereClause}
ORDER BY embedding <=> $1
LIMIT $2`,
params
);
if (results.length === 0) {
return '';
}
// Format as markdown for delegation context
let output = '## Related Tasks\n\n';
for (const t of results) {
const pct = Math.round(t.similarity * 100);
output += `- **${t.id}**: ${t.title} (${pct}% match, ${t.priority}, ${t.status})\n`;
}
return output;
}

View File

@@ -3,6 +3,7 @@
import { query, queryOne, execute } from '../db.js';
import { getEmbedding, formatEmbedding } from '../embeddings.js';
import { getSessionId } from './session-id.js';
// ============================================================================
// SESSION NOTES
@@ -32,7 +33,8 @@ interface SessionNote {
* Auto-generates embedding for semantic search
*/
export async function sessionNoteAdd(args: SessionNoteAddArgs): Promise<string> {
const { session_id, note_type, content } = args;
const { session_id: providedSessionId, note_type, content } = args;
const session_id = providedSessionId || getSessionId();
// Generate embedding for semantic search
const embedding = await getEmbedding(content);

26
src/tools/session-id.ts Normal file
View File

@@ -0,0 +1,26 @@
/**
* Shared utility: get current session ID from environment or cache file.
* Extracted from crud.ts during task-mcp → session-mcp fork (CF-762).
*/
import * as fs from 'fs';
import * as path from 'path';
import * as os from 'os';
export function getSessionId(): string {
if (process.env.CLAUDE_SESSION_ID) {
return process.env.CLAUDE_SESSION_ID;
}
const cacheFile = path.join(os.homedir(), '.cache', 'session-memory', 'current_session');
try {
const sessionId = fs.readFileSync(cacheFile, 'utf-8').trim();
if (sessionId) return sessionId;
} catch {
// File doesn't exist or can't be read
}
const now = new Date();
const timestamp = now.toISOString().replace(/[-:T]/g, '').slice(0, 15);
return `session_${timestamp}`;
}

View File

@@ -1,7 +1,9 @@
// Session management operations for database-driven session tracking
// Sessions auto-create CF Jira issues and post output on close (CF-762)
import { query, queryOne, execute } from '../db.js';
import { getEmbedding, formatEmbedding } from '../embeddings.js';
import { createSessionIssue, addComment, transitionToDone, updateIssueDescription } from '../services/jira.js';
interface SessionStartArgs {
session_id?: string;
@@ -9,6 +11,7 @@ interface SessionStartArgs {
working_directory?: string;
git_branch?: string;
initial_prompt?: string;
jira_issue_key?: string;
}
interface SessionUpdateArgs {
@@ -52,23 +55,25 @@ interface Session {
token_count: number;
tools_used: string[] | null;
status: string;
jira_issue_key: string | null;
created_at: string;
}
/**
* Start a new session with metadata tracking
* Returns session_id and session_number
* Start a new session with metadata tracking.
* Auto-creates a CF Jira issue for session tracking.
* Returns session_id, session_number, and Jira issue key.
*/
export async function sessionStart(args: SessionStartArgs): Promise<string> {
const { session_id, project, working_directory, git_branch, initial_prompt } = args;
const { session_id, project, working_directory, git_branch, initial_prompt, jira_issue_key } = args;
// Generate session ID if not provided (fallback, should come from session-memory)
const id = session_id || `session_${Date.now()}_${Math.random().toString(36).substring(7)}`;
await execute(
`INSERT INTO sessions (id, project, started_at, working_directory, git_branch, initial_prompt, status)
VALUES ($1, $2, NOW(), $3, $4, $5, 'active')`,
[id, project, working_directory || null, git_branch || null, initial_prompt || null]
`INSERT INTO sessions (id, project, started_at, working_directory, git_branch, initial_prompt, jira_issue_key, status)
VALUES ($1, $2, NOW(), $3, $4, $5, $6, 'active')`,
[id, project, working_directory || null, git_branch || null, initial_prompt || null, jira_issue_key || null]
);
// Get the assigned session_number
@@ -79,7 +84,32 @@ export async function sessionStart(args: SessionStartArgs): Promise<string> {
const session_number = result?.session_number || null;
return `Session started: ${id} (${project} #${session_number})`;
// Auto-create CF Jira issue for session tracking (non-blocking)
let sessionJiraKey: string | null = jira_issue_key || null;
if (!sessionJiraKey) {
try {
const jiraResult = await createSessionIssue({
sessionNumber: session_number,
project,
parentIssueKey: jira_issue_key || undefined,
branch: git_branch || undefined,
workingDirectory: working_directory || undefined,
});
if (jiraResult) {
sessionJiraKey = jiraResult.key;
// Store the auto-created Jira issue key
await execute(
`UPDATE sessions SET jira_issue_key = $1 WHERE id = $2`,
[sessionJiraKey, id]
);
}
} catch (err) {
console.error('session-mcp: Failed to create session Jira issue:', err);
}
}
const jiraInfo = sessionJiraKey ? ` [${sessionJiraKey}]` : '';
return `Session started: ${id} (${project} #${session_number})${jiraInfo}`;
}
/**
@@ -121,7 +151,8 @@ export async function sessionUpdate(args: SessionUpdateArgs): Promise<string> {
}
/**
* End session and generate summary with embedding
* End session and generate summary with embedding.
* Posts full session output as Jira comment and transitions session issue to Done.
*/
export async function sessionEnd(args: SessionEndArgs): Promise<string> {
const { session_id, summary, status = 'completed' } = args;
@@ -154,8 +185,8 @@ export async function sessionEnd(args: SessionEndArgs): Promise<string> {
}
// Get session details
const session = await queryOne<Session>(
`SELECT id, project, session_number, duration_minutes
const session = await queryOne<Session & { jira_issue_key: string | null }>(
`SELECT id, project, session_number, duration_minutes, jira_issue_key
FROM sessions WHERE id = $1`,
[session_id]
);
@@ -164,7 +195,100 @@ export async function sessionEnd(args: SessionEndArgs): Promise<string> {
return `Session ended: ${session_id}`;
}
return `Session ended: ${session.project} #${session.session_number} (${session.duration_minutes || 0}m)`;
// Post session output to Jira and close the session issue (non-blocking)
let jiraStatus = '';
if (session.jira_issue_key) {
try {
// Collect session output for Jira comment
const sessionOutput = await buildSessionOutput(session_id, session, summary);
// Post as comment
const commented = await addComment(session.jira_issue_key, sessionOutput);
// Update issue description with final summary
const descriptionUpdate = [
`## Session ${session.project} #${session.session_number}`,
`**Duration:** ${session.duration_minutes || 0} minutes`,
`**Status:** ${status}`,
`**Session ID:** ${session_id}`,
'',
`## Summary`,
summary,
].join('\n');
await updateIssueDescription(session.jira_issue_key, descriptionUpdate);
// Transition to Done
const transitioned = await transitionToDone(session.jira_issue_key);
jiraStatus = commented && transitioned
? ` [${session.jira_issue_key} → Done]`
: commented
? ` [${session.jira_issue_key} commented]`
: ` [${session.jira_issue_key} Jira update partial]`;
} catch (err) {
console.error('session-mcp: Failed to update session Jira issue:', err);
jiraStatus = ` [${session.jira_issue_key} Jira update failed]`;
}
}
return `Session ended: ${session.project} #${session.session_number} (${session.duration_minutes || 0}m)${jiraStatus}`;
}
/**
* Build full session output markdown for Jira comment.
*/
async function buildSessionOutput(
session_id: string,
session: { project: string | null; session_number: number | null; duration_minutes: number | null },
summary: string
): Promise<string> {
const lines: string[] = [];
lines.push(`# Session ${session.project} #${session.session_number}`);
lines.push(`Duration: ${session.duration_minutes || 0} minutes`);
lines.push('');
lines.push(`## Summary`);
lines.push(summary);
lines.push('');
// Get session notes
const notes = await query<{ note_type: string; content: string }>(
`SELECT note_type, content FROM session_notes WHERE session_id = $1 ORDER BY created_at`,
[session_id]
);
if (notes.length > 0) {
const grouped: Record<string, string[]> = {};
for (const n of notes) {
if (!grouped[n.note_type]) grouped[n.note_type] = [];
grouped[n.note_type].push(n.content);
}
for (const [type, items] of Object.entries(grouped)) {
const label = type.replace(/_/g, ' ').replace(/\b\w/g, c => c.toUpperCase());
lines.push(`## ${label}`);
for (const item of items) {
lines.push(`- ${item}`);
}
lines.push('');
}
}
// Get commits
const commits = await query<{ commit_sha: string; repo: string; commit_message: string | null }>(
`SELECT commit_sha, repo, commit_message FROM session_commits WHERE session_id = $1 ORDER BY committed_at DESC`,
[session_id]
);
if (commits.length > 0) {
lines.push(`## Commits (${commits.length})`);
for (const c of commits) {
const msg = c.commit_message ? c.commit_message.split('\n')[0] : 'No message';
lines.push(`- ${c.commit_sha.substring(0, 7)} (${c.repo}): ${msg}`);
}
lines.push('');
}
return lines.join('\n');
}
/**

View File

@@ -1,306 +0,0 @@
// Version management operations for task-mcp
import { query, queryOne, execute, getProjectKey } from '../db.js';
import type { Version, Task } from '../types.js';
interface VersionAddArgs {
project: string;
version: string;
build_number?: number;
status?: string;
release_notes?: string;
}
interface VersionListArgs {
project?: string;
status?: string;
limit?: number;
}
interface VersionUpdateArgs {
id: string;
status?: string;
git_tag?: string;
git_sha?: string;
release_notes?: string;
release_date?: string;
}
/**
* Generate version ID from project and version number
*/
function generateVersionId(projectKey: string, version: string): string {
return `${projectKey}-v${version.replace(/^v/, '')}`;
}
/**
* Create a new version
*/
export async function versionAdd(args: VersionAddArgs): Promise<string> {
const { project, version, build_number, status = 'planned', release_notes } = args;
// Get project key
const projectKey = await getProjectKey(project);
// Generate version ID
const versionId = generateVersionId(projectKey, version);
// Check if version already exists
const existing = await queryOne<{ id: string }>(`SELECT id FROM versions WHERE id = $1`, [versionId]);
if (existing) {
return `Version already exists: ${versionId}`;
}
// Insert version
await execute(
`INSERT INTO versions (id, project, version, build_number, status, release_notes)
VALUES ($1, $2, $3, $4, $5, $6)`,
[versionId, projectKey, version, build_number || null, status, release_notes || null]
);
return `Created version: ${versionId}\n Version: ${version}\n Project: ${projectKey}\n Status: ${status}${build_number ? `\n Build: ${build_number}` : ''}`;
}
/**
* List versions with filters
*/
export async function versionList(args: VersionListArgs): Promise<string> {
const { project, status, limit = 20 } = args;
let whereClause = 'WHERE 1=1';
const params: unknown[] = [];
let paramIndex = 1;
if (project) {
const projectKey = await getProjectKey(project);
whereClause += ` AND v.project = $${paramIndex++}`;
params.push(projectKey);
}
if (status) {
whereClause += ` AND v.status = $${paramIndex++}`;
params.push(status);
}
params.push(limit);
const versions = await query<Version & { task_count: number; open_count: number }>(
`SELECT v.id, v.version, v.status, v.project, v.build_number, v.git_tag,
to_char(v.release_date, 'YYYY-MM-DD') as release_date,
COUNT(t.id) as task_count,
COUNT(t.id) FILTER (WHERE t.status != 'completed') as open_count
FROM versions v
LEFT JOIN tasks t ON t.version_id = v.id
${whereClause}
GROUP BY v.id, v.version, v.status, v.project, v.build_number, v.git_tag, v.release_date, v.created_at
ORDER BY
CASE v.status WHEN 'in_progress' THEN 0 WHEN 'planned' THEN 1 WHEN 'released' THEN 2 ELSE 3 END,
v.created_at DESC
LIMIT $${paramIndex}`,
params
);
if (versions.length === 0) {
return `No versions found${project ? ` for project ${project}` : ''}`;
}
const lines = versions.map(v => {
const statusIcon = v.status === 'released' ? '[R]' : v.status === 'in_progress' ? '[>]' : v.status === 'archived' ? '[A]' : '[ ]';
const progress = v.task_count > 0 ? ` (${v.task_count - v.open_count}/${v.task_count} tasks)` : '';
const tag = v.git_tag ? ` [${v.git_tag}]` : '';
const date = (v as unknown as { release_date: string }).release_date ? ` - ${(v as unknown as { release_date: string }).release_date}` : '';
return `${statusIcon} ${v.id}: ${v.version}${tag}${progress}${date}`;
});
return `Versions${project ? ` (${project})` : ''}:\n\n${lines.join('\n')}`;
}
/**
* Show version details with assigned tasks
*/
export async function versionShow(id: string): Promise<string> {
const version = await queryOne<Version & { created: string; released: string }>(
`SELECT id, project, version, build_number, status, release_notes, git_tag, git_sha,
to_char(created_at, 'YYYY-MM-DD HH24:MI') as created,
to_char(release_date, 'YYYY-MM-DD') as released
FROM versions WHERE id = $1`,
[id]
);
if (!version) {
return `Version not found: ${id}`;
}
let output = `# ${version.id}\n\n`;
output += `**Version:** ${version.version}\n`;
output += `**Project:** ${version.project}\n`;
output += `**Status:** ${version.status}\n`;
if (version.build_number) {
output += `**Build:** ${version.build_number}\n`;
}
if ((version as unknown as { git_tag: string }).git_tag) {
output += `**Git Tag:** ${(version as unknown as { git_tag: string }).git_tag}\n`;
}
if ((version as unknown as { git_sha: string }).git_sha) {
output += `**Git SHA:** ${(version as unknown as { git_sha: string }).git_sha}\n`;
}
output += `**Created:** ${version.created}\n`;
if (version.released) {
output += `**Released:** ${version.released}\n`;
}
if (version.release_notes) {
output += `\n**Release Notes:**\n${version.release_notes}\n`;
}
// Get tasks assigned to this version
const tasks = await query<Task>(
`SELECT id, title, status, priority, type
FROM tasks
WHERE version_id = $1
ORDER BY
CASE status WHEN 'in_progress' THEN 0 WHEN 'open' THEN 1 WHEN 'blocked' THEN 2 ELSE 3 END,
CASE priority WHEN 'P0' THEN 0 WHEN 'P1' THEN 1 WHEN 'P2' THEN 2 ELSE 3 END`,
[id]
);
if (tasks.length > 0) {
const done = tasks.filter(t => t.status === 'completed').length;
output += `\n**Tasks:** (${done}/${tasks.length} done)\n`;
for (const t of tasks) {
const statusIcon = t.status === 'completed' ? '[x]' : t.status === 'in_progress' ? '[>]' : t.status === 'blocked' ? '[!]' : '[ ]';
output += ` ${statusIcon} ${t.priority} ${t.id}: ${t.title}\n`;
}
} else {
output += `\n**Tasks:** None assigned\n`;
}
// Get epics targeting this version
const epics = await query<{ id: string; title: string; status: string }>(
`SELECT id, title, status FROM epics WHERE target_version_id = $1`,
[id]
);
if (epics.length > 0) {
output += `\n**Epics:**\n`;
for (const e of epics) {
const statusIcon = e.status === 'completed' ? '[x]' : e.status === 'in_progress' ? '[>]' : '[ ]';
output += ` ${statusIcon} ${e.id}: ${e.title}\n`;
}
}
return output;
}
/**
* Update a version
*/
export async function versionUpdate(args: VersionUpdateArgs): Promise<string> {
const { id, status, git_tag, git_sha, release_notes, release_date } = args;
const updates: string[] = [];
const params: unknown[] = [];
let paramIndex = 1;
if (status) {
updates.push(`status = $${paramIndex++}`);
params.push(status);
}
if (git_tag !== undefined) {
updates.push(`git_tag = $${paramIndex++}`);
params.push(git_tag);
}
if (git_sha !== undefined) {
updates.push(`git_sha = $${paramIndex++}`);
params.push(git_sha);
}
if (release_notes !== undefined) {
updates.push(`release_notes = $${paramIndex++}`);
params.push(release_notes);
}
if (release_date) {
updates.push(`release_date = $${paramIndex++}`);
params.push(release_date);
}
if (updates.length === 0) {
return 'No updates specified';
}
params.push(id);
const result = await execute(
`UPDATE versions SET ${updates.join(', ')} WHERE id = $${paramIndex}`,
params
);
if (result === 0) {
return `Version not found: ${id}`;
}
return `Updated: ${id}`;
}
/**
* Mark a version as released
*/
export async function versionRelease(args: { id: string; git_tag?: string }): Promise<string> {
const { id, git_tag } = args;
// Verify version exists
const version = await queryOne<{ id: string; status: string; version: string }>(
`SELECT id, status, version FROM versions WHERE id = $1`,
[id]
);
if (!version) {
return `Version not found: ${id}`;
}
if (version.status === 'released') {
return `Version already released: ${id}`;
}
// Update version status
const updates = ['status = $1', 'release_date = NOW()'];
const params: unknown[] = ['released'];
let paramIndex = 2;
if (git_tag) {
updates.push(`git_tag = $${paramIndex++}`);
params.push(git_tag);
}
params.push(id);
await execute(
`UPDATE versions SET ${updates.join(', ')} WHERE id = $${paramIndex}`,
params
);
return `Released: ${id} (${version.version})${git_tag ? ` tagged as ${git_tag}` : ''}`;
}
/**
* Assign a task to a version
*/
export async function versionAssignTask(args: { task_id: string; version_id: string }): Promise<string> {
const { task_id, version_id } = args;
// Verify version exists
const version = await queryOne<{ id: string }>(`SELECT id FROM versions WHERE id = $1`, [version_id]);
if (!version) {
return `Version not found: ${version_id}`;
}
// Update task
const result = await execute(
`UPDATE tasks SET version_id = $1, updated_at = NOW() WHERE id = $2`,
[version_id, task_id]
);
if (result === 0) {
return `Task not found: ${task_id}`;
}
return `Assigned ${task_id} to version ${version_id}`;
}

View File

@@ -10,6 +10,7 @@ export interface Task {
priority: 'P0' | 'P1' | 'P2' | 'P3';
version_id?: string;
epic_id?: string;
planning_mode_required?: boolean | null;
created_at: Date;
updated_at: Date;
completed_at?: Date;

View File

@@ -1,6 +1,6 @@
#!/usr/bin/env bash
# Wrapper script for task-mcp with hardcoded env vars
export DB_HOST="infra.agiliton.internal"
export DB_HOST="postgres.agiliton.internal"
export DB_PORT="5432"
export DB_NAME="agiliton"
export DB_USER="agiliton"