Add session documentation system (migration + MCP tools)

Phase 1-2 complete: Database schema + 9 MCP tools for session docs

Database Changes (migration 016):
- session_notes table (accomplishments, decisions, gotchas, etc.)
- session_plans table (plan mode plans with lifecycle tracking)
- project_documentation table (persistent project docs)
- sessions.documentation column (auto-generated markdown)
- HNSW indexes for semantic search across all doc types

MCP Tools Added (session-docs.ts):
1. session_note_add - Add structured notes to session
2. session_notes_list - List notes by type
3. session_plan_save - Save plan with embedding
4. session_plan_update_status - Track plan lifecycle
5. session_plan_list - List session plans
6. project_doc_upsert - Create/update project docs
7. project_doc_get - Get specific doc by type
8. project_doc_list - List all project docs
9. session_documentation_generate - Auto-generate markdown

Replaces: CLAUDE.md files, ~/.claude/plans/ directory

Next: Update session-start/end scripts for temp file management

Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
This commit is contained in:
Christian Gick
2026-01-19 10:13:57 +02:00
parent afce0bd3e5
commit 3745a13eaf
5 changed files with 926 additions and 0 deletions

View File

@@ -0,0 +1,90 @@
#!/usr/bin/env node
import pg from 'pg';
const { Pool } = pg;
// Database configuration
const pool = new Pool({
host: 'infra.agiliton.internal',
port: 5432,
database: 'agiliton',
user: 'agiliton',
password: 'QtqiwCOAUpQNF6pjzOMAREzUny2bY8V1',
max: 5,
});
// LiteLLM API configuration
const LLM_API_URL = 'https://api.agiliton.cloud/llm';
const LLM_API_KEY = 'sk-litellm-master-key';
async function getEmbedding(text) {
try {
const response = await fetch(`${LLM_API_URL}/v1/embeddings`, {
method: 'POST',
headers: {
'Authorization': `Bearer ${LLM_API_KEY}`,
'Content-Type': 'application/json',
},
body: JSON.stringify({
model: 'mxbai-embed-large',
input: text,
}),
});
if (!response.ok) {
console.error('Embedding API error:', response.status, await response.text());
return null;
}
const data = await response.json();
return data.data?.[0]?.embedding || null;
} catch (error) {
console.error('Embedding generation failed:', error);
return null;
}
}
async function regenerateEmbeddings() {
try {
// Fetch all tool_docs
const result = await pool.query(
'SELECT id, tool_name, title, description, notes, tags FROM tool_docs WHERE embedding IS NULL'
);
console.log(`Found ${result.rows.length} tool_docs without embeddings`);
let successCount = 0;
let failCount = 0;
for (const row of result.rows) {
const embedText = `${row.title}. ${row.description}. ${row.tags?.join(' ') || ''}. ${row.notes || ''}`;
console.log(`Generating embedding for: ${row.tool_name}`);
const embedding = await getEmbedding(embedText);
if (embedding) {
const embeddingStr = `[${embedding.join(',')}]`;
await pool.query(
'UPDATE tool_docs SET embedding = $1 WHERE id = $2',
[embeddingStr, row.id]
);
successCount++;
console.log(`${row.tool_name} (${successCount}/${result.rows.length})`);
} else {
failCount++;
console.log(`${row.tool_name} - Failed to generate embedding`);
}
// Small delay to avoid rate limiting
await new Promise(resolve => setTimeout(resolve, 100));
}
console.log(`\n✅ Complete: ${successCount} successful, ${failCount} failed`);
} catch (error) {
console.error('Error:', error);
} finally {
await pool.end();
}
}
regenerateEmbeddings();