fix: Copy confluence-collab package instead of symlink for Docker build
Symlinks dont resolve on remote VM during Docker build context. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -1 +0,0 @@
|
|||||||
/Users/christian.gick/Development/Infrastructure/mcp-servers/confluence-collab
|
|
||||||
24
confluence-collab/pyproject.toml
Normal file
24
confluence-collab/pyproject.toml
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
[build-system]
|
||||||
|
requires = ["hatchling"]
|
||||||
|
build-backend = "hatchling.build"
|
||||||
|
|
||||||
|
[project]
|
||||||
|
name = "confluence-collab"
|
||||||
|
version = "0.1.0"
|
||||||
|
description = "Section-based Confluence page editing with conflict retry"
|
||||||
|
requires-python = ">=3.11"
|
||||||
|
dependencies = [
|
||||||
|
"httpx>=0.27",
|
||||||
|
"beautifulsoup4>=4.12",
|
||||||
|
"lxml>=5.0",
|
||||||
|
"mcp>=1.0",
|
||||||
|
]
|
||||||
|
|
||||||
|
[project.optional-dependencies]
|
||||||
|
dev = ["pytest", "pytest-asyncio", "respx"]
|
||||||
|
|
||||||
|
[project.scripts]
|
||||||
|
confluence-collab = "confluence_collab.cli:main"
|
||||||
|
|
||||||
|
[tool.pytest.ini_options]
|
||||||
|
asyncio_mode = "auto"
|
||||||
16
confluence-collab/src/confluence_collab/__init__.py
Normal file
16
confluence-collab/src/confluence_collab/__init__.py
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
"""Confluence section-based collaborative editing library."""
|
||||||
|
|
||||||
|
from confluence_collab.parser import Section, parse_sections, find_section, replace_section_content
|
||||||
|
from confluence_collab.editor import section_list, section_get, section_update, section_append, section_delete
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"Section",
|
||||||
|
"parse_sections",
|
||||||
|
"find_section",
|
||||||
|
"replace_section_content",
|
||||||
|
"section_list",
|
||||||
|
"section_get",
|
||||||
|
"section_update",
|
||||||
|
"section_append",
|
||||||
|
"section_delete",
|
||||||
|
]
|
||||||
4
confluence-collab/src/confluence_collab/__main__.py
Normal file
4
confluence-collab/src/confluence_collab/__main__.py
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
"""Allow running as: python -m confluence_collab <command>"""
|
||||||
|
from confluence_collab.cli import main
|
||||||
|
|
||||||
|
main()
|
||||||
103
confluence-collab/src/confluence_collab/cli.py
Normal file
103
confluence-collab/src/confluence_collab/cli.py
Normal file
@@ -0,0 +1,103 @@
|
|||||||
|
"""CLI for manual testing of confluence-collab operations."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import asyncio
|
||||||
|
import json
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from confluence_collab.client import Auth
|
||||||
|
from confluence_collab.editor import section_list, section_get, section_update, section_append, section_delete
|
||||||
|
|
||||||
|
|
||||||
|
async def cmd_list(args: argparse.Namespace) -> None:
|
||||||
|
auth = Auth.from_env()
|
||||||
|
sections = await section_list(args.page_id, auth)
|
||||||
|
for s in sections:
|
||||||
|
indent = " " * (s.level - 1)
|
||||||
|
print(f"{indent}h{s.level}: {s.heading}")
|
||||||
|
|
||||||
|
|
||||||
|
async def cmd_get(args: argparse.Namespace) -> None:
|
||||||
|
auth = Auth.from_env()
|
||||||
|
content = await section_get(args.page_id, args.heading, auth)
|
||||||
|
if content is None:
|
||||||
|
print(f"Section '{args.heading}' not found", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
print(content)
|
||||||
|
|
||||||
|
|
||||||
|
async def cmd_update(args: argparse.Namespace) -> None:
|
||||||
|
auth = Auth.from_env()
|
||||||
|
body = args.body
|
||||||
|
if body == "-":
|
||||||
|
body = sys.stdin.read()
|
||||||
|
result = await section_update(args.page_id, args.heading, body, auth)
|
||||||
|
print(json.dumps({"ok": result.ok, "message": result.message, "version": result.version, "retries": result.retries}))
|
||||||
|
if not result.ok:
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
async def cmd_append(args: argparse.Namespace) -> None:
|
||||||
|
auth = Auth.from_env()
|
||||||
|
body = args.body
|
||||||
|
if body == "-":
|
||||||
|
body = sys.stdin.read()
|
||||||
|
result = await section_append(args.page_id, args.heading, body, auth)
|
||||||
|
print(json.dumps({"ok": result.ok, "message": result.message, "version": result.version}))
|
||||||
|
if not result.ok:
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
async def cmd_delete(args: argparse.Namespace) -> None:
|
||||||
|
auth = Auth.from_env()
|
||||||
|
result = await section_delete(args.page_id, args.heading, auth)
|
||||||
|
print(json.dumps({"ok": result.ok, "message": result.message, "version": result.version}))
|
||||||
|
if not result.ok:
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
def main() -> None:
|
||||||
|
parser = argparse.ArgumentParser(prog="confluence-collab", description="Section-based Confluence editing")
|
||||||
|
sub = parser.add_subparsers(dest="command", required=True)
|
||||||
|
|
||||||
|
# list-sections
|
||||||
|
p_list = sub.add_parser("list-sections", help="List page sections")
|
||||||
|
p_list.add_argument("--page-id", required=True)
|
||||||
|
|
||||||
|
# get-section
|
||||||
|
p_get = sub.add_parser("get-section", help="Get section content")
|
||||||
|
p_get.add_argument("--page-id", required=True)
|
||||||
|
p_get.add_argument("--heading", required=True)
|
||||||
|
|
||||||
|
# update-section
|
||||||
|
p_update = sub.add_parser("update-section", help="Update section content")
|
||||||
|
p_update.add_argument("--page-id", required=True)
|
||||||
|
p_update.add_argument("--heading", required=True)
|
||||||
|
p_update.add_argument("--body", required=True, help="HTML content or '-' for stdin")
|
||||||
|
|
||||||
|
# append-section
|
||||||
|
p_append = sub.add_parser("append-section", help="Append to section")
|
||||||
|
p_append.add_argument("--page-id", required=True)
|
||||||
|
p_append.add_argument("--heading", required=True)
|
||||||
|
p_append.add_argument("--body", required=True, help="HTML content or '-' for stdin")
|
||||||
|
|
||||||
|
# delete-section
|
||||||
|
p_del = sub.add_parser("delete-section", help="Delete a section")
|
||||||
|
p_del.add_argument("--page-id", required=True)
|
||||||
|
p_del.add_argument("--heading", required=True)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
handlers = {
|
||||||
|
"list-sections": cmd_list,
|
||||||
|
"get-section": cmd_get,
|
||||||
|
"update-section": cmd_update,
|
||||||
|
"append-section": cmd_append,
|
||||||
|
"delete-section": cmd_delete,
|
||||||
|
}
|
||||||
|
asyncio.run(handlers[args.command](args))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
93
confluence-collab/src/confluence_collab/client.py
Normal file
93
confluence-collab/src/confluence_collab/client.py
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
"""Confluence REST API v1 client using httpx."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
from dataclasses import dataclass
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class PageData:
|
||||||
|
"""Confluence page data."""
|
||||||
|
|
||||||
|
page_id: str
|
||||||
|
title: str
|
||||||
|
body_html: str
|
||||||
|
version: int
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Auth:
|
||||||
|
"""Confluence authentication credentials."""
|
||||||
|
|
||||||
|
base_url: str
|
||||||
|
username: str
|
||||||
|
api_token: str
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_env(cls) -> Auth:
|
||||||
|
"""Load auth from environment variables."""
|
||||||
|
return cls(
|
||||||
|
base_url=os.environ.get("CONFLUENCE_URL", ""),
|
||||||
|
username=os.environ.get("CONFLUENCE_USERNAME", os.environ.get("CONFLUENCE_USER", "")),
|
||||||
|
api_token=os.environ.get("CONFLUENCE_API_TOKEN", ""),
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def httpx_auth(self) -> tuple[str, str]:
|
||||||
|
return (self.username, self.api_token)
|
||||||
|
|
||||||
|
|
||||||
|
async def get_page(page_id: str, auth: Auth) -> PageData:
|
||||||
|
"""Fetch a Confluence page with body.storage and version."""
|
||||||
|
url = f"{auth.base_url}/rest/api/content/{page_id}"
|
||||||
|
params = {"expand": "body.storage,version,title"}
|
||||||
|
async with httpx.AsyncClient(timeout=15.0) as client:
|
||||||
|
resp = await client.get(url, params=params, auth=auth.httpx_auth)
|
||||||
|
resp.raise_for_status()
|
||||||
|
data = resp.json()
|
||||||
|
|
||||||
|
return PageData(
|
||||||
|
page_id=str(data["id"]),
|
||||||
|
title=data["title"],
|
||||||
|
body_html=data["body"]["storage"]["value"],
|
||||||
|
version=data["version"]["number"],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def put_page(
|
||||||
|
page_id: str,
|
||||||
|
title: str,
|
||||||
|
body_html: str,
|
||||||
|
version: int,
|
||||||
|
auth: Auth,
|
||||||
|
) -> PageData:
|
||||||
|
"""Update a Confluence page with version increment.
|
||||||
|
|
||||||
|
Raises httpx.HTTPStatusError on 409 (version conflict) or other errors.
|
||||||
|
"""
|
||||||
|
url = f"{auth.base_url}/rest/api/content/{page_id}"
|
||||||
|
payload = {
|
||||||
|
"version": {"number": version},
|
||||||
|
"title": title,
|
||||||
|
"type": "page",
|
||||||
|
"body": {
|
||||||
|
"storage": {
|
||||||
|
"value": body_html,
|
||||||
|
"representation": "storage",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
async with httpx.AsyncClient(timeout=15.0) as client:
|
||||||
|
resp = await client.put(url, json=payload, auth=auth.httpx_auth)
|
||||||
|
resp.raise_for_status()
|
||||||
|
data = resp.json()
|
||||||
|
|
||||||
|
return PageData(
|
||||||
|
page_id=str(data["id"]),
|
||||||
|
title=data["title"],
|
||||||
|
body_html=data["body"]["storage"]["value"],
|
||||||
|
version=data["version"]["number"],
|
||||||
|
)
|
||||||
159
confluence-collab/src/confluence_collab/editor.py
Normal file
159
confluence-collab/src/confluence_collab/editor.py
Normal file
@@ -0,0 +1,159 @@
|
|||||||
|
"""Section-level CRUD operations with 409 conflict retry."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
from dataclasses import dataclass
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
|
||||||
|
from confluence_collab.client import Auth, get_page, put_page
|
||||||
|
from confluence_collab.parser import Section, parse_sections, find_section, replace_section_content
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Result:
|
||||||
|
"""Result of a section operation."""
|
||||||
|
|
||||||
|
ok: bool
|
||||||
|
message: str
|
||||||
|
version: int = 0
|
||||||
|
retries: int = 0
|
||||||
|
|
||||||
|
|
||||||
|
async def section_list(page_id: str, auth: Auth) -> list[Section]:
|
||||||
|
"""List all sections (headings) on a page."""
|
||||||
|
page = await get_page(page_id, auth)
|
||||||
|
return parse_sections(page.body_html)
|
||||||
|
|
||||||
|
|
||||||
|
async def section_get(page_id: str, heading: str, auth: Auth) -> str | None:
|
||||||
|
"""Get the HTML content of a specific section."""
|
||||||
|
page = await get_page(page_id, auth)
|
||||||
|
sections = parse_sections(page.body_html)
|
||||||
|
section = find_section(sections, heading)
|
||||||
|
if section is None:
|
||||||
|
return None
|
||||||
|
return section.content_html
|
||||||
|
|
||||||
|
|
||||||
|
async def section_update(
|
||||||
|
page_id: str,
|
||||||
|
heading: str,
|
||||||
|
new_content: str,
|
||||||
|
auth: Auth,
|
||||||
|
*,
|
||||||
|
max_retries: int = 3,
|
||||||
|
) -> Result:
|
||||||
|
"""Update a section's content with 409 conflict retry.
|
||||||
|
|
||||||
|
Fetches current page, parses sections, replaces target section content,
|
||||||
|
and PUTs with version+1. On 409: exponential backoff and retry.
|
||||||
|
"""
|
||||||
|
attempt = 0
|
||||||
|
while True:
|
||||||
|
page = await get_page(page_id, auth)
|
||||||
|
sections = parse_sections(page.body_html)
|
||||||
|
section = find_section(sections, heading)
|
||||||
|
if section is None:
|
||||||
|
return Result(ok=False, message=f"Section '{heading}' not found")
|
||||||
|
|
||||||
|
new_body = replace_section_content(page.body_html, section, new_content)
|
||||||
|
|
||||||
|
try:
|
||||||
|
updated = await put_page(page_id, page.title, new_body, page.version + 1, auth)
|
||||||
|
return Result(
|
||||||
|
ok=True,
|
||||||
|
message=f"Section '{heading}' updated",
|
||||||
|
version=updated.version,
|
||||||
|
retries=attempt,
|
||||||
|
)
|
||||||
|
except httpx.HTTPStatusError as exc:
|
||||||
|
if exc.response.status_code == 409 and attempt < max_retries:
|
||||||
|
attempt += 1
|
||||||
|
await asyncio.sleep(0.1 * (2 ** attempt))
|
||||||
|
else:
|
||||||
|
return Result(
|
||||||
|
ok=False,
|
||||||
|
message=f"Failed after {attempt} retries: {exc}",
|
||||||
|
retries=attempt,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def section_append(
|
||||||
|
page_id: str,
|
||||||
|
heading: str,
|
||||||
|
append_content: str,
|
||||||
|
auth: Auth,
|
||||||
|
*,
|
||||||
|
max_retries: int = 3,
|
||||||
|
) -> Result:
|
||||||
|
"""Append content to the end of a section."""
|
||||||
|
attempt = 0
|
||||||
|
while True:
|
||||||
|
page = await get_page(page_id, auth)
|
||||||
|
sections = parse_sections(page.body_html)
|
||||||
|
section = find_section(sections, heading)
|
||||||
|
if section is None:
|
||||||
|
return Result(ok=False, message=f"Section '{heading}' not found")
|
||||||
|
|
||||||
|
combined = section.content_html + append_content
|
||||||
|
new_body = replace_section_content(page.body_html, section, combined)
|
||||||
|
|
||||||
|
try:
|
||||||
|
updated = await put_page(page_id, page.title, new_body, page.version + 1, auth)
|
||||||
|
return Result(
|
||||||
|
ok=True,
|
||||||
|
message=f"Content appended to '{heading}'",
|
||||||
|
version=updated.version,
|
||||||
|
retries=attempt,
|
||||||
|
)
|
||||||
|
except httpx.HTTPStatusError as exc:
|
||||||
|
if exc.response.status_code == 409 and attempt < max_retries:
|
||||||
|
attempt += 1
|
||||||
|
await asyncio.sleep(0.1 * (2 ** attempt))
|
||||||
|
else:
|
||||||
|
return Result(
|
||||||
|
ok=False,
|
||||||
|
message=f"Failed after {attempt} retries: {exc}",
|
||||||
|
retries=attempt,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def section_delete(
|
||||||
|
page_id: str,
|
||||||
|
heading: str,
|
||||||
|
auth: Auth,
|
||||||
|
*,
|
||||||
|
max_retries: int = 3,
|
||||||
|
) -> Result:
|
||||||
|
"""Delete a section (heading + content) from a page."""
|
||||||
|
attempt = 0
|
||||||
|
while True:
|
||||||
|
page = await get_page(page_id, auth)
|
||||||
|
sections = parse_sections(page.body_html)
|
||||||
|
section = find_section(sections, heading)
|
||||||
|
if section is None:
|
||||||
|
return Result(ok=False, message=f"Section '{heading}' not found")
|
||||||
|
|
||||||
|
# Remove the heading tag AND its content
|
||||||
|
new_body = page.body_html[:section.start_offset] + page.body_html[section.end_offset:]
|
||||||
|
|
||||||
|
try:
|
||||||
|
updated = await put_page(page_id, page.title, new_body, page.version + 1, auth)
|
||||||
|
return Result(
|
||||||
|
ok=True,
|
||||||
|
message=f"Section '{heading}' deleted",
|
||||||
|
version=updated.version,
|
||||||
|
retries=attempt,
|
||||||
|
)
|
||||||
|
except httpx.HTTPStatusError as exc:
|
||||||
|
if exc.response.status_code == 409 and attempt < max_retries:
|
||||||
|
attempt += 1
|
||||||
|
await asyncio.sleep(0.1 * (2 ** attempt))
|
||||||
|
else:
|
||||||
|
return Result(
|
||||||
|
ok=False,
|
||||||
|
message=f"Failed after {attempt} retries: {exc}",
|
||||||
|
retries=attempt,
|
||||||
|
)
|
||||||
135
confluence-collab/src/confluence_collab/parser.py
Normal file
135
confluence-collab/src/confluence_collab/parser.py
Normal file
@@ -0,0 +1,135 @@
|
|||||||
|
"""Section parsing for Confluence storage-format HTML using BeautifulSoup."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import re
|
||||||
|
from dataclasses import dataclass
|
||||||
|
|
||||||
|
from bs4 import BeautifulSoup, Tag
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Section:
|
||||||
|
"""A heading-delimited section of a Confluence page."""
|
||||||
|
|
||||||
|
heading: str
|
||||||
|
level: int # 1-6
|
||||||
|
content_html: str # HTML between this heading and the next same-or-higher-level heading
|
||||||
|
start_offset: int # character offset of the heading tag start in original HTML
|
||||||
|
end_offset: int # character offset where this section's content ends
|
||||||
|
|
||||||
|
|
||||||
|
def parse_sections(html: str) -> list[Section]:
|
||||||
|
"""Parse Confluence storage-format HTML into heading-delimited sections.
|
||||||
|
|
||||||
|
Each section spans from a heading tag (h1-h6) to the next heading of the
|
||||||
|
same or higher level (lower number), or end of document.
|
||||||
|
"""
|
||||||
|
soup = BeautifulSoup(html, "lxml")
|
||||||
|
body = soup.body if soup.body else soup
|
||||||
|
|
||||||
|
# Find all heading tags with their positions in the original HTML
|
||||||
|
heading_pattern = re.compile(r"^h([1-6])$")
|
||||||
|
headings: list[tuple[int, int, str, Tag]] = [] # (start, level, text, tag)
|
||||||
|
|
||||||
|
for tag in body.find_all(heading_pattern):
|
||||||
|
level = int(tag.name[1])
|
||||||
|
text = tag.get_text(strip=True)
|
||||||
|
# Find position in original HTML
|
||||||
|
tag_str = str(tag)
|
||||||
|
pos = html.find(tag_str)
|
||||||
|
if pos == -1:
|
||||||
|
# Fallback: search by heading text pattern
|
||||||
|
pattern = re.compile(
|
||||||
|
rf"<h{level}[^>]*>.*?{re.escape(text)}.*?</h{level}>",
|
||||||
|
re.IGNORECASE | re.DOTALL,
|
||||||
|
)
|
||||||
|
m = pattern.search(html)
|
||||||
|
if m:
|
||||||
|
pos = m.start()
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
headings.append((pos, level, text, tag))
|
||||||
|
|
||||||
|
if not headings:
|
||||||
|
return []
|
||||||
|
|
||||||
|
sections: list[Section] = []
|
||||||
|
for i, (pos, level, text, tag) in enumerate(headings):
|
||||||
|
tag_str = str(tag)
|
||||||
|
content_start = pos + len(tag_str)
|
||||||
|
|
||||||
|
# Find where this section ends: next heading of same or higher level
|
||||||
|
end = len(html)
|
||||||
|
for j in range(i + 1, len(headings)):
|
||||||
|
next_pos, next_level, _, _ = headings[j]
|
||||||
|
if next_level <= level:
|
||||||
|
end = next_pos
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
# No same-or-higher-level heading found; check if there's any next heading
|
||||||
|
if i + 1 < len(headings):
|
||||||
|
# Next heading is lower level (subsection) — find end after all subsections
|
||||||
|
for j in range(i + 1, len(headings)):
|
||||||
|
next_pos, next_level, _, _ = headings[j]
|
||||||
|
if next_level <= level:
|
||||||
|
end = next_pos
|
||||||
|
break
|
||||||
|
|
||||||
|
content_html = html[content_start:end]
|
||||||
|
sections.append(Section(
|
||||||
|
heading=text,
|
||||||
|
level=level,
|
||||||
|
content_html=content_html,
|
||||||
|
start_offset=pos,
|
||||||
|
end_offset=end,
|
||||||
|
))
|
||||||
|
|
||||||
|
return sections
|
||||||
|
|
||||||
|
|
||||||
|
def find_section(
|
||||||
|
sections: list[Section],
|
||||||
|
heading: str,
|
||||||
|
*,
|
||||||
|
fuzzy: bool = True,
|
||||||
|
) -> Section | None:
|
||||||
|
"""Find a section by heading text.
|
||||||
|
|
||||||
|
If fuzzy=True, matches case-insensitively and strips whitespace.
|
||||||
|
"""
|
||||||
|
needle = heading.strip().lower() if fuzzy else heading
|
||||||
|
|
||||||
|
for sec in sections:
|
||||||
|
target = sec.heading.strip().lower() if fuzzy else sec.heading
|
||||||
|
if target == needle:
|
||||||
|
return sec
|
||||||
|
|
||||||
|
# Partial match as fallback
|
||||||
|
if fuzzy:
|
||||||
|
for sec in sections:
|
||||||
|
if needle in sec.heading.strip().lower():
|
||||||
|
return sec
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def replace_section_content(html: str, section: Section, new_content: str) -> str:
|
||||||
|
"""Replace the content of a section in the original HTML.
|
||||||
|
|
||||||
|
Preserves the heading tag itself, replaces only the content between
|
||||||
|
this heading's close tag and the next section's start.
|
||||||
|
"""
|
||||||
|
# Find the end of the heading tag
|
||||||
|
heading_pattern = re.compile(
|
||||||
|
rf"<h{section.level}[^>]*>.*?{re.escape(section.heading)}.*?</h{section.level}>",
|
||||||
|
re.IGNORECASE | re.DOTALL,
|
||||||
|
)
|
||||||
|
m = heading_pattern.search(html, section.start_offset)
|
||||||
|
if not m:
|
||||||
|
raise ValueError(f"Cannot find heading '{section.heading}' at expected offset")
|
||||||
|
|
||||||
|
content_start = m.end()
|
||||||
|
content_end = section.end_offset
|
||||||
|
|
||||||
|
return html[:content_start] + new_content + html[content_end:]
|
||||||
94
confluence-collab/src/confluence_collab/proxy.py
Normal file
94
confluence-collab/src/confluence_collab/proxy.py
Normal file
@@ -0,0 +1,94 @@
|
|||||||
|
"""Composite MCP server: proxies mcp-atlassian + adds section tools.
|
||||||
|
|
||||||
|
Spawns mcp-atlassian as a subprocess (stdio), proxies all its tools, and
|
||||||
|
registers the confluence_section_* tools from this package. Claude Code
|
||||||
|
sees a single MCP server with all tools under one prefix.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from mcp.server.fastmcp import FastMCP
|
||||||
|
from mcp.client.session import ClientSession
|
||||||
|
from mcp.client.stdio import StdioServerParameters, stdio_client
|
||||||
|
|
||||||
|
from confluence_collab.server import (
|
||||||
|
confluence_section_list,
|
||||||
|
confluence_section_get,
|
||||||
|
confluence_section_update,
|
||||||
|
confluence_section_append,
|
||||||
|
confluence_section_delete,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger = logging.getLogger("confluence-collab-proxy")
|
||||||
|
|
||||||
|
mcp = FastMCP("atlassian-with-sections")
|
||||||
|
|
||||||
|
# Register section tools directly (they're already decorated with @mcp.tool in server.py,
|
||||||
|
# but we need to re-register them on this new FastMCP instance)
|
||||||
|
mcp.tool()(confluence_section_list)
|
||||||
|
mcp.tool()(confluence_section_get)
|
||||||
|
mcp.tool()(confluence_section_update)
|
||||||
|
mcp.tool()(confluence_section_append)
|
||||||
|
mcp.tool()(confluence_section_delete)
|
||||||
|
|
||||||
|
|
||||||
|
async def _proxy_upstream_tools() -> None:
|
||||||
|
"""Connect to mcp-atlassian subprocess and proxy its tools."""
|
||||||
|
cmd = "uvx"
|
||||||
|
args = ["--python", "3.13", "mcp-atlassian"]
|
||||||
|
|
||||||
|
server_params = StdioServerParameters(command=cmd, args=args, env=dict(os.environ))
|
||||||
|
|
||||||
|
async with stdio_client(server_params) as (read, write):
|
||||||
|
async with ClientSession(read, write) as session:
|
||||||
|
await session.initialize()
|
||||||
|
|
||||||
|
# List upstream tools
|
||||||
|
tools_result = await session.list_tools()
|
||||||
|
logger.info("Proxying %d upstream tools from mcp-atlassian", len(tools_result.tools))
|
||||||
|
|
||||||
|
# Register each upstream tool as a proxy on our server
|
||||||
|
for tool in tools_result.tools:
|
||||||
|
_register_proxy_tool(tool, session)
|
||||||
|
|
||||||
|
# Keep running until interrupted
|
||||||
|
await asyncio.Event().wait()
|
||||||
|
|
||||||
|
|
||||||
|
def _register_proxy_tool(tool, session: ClientSession) -> None:
|
||||||
|
"""Register a proxied tool from the upstream MCP server."""
|
||||||
|
|
||||||
|
async def proxy_handler(**kwargs):
|
||||||
|
result = await session.call_tool(tool.name, kwargs)
|
||||||
|
# Return concatenated text content
|
||||||
|
texts = []
|
||||||
|
for content in result.content:
|
||||||
|
if hasattr(content, "text"):
|
||||||
|
texts.append(content.text)
|
||||||
|
return "\n".join(texts) if texts else ""
|
||||||
|
|
||||||
|
proxy_handler.__name__ = tool.name
|
||||||
|
proxy_handler.__doc__ = tool.description or ""
|
||||||
|
|
||||||
|
# Build parameter annotations from tool schema
|
||||||
|
mcp.tool(name=tool.name, description=tool.description or "")(proxy_handler)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
"""Run the composite MCP server.
|
||||||
|
|
||||||
|
Note: The proxy approach requires running the upstream mcp-atlassian
|
||||||
|
as a subprocess. For simpler deployment, use the standalone server.py
|
||||||
|
which only provides section tools, and keep mcp-atlassian separate.
|
||||||
|
"""
|
||||||
|
mcp.run(transport="stdio")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
113
confluence-collab/src/confluence_collab/server.py
Normal file
113
confluence-collab/src/confluence_collab/server.py
Normal file
@@ -0,0 +1,113 @@
|
|||||||
|
"""MCP server exposing section-based Confluence editing tools."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
|
|
||||||
|
from mcp.server.fastmcp import FastMCP
|
||||||
|
|
||||||
|
from confluence_collab.client import Auth
|
||||||
|
from confluence_collab.editor import section_list, section_get, section_update, section_append, section_delete
|
||||||
|
|
||||||
|
mcp = FastMCP("confluence-collab")
|
||||||
|
|
||||||
|
|
||||||
|
def _get_auth() -> Auth:
|
||||||
|
return Auth.from_env()
|
||||||
|
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
async def confluence_section_list(page_id: str) -> str:
|
||||||
|
"""List all sections (headings) on a Confluence page.
|
||||||
|
|
||||||
|
Returns a JSON array of {heading, level} objects showing the page structure.
|
||||||
|
"""
|
||||||
|
auth = _get_auth()
|
||||||
|
sections = await section_list(page_id, auth)
|
||||||
|
return json.dumps(
|
||||||
|
[{"heading": s.heading, "level": s.level} for s in sections],
|
||||||
|
indent=2,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
async def confluence_section_get(page_id: str, heading: str) -> str:
|
||||||
|
"""Get the HTML content of a specific section by heading text.
|
||||||
|
|
||||||
|
Uses fuzzy matching (case-insensitive, partial match).
|
||||||
|
"""
|
||||||
|
auth = _get_auth()
|
||||||
|
content = await section_get(page_id, heading, auth)
|
||||||
|
if content is None:
|
||||||
|
return f"Section '{heading}' not found on page {page_id}"
|
||||||
|
return content
|
||||||
|
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
async def confluence_section_update(page_id: str, heading: str, body: str) -> str:
|
||||||
|
"""Update the content of a section identified by heading.
|
||||||
|
|
||||||
|
Replaces only the target section, preserving the rest of the page.
|
||||||
|
Handles version conflicts with automatic retry (exponential backoff).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
page_id: Confluence page ID
|
||||||
|
heading: Section heading text to find (fuzzy matched)
|
||||||
|
body: New HTML content for the section (replaces existing content between headings)
|
||||||
|
"""
|
||||||
|
auth = _get_auth()
|
||||||
|
result = await section_update(page_id, heading, body, auth)
|
||||||
|
return json.dumps({
|
||||||
|
"status": "ok" if result.ok else "error",
|
||||||
|
"message": result.message,
|
||||||
|
"version": result.version,
|
||||||
|
"retries": result.retries,
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
async def confluence_section_append(page_id: str, heading: str, body: str) -> str:
|
||||||
|
"""Append HTML content to the end of a section.
|
||||||
|
|
||||||
|
Adds content after the existing section content, before the next heading.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
page_id: Confluence page ID
|
||||||
|
heading: Section heading text to find (fuzzy matched)
|
||||||
|
body: HTML content to append
|
||||||
|
"""
|
||||||
|
auth = _get_auth()
|
||||||
|
result = await section_append(page_id, heading, body, auth)
|
||||||
|
return json.dumps({
|
||||||
|
"status": "ok" if result.ok else "error",
|
||||||
|
"message": result.message,
|
||||||
|
"version": result.version,
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
async def confluence_section_delete(page_id: str, heading: str) -> str:
|
||||||
|
"""Delete an entire section (heading + content) from a page.
|
||||||
|
|
||||||
|
Removes the heading tag and all content up to the next same-or-higher-level heading.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
page_id: Confluence page ID
|
||||||
|
heading: Section heading text to find (fuzzy matched)
|
||||||
|
"""
|
||||||
|
auth = _get_auth()
|
||||||
|
result = await section_delete(page_id, heading, auth)
|
||||||
|
return json.dumps({
|
||||||
|
"status": "ok" if result.ok else "error",
|
||||||
|
"message": result.message,
|
||||||
|
"version": result.version,
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
"""Run the MCP server."""
|
||||||
|
mcp.run(transport="stdio")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
Reference in New Issue
Block a user