fix(MAT-273): remove Skyvern (archived) + fix CI test failures
Some checks failed
Build & Deploy / test (push) Successful in 10s
Tests / test (push) Successful in 10s
Build & Deploy / build-and-deploy (push) Failing after 11m26s

- Remove Skyvern service + DB from docker-compose.yml
- Remove cron/browser_executor.py and pipelines/steps/skyvern.py
- Remove browser_scrape from cron executor dispatch
- Update tests to reflect Skyvern removal
- Fix test_needs_query_rewrite false positive ('das' is a valid trigger)

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Christian Gick
2026-04-16 13:23:41 +03:00
parent 6d79b184b9
commit 0c0a424004
8 changed files with 6 additions and 413 deletions

View File

@@ -1,192 +0,0 @@
"""Browser scrape executor — dispatches jobs to Skyvern API."""
import asyncio
import json
import logging
import os
import httpx
logger = logging.getLogger(__name__)
SKYVERN_BASE_URL = os.environ.get("SKYVERN_BASE_URL", "http://skyvern:8000")
SKYVERN_API_KEY = os.environ.get("SKYVERN_API_KEY", "")
POLL_INTERVAL = 5 # seconds
MAX_POLL_TIME = 300 # 5 minutes
async def _create_task(url: str, goal: str, extraction_goal: str = "",
extraction_schema: dict | None = None,
credential_id: str | None = None, totp_identifier: str | None = None) -> str:
"""Create a Skyvern task and return the task_id."""
payload: dict = {
"url": url,
"navigation_goal": goal,
"data_extraction_goal": extraction_goal or goal,
}
if extraction_schema:
payload["extracted_information_schema"] = extraction_schema
if credential_id:
payload["credential_id"] = credential_id
if totp_identifier:
payload["totp_identifier"] = totp_identifier
async with httpx.AsyncClient(timeout=60.0) as client:
resp = await client.post(
f"{SKYVERN_BASE_URL}/api/v1/tasks",
headers={
"Content-Type": "application/json",
"x-api-key": SKYVERN_API_KEY,
},
json=payload,
)
resp.raise_for_status()
data = resp.json()
return data["task_id"]
async def _poll_task(run_id: str) -> dict:
"""Poll Skyvern until task completes or times out."""
elapsed = 0
async with httpx.AsyncClient(timeout=60.0) as client:
while elapsed < MAX_POLL_TIME:
resp = await client.get(
f"{SKYVERN_BASE_URL}/api/v1/tasks/{run_id}",
headers={"x-api-key": SKYVERN_API_KEY},
)
resp.raise_for_status()
data = resp.json()
status = data.get("status", "")
if status in ("completed", "failed", "terminated", "timed_out"):
return data
await asyncio.sleep(POLL_INTERVAL)
elapsed += POLL_INTERVAL
return {"status": "timed_out", "error": f"Polling exceeded {MAX_POLL_TIME}s"}
def _format_extraction(data: dict) -> str:
"""Format extracted data as readable markdown for Matrix."""
extracted = data.get("extracted_information") or data.get("extracted_data")
if not extracted:
return "No data extracted."
# Handle list of items (most common: news, listings, results)
items = None
if isinstance(extracted, list):
items = extracted
elif isinstance(extracted, dict):
# Look for the first list value in the dict (e.g. {"news": [...]})
for v in extracted.values():
if isinstance(v, list) and v:
items = v
break
if items and isinstance(items[0], dict):
lines = []
for item in items:
# Try common field names for title/link
title = item.get("title") or item.get("name") or item.get("headline") or ""
link = item.get("link") or item.get("url") or item.get("href") or ""
# Build a line with remaining fields as details
skip = {"title", "name", "headline", "link", "url", "href"}
details = " · ".join(
str(v) for k, v in item.items()
if k not in skip and v
)
if title and link:
line = f"- [{title}]({link})"
elif title:
line = f"- {title}"
else:
line = f"- {json.dumps(item, ensure_ascii=False)}"
if details:
line += f" \n {details}"
lines.append(line)
return "\n".join(lines)
# Fallback: compact JSON
if isinstance(extracted, (dict, list)):
return json.dumps(extracted, indent=2, ensure_ascii=False)
return str(extracted)
async def execute_browser_scrape(job: dict, send_text, **_kwargs) -> dict:
"""Execute a browser-based scraping job via Skyvern."""
target_room = job["targetRoom"]
config = job.get("config", {})
url = config.get("url", "")
goal = config.get("goal", config.get("query", f"Scrape content from {url}"))
extraction_goal = config.get("extractionGoal", "") or goal
extraction_schema = config.get("extractionSchema")
browser_profile = job.get("browserProfile")
if not url:
await send_text(target_room, f"**{job['name']}**: No URL configured.")
return {"status": "error", "error": "No URL configured"}
if not SKYVERN_API_KEY:
await send_text(
target_room,
f"**{job['name']}**: Browser automation not configured (missing API key).",
)
return {"status": "error", "error": "SKYVERN_API_KEY not set"}
# Map browser profile fields to Skyvern credential
credential_id = None
totp_identifier = None
if browser_profile:
if browser_profile.get("status") == "expired":
await send_text(
target_room,
f"**{job['name']}**: Browser credential expired. "
f"Update at https://matrixhost.eu/settings/automations",
)
return {"status": "error", "error": "Browser credential expired"}
credential_id = browser_profile.get("credentialId")
totp_identifier = browser_profile.get("totpIdentifier")
try:
run_id = await _create_task(
url=url,
goal=goal,
extraction_goal=extraction_goal,
extraction_schema=extraction_schema,
credential_id=credential_id,
totp_identifier=totp_identifier,
)
logger.info("Skyvern task created: %s for job %s", run_id, job["name"])
result = await _poll_task(run_id)
status = result.get("status", "unknown")
if status == "completed":
extracted = _format_extraction(result)
msg = f"**{job['name']}** — {url}\n\n{extracted}"
# Truncate if too long for Matrix
if len(msg) > 4000:
msg = msg[:3950] + "\n\n_(truncated)_"
await send_text(target_room, msg)
return {"status": "success"}
else:
error = result.get("error") or result.get("failure_reason") or status
await send_text(
target_room,
f"**{job['name']}**: Browser task {status}{error}",
)
return {"status": "error", "error": str(error)}
except httpx.HTTPStatusError as exc:
error_msg = f"Skyvern API error: {exc.response.status_code}"
logger.error("Browser executor failed: %s", error_msg, exc_info=True)
await send_text(target_room, f"**{job['name']}**: {error_msg}")
return {"status": "error", "error": error_msg}
except Exception as exc:
error_msg = str(exc)
logger.error("Browser executor failed: %s", error_msg, exc_info=True)
await send_text(target_room, f"**{job['name']}**: Browser task failed — {error_msg}")
return {"status": "error", "error": error_msg}

View File

@@ -3,14 +3,12 @@
import logging
from .brave_search import execute_brave_search
from .browser_executor import execute_browser_scrape
from .reminder import execute_reminder
logger = logging.getLogger(__name__)
EXECUTORS = {
"brave_search": execute_brave_search,
"browser_scrape": execute_browser_scrape,
"reminder": execute_reminder,
}