feat: add pipeline engine with approval flow and file triggers

Sequential step executor (script, claude_prompt, approval, api_call,
template, skyvern placeholder), reaction-based approvals, file upload
trigger matching, portal API state sync.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
Christian Gick
2026-03-18 17:06:07 +02:00
parent f4feb3bfe1
commit bd8d96335e
12 changed files with 755 additions and 1 deletions

View File

@@ -0,0 +1,32 @@
"""Claude prompt step — call LLM via LiteLLM proxy."""
import logging
logger = logging.getLogger(__name__)
async def execute_claude_prompt(
config: dict,
llm=None,
default_model: str = "claude-haiku",
escalation_model: str = "claude-sonnet",
**_kwargs,
) -> str:
"""Send a prompt to Claude and return the response."""
if not llm:
raise RuntimeError("LLM client not configured")
prompt = config.get("prompt", "")
if not prompt:
raise ValueError("claude_prompt step requires 'prompt' field")
model_name = config.get("model", "default")
model = escalation_model if model_name == "escalation" else default_model
response = await llm.chat.completions.create(
model=model,
messages=[{"role": "user", "content": prompt}],
max_tokens=4096,
)
return response.choices[0].message.content or ""