Bot @ai:agiliton.eu accepts room invites, dispatches LiveKit agent. Agent joins call with STT (Groq Whisper) → LLM (Sonnet) → TTS (ElevenLabs) pipeline, all routed through LiteLLM. CF-1147 Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
59 lines
1.6 KiB
Python
59 lines
1.6 KiB
Python
import os
|
|
import logging
|
|
|
|
from livekit.agents import Agent, AgentSession, AgentServer, JobContext, JobProcess, cli
|
|
from livekit.plugins import openai as lk_openai, elevenlabs, silero
|
|
|
|
logger = logging.getLogger("matrix-ai-agent")
|
|
|
|
LITELLM_URL = os.environ["LITELLM_BASE_URL"]
|
|
LITELLM_KEY = os.environ.get("LITELLM_API_KEY", "not-needed")
|
|
|
|
SYSTEM_PROMPT = """You are a helpful voice assistant in a Matrix call.
|
|
Rules:
|
|
- Keep answers SHORT — 1-3 sentences max
|
|
- Be direct, no filler words
|
|
- If the user wants more detail, they will ask
|
|
- Speak naturally as in a conversation"""
|
|
|
|
server = AgentServer()
|
|
|
|
|
|
def prewarm(proc: JobProcess):
|
|
proc.userdata["vad"] = silero.VAD.load()
|
|
|
|
|
|
server.setup_fnc = prewarm
|
|
|
|
|
|
@server.rtc_session(agent_name="matrix-ai")
|
|
async def entrypoint(ctx: JobContext):
|
|
model = os.environ.get("LITELLM_MODEL", "claude-sonnet")
|
|
voice_id = os.environ.get("ELEVENLABS_VOICE_ID", "21m00Tcm4TlvDq8ikWAM")
|
|
|
|
session = AgentSession(
|
|
stt=lk_openai.STT(
|
|
base_url=LITELLM_URL,
|
|
api_key=LITELLM_KEY,
|
|
model="whisper",
|
|
),
|
|
llm=lk_openai.LLM(
|
|
base_url=LITELLM_URL,
|
|
api_key=LITELLM_KEY,
|
|
model=model,
|
|
),
|
|
tts=elevenlabs.TTS(
|
|
voice=voice_id,
|
|
model="eleven_multilingual_v2",
|
|
),
|
|
vad=ctx.proc.userdata["vad"],
|
|
)
|
|
|
|
agent = Agent(instructions=SYSTEM_PROMPT)
|
|
await session.start(agent=agent, room=ctx.room)
|
|
await session.generate_reply(instructions="Greet the user briefly.")
|
|
|
|
|
|
if __name__ == "__main__":
|
|
cli.run_app(server)
|