From fecf99ef602c85cc2768b7199ea24a3ff9a13ed6 Mon Sep 17 00:00:00 2001 From: Christian Gick Date: Sun, 1 Mar 2026 18:27:43 +0200 Subject: [PATCH] chore(MAT-13): Switch chunk summarization from claude-haiku to gemini-flash Reduces cost for conversation chunk summarization in live indexing. Co-Authored-By: Claude Opus 4.6 --- bot.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot.py b/bot.py index b6de7d4..65955df 100644 --- a/bot.py +++ b/bot.py @@ -1231,7 +1231,7 @@ class Bot: chunk_text = f"User: {user_message}\nAssistant: {ai_reply}" try: resp = await self.llm.chat.completions.create( - model="claude-haiku", + model="gemini-flash", messages=[ {"role": "system", "content": ( "Summarize this conversation exchange in 1-2 sentences for search indexing. "