feat(05-02): implement /open command handler

Add /open command handler that queries all project models in parallel.
Creates Discussion and Round records, persists Message for each response.
Shows typing indicator and formats output with model names.

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
Mikkel Georgsen 2026-01-16 19:37:08 +00:00
parent 81b5bfff35
commit cef1898352
2 changed files with 83 additions and 1 deletions

View file

@ -7,7 +7,7 @@ project management, discussion commands, and export functionality.
from telegram.ext import Application, CommandHandler from telegram.ext import Application, CommandHandler
from moai.bot.handlers.commands import help_command, start_command from moai.bot.handlers.commands import help_command, start_command
from moai.bot.handlers.discussion import ask_command from moai.bot.handlers.discussion import ask_command, open_command
from moai.bot.handlers.projects import project_command, projects_command from moai.bot.handlers.projects import project_command, projects_command
from moai.bot.handlers.status import status_command from moai.bot.handlers.status import status_command
@ -31,3 +31,4 @@ def register_handlers(app: Application) -> None:
# Discussion / Q&A # Discussion / Q&A
app.add_handler(CommandHandler("ask", ask_command)) app.add_handler(CommandHandler("ask", ask_command))
app.add_handler(CommandHandler("open", open_command))

View file

@ -5,6 +5,9 @@ from telegram.ext import ContextTypes
from moai.bot.handlers.projects import get_selected_project from moai.bot.handlers.projects import get_selected_project
from moai.core.ai_client import MODEL_MAP, get_ai_client from moai.core.ai_client import MODEL_MAP, get_ai_client
from moai.core.models import DiscussionType, RoundType
from moai.core.orchestrator import query_models_parallel
from moai.core.services.discussion import create_discussion, create_message, create_round
async def ask_command(update: Update, context: ContextTypes.DEFAULT_TYPE) -> None: async def ask_command(update: Update, context: ContextTypes.DEFAULT_TYPE) -> None:
@ -56,3 +59,81 @@ async def ask_command(update: Update, context: ContextTypes.DEFAULT_TYPE) -> Non
) )
except Exception as e: except Exception as e:
await update.message.reply_text(f"Error: {e}") await update.message.reply_text(f"Error: {e}")
async def open_command(update: Update, context: ContextTypes.DEFAULT_TYPE) -> None:
"""Handle /open <question> command - ask all project models in parallel.
Requires a selected project with configured models. Creates a Discussion
with OPEN type and a PARALLEL round, then queries all models simultaneously.
Examples:
/open What is Python?
/open How should we approach this problem?
"""
args = context.args or []
if not args:
await update.message.reply_text(
"Usage: /open <question>\n\nExample: /open What are the pros and cons of microservices?"
)
return
question = " ".join(args)
# Require a selected project
project = await get_selected_project(context)
if project is None:
await update.message.reply_text("No project selected. Use /project select <name> first.")
return
# Require configured models
if not project.models:
await update.message.reply_text(
"No models configured for this project.\n"
"Use /project models claude,gpt,gemini to set models."
)
return
# Show typing indicator while waiting for AI
await update.message.chat.send_action("typing")
try:
# Create discussion and round in database
discussion = await create_discussion(
project_id=project.id,
question=question,
discussion_type=DiscussionType.OPEN,
)
round_ = await create_round(
discussion_id=discussion.id,
round_number=1,
round_type=RoundType.PARALLEL,
)
# Query all models in parallel
responses = await query_models_parallel(
models=project.models,
question=question,
project_name=project.name,
)
# Persist messages and build response text
response_lines = [f"*Question:* {question}\n"]
for model, response in responses.items():
await create_message(
round_id=round_.id,
model=model,
content=response,
)
response_lines.append(f"*{model.title()}:*\n{response}\n")
# Send combined response
await update.message.reply_text(
"\n".join(response_lines),
parse_mode="Markdown",
)
except Exception as e:
await update.message.reply_text(f"Error: {e}")