feat(04-02): implement /ask command for single model queries
Create discussion.py with ask_command handler that:
- Validates model name against MODEL_MAP
- Shows usage when called without arguments
- Sends typing indicator while waiting for AI
- Returns formatted response with model name
- Includes optional project context if project is selected
Register CommandHandler("ask", ask_command) in handlers/__init__.py.
Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
parent
821b419271
commit
32983c9301
2 changed files with 62 additions and 0 deletions
|
|
@ -7,6 +7,7 @@ project management, discussion commands, and export functionality.
|
|||
from telegram.ext import Application, CommandHandler
|
||||
|
||||
from moai.bot.handlers.commands import help_command, start_command
|
||||
from moai.bot.handlers.discussion import ask_command
|
||||
from moai.bot.handlers.projects import project_command, projects_command
|
||||
from moai.bot.handlers.status import status_command
|
||||
|
||||
|
|
@ -27,3 +28,6 @@ def register_handlers(app: Application) -> None:
|
|||
# Project management
|
||||
app.add_handler(CommandHandler("projects", projects_command))
|
||||
app.add_handler(CommandHandler("project", project_command))
|
||||
|
||||
# Discussion / Q&A
|
||||
app.add_handler(CommandHandler("ask", ask_command))
|
||||
|
|
|
|||
58
src/moai/bot/handlers/discussion.py
Normal file
58
src/moai/bot/handlers/discussion.py
Normal file
|
|
@ -0,0 +1,58 @@
|
|||
"""Discussion handlers for MoAI bot."""
|
||||
|
||||
from telegram import Update
|
||||
from telegram.ext import ContextTypes
|
||||
|
||||
from moai.bot.handlers.projects import get_selected_project
|
||||
from moai.core.ai_client import MODEL_MAP, get_ai_client
|
||||
|
||||
|
||||
async def ask_command(update: Update, context: ContextTypes.DEFAULT_TYPE) -> None:
|
||||
"""Handle /ask <model> <question> command.
|
||||
|
||||
Examples:
|
||||
/ask claude What is Python?
|
||||
/ask gpt Explain async/await
|
||||
"""
|
||||
args = context.args or []
|
||||
|
||||
if len(args) < 2:
|
||||
available = ", ".join(MODEL_MAP.keys())
|
||||
await update.message.reply_text(
|
||||
f"Usage: /ask <model> <question>\n"
|
||||
f"Available models: {available}\n\n"
|
||||
f"Example: /ask claude What is Python?"
|
||||
)
|
||||
return
|
||||
|
||||
model_name = args[0].lower()
|
||||
question = " ".join(args[1:])
|
||||
|
||||
# Validate model
|
||||
if model_name not in MODEL_MAP:
|
||||
available = ", ".join(MODEL_MAP.keys())
|
||||
await update.message.reply_text(f"Unknown model: {model_name}\nAvailable: {available}")
|
||||
return
|
||||
|
||||
# Get project context if available (optional for /ask)
|
||||
project = await get_selected_project(context)
|
||||
project_context = f"Project: {project.name}\n" if project else ""
|
||||
|
||||
# Send "typing" indicator while waiting for AI
|
||||
await update.message.chat.send_action("typing")
|
||||
|
||||
try:
|
||||
client = get_ai_client()
|
||||
response = await client.complete(
|
||||
model=model_name,
|
||||
messages=[{"role": "user", "content": question}],
|
||||
system_prompt=f"{project_context}You are a helpful AI assistant.",
|
||||
)
|
||||
|
||||
# Format response with model name
|
||||
await update.message.reply_text(
|
||||
f"*{model_name.title()}:*\n\n{response}",
|
||||
parse_mode="Markdown",
|
||||
)
|
||||
except Exception as e:
|
||||
await update.message.reply_text(f"Error: {e}")
|
||||
Loading…
Add table
Reference in a new issue