diff --git a/src/moai/core/orchestrator.py b/src/moai/core/orchestrator.py index 6cd548b..bddd60b 100644 --- a/src/moai/core/orchestrator.py +++ b/src/moai/core/orchestrator.py @@ -107,6 +107,58 @@ def build_context(discussion: Discussion) -> list[dict]: return messages +async def query_model_direct( + model: str, + message: str, + discussion: Discussion | None, + project_name: str, +) -> str: + """Query a single model directly with optional discussion context. + + Used for @mention messages where user addresses a specific model. + If a discussion is provided, includes full context so the model + can reference prior responses. + + Args: + model: Model short name (e.g., "claude", "gpt", "gemini"). + message: The direct message to the model. + discussion: Optional Discussion object for context (with eager-loaded rounds/messages). + project_name: Project name for context. + + Returns: + The model's response text, or error message if the query fails. + """ + client = get_ai_client() + + # Build system prompt indicating this is a direct message + system_prompt = f"""You are participating in a discussion about: {project_name} + +This is a direct message to you specifically. The user has chosen to address you +directly for your unique perspective. + +Respond helpfully and concisely.""" + + # Build messages with optional discussion context + if discussion is not None: + messages = build_context(discussion) + # Add the direct message + messages.append({"role": "user", "content": f"[Direct to you]: {message}"}) + else: + messages = [{"role": "user", "content": message}] + + try: + response = await client.complete( + model=model, + messages=messages, + system_prompt=system_prompt, + ) + logger.info("Direct query to %s successful", model) + return response + except Exception as e: + logger.error("Direct query to %s failed: %s", model, e) + return f"[Error: {e}]" + + async def run_discussion_round( discussion: Discussion, models: list[str],