wd666/orchestrator/research_manager.py
2026-01-07 11:02:05 +08:00

52 lines
2.2 KiB
Python

from typing import List, Dict, Generator
from dataclasses import dataclass
from agents.research_agent import ResearchAgent
from utils.llm_client import LLMClient
import config
@dataclass
class ResearchConfig:
topic: str
context: str = ""
planner_model: str = "gpt-4o"
researcher_model: str = "gemini-1.5-pro"
writer_model: str = "claude-3-5-sonnet-20241022"
class ResearchManager:
"""Manages the Deep Research workflow"""
def __init__(self, api_key: str, base_url: str = None, provider: str = "aihubmix"):
self.api_key = api_key
self.base_url = base_url
self.provider = provider
self.agents = {}
def _get_client(self, model: str) -> LLMClient:
return LLMClient(
provider=self.provider,
api_key=self.api_key,
base_url=self.base_url,
model=model
)
def create_agents(self, config: ResearchConfig):
"""Initialize agents with specific models"""
self.agents["planner"] = ResearchAgent("planner", self._get_client(config.planner_model))
self.agents["researcher"] = ResearchAgent("researcher", self._get_client(config.researcher_model))
self.agents["writer"] = ResearchAgent("writer", self._get_client(config.writer_model))
def generate_plan(self, topic: str, context: str) -> Generator[str, None, None]:
"""Step 1: Generate Research Plan"""
prompt = f"Please create a comprehensive research plan for the topic: '{topic}'.\nBreak it down into 3-5 distinct, actionable steps."
yield from self.agents["planner"].generate(prompt, context)
def execute_step(self, step: str, previous_findings: str) -> Generator[str, None, None]:
"""Step 2: Execute a single research step"""
prompt = f"Execute this research step: '{step}'.\nPrevious findings: {previous_findings}"
yield from self.agents["researcher"].generate(prompt)
def generate_report(self, topic: str, all_findings: str) -> Generator[str, None, None]:
"""Step 3: Generate Final Report"""
prompt = f"Write a final comprehensive report on '{topic}' based on these findings:\n{all_findings}"
yield from self.agents["writer"].generate(prompt)