File size: 5,389 Bytes
a0aaa8e eb339cc b97f2de e76df21 a0aaa8e e76df21 a0aaa8e b97f2de eb339cc a0aaa8e eb339cc 47f482b eb339cc 47f482b eb339cc e76df21 eb339cc 47f482b a0aaa8e 47f482b eb339cc a0aaa8e 47f482b a0aaa8e 47f482b a0aaa8e 47f482b a0aaa8e b97f2de 47f482b a0aaa8e 47f482b a0aaa8e 47f482b a0aaa8e 47f482b a0aaa8e 47f482b a0aaa8e 47f482b a0aaa8e 47f482b 4ffcfc1 a0aaa8e cd1cb3b a0aaa8e 47f482b a0aaa8e cd1cb3b a0aaa8e a63edd5 eb339cc a0aaa8e |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 |
"""qResearch v3.0: Dual-Agent Research System"""
import os
import gradio as gr
from dotenv import load_dotenv
from smolagents import CodeAgent, HfApiModel, Tool
# Initialize environment
load_dotenv(override=True)
class DuckDuckGoSearchTool(Tool):
"""Web search tool for primary research"""
name = "web_search"
description = "Performs privacy-focused web searches using DuckDuckGo"
inputs = {
"query": {"type": "string", "description": "Research query"},
"max_results": {"type": "integer", "description": "Number of results (3-10)", "default": 5}
}
output_type = "string"
def forward(self, query: str, max_results: int = 5) -> str:
from duckduckgo_search import DDGS
with DDGS() as ddgs:
results = list(ddgs.text(query, max_results=max_results))
return "\n".join([f"{idx+1}. {r['title']} ({r['href']}): {r['body']}"
for idx, r in enumerate(results)])
class ResearchSystem:
def __init__(self):
# Shared model configuration
self.model = HfApiModel(
model_id="Qwen/Qwen2.5-Coder-32B-Instruct",
custom_role_conversions={"tool-call": "assistant", "tool-response": "user"},
use_auth_token=False,
trust_remote_code=True
)
# Initialize Researcher Agent
self.researcher = CodeAgent(
tools=[DuckDuckGoSearchTool()],
model=self.model,
system_prompt="""You are a Senior Research Analyst. Your tasks:
1. Conduct comprehensive web research using available tools
2. Synthesize findings into a detailed draft report
3. Include all relevant sources and data points
4. Maintain raw factual accuracy without formatting"""
)
# Initialize Formatter Agent
self.formatter = CodeAgent(
tools=[],
model=self.model,
system_prompt="""You are an MLA Formatting Specialist. Your tasks:
1. Receive raw research content
2. Apply proper MLA 9th edition formatting
3. Verify citation integrity
4. Structure content with:
- Header block
- Title capitalization
- In-text citations
- Works Cited section
5. Ensure academic tone and clarity"""
)
def _process_research(self, query: str) -> str:
"""Execute two-stage research process"""
# Stage 1: Initial research
raw_response = self.researcher.run(
task=f"Conduct research about: {query}",
temperature=0.7
)
# Stage 2: MLA formatting
formatted_response = self.formatter.run(
task=f"Format this research into MLA:\n{raw_response}",
temperature=0.3
)
return formatted_response
def create_interface(self):
with gr.Blocks(theme=gr.themes.Soft(), title="qResearch v3") as interface:
gr.Markdown("# qResearch Dual-Agent System\n*Research → Analysis → MLA Formatting*")
with gr.Row():
with gr.Column(scale=2):
chat_history = gr.Chatbot(
label="Agent Communication",
avatar_images={
"user": "👤",
"assistant": "🤖",
"Researcher": "🔍",
"Formatter": "✒️"
},
height=500
)
with gr.Column(scale=1):
research_steps = gr.JSON(
label="Processing Steps",
value={"Current Stage": "Awaiting Input"}
)
with gr.Row():
input_box = gr.Textbox(
placeholder="Enter research topic...",
lines=2,
label="Research Query"
)
submit_btn = gr.Button("Start Research", variant="primary")
submit_btn.click(
self._handle_query,
inputs=[input_box],
outputs=[chat_history, research_steps]
)
return interface
def _handle_query(self, query: str):
try:
# Initial researcher agent processing
researcher_msg = gr.ChatMessage(role="Researcher", content=f"Beginning research: {query}")
# Get formatted response
formatted_response = self._process_research(query)
# Formatter agent response
formatter_msg = gr.ChatMessage(role="Formatter", content=formatted_response)
return [
researcher_msg,
formatter_msg
], {
"Completed Stages": ["Research Collection", "MLA Formatting"],
"Current Status": "Research Complete"
}
except Exception as e:
error_msg = gr.ChatMessage(role="System", content=f"Error: {str(e)}")
return [error_msg], {"Error": str(e)}
if __name__ == "__main__":
research_system = ResearchSystem()
interface = research_system.create_interface()
interface.launch(
server_name="0.0.0.0",
server_port=7860,
share=False,
show_error=True
) |