Quazim0t0 commited on
Commit
a07c74e
·
verified ·
1 Parent(s): 4ec2867

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +226 -23
app.py CHANGED
@@ -1,77 +1,280 @@
1
- """qResearch: Dual-Agent Research System"""
 
 
 
 
2
 
3
  import os
4
  import gradio as gr
5
  from smolagents import CodeAgent, HfApiModel, tool
 
 
6
 
 
7
  @tool
8
  def web_search(query: str, max_results: int = 5) -> str:
9
- """Performs web searches using DuckDuckGo
10
 
11
  Args:
12
  query: Search query string to look up
13
  max_results: Number of results (1-10) to return
14
 
15
  Returns:
16
- str: Formatted search results as a string
17
  """
18
  from duckduckgo_search import DDGS
19
  with DDGS() as ddgs:
20
  results = ddgs.text(query, max_results=max_results)
21
- return "\n".join([f"{i+1}. {r['title']}: {r['body']} ({r['href']})"
22
- for i, r in enumerate(results)])
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
23
 
24
  class ResearchSystem:
25
  def __init__(self):
26
- # Initialize model FIRST
 
 
 
 
 
 
 
 
27
  self.model = HfApiModel(
28
  model_id="Qwen/Qwen2.5-Coder-32B-Instruct",
29
  custom_role_conversions={
30
  "tool-call": "assistant",
31
- "tool-response": "user"
 
 
32
  }
33
  )
34
 
35
- # Initialize agents AFTER model
36
  self.researcher = CodeAgent(
37
- tools=[web_search],
38
  model=self.model
39
  )
40
 
 
41
  self.formatter = CodeAgent(
42
  tools=[],
43
  model=self.model
44
  )
 
 
 
45
 
46
  def create_interface(self):
47
- with gr.Blocks(title="qResearch") as interface:
48
- gr.Markdown("# qResearch\n*Research Analysis → MLA Formatting*")
 
 
 
 
 
49
 
50
  with gr.Row():
51
- chat = gr.Chatbot(label="Research Process", height=500)
52
- input_box = gr.Textbox(label="Enter Query", placeholder="Research topic...")
53
- submit_btn = gr.Button("Start Research", variant="primary")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
54
 
 
55
  submit_btn.click(
56
  self.process_query,
57
- inputs=[input_box],
58
  outputs=[chat]
59
  )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
60
 
61
  return interface
62
 
63
- def process_query(self, query: str):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
64
  try:
65
- raw_research = self.researcher.run(query)
66
- formatted = self.formatter.run(f"Format this research:\n{raw_research}")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
67
  return [
68
- [query, None], # User message
69
- ["Researcher", raw_research], # Research results
70
- ["Formatter", formatted] # Formatted results
71
  ]
 
72
  except Exception as e:
73
- return [[None, f"Error: {str(e)}"]]
 
 
 
 
74
 
75
  if __name__ == "__main__":
76
  system = ResearchSystem()
77
- system.create_interface().launch(server_port=7860)
 
 
 
 
 
 
1
+ """qResearch: Dual-Agent Research System
2
+
3
+ A research system that combines web search, analysis, and proper MLA formatting
4
+ using a dual-agent approach with specialized roles.
5
+ """
6
 
7
  import os
8
  import gradio as gr
9
  from smolagents import CodeAgent, HfApiModel, tool
10
+ from typing import Dict, List, Optional, Tuple, Union
11
+ import json
12
 
13
+ # Advanced Research Tools
14
  @tool
15
  def web_search(query: str, max_results: int = 5) -> str:
16
+ """Performs comprehensive web searches using DuckDuckGo
17
 
18
  Args:
19
  query: Search query string to look up
20
  max_results: Number of results (1-10) to return
21
 
22
  Returns:
23
+ str: Formatted search results as a string with titles, snippets and URLs
24
  """
25
  from duckduckgo_search import DDGS
26
  with DDGS() as ddgs:
27
  results = ddgs.text(query, max_results=max_results)
28
+
29
+ if not results:
30
+ return "No results found for the given query."
31
+
32
+ formatted_results = []
33
+ for i, r in enumerate(results, 1):
34
+ formatted_results.append(
35
+ f"{i}. {r['title']}\n"
36
+ f" Summary: {r['body']}\n"
37
+ f" Source: {r['href']}\n"
38
+ )
39
+ return "\n".join(formatted_results)
40
+
41
+ @tool
42
+ def analyze_content(text: str, analysis_type: str = "general") -> str:
43
+ """Analyzes content for various aspects like key points, themes, or citations
44
+
45
+ Args:
46
+ text: The content to analyze
47
+ analysis_type: Type of analysis ("general", "academic", "citations")
48
+
49
+ Returns:
50
+ str: Analysis results in a structured format
51
+ """
52
+ points = []
53
+
54
+ if "academic" in analysis_type.lower():
55
+ # Academic analysis focusing on scholarly aspects
56
+ points.extend([
57
+ "Key Arguments:",
58
+ "- Main thesis and supporting evidence",
59
+ "- Methodological approach",
60
+ "- Scholarly context and significance",
61
+ "\nAcademic Sources:",
62
+ "- Peer-reviewed citations",
63
+ "- Research foundations",
64
+ ])
65
+ elif "citations" in analysis_type.lower():
66
+ # Citation analysis
67
+ points.extend([
68
+ "Citation Analysis:",
69
+ "- Author credentials",
70
+ "- Publication details",
71
+ "- Citation format and standards",
72
+ "\nBibliographic Information:",
73
+ "- Primary sources",
74
+ "- Secondary references",
75
+ ])
76
+ else:
77
+ # General content analysis
78
+ points.extend([
79
+ "Content Overview:",
80
+ "- Main topics and themes",
81
+ "- Key findings and insights",
82
+ "- Supporting evidence",
83
+ "\nRelevance and Context:",
84
+ "- Topic significance",
85
+ "- Current applications",
86
+ ])
87
+
88
+ return "\n".join(points)
89
 
90
  class ResearchSystem:
91
  def __init__(self):
92
+ # System configuration
93
+ self.config = {
94
+ "max_research_depth": 3,
95
+ "min_sources": 2,
96
+ "format_style": "MLA",
97
+ "cache_results": True
98
+ }
99
+
100
+ # Initialize model with role specialization
101
  self.model = HfApiModel(
102
  model_id="Qwen/Qwen2.5-Coder-32B-Instruct",
103
  custom_role_conversions={
104
  "tool-call": "assistant",
105
+ "tool-response": "user",
106
+ "researcher": "assistant",
107
+ "formatter": "assistant"
108
  }
109
  )
110
 
111
+ # Research agent with enhanced capabilities
112
  self.researcher = CodeAgent(
113
+ tools=[web_search, analyze_content],
114
  model=self.model
115
  )
116
 
117
+ # Formatting agent specializing in academic standards
118
  self.formatter = CodeAgent(
119
  tools=[],
120
  model=self.model
121
  )
122
+
123
+ # Result cache for performance
124
+ self.cache: Dict[str, Tuple[str, str]] = {}
125
 
126
  def create_interface(self):
127
+ """Creates an enhanced Gradio interface with advanced features"""
128
+ with gr.Blocks(title="qResearch", theme=gr.themes.Soft()) as interface:
129
+ gr.Markdown(
130
+ "# qResearch Pro\n"
131
+ "*Advanced Research → Intelligent Analysis → Professional MLA Formatting*\n"
132
+ "---"
133
+ )
134
 
135
  with gr.Row():
136
+ with gr.Column(scale=3):
137
+ chat = gr.Chatbot(
138
+ label="Research Process",
139
+ height=600,
140
+ show_label=True
141
+ )
142
+
143
+ with gr.Column(scale=1):
144
+ with gr.Group():
145
+ input_box = gr.Textbox(
146
+ label="Research Query",
147
+ placeholder="Enter your research topic...",
148
+ lines=3
149
+ )
150
+
151
+ with gr.Row():
152
+ submit_btn = gr.Button(
153
+ "Start Research",
154
+ variant="primary"
155
+ )
156
+ clear_btn = gr.Button(
157
+ "Clear",
158
+ variant="secondary"
159
+ )
160
+
161
+ with gr.Accordion("Advanced Options", open=False):
162
+ depth = gr.Slider(
163
+ minimum=1,
164
+ maximum=5,
165
+ value=3,
166
+ step=1,
167
+ label="Research Depth"
168
+ )
169
+ sources = gr.Slider(
170
+ minimum=1,
171
+ maximum=10,
172
+ value=5,
173
+ step=1,
174
+ label="Number of Sources"
175
+ )
176
 
177
+ # Event handlers
178
  submit_btn.click(
179
  self.process_query,
180
+ inputs=[input_box, depth, sources],
181
  outputs=[chat]
182
  )
183
+
184
+ clear_btn.click(
185
+ lambda: None,
186
+ inputs=[],
187
+ outputs=[chat],
188
+ _js="() => []" # Clear chat
189
+ )
190
+
191
+ # Help information
192
+ with gr.Accordion("Usage Guide", open=False):
193
+ gr.Markdown("""
194
+ ### How to Use qResearch Pro
195
+ 1. Enter your research topic or question in the query box
196
+ 2. Adjust research depth and source count if needed
197
+ 3. Click 'Start Research' to begin the process
198
+ 4. Review the results in three stages:
199
+ - Initial research findings
200
+ - Analysis and synthesis
201
+ - Properly formatted MLA document
202
+ """)
203
 
204
  return interface
205
 
206
+ def process_query(self,
207
+ query: str,
208
+ depth: int = 3,
209
+ num_sources: int = 5) -> List[List[str]]:
210
+ """
211
+ Processes a research query with enhanced capabilities
212
+
213
+ Args:
214
+ query: The research query
215
+ depth: Desired research depth (1-5)
216
+ num_sources: Number of sources to include (1-10)
217
+
218
+ Returns:
219
+ List of message pairs for the chatbot interface
220
+ """
221
  try:
222
+ # Check cache first
223
+ if query in self.cache and self.config["cache_results"]:
224
+ raw_research, formatted = self.cache[query]
225
+ return [
226
+ [query, None],
227
+ ["Researcher (Cached)", raw_research],
228
+ ["Formatter (Cached)", formatted]
229
+ ]
230
+
231
+ # Configure research parameters
232
+ research_prompt = (
233
+ f"Research Query: {query}\n"
234
+ f"Required Depth: {depth}\n"
235
+ f"Minimum Sources: {num_sources}\n"
236
+ "Please provide comprehensive research with:\n"
237
+ "1. Multiple reliable sources\n"
238
+ "2. Key findings and analysis\n"
239
+ "3. Supporting evidence and context"
240
+ )
241
+
242
+ # Conduct research
243
+ raw_research = self.researcher.run(research_prompt)
244
+
245
+ # Format results
246
+ format_prompt = (
247
+ "Format the following research in MLA style with:\n"
248
+ "1. Proper citations\n"
249
+ "2. Academic formatting\n"
250
+ "3. Bibliography\n\n"
251
+ f"Content to format:\n{raw_research}"
252
+ )
253
+ formatted = self.formatter.run(format_prompt)
254
+
255
+ # Cache results
256
+ if self.config["cache_results"]:
257
+ self.cache[query] = (raw_research, formatted)
258
+
259
+ # Return results in chatbot format
260
  return [
261
+ [query, None],
262
+ ["📚 Research Findings", raw_research],
263
+ ["📝 MLA Formatted", formatted]
264
  ]
265
+
266
  except Exception as e:
267
+ error_msg = (
268
+ f"Error during research process: {str(e)}\n"
269
+ "Please try again or refine your query."
270
+ )
271
+ return [[None, error_msg]]
272
 
273
  if __name__ == "__main__":
274
  system = ResearchSystem()
275
+ system.create_interface().launch(
276
+ server_port=7860,
277
+ share=True, # Enable public link
278
+ show_api=False, # Hide API docs
279
+ enable_queue=True # Enable request queuing
280
+ )