wt002 commited on
Commit
671879b
·
verified ·
1 Parent(s): fa302d4

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -150
app.py CHANGED
@@ -11,7 +11,7 @@ import wikipediaapi
11
  #from typing import Optional
12
  #from termax.agent import Agent # Assuming you're using a specific agent framework
13
  #from termax.tool import google_search # Assuming this is your search tool import
14
- from langchain_google_genai import ChatGoogleGenerativeAI
15
  from langchain.agents import AgentExecutor, create_tool_calling_agent
16
  from langchain.tools import Tool
17
  from langchain import hub
@@ -55,155 +55,7 @@ class BasicAgent:
55
  response = requests.get(url)
56
  return response.text
57
 
58
-
59
- def search_tool(self, prompt: str) -> str:
60
- model = genai.GenerativeModel('gemini-pro')
61
- response = model.generate_content(
62
- f"Search the web for: {prompt}. Summarize the results."
63
- )
64
- return response.text
65
-
66
- def search_tool(self, prompt: str) -> str:
67
- # Initialize Gemini model
68
- llm = ChatGoogleGenerativeAI(model="gemini-1.5-flash") # or "gemini-pro"
69
-
70
- # Define Google Search tool (replace with actual search API)
71
- def google_search(query: str) -> str:
72
- # Use SerpAPI, Google Custom Search, etc.
73
- return f"Search results for: {query}"
74
-
75
- tools = [
76
- Tool(
77
- name="google_search",
78
- func=google_search,
79
- description="Useful for searching the web."
80
- )
81
- ]
82
-
83
- # Create agent
84
- agent = create_tool_calling_agent(llm, tools, prompt_template=hub.pull("hwchase17/openai-tools-agent"))
85
- agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True)
86
-
87
- # Run the agent
88
- response = agent_executor.invoke({"input": prompt})
89
- return response["output"]
90
-
91
-
92
- def search_tool(self, prompt: str) -> str:
93
- """
94
- Searches for information using a Gemini-powered agent with Google Search capability.
95
-
96
- Args:
97
- prompt: The user's query to search for
98
-
99
- Returns:
100
- str: The response from the search agent
101
- """
102
- root_agent = Agent(
103
- name="search_assistant",
104
- model="gemini-2.0-flash", # Or your preferred Gemini model
105
- instruction="You are a helpful assistant. Answer user questions using Google Search when needed.",
106
- description="An assistant that can search the web.",
107
- tools=[google_search]
108
- )
109
-
110
- # Actually invoke the agent with the prompt
111
- response = root_agent.run(prompt)
112
- return response
113
-
114
- def answer_question(self, prompt: str) -> str:
115
- from google.genarativeai import genai
116
-
117
- client = genai.Client(api_key=os.getenv("GEMINI_API_KEY"))
118
-
119
- response = client.models.generate_content(
120
- model="gemini-2.0-flash",
121
- contents=prompt,
122
- )
123
- return response.text
124
-
125
-
126
- def generate_response(self, prompt: str) -> str:
127
- """Get response from HuggingFace model"""
128
- try:
129
- response = requests.post(
130
- self.api_url,
131
- headers=self.headers,
132
- json={"inputs": prompt}
133
- )
134
- response.raise_for_status()
135
- return response.json()[0]['generated_text']
136
- except Exception as e:
137
- return f"Error generating response: {str(e)}"
138
-
139
- def web_search(self, query: str) -> List[Dict]:
140
- """Search using SearxNG (meta-search engine)"""
141
- params = {
142
- "q": query,
143
- "format": "json",
144
- "engines": "google,bing,duckduckgo"
145
- }
146
- try:
147
- response = requests.get(self.searx_url, params=params)
148
- response.raise_for_status()
149
- return response.json().get("results", [])
150
- except requests.RequestException:
151
- return []
152
-
153
- def wikipedia_search(self, query: str) -> str:
154
- """Get Wikipedia summary"""
155
- page = self.wiki.page(query)
156
- return page.summary if page.exists() else "No Wikipedia page found"
157
-
158
-
159
- def _process_pdf(self, file_path: str) -> str:
160
- """Fallback PDF text extraction (works for simple PDFs)."""
161
- try:
162
- # Read raw binary data and decode text (very basic)
163
- with open(file_path, 'rb') as f:
164
- text = f.read().decode('latin-1') # Try UTF-8 if this fails
165
- return text if text.strip() else "PDF text not extractable (install pdfminer.six for better results)"
166
- except Exception as e:
167
- return f"PDF read error: {str(e)}"
168
-
169
-
170
- def __call__(self, query: str) -> str:
171
- """Handle queries (text, search, or file processing)"""
172
- print(f"Processing query: {query[:50]}...")
173
-
174
- # If it's a file path, process it
175
- if os.path.exists(query):
176
- return self.process_document(query)
177
-
178
- # If it's a Wikipedia-style query (e.g., "wikipedia:Python")
179
- if query.lower().startswith("wikipedia:"):
180
- topic = query.split(":")[1].strip()
181
- return self.wikipedia_search(topic)
182
-
183
- # If it's a web search (e.g., "search:best LLMs 2024")
184
- if query.lower().startswith("search:"):
185
- search_query = query.split(":")[1].strip()
186
- results = self.web_search(search_query)
187
- return "\n".join([f"{r['title']}: {r['url']}" for r in results])
188
-
189
- # Default: Use HuggingFace for text generation
190
- return self.generate_response(query)
191
-
192
- # Example Usage
193
- if __name__ == "__main__":
194
- agent = BasicAgent()
195
-
196
- # Test Wikipedia search
197
- print(agent("wikipedia:Python"))
198
-
199
- # Test web search (requires SearxNG instance)
200
- # print(agent("search:best programming languages 2024"))
201
-
202
- # Test text generation
203
- print(agent("Explain quantum computing in simple terms"))
204
-
205
- # Test file processing (example: PDF)
206
- # print(agent("/path/to/document.pdf"))
207
 
208
 
209
  def run_and_submit_all( profile: gr.OAuthProfile | None):
 
11
  #from typing import Optional
12
  #from termax.agent import Agent # Assuming you're using a specific agent framework
13
  #from termax.tool import google_search # Assuming this is your search tool import
14
+ #from langchain_google_genai import ChatGoogleGenerativeAI
15
  from langchain.agents import AgentExecutor, create_tool_calling_agent
16
  from langchain.tools import Tool
17
  from langchain import hub
 
55
  response = requests.get(url)
56
  return response.text
57
 
58
+
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
59
 
60
 
61
  def run_and_submit_all( profile: gr.OAuthProfile | None):