wt002 commited on
Commit
92047ee
·
verified ·
1 Parent(s): 76730ef

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +15 -67
app.py CHANGED
@@ -3,14 +3,13 @@ from dotenv import load_dotenv
3
  import gradio as gr
4
  import requests
5
 
6
- from typing import List, Dict, Union
7
- import pandas as pd
8
- import wikipediaapi
9
  import requests
10
- from bs4 import BeautifulSoup
11
- import urllib.parse
12
- import random
13
- from typing import List, Dict
14
 
15
  load_dotenv()
16
 
@@ -23,66 +22,15 @@ DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
23
 
24
  class BasicAgent:
25
  def __init__(self):
26
- self.headers = {
27
- 'User-Agent': self._get_random_user_agent(),
28
- 'Accept-Language': 'en-US,en;q=0.5',
29
- }
30
-
31
- def _get_random_user_agent(self) -> str:
32
- browsers = [
33
- 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36',
34
- 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:89.0) Gecko/20100101 Firefox/89.0',
35
- 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.1.1 Safari/605.1.15'
36
- ]
37
- return random.choice(browsers)
38
-
39
- def search(self, query: str, num_results: int = 3) -> List[Dict]:
40
- encoded_query = urllib.parse.quote_plus(query)
41
- url = f"https://www.google.com/search?q={encoded_query}&num={num_results + 2}"
42
-
43
- try:
44
- response = requests.get(url, headers=self.headers, timeout=10)
45
- response.raise_for_status()
46
- return self._parse_results(response.text, num_results)
47
- except Exception as e:
48
- print(f"Search failed: {str(e)}")
49
- return []
50
-
51
-
52
-
53
- def _parse_results(self, html: str, max_results: int) -> List[Dict]:
54
- soup = BeautifulSoup(html, 'html.parser')
55
- results = []
56
-
57
- for i, result in enumerate(soup.select('.tF2Cxc, .g')[:max_results]):
58
- title = result.select_one('h3, .LC20lb')
59
- link = result.find('a')['href']
60
- snippet = result.select_one('.IsZvec, .VwiC3b')
61
-
62
- if title and link:
63
- results.append({
64
- 'position': i + 1,
65
- 'title': title.get_text(),
66
- 'link': link if link.startswith('http') else f"https://www.google.com{link}",
67
- 'snippet': snippet.get_text() if snippet else None
68
- })
69
- return results
70
-
71
- def pretty_print(self, results: List[Dict]) -> str:
72
- output = []
73
- for res in results:
74
- output.append(
75
- f"{res['position']}. {res['title']}\n"
76
- f" 🔗 {res['link']}\n"
77
- f" 📝 {res['snippet'] or 'No description available'}\n"
78
- )
79
- return "\n".join(output)
80
-
81
- def __call__(self, query: str) -> str:
82
- """Added this to make the agent callable"""
83
- return self.pretty_print(self.search(query))
84
-
85
-
86
 
87
 
88
  def run_and_submit_all( profile: gr.OAuthProfile | None):
 
3
  import gradio as gr
4
  import requests
5
 
6
+ import os
7
+ import inspect
8
+ import gradio as gr
9
  import requests
10
+ import pandas as pd
11
+ from langchain_core.messages import HumanMessage
12
+ from agent import build_graph
 
13
 
14
  load_dotenv()
15
 
 
22
 
23
  class BasicAgent:
24
  def __init__(self):
25
+ print("BasicAgent initialized.")
26
+ self.graph = build_graph()
27
+
28
+ def __call__(self, question: str) -> str:
29
+ print(f"Agent received question (first 50 chars): {question[:50]}...")
30
+ messages = [HumanMessage(content=question)]
31
+ result = self.graph.invoke({"messages": messages})
32
+ answer = result['messages'][-1].content
33
+ return answer
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
34
 
35
 
36
  def run_and_submit_all( profile: gr.OAuthProfile | None):