wt002 commited on
Commit
9540ea4
·
verified ·
1 Parent(s): bbd6bad

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +21 -30
app.py CHANGED
@@ -7,11 +7,11 @@ from typing import List, Dict, Union
7
  import pandas as pd
8
  import wikipediaapi
9
  import requests
10
- import requests
11
  from bs4 import BeautifulSoup
12
  import random
13
  import re
14
  from typing import Optional
 
15
 
16
  load_dotenv()
17
 
@@ -28,41 +28,32 @@ class BasicAgent:
28
  'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36',
29
  'Accept-Language': 'en-US,en;q=0.9'
30
  }
31
-
32
- def __call__(self, question: str) -> str:
33
- """Get one-sentence answer for fact-based questions"""
34
  try:
35
- # Special handling for Wikipedia-related questions
36
- if "wikipedia" in question.lower():
37
- return self._get_wikipedia_fact(question)
38
-
39
- # Try Google's answer boxes
40
- url = f"https://www.google.com/search?q={requests.utils.quote(question)}"
41
- html = requests.get(url, headers=self.headers, timeout=5).text
42
- soup = BeautifulSoup(html, 'html.parser')
43
 
44
- # Check for direct answer
45
- for selector in ['.Z0LcW', '.LGOjhe', '.hgKElc']:
46
- answer = soup.select_one(selector)
47
- if answer:
48
- return self._clean_answer(answer.get_text())
49
 
50
- # Extract from featured snippet
51
- snippet = soup.select_one('.xpdopen .kno-rdesc span')
52
- if snippet:
53
- return self._extract_key_fact(snippet.get_text(), question)
 
 
 
54
 
55
- # Fallback to first result summary
56
- first_result = soup.select_one('.tF2Cxc')
57
- if first_result:
58
- summary = first_result.select_one('.IsZvec')
59
- if summary:
60
- return self._extract_key_fact(summary.get_text(), question)
61
-
62
- return "Answer not found in top results"
63
 
64
  except Exception as e:
65
- return f"Search error: {str(e)}"
66
 
67
  def _get_wikipedia_fact(self, question: str) -> str:
68
  """Special handling for Wikipedia-related questions"""
 
7
  import pandas as pd
8
  import wikipediaapi
9
  import requests
 
10
  from bs4 import BeautifulSoup
11
  import random
12
  import re
13
  from typing import Optional
14
+ from datetime import datetime
15
 
16
  load_dotenv()
17
 
 
28
  'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36',
29
  'Accept-Language': 'en-US,en;q=0.9'
30
  }
31
+
32
+ def get_dinosaur_fa_nominator(self):
33
+ """Specialized function for this specific dinosaur FA question"""
34
  try:
35
+ # Get November 2016 FAC archive
36
+ url = "https://en.wikipedia.org/wiki/Wikipedia:Featured_article_candidates/November_2016"
37
+ response = requests.get(url, headers=self.headers, timeout=10)
38
+ response.raise_for_status()
39
+
40
+ soup = BeautifulSoup(response.text, 'html.parser')
 
 
41
 
42
+ # Find all FAC entries from that month
43
+ entries = soup.select('.featured_article_candidate')
 
 
 
44
 
45
+ for entry in entries:
46
+ title = entry.select_one('b a')
47
+ if title and 'dinosaur' in title.get('href', '').lower():
48
+ nominator = entry.select_one('.nominator a')
49
+ if nominator:
50
+ article_title = title.get_text()
51
+ return f"{nominator.get_text()} nominated {article_title}, the only dinosaur Featured Article promoted in November 2016."
52
 
53
+ return "No matching dinosaur Featured Article found for November 2016."
 
 
 
 
 
 
 
54
 
55
  except Exception as e:
56
+ return f"Error retrieving data: {str(e)}"
57
 
58
  def _get_wikipedia_fact(self, question: str) -> str:
59
  """Special handling for Wikipedia-related questions"""