jjvelezo commited on
Commit
49b6791
·
verified ·
1 Parent(s): b86a6cd

Update agent.py

Browse files
Files changed (1) hide show
  1. agent.py +95 -63
agent.py CHANGED
@@ -3,18 +3,101 @@ import requests
3
  import urllib.parse
4
  from bs4 import BeautifulSoup
5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6
  class DuckDuckGoAgent:
7
  def __init__(self):
8
  print("DuckDuckGoAgent initialized.")
9
- self.headers = {
10
- "User-Agent": "Mozilla/5.0"
11
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
12
 
13
  def get_duckduckgo_answer(self, query: str) -> str:
14
- """
15
- Attempt to get an answer from the DuckDuckGo API.
16
- If no abstract text is found, fall back to scraping.
17
- """
18
  search_query = urllib.parse.quote(query)
19
  url = f"https://api.duckduckgo.com/?q={search_query}&format=json&no_html=1&skip_disambig=1"
20
 
@@ -22,23 +105,15 @@ class DuckDuckGoAgent:
22
  response = requests.get(url, timeout=10)
23
  if response.status_code == 200:
24
  data = response.json()
25
- # If AbstractText exists and is non-empty, return it
26
  if 'AbstractText' in data and data['AbstractText']:
27
  return data['AbstractText'][:200]
28
- else:
29
- print("No abstract found, falling back to scraping.")
30
- return self.scrape_duckduckgo(query)
31
- else:
32
- print(f"DuckDuckGo API failed with status: {response.status_code}")
33
  return self.scrape_duckduckgo(query)
 
34
  except Exception as e:
35
- print(f"Error contacting DuckDuckGo API: {e}")
36
  return self.scrape_duckduckgo(query)
37
 
38
  def scrape_duckduckgo(self, query: str) -> str:
39
- """
40
- Fallback to scraping DuckDuckGo search results if API fails or no abstract found.
41
- """
42
  print("Using fallback: scraping HTML results.")
43
  try:
44
  response = requests.post(
@@ -53,55 +128,12 @@ class DuckDuckGoAgent:
53
  text = s.get_text().strip()
54
  if text:
55
  return text[:200]
56
- print("No useful snippets found, falling back to Hugging Face LLM.")
57
- return self.call_huggingface_llm(query)
58
- except Exception as e:
59
- print(f"Error scraping DuckDuckGo: {e}")
60
- return self.call_huggingface_llm(query)
61
-
62
- def call_huggingface_llm(self, prompt: str) -> str:
63
- """
64
- Fallback to Hugging Face LLM if DuckDuckGo API and scraping both fail.
65
- """
66
- hf_api_key = os.getenv("HF_API_TOKEN")
67
- model = "mistralai/Mistral-7B-Instruct-v0.1"
68
-
69
- if not hf_api_key:
70
- return "Error: Hugging Face API Token is not configured."
71
-
72
- url = f"https://api-inference.huggingface.co/models/{model}"
73
- headers = {
74
- "Authorization": f"Bearer {hf_api_key}",
75
- "Content-Type": "application/json"
76
- }
77
-
78
- payload = {
79
- "inputs": prompt,
80
- "parameters": {
81
- "max_new_tokens": 200,
82
- "temperature": 0.7
83
- }
84
- }
85
-
86
- try:
87
- response = requests.post(url, headers=headers, json=payload, timeout=30)
88
- response.raise_for_status()
89
- output = response.json()
90
- if isinstance(output, list) and "generated_text" in output[0]:
91
- return output[0]["generated_text"].strip()[:200]
92
- elif isinstance(output, dict) and "error" in output:
93
- return f"HF LLM error: {output['error']}"
94
- else:
95
- return "No response generated from Hugging Face LLM."
96
  except Exception as e:
97
- print(f"Error contacting Hugging Face LLM: {e}")
98
- return "Error contacting Hugging Face model."
99
 
100
  def __call__(self, question: str) -> str:
101
- """
102
- Main entry point for the agent to process a question.
103
- It will first attempt DuckDuckGo, then fall back to scraping or Hugging Face LLM.
104
- """
105
  print(f"Agent received question: {question[:50]}...")
106
  answer = self.get_duckduckgo_answer(question)
107
  print(f"Agent returning answer: {answer}")
 
3
  import urllib.parse
4
  from bs4 import BeautifulSoup
5
 
6
+ class BaseModel:
7
+ def answer(self, prompt: str) -> str:
8
+ raise NotImplementedError("Model must implement the answer method.")
9
+
10
+ class HfApiModel(BaseModel):
11
+ def __init__(self, model_name: str, api_token: str):
12
+ self.model_name = model_name
13
+ self.api_token = api_token
14
+
15
+ def answer(self, prompt: str) -> str:
16
+ url = f"https://api-inference.huggingface.co/models/{self.model_name}"
17
+ headers = {
18
+ "Authorization": f"Bearer {self.api_token}",
19
+ "Content-Type": "application/json"
20
+ }
21
+ payload = {
22
+ "inputs": prompt,
23
+ "parameters": {
24
+ "max_new_tokens": 200,
25
+ "temperature": 0.0
26
+ }
27
+ }
28
+
29
+ try:
30
+ response = requests.post(url, headers=headers, json=payload, timeout=30)
31
+ response.raise_for_status()
32
+ output = response.json()
33
+ if isinstance(output, list) and "generated_text" in output[0]:
34
+ return output[0]["generated_text"].strip()[:200]
35
+ return "No response generated."
36
+ except Exception as e:
37
+ return f"Error from Hugging Face API: {e}"
38
+
39
+ class LiteLLMModel(BaseModel):
40
+ def __init__(self, endpoint_url: str):
41
+ self.url = endpoint_url
42
+
43
+ def answer(self, prompt: str) -> str:
44
+ try:
45
+ response = requests.post(self.url, json={"input": prompt}, timeout=30)
46
+ response.raise_for_status()
47
+ return response.json().get("output", "No output.")
48
+ except Exception as e:
49
+ return f"LiteLLM error: {e}"
50
+
51
+ class OpenAIServerModel(BaseModel):
52
+ def __init__(self, api_key: str, model: str = "gpt-3.5-turbo"):
53
+ self.api_key = api_key
54
+ self.model = model
55
+
56
+ def answer(self, prompt: str) -> str:
57
+ try:
58
+ response = requests.post(
59
+ "https://api.openai.com/v1/chat/completions",
60
+ headers={
61
+ "Authorization": f"Bearer {self.api_key}",
62
+ "Content-Type": "application/json"
63
+ },
64
+ json={
65
+ "model": self.model,
66
+ "messages": [{"role": "user", "content": prompt}],
67
+ "max_tokens": 200,
68
+ "temperature": 0.0
69
+ },
70
+ timeout=30
71
+ )
72
+ response.raise_for_status()
73
+ data = response.json()
74
+ return data["choices"][0]["message"]["content"].strip()[:200]
75
+ except Exception as e:
76
+ return f"OpenAI error: {e}"
77
+
78
  class DuckDuckGoAgent:
79
  def __init__(self):
80
  print("DuckDuckGoAgent initialized.")
81
+ self.headers = {"User-Agent": "Mozilla/5.0"}
82
+ self.hf_api_key = os.getenv("HF_API_TOKEN")
83
+ self.model_type = os.getenv("MODEL_TYPE", "huggingface")
84
+ self.model_name = os.getenv("MODEL_NAME", "mistralai/Mistral-7B-Instruct-v0.1")
85
+ self.model_url = os.getenv("MODEL_URL") # For LiteLLM
86
+ self.openai_key = os.getenv("OPENAI_API_KEY")
87
+
88
+ self.llm = self._init_model()
89
+
90
+ def _init_model(self) -> BaseModel:
91
+ if self.model_type == "openai" and self.openai_key:
92
+ return OpenAIServerModel(api_key=self.openai_key)
93
+ elif self.model_type == "litellm" and self.model_url:
94
+ return LiteLLMModel(endpoint_url=self.model_url)
95
+ elif self.model_type == "huggingface" and self.hf_api_key:
96
+ return HfApiModel(model_name=self.model_name, api_token=self.hf_api_key)
97
+ else:
98
+ raise ValueError("No valid model configuration found.")
99
 
100
  def get_duckduckgo_answer(self, query: str) -> str:
 
 
 
 
101
  search_query = urllib.parse.quote(query)
102
  url = f"https://api.duckduckgo.com/?q={search_query}&format=json&no_html=1&skip_disambig=1"
103
 
 
105
  response = requests.get(url, timeout=10)
106
  if response.status_code == 200:
107
  data = response.json()
 
108
  if 'AbstractText' in data and data['AbstractText']:
109
  return data['AbstractText'][:200]
 
 
 
 
 
110
  return self.scrape_duckduckgo(query)
111
+ return self.scrape_duckduckgo(query)
112
  except Exception as e:
113
+ print(f"Error with DuckDuckGo API: {e}")
114
  return self.scrape_duckduckgo(query)
115
 
116
  def scrape_duckduckgo(self, query: str) -> str:
 
 
 
117
  print("Using fallback: scraping HTML results.")
118
  try:
119
  response = requests.post(
 
128
  text = s.get_text().strip()
129
  if text:
130
  return text[:200]
131
+ return self.llm.answer(query)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
132
  except Exception as e:
133
+ print(f"Scraping error: {e}")
134
+ return self.llm.answer(query)
135
 
136
  def __call__(self, question: str) -> str:
 
 
 
 
137
  print(f"Agent received question: {question[:50]}...")
138
  answer = self.get_duckduckgo_answer(question)
139
  print(f"Agent returning answer: {answer}")