wt002 commited on
Commit
5261680
·
verified ·
1 Parent(s): 728e81f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -15
app.py CHANGED
@@ -7,6 +7,9 @@ from typing import List, Dict, Union
7
  import requests
8
  import wikipediaapi
9
  import pandas as pd
 
 
 
10
 
11
  load_dotenv()
12
 
@@ -16,30 +19,24 @@ DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
16
 
17
 
18
  # --- Basic Agent Definition ---
 
19
  class BasicAgent:
20
  def __init__(self, model="google/gemma-7b"):
21
- self.api_url = f"https://api-inference.huggingface.co/models/{model}"
22
- self.headers = {"Authorization": f"Bearer {os.getenv('HF_API_KEY')}"}
23
  print("BasicAgent initialized.")
24
 
25
- #usage
26
- #agent = HuggingFaceAgent("google/gemma-7b") # Same architecture as Gemini
27
- #print(agent.generate("Explain quantum computing"))
28
-
29
-
30
  def __call__(self, question: str) -> str:
31
  print(f"Agent received question (first 50 chars): {question[:50]}...")
32
- fixed_answer = self.agent.generate(question)
33
- print(f"Agent returning answer: {fixed_answer}")
34
- return fixed_answer
35
-
36
 
37
- # to check
38
  def generate_response(self, prompt: str) -> str:
39
- """Get response from Gema"""
40
  try:
41
- response = self.model.generate_content(prompt)
42
- return response.text
43
  except Exception as e:
44
  return f"Error generating response: {str(e)}"
45
 
 
7
  import requests
8
  import wikipediaapi
9
  import pandas as pd
10
+ from transformers import HuggingFaceAgent
11
+ from transformers import pipeline # or HfAgent if you want the higher-level agent
12
+
13
 
14
  load_dotenv()
15
 
 
19
 
20
 
21
  # --- Basic Agent Definition ---
22
+
23
  class BasicAgent:
24
  def __init__(self, model="google/gemma-7b"):
25
+ # Using pipeline for text generation
26
+ self.agent = pipeline("text-generation", model=model)
27
  print("BasicAgent initialized.")
28
 
 
 
 
 
 
29
  def __call__(self, question: str) -> str:
30
  print(f"Agent received question (first 50 chars): {question[:50]}...")
31
+ answer = self.agent(question, max_length=100)[0]['generated_text']
32
+ print(f"Agent returning answer (first 50 chars): {answer[:50]}...")
33
+ return answer
 
34
 
 
35
  def generate_response(self, prompt: str) -> str:
36
+ """Get response from model"""
37
  try:
38
+ response = self.agent(prompt, max_length=100)[0]['generated_text']
39
+ return response
40
  except Exception as e:
41
  return f"Error generating response: {str(e)}"
42