wt002 commited on
Commit
7c3b427
·
verified ·
1 Parent(s): 5261680

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -9
app.py CHANGED
@@ -9,7 +9,7 @@ import wikipediaapi
9
  import pandas as pd
10
  from transformers import HuggingFaceAgent
11
  from transformers import pipeline # or HfAgent if you want the higher-level agent
12
-
13
 
14
  load_dotenv()
15
 
@@ -22,20 +22,23 @@ DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
22
 
23
  class BasicAgent:
24
  def __init__(self, model="google/gemma-7b"):
25
- # Using pipeline for text generation
26
- self.agent = pipeline("text-generation", model=model)
 
 
 
27
  print("BasicAgent initialized.")
28
 
29
  def __call__(self, question: str) -> str:
30
- print(f"Agent received question (first 50 chars): {question[:50]}...")
31
- answer = self.agent(question, max_length=100)[0]['generated_text']
32
- print(f"Agent returning answer (first 50 chars): {answer[:50]}...")
33
- return answer
34
 
35
  def generate_response(self, prompt: str) -> str:
36
- """Get response from model"""
37
  try:
38
- response = self.agent(prompt, max_length=100)[0]['generated_text']
39
  return response
40
  except Exception as e:
41
  return f"Error generating response: {str(e)}"
 
9
  import pandas as pd
10
  from transformers import HuggingFaceAgent
11
  from transformers import pipeline # or HfAgent if you want the higher-level agent
12
+ from huggingface_hub import HfAgent
13
 
14
  load_dotenv()
15
 
 
22
 
23
  class BasicAgent:
24
  def __init__(self, model="google/gemma-7b"):
25
+ # Initialize the HF Agent with your model endpoint
26
+ self.agent = HfAgent(
27
+ endpoint=f"https://api-inference.huggingface.co/models/{model}",
28
+ headers={"Authorization": f"Bearer {os.getenv('HF_API_KEY')}"}
29
+ )
30
  print("BasicAgent initialized.")
31
 
32
  def __call__(self, question: str) -> str:
33
+ print(f"Question: {question[:50]}...")
34
+ return self.agent.run(question) # For full agent workflow
35
+ # OR for simple chat:
36
+ # return self.agent.chat(question)
37
 
38
  def generate_response(self, prompt: str) -> str:
39
+ """Get response from model using chat interface"""
40
  try:
41
+ response = self.agent.chat(prompt)
42
  return response
43
  except Exception as e:
44
  return f"Error generating response: {str(e)}"