wt002 commited on
Commit
dee0311
·
verified ·
1 Parent(s): 564212d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -14
app.py CHANGED
@@ -7,10 +7,8 @@ from typing import List, Dict, Union
7
  import requests
8
  import wikipediaapi
9
  import pandas as pd
10
- from transformers import pipeline # or HfAgent if you want the higher-level agent
11
- from huggingface_hub import InferenceClient # Updated import
12
- from transformers import AutoTokenizer, AutoModelForCausalLM
13
- import torch
14
 
15
  load_dotenv()
16
 
@@ -23,19 +21,18 @@ DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
23
 
24
 
25
  class BasicAgent:
26
- def __init__(self, model="google/gemma-7b"):
27
- self.pipe = pipeline("text-generation", model=model)
28
  print("BasicAgent initialized.")
 
29
 
30
  def __call__(self, question: str) -> str:
31
- print(f"Question: {question[:50]}...")
32
- response = self.pipe(question, max_new_tokens=100)
33
- return response[0]['generated_text']
34
-
35
- def wikipedia_search(self, query: str) -> str:
36
- """Get Wikipedia summary"""
37
- page = self.wiki.page(query)
38
- return page.summary if page.exists() else "No Wikipedia page found"
39
 
40
 
41
 
 
7
  import requests
8
  import wikipediaapi
9
  import pandas as pd
10
+ from langchain_core.messages import HumanMessage
11
+ from agent import build_graph
 
 
12
 
13
  load_dotenv()
14
 
 
21
 
22
 
23
  class BasicAgent:
24
+ """A langgraph agent."""
25
+ def __init__(self):
26
  print("BasicAgent initialized.")
27
+ self.graph = build_graph()
28
 
29
  def __call__(self, question: str) -> str:
30
+ print(f"Agent received question (first 50 chars): {question[:50]}...")
31
+ # Wrap the question in a HumanMessage from langchain_core
32
+ messages = [HumanMessage(content=question)]
33
+ messages = self.graph.invoke({"messages": messages})
34
+ answer = messages['messages'][-1].content
35
+ return answer[14:]
 
 
36
 
37
 
38