Spaces:
Sleeping
Sleeping
from langchain_google_genai import ChatGoogleGenerativeAI | |
from langchain_core.messages import SystemMessage | |
class GeminiAgent: | |
def __init__(self, api_key: str, model_name: str = "gemini-2.0-flash"): | |
# Suppress warnings | |
import warnings | |
warnings.filterwarnings("ignore", category=UserWarning) | |
warnings.filterwarnings("ignore", category=DeprecationWarning) | |
warnings.filterwarnings("ignore", message=".*will be deprecated.*") | |
warnings.filterwarnings("ignore", "LangChain.*") | |
self.api_key = api_key | |
self.model_name = model_name | |
self.agent = self._setup_agent() | |
def _setup_agent(self): | |
# Initialize model with system message | |
return ChatGoogleGenerativeAI( | |
model=self.model_name, | |
google_api_key=self.api_key, | |
temperature=0, # Lower temperature for focused responses | |
max_output_tokens=2000, # Increased for more detailed responses | |
convert_system_message_to_human=True, | |
system_message=SystemMessage(content="You are a helpful AI assistant. For the Wikipedia question, use the latest 2022 English Wikipedia version as your knowledge source. For the YouTube video question, analyze the video content carefully and count the maximum number of different bird species visible simultaneously in any frame.") | |
) | |
def run(self, query: str) -> str: | |
try: | |
response = self.agent.invoke(query) | |
return response.content | |
except Exception as e: | |
return f"Error: {e}" | |
def run_interactive(self): | |
print("AI Assistant Ready! (Type 'exit' to quit)") | |
while True: | |
query = input("You: ").strip() | |
if query.lower() == 'exit': | |
print("Goodbye!") | |
break | |
print("Assistant:", self.run(query)) | |