Spaces:
Sleeping
Sleeping
from fastapi import FastAPI, HTTPException | |
import os | |
from typing import List, Dict | |
from dotenv import load_dotenv | |
import logging | |
from pathlib import Path | |
from langchain_community.document_loaders import PyPDFLoader | |
from langchain_text_splitters import RecursiveCharacterTextSplitter | |
from langchain_community.vectorstores import Qdrant as QdrantVectorStore | |
from langchain_google_genai import GoogleGenerativeAIEmbeddings | |
from langchain_groq import ChatGroq | |
from qdrant_client import QdrantClient | |
from qdrant_client.http.models import Distance, VectorParams | |
from qdrant_client.models import PointIdsList | |
from langgraph.graph import MessagesState, StateGraph | |
from langchain_core.messages import SystemMessage, HumanMessage | |
from langgraph.prebuilt import ToolNode | |
from langgraph.graph import END | |
from langgraph.prebuilt import tools_condition | |
from langgraph.checkpoint.memory import MemorySaver | |
logging.basicConfig(level=logging.INFO) | |
logger = logging.getLogger(__name__) | |
load_dotenv() | |
GOOGLE_API_KEY = os.getenv('GOOGLE_API_KEY') | |
GROQ_API_KEY = os.getenv('GROQ_API_KEY') | |
if not GOOGLE_API_KEY or not GROQ_API_KEY: | |
raise ValueError("API keys not set in environment variables") | |
app = FastAPI() | |
class QASystem: | |
def __init__(self): | |
self.vector_store = None | |
self.graph = None | |
self.memory = None | |
self.embeddings = None | |
self.client = None | |
self.pdf_dir = "pdfss" | |
def load_pdf_documents(self): | |
documents = [] | |
pdf_dir = Path(self.pdf_dir) | |
if not pdf_dir.exists(): | |
raise FileNotFoundError(f"PDF directory not found: {self.pdf_dir}") | |
for pdf_path in pdf_dir.glob("*.pdf"): | |
try: | |
loader = PyPDFLoader(str(pdf_path)) | |
documents.extend(loader.load()) | |
logger.info(f"Loaded PDF: {pdf_path}") | |
except Exception as e: | |
logger.error(f"Error loading PDF {pdf_path}: {str(e)}") | |
text_splitter = RecursiveCharacterTextSplitter( | |
chunk_size=1000, | |
chunk_overlap=100 | |
) | |
split_docs = text_splitter.split_documents(documents) | |
logger.info(f"Split documents into {len(split_docs)} chunks") | |
return split_docs | |
def initialize_system(self): | |
try: | |
self.client = QdrantClient(":memory:") | |
try: | |
self.client.get_collection("pdf_data") | |
except Exception: | |
self.client.create_collection( | |
collection_name="pdf_data", | |
vectors_config=VectorParams(size=768, distance=Distance.COSINE), | |
) | |
logger.info("Created new collection: pdf_data") | |
self.embeddings = GoogleGenerativeAIEmbeddings( | |
model="models/embedding-001", | |
google_api_key=GOOGLE_API_KEY | |
) | |
self.vector_store = QdrantVectorStore( | |
client=self.client, | |
collection_name="pdf_data", | |
embeddings=self.embeddings, | |
) | |
documents = self.load_pdf_documents() | |
if documents: | |
try: | |
points = self.client.scroll(collection_name="pdf_data", limit=100)[0] | |
if points: | |
self.client.delete( | |
collection_name="pdf_data", | |
points_selector=PointIdsList( | |
points=[p.id for p in points] | |
) | |
) | |
except Exception as e: | |
logger.error(f"Error clearing vectors: {str(e)}") | |
self.vector_store.add_documents(documents) | |
logger.info(f"Added {len(documents)} documents to vector store") | |
llm = ChatGroq( | |
model="llama3-8b-8192", | |
api_key=GROQ_API_KEY, | |
temperature=0.7 | |
) | |
graph_builder = StateGraph(MessagesState) | |
def query_or_respond(state: MessagesState): | |
retrieved_docs = [m for m in state["messages"] if m.type == "tool"] | |
if retrieved_docs: | |
context = ' '.join(m.content for m in retrieved_docs) | |
else: | |
context = "mountain bicycle documentation knowledge" | |
system_prompt = ( | |
"You are an AI assistant embedded within the Interactive Electronic Technical Manual (IETM) for Mountain Cycles.. " | |
"Always provide accurate responses with references to provided data. " | |
"If the user query is not technical-specific, still respond from a IETM perspective." | |
f"\n\nContext:\n{context}" | |
) | |
messages = [SystemMessage(content=system_prompt)] + state["messages"] | |
logger.info(f"Sending to LLM: {[m.content for m in messages]}") # Debugging log | |
response = llm.invoke(messages) | |
return {"messages": [response]} | |
def generate(state: MessagesState): | |
retrieved_docs = [m for m in reversed(state["messages"]) if m.type == "tool"][::-1] | |
context = ' '.join(m.content for m in retrieved_docs) if retrieved_docs else "mountain bicycle documentation knowledge" | |
system_prompt = ( | |
"You are an AI assistant embedded within the Interactive Electronic Technical Manual (IETM) for Mountain Cycles. " | |
"Your responses MUST be accurate, concise (5 sentences max)." | |
f"\n\nContext:\n{context}" | |
) | |
messages = [SystemMessage(content=system_prompt)] + state["messages"] | |
logger.info(f"Sending to LLM: {[m.content for m in messages]}") # Debugging log | |
response = llm.invoke(messages) | |
return {"messages": [response]} | |
graph_builder.add_node("query_or_respond", query_or_respond) | |
graph_builder.add_node("generate", generate) | |
graph_builder.set_entry_point("query_or_respond") | |
graph_builder.add_edge("query_or_respond", "generate") | |
graph_builder.add_edge("generate", END) | |
self.memory = MemorySaver() | |
self.graph = graph_builder.compile(checkpointer=self.memory) | |
return True | |
except Exception as e: | |
logger.error(f"System initialization error: {str(e)}") | |
return False | |
def process_query(self, query: str) -> List[Dict[str, str]]: | |
try: | |
responses = [] | |
for step in self.graph.stream( | |
{"messages": [HumanMessage(content=query)]}, | |
stream_mode="values", | |
config={"configurable": {"thread_id": "abc123"}} | |
): | |
if step["messages"]: | |
responses.append({ | |
'content': step["messages"][-1].content, | |
'type': step["messages"][-1].type | |
}) | |
return responses | |
except Exception as e: | |
logger.error(f"Query processing error: {str(e)}") | |
return [{'content': f"Query processing error: {str(e)}", 'type': 'error'}] | |
qa_system = QASystem() | |
if qa_system.initialize_system(): | |
logger.info("QA System Initialized Successfully") | |
else: | |
raise RuntimeError("Failed to initialize QA System") | |
async def query_api(query: str): | |
responses = qa_system.process_query(query) | |
return {"responses": responses} |