from fastapi import FastAPI, HTTPException from pydantic import BaseModel from langchain_groq import ChatGroq from langchain.chains import LLMChain from langchain.prompts import PromptTemplate import os # Initialize FastAPI app app = FastAPI() # Create a request model with context class SearchQuery(BaseModel): query: str context: str = None # Optional context field # Initialize LangChain with Groq llm = ChatGroq( temperature=0.7, model_name="mixtral-8x7b-32768", groq_api_key="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" # Replace with your actual Groq API key ) # Define the prompt template with cybersecurity expertise prompt_template = PromptTemplate( input_variables=["query", "context"], template=""" Context: You are a cybersecurity expert with extensive experience in all sub-streams of the industry, including but not limited to network security, application security, cloud security, threat intelligence, penetration testing, and incident response. {context} Query: {query} Please provide a detailed and professional response to the query based on your expertise in cybersecurity and the provided context. """ ) chain = LLMChain(llm=llm, prompt=prompt_template) @app.post("/search") async def process_search(search_query: SearchQuery): try: # Set default context if not provided context = search_query.context or "You are a cybersecurity expert." # Process the query using LangChain with context response = chain.run(query=search_query.query, context=context) return { "status": "success", "response": response } except Exception as e: raise HTTPException(status_code=500, detail=str(e)) @app.get("/") async def root(): return {"message": "Search API is running"}