|
import streamlit as st |
|
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM, pipeline |
|
from langchain_core.prompts import PromptTemplate |
|
from langchain_core.runnables import RunnableSequence |
|
from langchain_huggingface import HuggingFacePipeline |
|
from langchain.agents import create_react_agent, AgentExecutor, Tool |
|
from langchain.memory import ConversationBufferMemory |
|
|
|
|
|
LEADS = [ |
|
{"name": "John Doe", "email": "[email protected]", "company": "TechCorp"}, |
|
{"name": "Jane Smith", "email": "[email protected]", "company": "InnoSoft"}, |
|
{"name": "Bob Johnson", "email": "[email protected]", "company": "DataTech"}, |
|
] |
|
|
|
|
|
@st.cache_resource |
|
def load_model(): |
|
model_name = "google/flan-t5-large" |
|
tokenizer = AutoTokenizer.from_pretrained(model_name) |
|
model = AutoModelForSeq2SeqLM.from_pretrained(model_name) |
|
pipe = pipeline( |
|
"text2text-generation", |
|
model=model, |
|
tokenizer=tokenizer, |
|
max_length=512 |
|
) |
|
return HuggingFacePipeline(pipeline=pipe) |
|
|
|
local_llm = load_model() |
|
|
|
|
|
def search_leads(query): |
|
return [lead for lead in LEADS if query.lower() in lead['name'].lower()] |
|
|
|
def send_email(to_email, subject, body): |
|
|
|
st.write(f"Email sent to: {to_email}") |
|
st.write(f"Subject: {subject}") |
|
st.write(f"Body: {body}") |
|
return "Email sent successfully" |
|
|
|
tools = [ |
|
Tool( |
|
name="Search Leads", |
|
func=search_leads, |
|
description="Useful for searching leads by name" |
|
), |
|
Tool( |
|
name="Send Email", |
|
func=send_email, |
|
description="Useful for sending emails to leads" |
|
) |
|
] |
|
|
|
|
|
prefix = """You are an AI CyberSecurity Program Advisor. Your goal is to engage with leads and get them to book a video call for an in-person sales meeting. You have access to a list of leads and can send emails. |
|
|
|
You have access to the following tools:""" |
|
|
|
suffix = """Begin! |
|
|
|
{chat_history} |
|
Human: {human_input} |
|
AI: Let's approach this step-by-step:""" |
|
|
|
prompt = PromptTemplate( |
|
template=prefix + "{agent_scratchpad}" + suffix, |
|
input_variables=["human_input", "chat_history", "agent_scratchpad"] |
|
) |
|
|
|
llm_chain = RunnableSequence(prompt, local_llm) |
|
memory = ConversationBufferMemory(memory_key="chat_history") |
|
|
|
agent = create_react_agent(local_llm, tools, prompt) |
|
agent_executor = AgentExecutor.from_agent_and_tools( |
|
agent=agent, tools=tools, verbose=True, memory=memory |
|
) |
|
|
|
|
|
st.title("AI CyberSecurity Program Advisor Demo") |
|
|
|
st.write("This demo showcases an AI agent that can engage with leads and attempt to book video calls for in-person sales meetings.") |
|
|
|
lead_name = st.text_input("Enter a lead's name to engage with:") |
|
|
|
if lead_name: |
|
lead_info = search_leads(lead_name) |
|
if not lead_info: |
|
st.write(f"No lead found with the name {lead_name}") |
|
else: |
|
lead = lead_info[0] |
|
st.write(f"Lead found: {lead['name']} (Email: {lead['email']}, Company: {lead['company']})") |
|
|
|
initial_message = f"Hello {lead['name']}, I'd like to discuss our cybersecurity program with {lead['company']}. Are you available for a quick video call?" |
|
|
|
if st.button("Engage with Lead"): |
|
with st.spinner("AI is generating a response..."): |
|
response = agent_executor.run(initial_message) |
|
|
|
st.write("AI Response:") |
|
st.write(response) |
|
|
|
st.sidebar.title("About") |
|
st.sidebar.info("This is a demo of an AI CyberSecurity Program Advisor using an open-source LLM and LangChain. It's designed to engage with leads and attempt to book video calls for sales meetings.") |
|
|
|
|