File size: 4,277 Bytes
28552b3 5a1b2d8 b38937a 28552b3 b38937a 28552b3 5ab8f67 28552b3 b38937a 28552b3 5ab8f67 ecc0e05 28552b3 ecc0e05 28552b3 ecc0e05 28552b3 ecc0e05 28552b3 ecc0e05 5ab8f67 ecc0e05 28552b3 ecc0e05 28552b3 5ab8f67 d76ac06 28552b3 5ab8f67 f4f4ef5 28552b3 b38937a 28552b3 b38937a 28552b3 d76ac06 f4f4ef5 28552b3 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 |
import streamlit as st
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM, pipeline
from langchain_core.prompts import PromptTemplate
from langchain_huggingface import HuggingFacePipeline
from langchain.agents import create_react_agent, AgentExecutor, Tool
from langchain.memory import ConversationBufferMemory
# Mock lead data
LEADS = [
{"name": "John Doe", "email": "[email protected]", "company": "TechCorp"},
{"name": "Jane Smith", "email": "[email protected]", "company": "InnoSoft"},
{"name": "Bob Johnson", "email": "[email protected]", "company": "DataTech"},
]
# Set up the open-source LLM
@st.cache_resource
def load_model():
model_name = "google/flan-t5-large"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForSeq2SeqLM.from_pretrained(model_name)
pipe = pipeline(
"text2text-generation",
model=model,
tokenizer=tokenizer,
max_length=512
)
return HuggingFacePipeline(pipeline=pipe)
local_llm = load_model()
# Define the tools for the agent
def search_leads(query):
results = [lead for lead in LEADS if query.lower() in lead['name'].lower()]
return results
def send_email(to_email, subject, body):
# For demo purposes, we'll just print the email details
st.write(f"Email sent to: {to_email}")
st.write(f"Subject: {subject}")
st.write(f"Body: {body}")
return "Email sent successfully"
tools = [
Tool(
name="Search Leads",
func=search_leads,
description="Useful for searching leads by name"
),
Tool(
name="Send Email",
func=send_email,
description="Useful for sending emails to leads"
)
]
# Set up the agent with a specified output key
prompt = PromptTemplate.from_template(
"""You are an AI CyberSecurity Program Advisor. Your goal is to engage with leads and get them to book a video call for an in-person sales meeting. You have access to a list of leads and can send emails.
You have access to the following tools:
{tools}
Use the following format:
Question: the input question you must answer
Thought: you should always think about what to do
Action: the action to take, should be one of [{tool_names}]
Action Input: the input to the action
Observation: the result of the action
... (this Thought/Action/Action Input/Observation can repeat N times)
Thought: I now know the final answer
Final Answer: {final_answer} # Ensure this is defined as the output key
Begin!
Question: {input}
Thought: Let's approach this step-by-step:
{agent_scratchpad}"""
)
agent = create_react_agent(
local_llm,
tools,
prompt,
output_key="final_answer" # Specify the output key here
)
# Create the agent executor
agent_executor = AgentExecutor.from_agent_and_tools(
agent=agent,
tools=tools,
verbose=True,
memory=ConversationBufferMemory(),
output_keys=["final_answer"] # Explicitly define the output key(s) here
)
# Streamlit interface
st.title("AI CyberSecurity Program Advisor Demo")
st.write("This demo showcases an AI agent that can engage with leads and attempt to book video calls for in-person sales meetings.")
lead_name = st.text_input("Enter a lead's name to engage with:")
if lead_name:
lead_info = search_leads(lead_name)
if not lead_info:
st.write(f"No lead found with the name {lead_name}")
else:
lead = lead_info[0]
st.write(f"Lead found: {lead['name']} (Email: {lead['email']}, Company: {lead['company']})")
initial_message = f"Hello {lead['name']}, I'd like to discuss our cybersecurity program with {lead['company']}. Are you available for a quick video call?"
if st.button("Engage with Lead"):
with st.spinner("AI is generating a response..."):
response = agent_executor.run(initial_message)
st.write("AI Response:")
st.write(response) # Now 'response' will directly be the final answer
st.sidebar.title("About")
st.sidebar.info("This is a demo of an AI CyberSecurity Program Advisor using an open-source LLM and LangChain. It's designed to engage with leads and attempt to book video calls for sales meetings.")
# To run this script, use: streamlit run your_script_name.py |