Cazimir Roman
add transformers dependency * change llm to oss
8182eb0
raw
history blame
3.75 kB
import streamlit as st
from streamlit_chat import message
import os
import uuid
from dataclasses import dataclass
from langchain.agents import load_tools, initialize_agent
from langchain.llms import OpenAI
from transformers import AutoModelForQuestionAnswering, AutoTokenizer
@dataclass
class Message:
text: str
is_user: bool
# Set streamlit page configuration
st.set_page_config(page_title="Chat with wikipedia")
with st.sidebar:
st.title('Chat GPT using Langchain Agents to talk to Wikipedia')
st.markdown('''
## About
This app is an OpenAi LLM-powered PDF chatbot built using:
- [Streamlit](https://streamlit.io/)
- [LangChain](https://python.langchain.com/)
- [OpenAI](https://platform.openai.com/docs/models) LLM model
## How it works
- Ask any question. It will try to use wikipedia api to get the result. (For ex: *Who is Madonna?*)
- Ask a follow up question that would involve using some kind of mathematical operation (For ex: *Take Elton Johns age and multiply it with Madonnas age*)
- The langchain agent will try to get the necessary information based on a tree of thought type of prompting
''')
st.write('Made with πŸ€– by [Cazimir Roman](https://cazimir.dev)')
def load_app():
## AGENT setup
# llm = OpenAI(temperature=0)
model_name = "deepset/roberta-base-squad2"
tokenizer = AutoTokenizer.from_pretrained(model_name)
llm = AutoModelForQuestionAnswering.from_pretrained(model_name)
# llm = OpenAI(temperature=0)
tools = load_tools(["llm-math", "wikipedia"], llm=llm)
agent = initialize_agent(tools, llm, agent="zero-shot-react-description", verbose=True)
## AGENT setup
if 'messages' not in st.session_state:
st.session_state['messages'] = [Message(text="Hey, this is Botty. How can I help?", is_user=False)]
if 'current_prompt' not in st.session_state:
st.session_state['current_prompt'] = "" # Store the latest user input
# Define function to submit user input
def submit():
# Set current_prompt to the current value of prompt_input
st.session_state.current_prompt = st.session_state.prompt_input
# Clear prompt_input
st.session_state.prompt_input = ""
if st.session_state.current_prompt != "":
user_query = st.session_state.current_prompt
st.session_state.messages.append(Message(text=user_query, is_user=True))
# Get user query
with st.spinner('AI is thinking...'):
response = agent.run(user_query)
st.session_state.messages.append(Message(text=response, is_user=False))
if len(st.session_state['messages']) != 0:
for message_item in reversed(st.session_state['messages']):
message(message_item.text, key=str(uuid.uuid4()), is_user=message_item.is_user)
# Create a text input for user
st.text_input('You: ', key='prompt_input', on_change=submit)
def main():
container = st.container()
with container:
open_ai_key = os.getenv("OPENAI_API_KEY")
api_key = container.text_input("Enter your OpenAI API key", type="password", value="" if open_ai_key == None else open_ai_key)
st.write("Get your own Open AI API key from [here](https://platform.openai.com/account/api-keys)")
submit = container.button("Submit")
if open_ai_key:
load_app()
# submit button is pressed
if submit:
# check if api key length correct
if len(api_key) == 51:
os.environ["OPENAI_API_KEY"] = api_key
load_app()
else:
st.error("Api key is not correct")
if __name__ == '__main__':
main()