Spaces:
Runtime error
Runtime error
File size: 3,747 Bytes
e795297 8182eb0 e795297 b304dee 8182eb0 b304dee |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 |
import streamlit as st
from streamlit_chat import message
import os
import uuid
from dataclasses import dataclass
from langchain.agents import load_tools, initialize_agent
from langchain.llms import OpenAI
from transformers import AutoModelForQuestionAnswering, AutoTokenizer
@dataclass
class Message:
text: str
is_user: bool
# Set streamlit page configuration
st.set_page_config(page_title="Chat with wikipedia")
with st.sidebar:
st.title('Chat GPT using Langchain Agents to talk to Wikipedia')
st.markdown('''
## About
This app is an OpenAi LLM-powered PDF chatbot built using:
- [Streamlit](https://streamlit.io/)
- [LangChain](https://python.langchain.com/)
- [OpenAI](https://platform.openai.com/docs/models) LLM model
## How it works
- Ask any question. It will try to use wikipedia api to get the result. (For ex: *Who is Madonna?*)
- Ask a follow up question that would involve using some kind of mathematical operation (For ex: *Take Elton Johns age and multiply it with Madonnas age*)
- The langchain agent will try to get the necessary information based on a tree of thought type of prompting
''')
st.write('Made with 🤖 by [Cazimir Roman](https://cazimir.dev)')
def load_app():
## AGENT setup
# llm = OpenAI(temperature=0)
model_name = "deepset/roberta-base-squad2"
tokenizer = AutoTokenizer.from_pretrained(model_name)
llm = AutoModelForQuestionAnswering.from_pretrained(model_name)
# llm = OpenAI(temperature=0)
tools = load_tools(["llm-math", "wikipedia"], llm=llm)
agent = initialize_agent(tools, llm, agent="zero-shot-react-description", verbose=True)
## AGENT setup
if 'messages' not in st.session_state:
st.session_state['messages'] = [Message(text="Hey, this is Botty. How can I help?", is_user=False)]
if 'current_prompt' not in st.session_state:
st.session_state['current_prompt'] = "" # Store the latest user input
# Define function to submit user input
def submit():
# Set current_prompt to the current value of prompt_input
st.session_state.current_prompt = st.session_state.prompt_input
# Clear prompt_input
st.session_state.prompt_input = ""
if st.session_state.current_prompt != "":
user_query = st.session_state.current_prompt
st.session_state.messages.append(Message(text=user_query, is_user=True))
# Get user query
with st.spinner('AI is thinking...'):
response = agent.run(user_query)
st.session_state.messages.append(Message(text=response, is_user=False))
if len(st.session_state['messages']) != 0:
for message_item in reversed(st.session_state['messages']):
message(message_item.text, key=str(uuid.uuid4()), is_user=message_item.is_user)
# Create a text input for user
st.text_input('You: ', key='prompt_input', on_change=submit)
def main():
container = st.container()
with container:
open_ai_key = os.getenv("OPENAI_API_KEY")
api_key = container.text_input("Enter your OpenAI API key", type="password", value="" if open_ai_key == None else open_ai_key)
st.write("Get your own Open AI API key from [here](https://platform.openai.com/account/api-keys)")
submit = container.button("Submit")
if open_ai_key:
load_app()
# submit button is pressed
if submit:
# check if api key length correct
if len(api_key) == 51:
os.environ["OPENAI_API_KEY"] = api_key
load_app()
else:
st.error("Api key is not correct")
if __name__ == '__main__':
main() |