File size: 4,967 Bytes
32f5b77 fb65c41 32f5b77 fb65c41 32f5b77 fb65c41 32f5b77 fb65c41 32f5b77 fb65c41 32f5b77 fb65c41 7e52d7c 32f5b77 7e52d7c 32f5b77 7e52d7c 32f5b77 fb65c41 32f5b77 fb65c41 32f5b77 fb65c41 32f5b77 fb65c41 32f5b77 fb65c41 32f5b77 fb65c41 32f5b77 fb65c41 32f5b77 fb65c41 32f5b77 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 |
from data_sources import process_data_upload
import gradio as gr
from haystack.dataclasses import ChatMessage
from haystack.components.generators.chat import OpenAIChatGenerator
import os
from getpass import getpass
from dotenv import load_dotenv
load_dotenv()
if "OPENAI_API_KEY" not in os.environ:
os.environ["OPENAI_API_KEY"] = getpass("Enter OpenAI API key:")
chat_generator = OpenAIChatGenerator(model="gpt-4o")
response = None
messages = [
ChatMessage.from_system(
"You are a helpful and knowledgeable agent who has access to an SQL database which has a table called 'data_source'"
)
]
def chatbot_with_fc(message, history, session_hash):
from functions import sqlite_query_func
from pipelines import rag_pipeline_func
import tools
available_functions = {"sql_query_func": sqlite_query_func, "rag_pipeline_func": rag_pipeline_func}
messages.append(ChatMessage.from_user(message))
response = chat_generator.run(messages=messages, generation_kwargs={"tools": tools.tools_call(session_hash)})
while True:
# if OpenAI response is a tool call
if response and response["replies"][0].meta["finish_reason"] == "tool_calls" or response["replies"][0].tool_calls:
function_calls = response["replies"][0].tool_calls
for function_call in function_calls:
messages.append(ChatMessage.from_assistant(tool_calls=[function_call]))
## Parse function calling information
function_name = function_call.tool_name
function_args = function_call.arguments
## Find the correspoding function and call it with the given arguments
function_to_call = available_functions[function_name]
function_response = function_to_call(**function_args, session_hash=session_hash)
## Append function response to the messages list using `ChatMessage.from_tool`
messages.append(ChatMessage.from_tool(tool_result=function_response['reply'], origin=function_call))
response = chat_generator.run(messages=messages, generation_kwargs={"tools": tools.tools_call(session_hash)})
# Regular Conversation
else:
messages.append(response["replies"][0])
break
return response["replies"][0].text
def delete_db(req: gr.Request):
db_file_path = f'data_source_{req.session_hash}.db'
if os.path.exists(db_file_path):
os.remove(db_file_path)
def run_example(input):
return input
def example_display(input):
if input == None:
display = True
else:
display = False
return gr.update(visible=display)
css= ".file_marker .large{min-height:50px !important;} .example_btn{max-width:300px;}"
with gr.Blocks(css=css) as demo:
title = gr.HTML("<h1 style='text-align:center;'>Virtual Data Analyst</h1>")
description = gr.HTML("<p style='text-align:center;'>Upload a CSV file and chat with our virtual data analyst to get insights on your data set</p>")
example_file = gr.File(visible=False, value="samples/bank_marketing_campaign.csv")
example_btn = gr.Button(value="Try Me: bank_marketing_campaign.csv", elem_classes="example_btn", size="md", variant="primary")
file_output = gr.File(label="CSV File", show_label=True, elem_classes="file_marker", file_types=['.csv'])
example_btn.click(fn=run_example, inputs=example_file, outputs=file_output)
file_output.change(fn=example_display, inputs=file_output, outputs=example_btn)
@gr.render(inputs=file_output)
def data_options(filename, request: gr.Request):
print(filename)
if filename:
parameters = gr.Textbox(visible=False, value=request.session_hash)
bot = gr.Chatbot(type='messages', label="CSV Chat Window", show_label=True, render=False, visible=True, elem_classes="chatbot")
chat = gr.ChatInterface(
fn=chatbot_with_fc,
type='messages',
chatbot=bot,
title="Chat with your data file",
concurrency_limit=None,
examples=[
["Describe the dataset"],
["List the columns in the dataset"],
["What could this data be used for?"],
],
additional_inputs=parameters
)
process_upload(filename, request.session_hash)
def process_upload(upload_value, session_hash):
if upload_value:
process_data_upload(upload_value, session_hash)
return [], []
demo.unload(delete_db)
|