ChangranHuuu's picture
Upload 22 files
493728d verified
import gradio as gr
from run_chatbot import process_with_longcepo, SAMBANOVA_MODEL
def chatbot_interface(system_prompt, context, query):
"""Gradio interface function to interact with the LongCePO chatbot."""
if not context or not query:
return "Please provide both context and query."
# Combine context and query using the expected delimiter
initial_query = f"{context}<CONTEXT_END>{query}"
# Use a default system prompt if none is provided
if not system_prompt:
system_prompt = "You are a helpful assistant designed to answer questions based on the provided context."
print(f"Received request:\nSystem Prompt: {system_prompt}\nContext: {context[:100]}...\nQuery: {query}")
# Call the processing function
result = process_with_longcepo(system_prompt, initial_query)
print(f"Returning result: {result[:100]}...")
return result
# Define Gradio interface components
iface = gr.Interface(
fn=chatbot_interface,
inputs=[
gr.Textbox(label="System Prompt (Optional)", placeholder="Enter system prompt here...", lines=2),
gr.Textbox(label="Context", placeholder="Enter the long context here...", lines=10),
gr.Textbox(label="Query", placeholder="Enter your query based on the context here...", lines=2)
],
outputs=gr.Textbox(label="Answer", lines=10),
title=f"LongCePO Chatbot ({SAMBANOVA_MODEL})",
description="Enter a long context and a query. The chatbot will use the LongCePO method with Sambanova backend to generate an answer.",
allow_flagging="never"
)
# Launch the Gradio app
if __name__ == "__main__":
print("Launching Gradio interface...")
# Listen on 0.0.0.0 to make it accessible externally if needed
iface.launch(server_name="0.0.0.0", server_port=7860)