|
import gradio as gr |
|
from run_chatbot import process_with_longcepo, SAMBANOVA_MODEL |
|
|
|
def chatbot_interface(system_prompt, context, query): |
|
"""Gradio interface function to interact with the LongCePO chatbot.""" |
|
if not context or not query: |
|
return "Please provide both context and query." |
|
|
|
|
|
initial_query = f"{context}<CONTEXT_END>{query}" |
|
|
|
|
|
if not system_prompt: |
|
system_prompt = "You are a helpful assistant designed to answer questions based on the provided context." |
|
|
|
print(f"Received request:\nSystem Prompt: {system_prompt}\nContext: {context[:100]}...\nQuery: {query}") |
|
|
|
|
|
result = process_with_longcepo(system_prompt, initial_query) |
|
|
|
print(f"Returning result: {result[:100]}...") |
|
return result |
|
|
|
|
|
iface = gr.Interface( |
|
fn=chatbot_interface, |
|
inputs=[ |
|
gr.Textbox(label="System Prompt (Optional)", placeholder="Enter system prompt here...", lines=2), |
|
gr.Textbox(label="Context", placeholder="Enter the long context here...", lines=10), |
|
gr.Textbox(label="Query", placeholder="Enter your query based on the context here...", lines=2) |
|
], |
|
outputs=gr.Textbox(label="Answer", lines=10), |
|
title=f"LongCePO Chatbot ({SAMBANOVA_MODEL})", |
|
description="Enter a long context and a query. The chatbot will use the LongCePO method with Sambanova backend to generate an answer.", |
|
allow_flagging="never" |
|
) |
|
|
|
|
|
if __name__ == "__main__": |
|
print("Launching Gradio interface...") |
|
|
|
iface.launch(server_name="0.0.0.0", server_port=7860) |
|
|
|
|