GrammarBlocks / app.py
ColeGuion's picture
Update app.py
afb1063 verified
raw
history blame
2.49 kB
import gradio as gr
def respond(system_message, max_length, min_length, max_new_tokens, num_beams, temperature, top_p):
# Your response generation logic here
#response = correct_text(message, max_length, max_new_tokens, min_length, num_beams, temperature, top_p)
#yield response
return f"System message: {system_message}, Max Length: {max_length}, Min Length: {min_length}, Max new tokens: {max_new_tokens}, Num Beams: {num_beams}, Temperature: {temperature}, Top-p: {top_p}"
def set_prompt(prompt_text):
return gr.update(value=prompt_text)
# Create the Gradio interface
with gr.Blocks() as demo:
system_message = gr.Textbox(value="You are a friendly Chatbot.", label="System message")
prompt_box = gr.Textbox(lines=2, placeholder="Enter your prompt here...")
# Predefined prompts as buttons
prompt1 = gr.Button("we shood buy an car")
prompt2 = gr.Button("this is an exampl of bad grammer")
prompt3 = gr.Button("their are many ways to correct grammar")
prompt1.click(set_prompt, inputs=None, outputs=prompt_box, _js="() => 'we shood buy an car'")
prompt2.click(set_prompt, inputs=None, outputs=prompt_box, _js="() => 'this is an exampl of bad grammer'")
prompt3.click(set_prompt, inputs=None, outputs=prompt_box, _js="() => 'their are many ways to correct grammar'")
max_length = gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max Length")
min_length = gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Min Length")
max_new_tokens = gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens")
num_beams = gr.Slider(minimum=1, maximum=10, value=5, step=1, label="Num Beams")
temperature = gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature")
top_p = gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)")
show_top_p = gr.Checkbox(value=True, label="Show Top-p Slider")
top_p_slider = gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)", visible=True)
show_top_p.change(lambda show: gr.update(visible=show), show_top_p, top_p_slider)
demo = gr.ChatInterface(
respond,
additional_inputs=[
system_message,
prompt_box,
max_length,
min_length,
max_new_tokens,
num_beams,
temperature,
top_p_slider,
],
)
demo.launch()