Fix for streaming output
Browse files
app.py
CHANGED
@@ -77,8 +77,8 @@ with gr.Blocks() as demo:
|
|
77 |
model_selector.change(fn=load_model_on_selection, inputs=model_selector, outputs=model_status)
|
78 |
|
79 |
# Generate with current model
|
80 |
-
generate_btn.click(fn=generate_text, inputs=input_text, outputs=output_text
|
81 |
-
input_text.submit(fn=generate_text, inputs=input_text, outputs=output_text
|
82 |
|
83 |
|
84 |
load_model_on_selection("meta-llama/Llama-3.2-3B-Instruct")
|
|
|
77 |
model_selector.change(fn=load_model_on_selection, inputs=model_selector, outputs=model_status)
|
78 |
|
79 |
# Generate with current model
|
80 |
+
generate_btn.click(fn=generate_text, inputs=input_text, outputs=output_text)
|
81 |
+
input_text.submit(fn=generate_text, inputs=input_text, outputs=output_text)
|
82 |
|
83 |
|
84 |
load_model_on_selection("meta-llama/Llama-3.2-3B-Instruct")
|