streaming
Browse files
app.py
CHANGED
@@ -1,12 +1,13 @@
|
|
1 |
import gradio as gr
|
2 |
|
3 |
-
from llama_cpp import Llama
|
4 |
|
5 |
llm = Llama.from_pretrained(
|
6 |
repo_id="ID2223JR/gguf_model",
|
7 |
filename="unsloth.Q4_K_M.gguf",
|
8 |
)
|
9 |
|
|
|
10 |
# Data storage
|
11 |
ingredients_list = []
|
12 |
|
@@ -24,34 +25,57 @@ def add_ingredient(ingredient, quantity):
|
|
24 |
|
25 |
# Function to enable/disable add button
|
26 |
def validate_inputs(ingredient, quantity):
|
27 |
-
if ingredient and quantity > 0:
|
28 |
return gr.update(interactive=True)
|
29 |
return gr.update(interactive=False)
|
30 |
|
31 |
|
32 |
-
# Function to handle model submission
|
33 |
def submit_to_model():
|
34 |
if not ingredients_list:
|
35 |
-
|
|
|
36 |
|
37 |
-
# Join ingredients into a single prompt
|
38 |
prompt = f"Using the following ingredients, suggest a recipe:\n\n" + "\n".join(
|
39 |
ingredients_list
|
40 |
)
|
41 |
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
-
|
47 |
-
|
48 |
-
|
49 |
-
|
50 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
51 |
|
52 |
-
|
53 |
|
54 |
-
|
|
|
55 |
|
56 |
|
57 |
# App
|
@@ -69,7 +93,7 @@ def app():
|
|
69 |
with gr.Row():
|
70 |
submit_button = gr.Button("Submit")
|
71 |
model_output = gr.Textbox(
|
72 |
-
label="Recipe Suggestion", lines=
|
73 |
)
|
74 |
|
75 |
# Validate inputs
|
|
|
1 |
import gradio as gr
|
2 |
|
3 |
+
from llama_cpp import Llama, LlamaTokenizer
|
4 |
|
5 |
llm = Llama.from_pretrained(
|
6 |
repo_id="ID2223JR/gguf_model",
|
7 |
filename="unsloth.Q4_K_M.gguf",
|
8 |
)
|
9 |
|
10 |
+
|
11 |
# Data storage
|
12 |
ingredients_list = []
|
13 |
|
|
|
25 |
|
26 |
# Function to enable/disable add button
|
27 |
def validate_inputs(ingredient, quantity):
|
28 |
+
if ingredient and quantity is not None and quantity > 0:
|
29 |
return gr.update(interactive=True)
|
30 |
return gr.update(interactive=False)
|
31 |
|
32 |
|
|
|
33 |
def submit_to_model():
|
34 |
if not ingredients_list:
|
35 |
+
yield "Ingredients list is empty! Please add ingredients first."
|
36 |
+
return
|
37 |
|
|
|
38 |
prompt = f"Using the following ingredients, suggest a recipe:\n\n" + "\n".join(
|
39 |
ingredients_list
|
40 |
)
|
41 |
|
42 |
+
try:
|
43 |
+
response = llm.create_chat_completion(
|
44 |
+
messages=[
|
45 |
+
{
|
46 |
+
"role": "system",
|
47 |
+
"content": (
|
48 |
+
"You are a world-renowned chef, celebrated for your expertise..."
|
49 |
+
),
|
50 |
+
},
|
51 |
+
{"role": "user", "content": prompt},
|
52 |
+
],
|
53 |
+
stream=True, # Enable streaming
|
54 |
+
)
|
55 |
+
|
56 |
+
content = ""
|
57 |
+
|
58 |
+
for partial_response in response:
|
59 |
+
if "choices" in partial_response:
|
60 |
+
if "delta" in partial_response["choices"][0]:
|
61 |
+
content += partial_response["choices"][0]["delta"].get(
|
62 |
+
"content", ""
|
63 |
+
)
|
64 |
+
elif "message" in partial_response["choices"][0]:
|
65 |
+
content += partial_response["choices"][0]["message"].get(
|
66 |
+
"content", ""
|
67 |
+
)
|
68 |
+
else:
|
69 |
+
content += partial_response["choices"][0].get("text", "")
|
70 |
+
if content:
|
71 |
+
yield content
|
72 |
+
else:
|
73 |
+
yield "Unexpected response structure."
|
74 |
|
75 |
+
ingredients_list.clear() # Reset list after generation
|
76 |
|
77 |
+
except Exception as e:
|
78 |
+
yield f"An error occurred: {str(e)}"
|
79 |
|
80 |
|
81 |
# App
|
|
|
93 |
with gr.Row():
|
94 |
submit_button = gr.Button("Submit")
|
95 |
model_output = gr.Textbox(
|
96 |
+
label="Recipe Suggestion", lines=25, interactive=False
|
97 |
)
|
98 |
|
99 |
# Validate inputs
|