Spaces:
Running
on
Zero
Running
on
Zero
mjavaid
commited on
Commit
·
1b04e5d
1
Parent(s):
5f75bf4
first commit
Browse files
app.py
CHANGED
@@ -16,7 +16,6 @@ pipe = pipeline(
|
|
16 |
use_auth_token=hf_token
|
17 |
)
|
18 |
@spaces.GPU
|
19 |
-
|
20 |
def generate_response(user_text, user_image, history):
|
21 |
messages = [
|
22 |
{
|
@@ -31,22 +30,15 @@ def generate_response(user_text, user_image, history):
|
|
31 |
user_content.append({"type": "text", "text": user_text})
|
32 |
messages.append({"role": "user", "content": user_content})
|
33 |
|
|
|
34 |
output = pipe(text=messages, max_new_tokens=200)
|
35 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
36 |
history.append((user_text, response))
|
37 |
-
return history, history
|
38 |
-
|
39 |
-
with gr.Blocks() as demo:
|
40 |
-
gr.Markdown("# Gemma 3 Chat Interface")
|
41 |
-
gr.Markdown(
|
42 |
-
"This interface lets you chat with the Gemma 3 model. "
|
43 |
-
"You can type a message and optionally attach an image."
|
44 |
-
)
|
45 |
-
chatbot = gr.Chatbot(type="messages")
|
46 |
-
with gr.Row():
|
47 |
-
txt = gr.Textbox(show_label=False, placeholder="Type your message here...", container=False)
|
48 |
-
img = gr.Image(type="pil", label="Attach an image (optional)")
|
49 |
-
state = gr.State([])
|
50 |
-
|
51 |
-
txt.submit(generate_response, inputs=[txt, img, state], outputs=[chatbot, state])
|
52 |
-
demo.launch()
|
|
|
16 |
use_auth_token=hf_token
|
17 |
)
|
18 |
@spaces.GPU
|
|
|
19 |
def generate_response(user_text, user_image, history):
|
20 |
messages = [
|
21 |
{
|
|
|
30 |
user_content.append({"type": "text", "text": user_text})
|
31 |
messages.append({"role": "user", "content": user_content})
|
32 |
|
33 |
+
# Call the pipeline with the provided messages.
|
34 |
output = pipe(text=messages, max_new_tokens=200)
|
35 |
+
|
36 |
+
# Attempt to extract the generated content using the expected structure.
|
37 |
+
try:
|
38 |
+
response = output[0][0]["generated_text"][-1]["content"]
|
39 |
+
except (KeyError, IndexError, TypeError):
|
40 |
+
# Fallback: return the raw output as a string.
|
41 |
+
response = str(output)
|
42 |
+
|
43 |
history.append((user_text, response))
|
44 |
+
return history, history
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|