Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -25,6 +25,7 @@ model = Gemma3ForConditionalGeneration.from_pretrained(
|
|
25 |
).to(device).eval()
|
26 |
|
27 |
@torch.inference_mode()
|
|
|
28 |
def process(message, history):
|
29 |
"""Generate the model response in streaming mode given message and history
|
30 |
"""
|
|
|
25 |
).to(device).eval()
|
26 |
|
27 |
@torch.inference_mode()
|
28 |
+
@spaces.GPU
|
29 |
def process(message, history):
|
30 |
"""Generate the model response in streaming mode given message and history
|
31 |
"""
|