Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -8,7 +8,7 @@ import re
|
|
8 |
|
9 |
import torch
|
10 |
from transformers import Qwen2VLForConditionalGeneration, GenerationConfig, AutoProcessor
|
11 |
-
|
12 |
|
13 |
|
14 |
def extract_answer_content(text: str) -> str:
|
@@ -150,8 +150,12 @@ def scale_box(box, scale):
|
|
150 |
sw, sh = scale
|
151 |
return [int(box[0]*sw), int(box[1]*sh), int(box[2]*sw), int(box[3]*sh)]
|
152 |
|
153 |
-
|
154 |
def generate_sgg(image):
|
|
|
|
|
|
|
|
|
155 |
iw, ih = image.size
|
156 |
scale_factors = (iw / 1000.0, ih / 1000.0)
|
157 |
|
|
|
8 |
|
9 |
import torch
|
10 |
from transformers import Qwen2VLForConditionalGeneration, GenerationConfig, AutoProcessor
|
11 |
+
import spaces
|
12 |
|
13 |
|
14 |
def extract_answer_content(text: str) -> str:
|
|
|
150 |
sw, sh = scale
|
151 |
return [int(box[0]*sw), int(box[1]*sh), int(box[2]*sw), int(box[3]*sh)]
|
152 |
|
153 |
+
@spaces.GPU
|
154 |
def generate_sgg(image):
|
155 |
+
device='cuda' if torch.cuda.is_available() else "cpu"
|
156 |
+
if next(model.parameters()).device != torch.device(device):
|
157 |
+
model = model.to(device)
|
158 |
+
|
159 |
iw, ih = image.size
|
160 |
scale_factors = (iw / 1000.0, ih / 1000.0)
|
161 |
|