Spaces:
Paused
Paused
up
Browse files- app-fast.py +5 -3
app-fast.py
CHANGED
@@ -63,13 +63,15 @@ pipe = HiDreamImagePipeline.from_pretrained(
|
|
63 |
pipe.transformer = transformer
|
64 |
|
65 |
|
66 |
-
@spaces.GPU(duration=
|
67 |
def generate_image(
|
68 |
prompt: str, resolution: str, seed: int, progress=gr.Progress(track_tqdm=True)
|
69 |
) -> tuple[PIL.Image.Image, int]:
|
70 |
if seed == -1:
|
71 |
seed = torch.randint(0, 1_000_000, (1,)).item()
|
72 |
|
|
|
|
|
73 |
height, width = tuple(map(int, resolution.replace(" ", "").split("x")))
|
74 |
generator = torch.Generator("cuda").manual_seed(seed)
|
75 |
|
@@ -87,8 +89,8 @@ def generate_image(
|
|
87 |
|
88 |
|
89 |
# Gradio UI
|
90 |
-
with gr.Blocks(title="HiDream Image Generator") as demo:
|
91 |
-
gr.Markdown("## 🌈 HiDream Image Generator")
|
92 |
|
93 |
with gr.Row():
|
94 |
with gr.Column():
|
|
|
63 |
pipe.transformer = transformer
|
64 |
|
65 |
|
66 |
+
@spaces.GPU(duration=120)
|
67 |
def generate_image(
|
68 |
prompt: str, resolution: str, seed: int, progress=gr.Progress(track_tqdm=True)
|
69 |
) -> tuple[PIL.Image.Image, int]:
|
70 |
if seed == -1:
|
71 |
seed = torch.randint(0, 1_000_000, (1,)).item()
|
72 |
|
73 |
+
msg = "ℹ️ This spaces currently crash because of the memory usage. Please help me fix 😅"
|
74 |
+
raise gr.Error(msg, duration=10)
|
75 |
height, width = tuple(map(int, resolution.replace(" ", "").split("x")))
|
76 |
generator = torch.Generator("cuda").manual_seed(seed)
|
77 |
|
|
|
89 |
|
90 |
|
91 |
# Gradio UI
|
92 |
+
with gr.Blocks(title="HiDream Image Generator Fast") as demo:
|
93 |
+
gr.Markdown("## 🌈 HiDream Image Generator Fast")
|
94 |
|
95 |
with gr.Row():
|
96 |
with gr.Column():
|