Spaces:
Running
on
Zero
Running
on
Zero
foivospar
commited on
Commit
·
2583838
1
Parent(s):
b64e72f
add lcm-lora support
Browse files
app.py
CHANGED
@@ -58,11 +58,11 @@ pipeline = StableDiffusionPipeline.from_pretrained(
|
|
58 |
safety_checker=None
|
59 |
)
|
60 |
pipeline.scheduler = DPMSolverMultistepScheduler.from_config(pipeline.scheduler.config)
|
|
|
61 |
|
62 |
# load and disable LCM
|
63 |
pipeline.load_lora_weights("latent-consistency/lcm-lora-sdv1-5")
|
64 |
pipeline.disable_lora()
|
65 |
-
pipeline = pipeline.to(device)
|
66 |
|
67 |
def toggle_lcm_ui(value):
|
68 |
if value:
|
@@ -113,9 +113,11 @@ def generate_image(image_path, num_steps, guidance_scale, seed, num_images, use_
|
|
113 |
if use_lcm:
|
114 |
pipeline.scheduler = LCMScheduler.from_config(pipeline.scheduler.config)
|
115 |
pipeline.enable_lora()
|
|
|
116 |
else:
|
117 |
pipeline.disable_lora()
|
118 |
pipeline.scheduler = DPMSolverMultistepScheduler.from_config(pipeline.scheduler.config)
|
|
|
119 |
|
120 |
if image_path is None:
|
121 |
raise gr.Error(f"Cannot find any input face image! Please upload a face image.")
|
|
|
58 |
safety_checker=None
|
59 |
)
|
60 |
pipeline.scheduler = DPMSolverMultistepScheduler.from_config(pipeline.scheduler.config)
|
61 |
+
pipeline = pipeline.to(device)
|
62 |
|
63 |
# load and disable LCM
|
64 |
pipeline.load_lora_weights("latent-consistency/lcm-lora-sdv1-5")
|
65 |
pipeline.disable_lora()
|
|
|
66 |
|
67 |
def toggle_lcm_ui(value):
|
68 |
if value:
|
|
|
113 |
if use_lcm:
|
114 |
pipeline.scheduler = LCMScheduler.from_config(pipeline.scheduler.config)
|
115 |
pipeline.enable_lora()
|
116 |
+
pipeline = pipeline.to(device)
|
117 |
else:
|
118 |
pipeline.disable_lora()
|
119 |
pipeline.scheduler = DPMSolverMultistepScheduler.from_config(pipeline.scheduler.config)
|
120 |
+
pipeline = pipeline.to(device)
|
121 |
|
122 |
if image_path is None:
|
123 |
raise gr.Error(f"Cannot find any input face image! Please upload a face image.")
|