Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -70,7 +70,8 @@ def infer(
|
|
70 |
model_id='CompVis/stable-diffusion-v1-4',
|
71 |
seed=42,
|
72 |
guidance_scale=7.0,
|
73 |
-
lora_scale=0.5
|
|
|
74 |
):
|
75 |
generator = torch.Generator(device).manual_seed(seed)
|
76 |
|
@@ -86,7 +87,8 @@ def infer(
|
|
86 |
prompt_embeds, negative_prompt_embeds = align_embeddings(prompt_embeds, negative_prompt_embeds)
|
87 |
print(f"LoRA adapter loaded: {pipe.unet.active_adapters}")
|
88 |
print(f"LoRA scale applied: {lora_scale}")
|
89 |
-
pipe.fuse_lora(lora_scale=lora_scale)
|
|
|
90 |
|
91 |
params = {
|
92 |
'prompt_embeds': prompt_embeds,
|
|
|
70 |
model_id='CompVis/stable-diffusion-v1-4',
|
71 |
seed=42,
|
72 |
guidance_scale=7.0,
|
73 |
+
lora_scale=0.5,
|
74 |
+
progress=gr.Progress(track_tqdm=True)
|
75 |
):
|
76 |
generator = torch.Generator(device).manual_seed(seed)
|
77 |
|
|
|
87 |
prompt_embeds, negative_prompt_embeds = align_embeddings(prompt_embeds, negative_prompt_embeds)
|
88 |
print(f"LoRA adapter loaded: {pipe.unet.active_adapters}")
|
89 |
print(f"LoRA scale applied: {lora_scale}")
|
90 |
+
# pipe.fuse_lora(lora_scale=lora_scale)
|
91 |
+
pipe.unet = pipe.unet.merge_and_unload(lora_scale=lora_scale)
|
92 |
|
93 |
params = {
|
94 |
'prompt_embeds': prompt_embeds,
|