Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -11,19 +11,21 @@ from diffusers import StableDiffusionPipeline
|
|
11 |
|
12 |
|
13 |
|
14 |
-
# 1. 选择一个基础模型,例如 SD 1.5
|
15 |
-
base_model_id = "runwayml/stable-diffusion-v1-5"
|
16 |
|
17 |
-
# 2. 加载基础模型
|
18 |
-
pipe = StableDiffusionPipeline.from_pretrained(
|
19 |
-
|
20 |
-
|
21 |
-
)
|
22 |
|
23 |
-
# 3. 加载 LoRA 权重
|
24 |
-
lora_model_id = "openfree/flux-chatgpt-ghibli-lora"
|
25 |
-
pipe.load_lora_weights(lora_model_id)
|
26 |
|
|
|
|
|
27 |
|
28 |
# Move pipeline to GPU if available
|
29 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
|
|
11 |
|
12 |
|
13 |
|
14 |
+
# # 1. 选择一个基础模型,例如 SD 1.5
|
15 |
+
# base_model_id = "runwayml/stable-diffusion-v1-5"
|
16 |
|
17 |
+
# # 2. 加载基础模型
|
18 |
+
# pipe = StableDiffusionPipeline.from_pretrained(
|
19 |
+
# base_model_id,
|
20 |
+
# torch_dtype=torch.float32
|
21 |
+
# )
|
22 |
|
23 |
+
# # 3. 加载 LoRA 权重
|
24 |
+
# lora_model_id = "openfree/flux-chatgpt-ghibli-lora"
|
25 |
+
# pipe.load_lora_weights(lora_model_id)
|
26 |
|
27 |
+
pipe = StableDiffusionImg2ImgPipeline.from_pretrained('black-forest-labs/FLUX.1-dev', torch_dtype=torch.bfloat16)
|
28 |
+
pipe.load_lora_weights('openfree/flux-chatgpt-ghibli-lora', weight_name='flux-chatgpt-ghibli-lora.safetensors')
|
29 |
|
30 |
# Move pipeline to GPU if available
|
31 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|