Spaces:
Running
on
Zero
Running
on
Zero
File size: 1,547 Bytes
666f90c 79e7646 81698f7 79e7646 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 |
import spaces
import os
import torch
import traceback
import gradio as gr # ✅ Needed for gr.Error
from diffusers import AutoPipelineForImage2Image
# ✅ Cache models and tokenizers inside persistent storage
os.environ["HF_HOME"] = "/data/.cache/huggingface"
# Load SDXL pipeline with LoRA
pipe = AutoPipelineForImage2Image.from_pretrained(
"stabilityai/stable-diffusion-xl-base-1.0",
torch_dtype=torch.float16,
variant="fp16",
use_safetensors=True,
token=os.getenv("HF_TOKEN") # ✅ Your token from Space secrets
).to("cuda")
pipe.load_lora_weights("theoracle/sdxl-lora-headshot")
@spaces.GPU(duration=30)
def generate_with_lora(image, prompt, negative_prompt, strength, guidance_scale):
try:
if image is None:
raise ValueError("Uploaded image is None. Please upload a valid image.")
print("[INFO] Received image size:", image.size)
image = image.convert("RGB").resize((1024, 1024)) # ✅ Safer with convert("RGB")
print("[INFO] Starting pipeline with prompt:", prompt)
result = pipe(
prompt=prompt,
negative_prompt=negative_prompt or "",
image=image,
strength=strength,
guidance_scale=guidance_scale,
num_inference_steps=50
).images[0]
print("[INFO] Generation successful.")
return result
except Exception as e:
print("[ERROR] Exception in generate_with_lora:\n", traceback.format_exc())
raise gr.Error(f"Image generation failed: {str(e)}")
|