Spaces:
Running
Running
File size: 1,177 Bytes
280b1ae 51c8f6b 280b1ae 51c8f6b 280b1ae 51c8f6b 280b1ae 51c8f6b 280b1ae 51c8f6b 280b1ae 51c8f6b 280b1ae |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 |
import gradio as gr
import torch
from diffusers import AnimateDiffPipeline, MotionAdapter, DDIMScheduler
from diffusers.utils import export_to_gif
# Load the motion adapter
adapter = MotionAdapter.from_pretrained("guoyww/animatediff-motion-adapter-v1-5-2")
# Load the base model
model_id = "SG161222/Realistic_Vision_V5.1_noVAE"
pipe = AnimateDiffPipeline.from_pretrained(model_id, motion_adapter=adapter)
# Set up the scheduler
scheduler = DDIMScheduler.from_pretrained(
model_id, subfolder="scheduler", clip_sample=False, timestep_spacing="linspace", steps_offset=1
)
pipe.scheduler = scheduler
# Enable memory savings
pipe.enable_vae_slicing()
pipe.enable_model_cpu_offload()
def generate_animation(prompt):
output = pipe(
prompt=prompt,
negative_prompt="bad quality, worse quality",
num_frames=16,
guidance_scale=7.5,
num_inference_steps=25,
generator=torch.Generator("cpu").manual_seed(42),
)
frames = output.frames[0]
export_to_gif(frames, "animation.gif")
return "animation.gif"
# Create Gradio interface
demo = gr.Interface(fn=generate_animation, inputs="text", outputs="image")
demo.launch()
|