cocktailpeanut commited on
Commit
706b887
·
1 Parent(s): fc9b498
Files changed (1) hide show
  1. app.py +5 -1
app.py CHANGED
@@ -6,6 +6,7 @@ import numpy as np
6
  import math
7
  #import spaces
8
  import torch
 
9
 
10
  if torch.backends.mps.is_available():
11
  DEVICE = "mps"
@@ -14,6 +15,8 @@ elif torch.cuda.is_available():
14
  else:
15
  DEVICE = "cpu"
16
 
 
 
17
  #edit_file = hf_hub_download(repo_id="stabilityai/cosxl", filename="cosxl_edit.safetensors")
18
  #normal_file = hf_hub_download(repo_id="stabilityai/cosxl", filename="cosxl.safetensors")
19
  edit_file = hf_hub_download(repo_id="cocktailpeanut/c", filename="cosxl_edit.safetensors")
@@ -52,7 +55,8 @@ def run_normal(prompt, negative_prompt="", guidance_scale=7, progress=gr.Progres
52
  #@spaces.GPU
53
  def run_edit(image, prompt, resolution, negative_prompt="", guidance_scale=7, progress=gr.Progress(track_tqdm=True)):
54
  #resolution = 1024
55
- image.resize((resolution, resolution))
 
56
  return pipe_edit(prompt=prompt,image=image,height=resolution,width=resolution,negative_prompt=negative_prompt, guidance_scale=guidance_scale,num_inference_steps=20).images[0]
57
  css = '''
58
  .gradio-container{
 
6
  import math
7
  #import spaces
8
  import torch
9
+ from PIL import Image
10
 
11
  if torch.backends.mps.is_available():
12
  DEVICE = "mps"
 
15
  else:
16
  DEVICE = "cpu"
17
 
18
+ print(f"DEVICE={DEVICE}")
19
+
20
  #edit_file = hf_hub_download(repo_id="stabilityai/cosxl", filename="cosxl_edit.safetensors")
21
  #normal_file = hf_hub_download(repo_id="stabilityai/cosxl", filename="cosxl.safetensors")
22
  edit_file = hf_hub_download(repo_id="cocktailpeanut/c", filename="cosxl_edit.safetensors")
 
55
  #@spaces.GPU
56
  def run_edit(image, prompt, resolution, negative_prompt="", guidance_scale=7, progress=gr.Progress(track_tqdm=True)):
57
  #resolution = 1024
58
+ image.thumbnail((resolution, resolution), Image.LANCZOS)
59
+ #image.resize((resolution, resolution))
60
  return pipe_edit(prompt=prompt,image=image,height=resolution,width=resolution,negative_prompt=negative_prompt, guidance_scale=guidance_scale,num_inference_steps=20).images[0]
61
  css = '''
62
  .gradio-container{