macrdel commited on
Commit
1d0707d
·
1 Parent(s): 6b75ddc

update app.py

Browse files
Files changed (1) hide show
  1. app.py +19 -5
app.py CHANGED
@@ -21,10 +21,6 @@ import torch
21
  device = "cuda" if torch.cuda.is_available() else "cpu"
22
  torch_dtype = torch.float16 if torch.cuda.is_available() else torch.float32
23
  model_repo = "stabilityai/sdxl-turbo"
24
- pipe = DiffusionPipeline.from_pretrained(
25
- model_repo,
26
- torch_dtype=torch_dtype
27
- ).to(device)
28
 
29
  MAX_SEED = np.iinfo(np.int32).max
30
  MAX_IMAGE_SIZE = 1024
@@ -40,6 +36,7 @@ def infer(
40
  height,
41
  guidance_scale,
42
  num_inference_steps,
 
43
  progress=gr.Progress(track_tqdm=True),
44
  ):
45
  if randomize_seed:
@@ -47,6 +44,11 @@ def infer(
47
 
48
  generator = torch.Generator().manual_seed(seed)
49
 
 
 
 
 
 
50
  image = pipe(
51
  prompt=prompt,
52
  negative_prompt=negative_prompt,
@@ -76,7 +78,18 @@ css = """
76
  with gr.Blocks(css=css) as demo:
77
  with gr.Column(elem_id="col-container"):
78
  gr.Markdown(" # Text2Img Gradio")
79
- gr.Markdown(f" ## Model '{model_repo}'")
 
 
 
 
 
 
 
 
 
 
 
80
 
81
  with gr.Row():
82
  prompt = gr.Text(
@@ -159,6 +172,7 @@ with gr.Blocks(css=css) as demo:
159
  height,
160
  guidance_scale,
161
  num_inference_steps,
 
162
  ],
163
  outputs=[result, seed],
164
  )
 
21
  device = "cuda" if torch.cuda.is_available() else "cpu"
22
  torch_dtype = torch.float16 if torch.cuda.is_available() else torch.float32
23
  model_repo = "stabilityai/sdxl-turbo"
 
 
 
 
24
 
25
  MAX_SEED = np.iinfo(np.int32).max
26
  MAX_IMAGE_SIZE = 1024
 
36
  height,
37
  guidance_scale,
38
  num_inference_steps,
39
+ model_repo=model_repo,
40
  progress=gr.Progress(track_tqdm=True),
41
  ):
42
  if randomize_seed:
 
44
 
45
  generator = torch.Generator().manual_seed(seed)
46
 
47
+ pipe = DiffusionPipeline.from_pretrained(
48
+ model_repo,
49
+ torch_dtype=torch_dtype,
50
+ ).to(device)
51
+
52
  image = pipe(
53
  prompt=prompt,
54
  negative_prompt=negative_prompt,
 
78
  with gr.Blocks(css=css) as demo:
79
  with gr.Column(elem_id="col-container"):
80
  gr.Markdown(" # Text2Img Gradio")
81
+ # gr.Markdown(f" ## Model '{model_repo}'")
82
+
83
+ model_dropdown = gr.Dropdown(
84
+ label="Select Model",
85
+ choices=[
86
+ "stabilityai/sdxl-turbo",
87
+ "stabilityai/stable-diffusion-xl-base-1.0",
88
+ "runwayml/stable-diffusion-v1-5",
89
+ "SG161222/Realistic_Vision_V5.1_noVAE"
90
+ ],
91
+ value="stabilityai/sdxl-turbo",
92
+ )
93
 
94
  with gr.Row():
95
  prompt = gr.Text(
 
172
  height,
173
  guidance_scale,
174
  num_inference_steps,
175
+ model_dropdown,
176
  ],
177
  outputs=[result, seed],
178
  )