QHL067 commited on
Commit
0236b52
·
1 Parent(s): 7a6b9c3

save image

Browse files
Files changed (1) hide show
  1. app.py +9 -6
app.py CHANGED
@@ -16,6 +16,7 @@ import numpy as np
16
  import torch
17
  import torch.nn.functional as F
18
  from torchvision.utils import save_image
 
19
  from huggingface_hub import hf_hub_download
20
 
21
  from absl import logging
@@ -230,10 +231,13 @@ def infer(
230
  else:
231
  image_unprocessed = decode(_z)
232
 
233
- samples = unpreprocess(image_unprocessed).contiguous()[0]
234
 
235
- # return samples, seed
236
- return seed
 
 
 
237
 
238
 
239
  # examples = [
@@ -310,7 +314,7 @@ with gr.Blocks(css=css) as demo:
310
  )
311
  with gr.Row():
312
  num_of_interpolation = gr.Slider(
313
- label="Number of images for interpolation",
314
  minimum=5,
315
  maximum=50,
316
  step=1,
@@ -330,8 +334,7 @@ with gr.Blocks(css=css) as demo:
330
  num_inference_steps,
331
  num_of_interpolation,
332
  ],
333
- # outputs=[result, seed],
334
- outputs=[seed],
335
  )
336
 
337
  if __name__ == "__main__":
 
16
  import torch
17
  import torch.nn.functional as F
18
  from torchvision.utils import save_image
19
+ from torchvision.transforms import ToPILImage
20
  from huggingface_hub import hf_hub_download
21
 
22
  from absl import logging
 
231
  else:
232
  image_unprocessed = decode(_z)
233
 
234
+ samples = unpreprocess(image_unprocessed).contiguous()
235
 
236
+ to_pil = ToPILImage()
237
+
238
+ pil_image = to_pil(sample[0])
239
+
240
+ return pil_image, seed
241
 
242
 
243
  # examples = [
 
314
  )
315
  with gr.Row():
316
  num_of_interpolation = gr.Slider(
317
+ label="Number of images for interpolation - More images yield smoother transitions but require more resources and may fail.",
318
  minimum=5,
319
  maximum=50,
320
  step=1,
 
334
  num_inference_steps,
335
  num_of_interpolation,
336
  ],
337
+ outputs=[result, seed],
 
338
  )
339
 
340
  if __name__ == "__main__":