wrl2003 commited on
Commit
d706e6b
·
1 Parent(s): e5fdb98

Update space

Browse files
Files changed (1) hide show
  1. app.py +6 -6
app.py CHANGED
@@ -5,7 +5,6 @@ import os
5
  from PIL import Image
6
  import spaces
7
  import torch
8
- from gradio_imageslider import ImageSlider
9
 
10
  css = """
11
  #img-display-container {
@@ -38,7 +37,7 @@ with gr.Blocks(css=css) as demo:
38
  with gr.Row():
39
  left_image = gr.Image(label="Left Image", type='numpy', elem_id='img-display-input')
40
  right_image = gr.Image(label="Right Image", type='numpy', elem_id='img-display-input')
41
- depth_image_slider = ImageSlider(label="Depth Map with Slider View", elem_id='img-display-output', position=0.5,)
42
  # raw_file = gr.File(label="16-bit raw depth (can be considered as disparity)")
43
  submit = gr.Button("Submit")
44
 
@@ -50,20 +49,21 @@ with gr.Blocks(css=css) as demo:
50
  sample['left'] = sample['left'].unsqueeze(0)
51
  sample['right'] = sample['right'].unsqueeze(0)
52
 
53
- model.eval()
54
  for k, v in sample.items():
55
  sample[k] = v.to(0) if torch.is_tensor(v) else v
56
 
57
- model_pred = model(sample)
 
58
 
59
  return [model_pred]
60
 
61
- submit.click(on_submit, inputs=[left_image,right_image], outputs=[depth_image_slider])
62
 
63
  example_files = os.listdir('examples')
64
  example_files.sort()
65
  example_files = [os.path.join('examples', filename) for filename in example_files]
66
- examples = gr.Examples(examples=example_files, inputs=[left_image,right_image], outputs=[depth_image_slider], fn=on_submit, cache_examples=True)
67
 
68
 
69
  if __name__ == '__main__':
 
5
  from PIL import Image
6
  import spaces
7
  import torch
 
8
 
9
  css = """
10
  #img-display-container {
 
37
  with gr.Row():
38
  left_image = gr.Image(label="Left Image", type='numpy', elem_id='img-display-input')
39
  right_image = gr.Image(label="Right Image", type='numpy', elem_id='img-display-input')
40
+ depth_image = gr.Image(label="Depth Image", type='numpy', elem_id='img-display-input')
41
  # raw_file = gr.File(label="16-bit raw depth (can be considered as disparity)")
42
  submit = gr.Button("Submit")
43
 
 
49
  sample['left'] = sample['left'].unsqueeze(0)
50
  sample['right'] = sample['right'].unsqueeze(0)
51
 
52
+ # model.eval()
53
  for k, v in sample.items():
54
  sample[k] = v.to(0) if torch.is_tensor(v) else v
55
 
56
+ # model_pred = model(sample)
57
+ model_pred = None
58
 
59
  return [model_pred]
60
 
61
+ submit.click(on_submit, inputs=[left_image,right_image], outputs=[depth_image])
62
 
63
  example_files = os.listdir('examples')
64
  example_files.sort()
65
  example_files = [os.path.join('examples', filename) for filename in example_files]
66
+ examples = gr.Examples(examples=example_files, inputs=[left_image,right_image], outputs=[depth_image], fn=on_submit, cache_examples=True)
67
 
68
 
69
  if __name__ == '__main__':