ysharma HF Staff commited on
Commit
c5bbce0
·
1 Parent(s): c3ed386
Files changed (1) hide show
  1. app.py +17 -12
app.py CHANGED
@@ -74,7 +74,7 @@ def get_answers_timestamp(question, final_transcript, transcript):
74
 
75
  return start_timestamp
76
 
77
- def set_example_question(example: list) -> dict:
78
  return gr.Image.update(value=example[0])
79
 
80
 
@@ -99,17 +99,10 @@ def display_vid(url, question):
99
  html_out = "<iframe width='560' height='315' src='https://www.youtube.com/embed/" + video_id + "?start=" + str(ans_timestamp) + "' title='YouTube video player' frameborder='0' allow='accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture' allowfullscreen></iframe>"
100
  print(f"html output is : {html_out}")
101
 
102
- #"<iframe width='560' height='315' src='https://www.youtube.com/embed/smUHQndcmOY&t?start=215' title='YouTube video player' frameborder='0' allow='accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture' allowfullscreen></iframe>"
103
-
104
- #<iframe width="560" height="315" src="https://www.youtube.com/embed/smUHQndcmOY?start=402" title="YouTube video player" frameborder="0" allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture" allowfullscreen></iframe>
105
- #vid = YouTubeVideo('smUHQndcmOY&t=425s')
106
  return html_out
107
-
108
- #https://youtu.be/smUHQndcmOY
109
- def fun(url):
110
- return gr.Video(value=url)
111
-
112
 
 
 
113
 
114
  demo = gr.Blocks()
115
 
@@ -125,12 +118,24 @@ with demo:
125
  output_vid = gr.HTML(label="Video will play at the answer timestamp")
126
 
127
  with gr.Row():
128
- gr.Radio(["Does video talk about different modalities", "Can the model do classification", "Does the model pushes state of the art in image classification", "Is deepmind copying openai", "Is flamingo good enough", "Has flamingo passed andre karpathy challenge yet?", "Are there cool examples from flamingo in the video?", "Does the video talk about cat?", "Any funny examples in video?"], label= "Choose a sample Question")
 
 
 
 
 
 
 
 
 
 
 
129
  #paths = sorted(pathlib.Path('images').glob('*.jpg'))
130
  #example_images = gr.Dataset(components=[input_image],
131
  # samples=[[path.as_posix()]
132
  # for path in paths])
133
-
 
134
  b1 = gr.Button("Publish Video")
135
  #b2 = gr.Button("Generate Image")
136
 
 
74
 
75
  return start_timestamp
76
 
77
+ def set_example_question(example):
78
  return gr.Image.update(value=example[0])
79
 
80
 
 
99
  html_out = "<iframe width='560' height='315' src='https://www.youtube.com/embed/" + video_id + "?start=" + str(ans_timestamp) + "' title='YouTube video player' frameborder='0' allow='accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture' allowfullscreen></iframe>"
100
  print(f"html output is : {html_out}")
101
 
 
 
 
 
102
  return html_out
 
 
 
 
 
103
 
104
+ def set_example_question(example):
105
+ return gr.Radio.update(value=example[0])
106
 
107
  demo = gr.Blocks()
108
 
 
118
  output_vid = gr.HTML(label="Video will play at the answer timestamp")
119
 
120
  with gr.Row():
121
+ example_question = gr.Dataset(
122
+ samples=[
123
+ ["Does video talk about different modalities"],
124
+ ["Can the model do classification"],
125
+ ["Does the model pushes state of the art in image classification"],
126
+ ["Is deepmind copying openai"],
127
+ ["Is flamingo good enough"],
128
+ ["Has flamingo passed andre karpathy challnge yet?"],
129
+ ["Are there cool examples from flamingo in the video?"],
130
+ ["Does the video talk about cat?"],
131
+ ["Any funny examples in video?"]])
132
+ #gr.Radio(["Does video talk about different modalities", "Can the model do classification", "Does the model pushes state of the art in image classification", "Is deepmind copying openai", "Is flamingo good enough", "Has flamingo passed andre karpathy challenge yet?", "Are there cool examples from flamingo in the video?", "Does the video talk about cat?", "Any funny examples in video?"], label= "Choose a sample Question")
133
  #paths = sorted(pathlib.Path('images').glob('*.jpg'))
134
  #example_images = gr.Dataset(components=[input_image],
135
  # samples=[[path.as_posix()]
136
  # for path in paths])
137
+ example_styles.click(fn=set_example_question, inputs=example_question, outputs= input_url) #example_styles.components)
138
+
139
  b1 = gr.Button("Publish Video")
140
  #b2 = gr.Button("Generate Image")
141