Arri98 commited on
Commit
938c4a3
·
1 Parent(s): e6fcdff
Files changed (1) hide show
  1. app.py +34 -29
app.py CHANGED
@@ -10,20 +10,22 @@ def submit_prompt(prompt: str):
10
  return backend.router(prompt)
11
 
12
  def start_app()-> Tuple[bool, bool, bool]:
13
- print("Starting app")
14
  return (
15
  gr.update(visible=False), # landing visible
16
  gr.update(visible=True), # app visible
17
  gr.update(visible=False), # start_button visible
18
  )
19
 
20
- def change_vote(backdown: bool, id: str) -> Tuple[bool]:
21
- print(id)
22
- response = requests.post(url + "/v2/backdownvote", json={"backdown": backdown, "_id": id})
23
- return (gr.update(visible=False),)
 
 
 
24
 
25
  def record_vote(prompt: str, left_chat: List, right_chat: List,
26
- left_model: str, right_model: str, vote_type: int, moreConsumingn) -> Tuple[str, bool, bool, bool, bool]:
27
  """Record a vote for either the left or right model"""
28
  vote_message = "Is a tie!"
29
  if vote_type == 0:
@@ -36,7 +38,10 @@ def record_vote(prompt: str, left_chat: List, right_chat: List,
36
  "left_model": left_model, "right_model": right_model,
37
  })
38
  changeVisible = False
39
- if((moreConsuming == "left" and vote_type == 0) or (moreConsuming == "right" and vote_type == 1)):
 
 
 
40
  changeVisible = True
41
 
42
  #result, left_model, buttons[0], buttons[1], tievote_btn, model_names_row,
@@ -46,15 +51,16 @@ def record_vote(prompt: str, left_chat: List, right_chat: List,
46
  gr.update(interactive=False), # right_vote_btn interactive
47
  gr.update(interactive=False), # tie_btn interactive
48
  gr.update(visible=True), # model_names_row visible
49
- gr.update(visible=True), # backdown_row visible
 
50
  )
51
 
52
 
53
- def send_prompt(prompt: str , moreConsuming) -> Tuple[List, List, str, str, bool, bool, bool, bool]:
54
  response = requests.post(url + "/v2/query", json={"prompt": prompt})
55
  jsonResponse = response.json()
56
  print(jsonResponse)
57
- if(jsonResponse["status"] == 200 and jsonResponse["message"]):
58
  moreConsuming = jsonResponse["message"]["moreConsumption"]
59
  return (
60
  [{"role":"assistant", "content": jsonResponse["answers"][0]}], # left_output
@@ -65,6 +71,7 @@ def send_prompt(prompt: str , moreConsuming) -> Tuple[List, List, str, str, bool
65
  gr.update(interactive=True, visible=True),
66
  gr.update(interactive=True, visible=True),
67
  gr.update(visible=False),
 
68
  )
69
 
70
  # Initialize Gradio Blocks
@@ -138,15 +145,14 @@ with gr.Blocks(css="footer{display:none !important}") as demo:
138
  value="🤝 It's a Tie!", visible=True, interactive=False
139
  )
140
  with gr.Column(visible=False) as backdown_row:
141
- backdown_txt = gr.HTML("""<h2>Do you want to change your vote, knowing that the selected model consumes significantly more?</h2>""")
142
  with gr.Row():
143
- no_backdown_btn = gr.Button(value="No", visible=True, interactive=True)
144
- backdown_btn = gr.Button(value="Yes", visible=True, interactive=True)
145
  with gr.Row(visible=False) as model_names_row:
146
  left_model = gr.Textbox(label="Left Model", interactive=False)
147
  right_model = gr.Textbox(label="Right Model", interactive=False)
148
 
149
-
150
  result = gr.Textbox(label="Result", interactive=False, visible=False)
151
  with gr.Group():
152
  with gr.Row():
@@ -162,39 +168,39 @@ with gr.Blocks(css="footer{display:none !important}") as demo:
162
  tie_count = gr.State(0)
163
 
164
  # Define interactions
165
- textbox.submit(fn=lambda *args: send_prompt(*args, moreConsuming),
166
  inputs=[textbox],
167
  outputs=[chatbot[0], chatbot[1], left_model, right_model,
168
- buttons[0], buttons[1], tievote_btn, model_names_row,
169
  ])
170
 
171
 
172
  buttons[0].click(
173
  lambda *args: record_vote(*args, 0),
174
- inputs=[textbox, chatbot[0], chatbot[1], left_model, right_model, moreConsuming],
175
- outputs=[result,buttons[0], buttons[1], tievote_btn, model_names_row, backdown_row]
176
  )
177
 
178
  buttons[1].click(
179
  lambda *args: record_vote(*args, 1),
180
- inputs=[textbox, chatbot[0], chatbot[1], left_model, right_model, moreConsuming],
181
- outputs=[result,buttons[0], buttons[1], tievote_btn, model_names_row, backdown_row]
182
  )
183
 
184
  tievote_btn.click(
185
  lambda *args: record_vote(*args, 2),
186
- inputs=[textbox, chatbot[0], chatbot[1], left_model, right_model, gr.State(value=False)],
187
- outputs=[result, buttons[0], buttons[1], tievote_btn, model_names_row,]
188
  )
189
  backdown_btn.click(
190
- lambda *args: change_vote(*args, _id, True),
191
- inputs=[],
192
  outputs=[backdown_row]
193
  )
194
  no_backdown_btn.click(
195
- lambda *args: change_vote(*args, _id, False),
196
- inputs=[],
197
- outputs=[backdown_row]
198
  )
199
 
200
  # Project Description
@@ -235,6 +241,5 @@ with gr.Blocks(css="footer{display:none !important}") as demo:
235
  )
236
 
237
  if __name__ == "__main__":
238
- demo.launch(allowed_paths=["static"], show_api=False, share=False)
239
-
240
 
 
10
  return backend.router(prompt)
11
 
12
  def start_app()-> Tuple[bool, bool, bool]:
 
13
  return (
14
  gr.update(visible=False), # landing visible
15
  gr.update(visible=True), # app visible
16
  gr.update(visible=False), # start_button visible
17
  )
18
 
19
+ def change_vote( _id:str, backdown: bool,) -> Tuple[bool, bool]:
20
+ print(backdown, _id)
21
+ response = requests.post(url + "/v2/backdownvote", json={"backdown": backdown, "_id": _id})
22
+ return (
23
+ gr.update(visible=False),
24
+ gr.update(visible=False)
25
+ )
26
 
27
  def record_vote(prompt: str, left_chat: List, right_chat: List,
28
+ left_model: str, right_model: str, energy, moreConsuming, vote_type: int ) -> Tuple[str, bool, bool, bool, bool, bool]:
29
  """Record a vote for either the left or right model"""
30
  vote_message = "Is a tie!"
31
  if vote_type == 0:
 
38
  "left_model": left_model, "right_model": right_model,
39
  })
40
  changeVisible = False
41
+ jsonResponse = response.json()
42
+ _id = jsonResponse["id"]
43
+
44
+ if((moreConsuming == "izquierda" and vote_type == 0) or (moreConsuming == "derecha" and vote_type == 1)):
45
  changeVisible = True
46
 
47
  #result, left_model, buttons[0], buttons[1], tievote_btn, model_names_row,
 
51
  gr.update(interactive=False), # right_vote_btn interactive
52
  gr.update(interactive=False), # tie_btn interactive
53
  gr.update(visible=True), # model_names_row visible
54
+ gr.update(visible=changeVisible), # backdown_row visible
55
+ _id
56
  )
57
 
58
 
59
+ def send_prompt(prompt: str) -> Tuple[List, List, str, str, bool, bool, bool, bool, str]:
60
  response = requests.post(url + "/v2/query", json={"prompt": prompt})
61
  jsonResponse = response.json()
62
  print(jsonResponse)
63
+ if(jsonResponse["status"] == 200):
64
  moreConsuming = jsonResponse["message"]["moreConsumption"]
65
  return (
66
  [{"role":"assistant", "content": jsonResponse["answers"][0]}], # left_output
 
71
  gr.update(interactive=True, visible=True),
72
  gr.update(interactive=True, visible=True),
73
  gr.update(visible=False),
74
+ moreConsuming
75
  )
76
 
77
  # Initialize Gradio Blocks
 
145
  value="🤝 It's a Tie!", visible=True, interactive=False
146
  )
147
  with gr.Column(visible=False) as backdown_row:
148
+ backdown_txt = gr.HTML("""<h2> ¿Sabiendo que la respuesta que no has elegido consume menos energía cambiarías tu elección o la mantendrías?</h2>""")
149
  with gr.Row():
150
+ no_backdown_btn = gr.Button(value="Mantengo la respuesta", visible=True, interactive=True)
151
+ backdown_btn = gr.Button(value="Cambiaría de respuesta", visible=True, interactive=True)
152
  with gr.Row(visible=False) as model_names_row:
153
  left_model = gr.Textbox(label="Left Model", interactive=False)
154
  right_model = gr.Textbox(label="Right Model", interactive=False)
155
 
 
156
  result = gr.Textbox(label="Result", interactive=False, visible=False)
157
  with gr.Group():
158
  with gr.Row():
 
168
  tie_count = gr.State(0)
169
 
170
  # Define interactions
171
+ textbox.submit(fn=lambda *args: send_prompt(*args),
172
  inputs=[textbox],
173
  outputs=[chatbot[0], chatbot[1], left_model, right_model,
174
+ buttons[0], buttons[1], tievote_btn, model_names_row, moreConsuming
175
  ])
176
 
177
 
178
  buttons[0].click(
179
  lambda *args: record_vote(*args, 0),
180
+ inputs=[textbox, chatbot[0], chatbot[1], left_model, right_model, gr.State(value=False), moreConsuming],
181
+ outputs=[result,buttons[0], buttons[1], tievote_btn, model_names_row, backdown_row, _id]
182
  )
183
 
184
  buttons[1].click(
185
  lambda *args: record_vote(*args, 1),
186
+ inputs=[textbox, chatbot[0], chatbot[1], left_model, right_model, gr.State(value=False), moreConsuming],
187
+ outputs=[result,buttons[0], buttons[1], tievote_btn, model_names_row, backdown_row, _id]
188
  )
189
 
190
  tievote_btn.click(
191
  lambda *args: record_vote(*args, 2),
192
+ inputs=[textbox, chatbot[0], chatbot[1], left_model, right_model, gr.State(value=False), moreConsuming],
193
+ outputs=[result,buttons[0], buttons[1], tievote_btn, model_names_row, backdown_row, _id]
194
  )
195
  backdown_btn.click(
196
+ lambda *args: change_vote(*args, True),
197
+ inputs=[_id],
198
  outputs=[backdown_row]
199
  )
200
  no_backdown_btn.click(
201
+ lambda *args: change_vote(*args, False),
202
+ inputs=[_id],
203
+ outputs=[backdown_row, model_names_row]
204
  )
205
 
206
  # Project Description
 
241
  )
242
 
243
  if __name__ == "__main__":
244
+ demo.launch(allowed_paths=["static"], show_api=False, share=True)
 
245