Arri98 commited on
Commit
5669147
Β·
1 Parent(s): 603df31
Files changed (1) hide show
  1. app.py +240 -0
app.py ADDED
@@ -0,0 +1,240 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from typing import Tuple, List
3
+ import requests
4
+
5
+
6
+ url = "http://localhost:3000/arena"
7
+
8
+
9
+ def submit_prompt(prompt: str):
10
+ return backend.router(prompt)
11
+
12
+ def start_app()-> Tuple[bool, bool, bool]:
13
+ print("Starting app")
14
+ return (
15
+ gr.update(visible=False), # landing visible
16
+ gr.update(visible=True), # app visible
17
+ gr.update(visible=False), # start_button visible
18
+ )
19
+
20
+ def change_vote(backdown: bool, id: str) -> Tuple[bool]:
21
+ print(id)
22
+ response = requests.post(url + "/v2/backdownvote", json={"backdown": backdown, "_id": id})
23
+ return (gr.update(visible=False),)
24
+
25
+ def record_vote(prompt: str, left_chat: List, right_chat: List,
26
+ left_model: str, right_model: str, vote_type: int, moreConsumingn) -> Tuple[str, bool, bool, bool, bool]:
27
+ """Record a vote for either the left or right model"""
28
+ vote_message = "Is a tie!"
29
+ if vote_type == 0:
30
+ vote_message = "Right model wins!"
31
+ elif vote_type == 1:
32
+ vote_message = "Left model wins!"
33
+ result_msg = f"Vote recorded: {vote_message}"
34
+ response = requests.post(url + "/v2/vote", json={"vote": vote_type, "prompt": prompt,
35
+ "left_chat": left_chat, "right_chat": right_chat,
36
+ "left_model": left_model, "right_model": right_model,
37
+ })
38
+ changeVisible = False
39
+ if((moreConsuming == "left" and vote_type == 0) or (moreConsuming == "right" and vote_type == 1)):
40
+ changeVisible = True
41
+
42
+ #result, left_model, buttons[0], buttons[1], tievote_btn, model_names_row,
43
+ return (
44
+ result_msg, # result
45
+ gr.update(interactive=False), # left_vote_btn interactive
46
+ gr.update(interactive=False), # right_vote_btn interactive
47
+ gr.update(interactive=False), # tie_btn interactive
48
+ gr.update(visible=True), # model_names_row visible
49
+ gr.update(visible=True), # backdown_row visible
50
+ )
51
+
52
+
53
+ def send_prompt(prompt: str , moreConsuming) -> Tuple[List, List, str, str, bool, bool, bool, bool]:
54
+ response = requests.post(url + "/v2/query", json={"prompt": prompt})
55
+ jsonResponse = response.json()
56
+ print(jsonResponse)
57
+ if(jsonResponse["status"] == 200 and jsonResponse["message"]):
58
+ moreConsuming = jsonResponse["message"]["moreConsumption"]
59
+ return (
60
+ [{"role":"assistant", "content": jsonResponse["answers"][0]}], # left_output
61
+ [{"role": "assistant", "content": jsonResponse["answers"][1]}], # right_output
62
+ jsonResponse["models"][0], # left_model,
63
+ jsonResponse["models"][1], # right_model,
64
+ gr.update(interactive=True, visible=True),
65
+ gr.update(interactive=True, visible=True),
66
+ gr.update(interactive=True, visible=True),
67
+ gr.update(visible=False),
68
+ )
69
+
70
+ # Initialize Gradio Blocks
71
+ with gr.Blocks(css="footer{display:none !important}") as demo:
72
+ _id = gr.State("")
73
+ moreConsuming = gr.State("")
74
+ with gr.Column(visible=True) as landing:
75
+ gr.set_static_paths(paths=["static"])
76
+ with gr.Group():
77
+ gr.HTML("""
78
+ <div style="padding: 20px; font-size: 18px;">
79
+ <h2 style="font-size: 30px;">🌱 About This Project</h2>
80
+ <p>This space is part of the project <strong>"Sostenibilidad Generativa"</strong> 🌍, funded by the <strong>COTEC Foundation</strong>. Our goal is to evaluate how <strong>energy awareness</strong> ⚑ impacts users' evaluation of <strong>Large Language Models (LLMs)</strong>.</p>
81
+
82
+ <h2 style="font-size: 30x;">πŸ” How It Works</h2>
83
+ <ol>
84
+ <li><strong>Ask a Question</strong> πŸ’¬: Enter any question in the prompt box below.</li>
85
+ <li><strong>Compare Responses</strong> πŸ€–βš–οΈ: Two different LLMs will provide answers.</li>
86
+ <li><strong>Make Your Choice</strong> βœ…: Rate which response you think is better.</li>
87
+ <li><strong>Consider Energy Impact</strong> βš‘πŸ”‹: For some questions, you'll see information about the models' energy consumption.</li>
88
+ </ol>
89
+
90
+ <h2 style="font-size: 30px;">⚑ Energy Information</h2>
91
+ <ul>
92
+ <li>When shown, <strong>energy consumption data</strong> πŸ”‹ will help you understand the <strong>environmental impact</strong> 🌎.</li>
93
+ <li>You'll need to consider: <strong>Is a better response worth the additional energy consumption?</strong> πŸ€”</li>
94
+ <li>The comparison will highlight when one model <strong>consumes more than twice</strong> the energy of the other ⚠️.</li>
95
+ </ul>
96
+
97
+
98
+ <p style="text-align: center; margin-top: 20px; font-size: 35px;">
99
+ 🌿 <strong>Let's make AI more sustainable together!</strong> πŸš€β™»οΈ
100
+ </p>
101
+ </div>
102
+ """)
103
+ with gr.Column(visible=False) as app:
104
+ gr.set_static_paths(paths=["static"])
105
+ buttons = [None] * 2 # Initialize the list with None elements
106
+ with gr.Group():
107
+ gr.Image("static/logo.png", elem_id="centered", show_label=False)
108
+ with gr.Row(visible=False) as model_consumption_row:
109
+ consumption_text = gr.Textbox(label="Consumo: ", visible=True, interactive=False)
110
+ with gr.Row():
111
+ chatbot = [None] * 2 # Initialize the list with None elements
112
+ messages = ["πŸ‘ˆ Left is better", "πŸ‘‰ Right is better"]
113
+ for i in range(2):
114
+ with gr.Column():
115
+ chatbot[i] = gr.Chatbot(
116
+ show_label=False, # You can set this to False to hide the label
117
+ type="messages",
118
+ elem_id="chatbot",
119
+ height=650,
120
+ show_copy_button=True,
121
+ latex_delimiters=[
122
+ {"left": "$", "right": "$", "display": False},
123
+ {"left": "$$", "right": "$$", "display": True},
124
+ {"left": r"\(", "right": r"\)", "display": False},
125
+ {"left": r"\[", "right": r"\]", "display": True},
126
+ ],
127
+ )
128
+ buttons[i] = gr.Button(
129
+ value=messages[i], visible=True, interactive=False
130
+ )
131
+ with gr.Row():
132
+ for i in range(2):
133
+ with gr.Column():
134
+ gr.Textbox(show_label=False, visible=False)
135
+
136
+ #left_output = gr.Chatbot(label="A (400w πŸ”‹)", type="messages")
137
+ tievote_btn = gr.Button(
138
+ value="🀝 It's a Tie!", visible=True, interactive=False
139
+ )
140
+ with gr.Column(visible=False) as backdown_row:
141
+ backdown_txt = gr.HTML("""<h2>Do you want to change your vote, knowing that the selected model consumes significantly more?</h2>""")
142
+ with gr.Row():
143
+ no_backdown_btn = gr.Button(value="No", visible=True, interactive=True)
144
+ backdown_btn = gr.Button(value="Yes", visible=True, interactive=True)
145
+ with gr.Row(visible=False) as model_names_row:
146
+ left_model = gr.Textbox(label="Left Model", interactive=False)
147
+ right_model = gr.Textbox(label="Right Model", interactive=False)
148
+
149
+
150
+ result = gr.Textbox(label="Result", interactive=False, visible=False)
151
+ with gr.Group():
152
+ with gr.Row():
153
+ textbox = gr.Textbox(
154
+ show_label=False,
155
+ placeholder="πŸ‘‰ Enter your prompt and press ENTER",
156
+ elem_id="input_box",
157
+ #submit_btn=True,
158
+ )
159
+ #send_btn = gr.Button(value="Send", scale=0)
160
+
161
+ previous_prompt = gr.State("")
162
+ tie_count = gr.State(0)
163
+
164
+ # Define interactions
165
+ textbox.submit(fn=lambda *args: send_prompt(*args, moreConsuming),
166
+ inputs=[textbox],
167
+ outputs=[chatbot[0], chatbot[1], left_model, right_model,
168
+ buttons[0], buttons[1], tievote_btn, model_names_row,
169
+ ])
170
+
171
+
172
+ buttons[0].click(
173
+ lambda *args: record_vote(*args, 0),
174
+ inputs=[textbox, chatbot[0], chatbot[1], left_model, right_model, moreConsuming],
175
+ outputs=[result,buttons[0], buttons[1], tievote_btn, model_names_row, backdown_row]
176
+ )
177
+
178
+ buttons[1].click(
179
+ lambda *args: record_vote(*args, 1),
180
+ inputs=[textbox, chatbot[0], chatbot[1], left_model, right_model, moreConsuming],
181
+ outputs=[result,buttons[0], buttons[1], tievote_btn, model_names_row, backdown_row]
182
+ )
183
+
184
+ tievote_btn.click(
185
+ lambda *args: record_vote(*args, 2),
186
+ inputs=[textbox, chatbot[0], chatbot[1], left_model, right_model, gr.State(value=False)],
187
+ outputs=[result, buttons[0], buttons[1], tievote_btn, model_names_row,]
188
+ )
189
+ backdown_btn.click(
190
+ lambda *args: change_vote(*args, _id, True),
191
+ inputs=[],
192
+ outputs=[backdown_row]
193
+ )
194
+ no_backdown_btn.click(
195
+ lambda *args: change_vote(*args, _id, False),
196
+ inputs=[],
197
+ outputs=[backdown_row]
198
+ )
199
+
200
+ # Project Description
201
+ gr.HTML("""
202
+ <div style="padding: 20px; font-size: 18px;">
203
+ <h2 style="font-size: 30px;">🌱 About This Project</h2>
204
+ <p>This space is part of the project <strong>"Sostenibilidad Generativa"</strong> 🌍, funded by the <strong>COTEC Foundation</strong>. Our goal is to evaluate how <strong>energy awareness</strong> ⚑ impacts users' evaluation of <strong>Large Language Models (LLMs)</strong>.</p>
205
+
206
+ <h2 style="font-size: 30x;">πŸ” How It Works</h2>
207
+ <ol>
208
+ <li><strong>Ask a Question</strong> πŸ’¬: Enter any question in the prompt box below.</li>
209
+ <li><strong>Compare Responses</strong> πŸ€–βš–οΈ: Two different LLMs will provide answers.</li>
210
+ <li><strong>Make Your Choice</strong> βœ…: Rate which response you think is better.</li>
211
+ <li><strong>Consider Energy Impact</strong> βš‘πŸ”‹: For some questions, you'll see information about the models' energy consumption.</li>
212
+ </ol>
213
+
214
+ <h2 style="font-size: 30px;">⚑ Energy Information</h2>
215
+ <ul>
216
+ <li>When shown, <strong>energy consumption data</strong> πŸ”‹ will help you understand the <strong>environmental impact</strong> 🌎.</li>
217
+ <li>You'll need to consider: <strong>Is a better response worth the additional energy consumption?</strong> πŸ€”</li>
218
+ <li>The comparison will highlight when one model <strong>consumes more than twice</strong> the energy of the other ⚠️.</li>
219
+ </ul>
220
+
221
+
222
+ <p style="text-align: center; margin-top: 20px; font-size: 35px;">
223
+ 🌿 <strong>Let's make AI more sustainable together!</strong> πŸš€β™»οΈ
224
+ </p>
225
+ </div>
226
+ """)
227
+
228
+ gr.Markdown("""This space is part of a research project to study how knowledge of energy consumption influences user preferences in AI systems. It must be used only for that purpose and not for any illegal, harmful or offensive activities. Please do not upload personal or private information. The space collects and stores the questions and answers and reserves the right to distribute it under a Creative Commons Attribution (CC-BY) license."""
229
+ )
230
+ start_button = gr.Button(value="Start", visible=True, interactive=True, size= "lg", variant="primary")
231
+ start_button.click(
232
+ lambda *args: start_app(),
233
+ inputs=[],
234
+ outputs=[landing, app, start_button]
235
+ )
236
+
237
+ if __name__ == "__main__":
238
+ demo.launch(allowed_paths=["static"], show_api=False, share=False)
239
+
240
+