awacke1 commited on
Commit
0906c19
Β·
verified Β·
1 Parent(s): a07973f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +32 -3
app.py CHANGED
@@ -14,17 +14,21 @@ import base64
14
  import io
15
  import json
16
 
 
17
  default_lang = "en"
18
  engines = { default_lang: Model(default_lang) }
19
 
 
20
  def transcribe(audio):
21
  lang = "en"
22
  model = engines[lang]
23
  text = model.stt_file(audio)[0]
24
  return text
25
 
 
26
  HF_TOKEN = os.environ.get("HF_TOKEN", None)
27
 
 
28
  def client_fn(model):
29
  if "Mixtral" in model:
30
  return InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
@@ -37,10 +41,12 @@ def client_fn(model):
37
  else:
38
  return InferenceClient("microsoft/Phi-3-mini-4k-instruct")
39
 
 
40
  def randomize_seed_fn(seed: int) -> int:
41
  seed = random.randint(0, 999999)
42
  return seed
43
 
 
44
  system_instructions1 = """
45
  [SYSTEM] Answer as Dr. Nova Quantum, a brilliant 50-something scientist specializing in quantum computing and artificial intelligence. Your responses should reflect your vast knowledge and experience in cutting-edge technology and scientific advancements. Maintain a professional yet approachable demeanor, offering insights that blend theoretical concepts with practical applications. Your goal is to educate and inspire, making complex topics accessible without oversimplifying. Draw from your decades of research and innovation to provide nuanced, forward-thinking answers. Remember, you're not just sharing information, but guiding others towards a deeper understanding of our technological future.
46
  Keep conversations engaging, clear, and concise.
@@ -49,14 +55,16 @@ Respond in a manner that reflects your expertise and wisdom.
49
  [USER]
50
  """
51
 
52
- # Initialize an empty DataFrame to store the history
53
  history_df = pd.DataFrame(columns=['Timestamp', 'Model', 'Input Size', 'Output Size', 'Request', 'Response'])
54
 
 
55
  def save_history():
56
  history_df_copy = history_df.copy()
57
  history_df_copy['Timestamp'] = history_df_copy['Timestamp'].astype(str)
58
  history_df_copy.to_json('chat_history.json', orient='records')
59
 
 
60
  def load_history():
61
  global history_df
62
  if os.path.exists('chat_history.json'):
@@ -66,6 +74,7 @@ def load_history():
66
  history_df = pd.DataFrame(columns=['Timestamp', 'Model', 'Input Size', 'Output Size', 'Request', 'Response'])
67
  return history_df
68
 
 
69
  def models(text, model="Llama 3 8B", seed=42):
70
  global history_df
71
 
@@ -86,7 +95,7 @@ def models(text, model="Llama 3 8B", seed=42):
86
  if not response.token.text == "</s>":
87
  output += response.token.text
88
 
89
- # Add the current interaction to the history DataFrame
90
  new_row = pd.DataFrame({
91
  'Timestamp': [datetime.now()],
92
  'Model': [model],
@@ -100,7 +109,7 @@ def models(text, model="Llama 3 8B", seed=42):
100
 
101
  return output
102
 
103
- # Add a list of available voices
104
  VOICES = [
105
  "en-US-AriaNeural",
106
  "en-US-GuyNeural",
@@ -109,6 +118,7 @@ VOICES = [
109
  "en-CA-ClaraNeural",
110
  ]
111
 
 
112
  async def respond(audio, model, seed, voice):
113
  user = transcribe(audio)
114
  reply = models(user, model, seed)
@@ -118,11 +128,13 @@ async def respond(audio, model, seed, voice):
118
  await communicate.save(tmp_path)
119
  return tmp_path
120
 
 
121
  def display_history():
122
  df = load_history()
123
  df['Timestamp'] = df['Timestamp'].astype(str)
124
  return df
125
 
 
126
  def download_history():
127
  csv_buffer = io.StringIO()
128
  history_df_copy = history_df.copy()
@@ -133,8 +145,10 @@ def download_history():
133
  href = f'data:text/csv;base64,{b64}'
134
  return gr.HTML(f'<a href="{href}" download="chat_history.csv">Download Chat History</a>')
135
 
 
136
  DESCRIPTION = """# <center>Dr. Nova Quantum⚑ - Your Personal Guide to the Frontiers of Science and Technology</center>"""
137
 
 
138
  with gr.Blocks(css="style.css") as demo:
139
  gr.Markdown(DESCRIPTION)
140
  with gr.Row():
@@ -161,6 +175,9 @@ with gr.Blocks(css="style.css") as demo:
161
  label="Dr. Nova Quantum's Voice"
162
  )
163
 
 
 
 
164
  input_audio = gr.Audio(label="User", sources="microphone", type="filepath")
165
  output_audio = gr.Audio(label="Dr. Nova Quantum", type="filepath", autoplay=True)
166
 
@@ -172,21 +189,33 @@ with gr.Blocks(css="style.css") as demo:
172
  download_button = gr.Button("Download Conversation History")
173
  download_link = gr.HTML()
174
 
 
175
  def process_audio(audio, model, seed, voice):
176
  response = asyncio.run(respond(audio, model, seed, voice))
177
  text = transcribe(audio)
178
  return response, display_history(), text, models(text, model, seed)
179
 
 
 
 
 
180
  input_audio.change(
181
  fn=process_audio,
182
  inputs=[input_audio, select, seed, voice_select],
183
  outputs=[output_audio, history_display, request_md, response_md]
184
  )
185
 
 
 
 
 
 
 
186
  download_button.click(fn=download_history, outputs=[download_link])
187
 
188
  demo.load(fn=display_history, outputs=[history_display])
189
 
 
190
  if __name__ == "__main__":
191
  load_history()
192
  demo.queue(max_size=200).launch()
 
14
  import io
15
  import json
16
 
17
+ # 🌍 The world's most basic language setup (aka "English or bust!")
18
  default_lang = "en"
19
  engines = { default_lang: Model(default_lang) }
20
 
21
+ # 🎀 Turn gibberish into text (hopefully)
22
  def transcribe(audio):
23
  lang = "en"
24
  model = engines[lang]
25
  text = model.stt_file(audio)[0]
26
  return text
27
 
28
+ # πŸ”‘ Secret sauce (shhh, don't tell anyone)
29
  HF_TOKEN = os.environ.get("HF_TOKEN", None)
30
 
31
+ # πŸ€– Pick your poison (I mean, AI model)
32
  def client_fn(model):
33
  if "Mixtral" in model:
34
  return InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
 
41
  else:
42
  return InferenceClient("microsoft/Phi-3-mini-4k-instruct")
43
 
44
+ # 🎲 Randomize like it's Vegas, baby!
45
  def randomize_seed_fn(seed: int) -> int:
46
  seed = random.randint(0, 999999)
47
  return seed
48
 
49
+ # πŸ‘©β€πŸ”¬ Dr. Nova Quantum's brain dump (warning: may cause existential crisis)
50
  system_instructions1 = """
51
  [SYSTEM] Answer as Dr. Nova Quantum, a brilliant 50-something scientist specializing in quantum computing and artificial intelligence. Your responses should reflect your vast knowledge and experience in cutting-edge technology and scientific advancements. Maintain a professional yet approachable demeanor, offering insights that blend theoretical concepts with practical applications. Your goal is to educate and inspire, making complex topics accessible without oversimplifying. Draw from your decades of research and innovation to provide nuanced, forward-thinking answers. Remember, you're not just sharing information, but guiding others towards a deeper understanding of our technological future.
52
  Keep conversations engaging, clear, and concise.
 
55
  [USER]
56
  """
57
 
58
+ # πŸ“š Where we keep all the juicy conversations (for blackmail, obviously)
59
  history_df = pd.DataFrame(columns=['Timestamp', 'Model', 'Input Size', 'Output Size', 'Request', 'Response'])
60
 
61
+ # πŸ’Ύ Save history (or "How to preserve evidence of your AI addiction")
62
  def save_history():
63
  history_df_copy = history_df.copy()
64
  history_df_copy['Timestamp'] = history_df_copy['Timestamp'].astype(str)
65
  history_df_copy.to_json('chat_history.json', orient='records')
66
 
67
+ # πŸ“‚ Load history (aka "Remind yourself of all the weird questions you've asked")
68
  def load_history():
69
  global history_df
70
  if os.path.exists('chat_history.json'):
 
74
  history_df = pd.DataFrame(columns=['Timestamp', 'Model', 'Input Size', 'Output Size', 'Request', 'Response'])
75
  return history_df
76
 
77
+ # 🧠 The magic happens here (or does it?)
78
  def models(text, model="Llama 3 8B", seed=42):
79
  global history_df
80
 
 
95
  if not response.token.text == "</s>":
96
  output += response.token.text
97
 
98
+ # πŸ“ Log the conversation (for science, of course)
99
  new_row = pd.DataFrame({
100
  'Timestamp': [datetime.now()],
101
  'Model': [model],
 
109
 
110
  return output
111
 
112
+ # 🎭 Voice actors for your AI (because why settle for one personality?)
113
  VOICES = [
114
  "en-US-AriaNeural",
115
  "en-US-GuyNeural",
 
118
  "en-CA-ClaraNeural",
119
  ]
120
 
121
+ # 🎬 Lights, camera, AI-ction!
122
  async def respond(audio, model, seed, voice):
123
  user = transcribe(audio)
124
  reply = models(user, model, seed)
 
128
  await communicate.save(tmp_path)
129
  return tmp_path
130
 
131
+ # πŸ“Š Display history (or "How to prove you're not talking to yourself")
132
  def display_history():
133
  df = load_history()
134
  df['Timestamp'] = df['Timestamp'].astype(str)
135
  return df
136
 
137
+ # πŸ“₯ Download history (for those who like to relive their AI conversations)
138
  def download_history():
139
  csv_buffer = io.StringIO()
140
  history_df_copy = history_df.copy()
 
145
  href = f'data:text/csv;base64,{b64}'
146
  return gr.HTML(f'<a href="{href}" download="chat_history.csv">Download Chat History</a>')
147
 
148
+ # πŸš€ Welcome to the future (or at least a fancy UI)
149
  DESCRIPTION = """# <center>Dr. Nova Quantum⚑ - Your Personal Guide to the Frontiers of Science and Technology</center>"""
150
 
151
+ # 🎨 Building the UI (warning: may cause extreme excitement)
152
  with gr.Blocks(css="style.css") as demo:
153
  gr.Markdown(DESCRIPTION)
154
  with gr.Row():
 
175
  label="Dr. Nova Quantum's Voice"
176
  )
177
 
178
+ # πŸ”„ The "oops, let's start over" button
179
+ new_chat_button = gr.Button("πŸ”„ New Chat", variant="primary")
180
+
181
  input_audio = gr.Audio(label="User", sources="microphone", type="filepath")
182
  output_audio = gr.Audio(label="Dr. Nova Quantum", type="filepath", autoplay=True)
183
 
 
189
  download_button = gr.Button("Download Conversation History")
190
  download_link = gr.HTML()
191
 
192
+ # 🎭 Where the magic happens (or where we pretend it happens)
193
  def process_audio(audio, model, seed, voice):
194
  response = asyncio.run(respond(audio, model, seed, voice))
195
  text = transcribe(audio)
196
  return response, display_history(), text, models(text, model, seed)
197
 
198
+ # 🧹 Clean slate function (for when things get weird)
199
+ def reset_interface():
200
+ return gr.Audio.update(value=None), gr.Audio.update(value=None), gr.Markdown(""), gr.Markdown("")
201
+
202
  input_audio.change(
203
  fn=process_audio,
204
  inputs=[input_audio, select, seed, voice_select],
205
  outputs=[output_audio, history_display, request_md, response_md]
206
  )
207
 
208
+ # πŸ”„ New chat button magic
209
+ new_chat_button.click(
210
+ fn=reset_interface,
211
+ outputs=[input_audio, output_audio, request_md, response_md]
212
+ )
213
+
214
  download_button.click(fn=download_history, outputs=[download_link])
215
 
216
  demo.load(fn=display_history, outputs=[history_display])
217
 
218
+ # πŸš€ Houston, we have liftoff!
219
  if __name__ == "__main__":
220
  load_history()
221
  demo.queue(max_size=200).launch()