added back the yields
Browse files
app.py
CHANGED
@@ -133,25 +133,25 @@ def bot_comms(input_text: str,
|
|
133 |
|
134 |
if input_text == "mode":
|
135 |
if llm_mode == "":
|
136 |
-
|
137 |
else:
|
138 |
-
|
139 |
|
140 |
if input_text == "check cuda":
|
141 |
cuda_info = check_cuda()
|
142 |
-
|
143 |
|
144 |
if input_text == "switch to llama":
|
145 |
llm_mode = input_text
|
146 |
-
|
147 |
|
148 |
if input_text == "switch to gpt-4o":
|
149 |
llm_mode = input_text
|
150 |
-
|
151 |
|
152 |
if input_text == "switch to gpt-3.5-turbo":
|
153 |
llm_mode = input_text
|
154 |
-
|
155 |
|
156 |
if llm_mode == "switch to llama":
|
157 |
streamer = llama_generation(input_text=input_text, history=history, temperature=temperature, max_new_tokens=max_new_tokens)
|
|
|
133 |
|
134 |
if input_text == "mode":
|
135 |
if llm_mode == "":
|
136 |
+
yield "The mode is currently at Loki Default mode"
|
137 |
else:
|
138 |
+
yield f"The current mode: {llm_mode}"
|
139 |
|
140 |
if input_text == "check cuda":
|
141 |
cuda_info = check_cuda()
|
142 |
+
yield cuda_info
|
143 |
|
144 |
if input_text == "switch to llama":
|
145 |
llm_mode = input_text
|
146 |
+
yield "Got it! Llama is now activate for your questions only π¦"
|
147 |
|
148 |
if input_text == "switch to gpt-4o":
|
149 |
llm_mode = input_text
|
150 |
+
yield "Understood! GPT-4o is now hearing your responses only πΎ"
|
151 |
|
152 |
if input_text == "switch to gpt-3.5-turbo":
|
153 |
llm_mode = input_text
|
154 |
+
yield "Done. GPT-3.5-turbo is ready for your questions! π"
|
155 |
|
156 |
if llm_mode == "switch to llama":
|
157 |
streamer = llama_generation(input_text=input_text, history=history, temperature=temperature, max_new_tokens=max_new_tokens)
|