sandz7 commited on
Commit
c0248ea
Β·
1 Parent(s): f7f9ccf

added the output [] into llm for llama condition

Browse files
Files changed (1) hide show
  1. app.py +3 -1
app.py CHANGED
@@ -155,8 +155,10 @@ def bot_comms(input_text: str,
155
 
156
  if llm_mode == "switch to llama":
157
  streamer = llama_generation(input_text=input_text, history=history, temperature=temperature, max_new_tokens=max_new_tokens)
 
158
  for text in streamer:
159
- yield text
 
160
 
161
  if llm_mode == "switch to gpt-4o":
162
  stream = gpt_generation(input=input_text, llama_output="", mode="gpt-4o")
 
155
 
156
  if llm_mode == "switch to llama":
157
  streamer = llama_generation(input_text=input_text, history=history, temperature=temperature, max_new_tokens=max_new_tokens)
158
+ outputs = []
159
  for text in streamer:
160
+ outputs.append(text)
161
+ yield "".join(outputs)
162
 
163
  if llm_mode == "switch to gpt-4o":
164
  stream = gpt_generation(input=input_text, llama_output="", mode="gpt-4o")