s-a-malik commited on
Commit
f838d5b
·
1 Parent(s): 16c3a1a

remove streamer

Browse files
Files changed (1) hide show
  1. app.py +1 -3
app.py CHANGED
@@ -9,7 +9,6 @@ import spaces
9
  import gradio as gr
10
  import torch
11
  from transformers import AutoModelForCausalLM, AutoTokenizer
12
- from transformers.generation.streamers import BaseStreamer
13
 
14
 
15
  MAX_MAX_NEW_TOKENS = 2048
@@ -133,7 +132,6 @@ def generate(
133
  top_k=top_k,
134
  temperature=temperature,
135
  repetition_penalty=repetition_penalty,
136
- streamer=streamer,
137
  output_hidden_states=True,
138
  return_dict_in_generate=True,
139
  )
@@ -237,4 +235,4 @@ with gr.Blocks(title="Llama-2 7B Chat with Dual Probes", css="footer {visibility
237
 
238
 
239
  if __name__ == "__main__":
240
- demo.launch()
 
9
  import gradio as gr
10
  import torch
11
  from transformers import AutoModelForCausalLM, AutoTokenizer
 
12
 
13
 
14
  MAX_MAX_NEW_TOKENS = 2048
 
132
  top_k=top_k,
133
  temperature=temperature,
134
  repetition_penalty=repetition_penalty,
 
135
  output_hidden_states=True,
136
  return_dict_in_generate=True,
137
  )
 
235
 
236
 
237
  if __name__ == "__main__":
238
+ demo.launch()