Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -138,7 +138,7 @@ def reply(message: str, history: list[str]) -> str:
|
|
138 |
text = tokenizer.apply_chat_template(
|
139 |
history_transformer_format, tokenize=False, add_generation_prompt=True
|
140 |
)
|
141 |
-
model_inputs = tokenizer([text], return_tensors="pt").to("cuda
|
142 |
|
143 |
generate_kwargs = dict(model_inputs, streamer=streamer, max_new_tokens=512)
|
144 |
t = threading.Thread(target=chatmodel.generate, kwargs=generate_kwargs)
|
|
|
138 |
text = tokenizer.apply_chat_template(
|
139 |
history_transformer_format, tokenize=False, add_generation_prompt=True
|
140 |
)
|
141 |
+
model_inputs = tokenizer([text], return_tensors="pt").to("cuda")
|
142 |
|
143 |
generate_kwargs = dict(model_inputs, streamer=streamer, max_new_tokens=512)
|
144 |
t = threading.Thread(target=chatmodel.generate, kwargs=generate_kwargs)
|