fix
Browse files
app.py
CHANGED
@@ -64,6 +64,7 @@ def chat_with_model(messages):
|
|
64 |
output_text = ""
|
65 |
in_think = False
|
66 |
max_new_tokens = 256
|
|
|
67 |
|
68 |
prompt = format_prompt(messages)
|
69 |
device = torch.device("cuda")
|
@@ -101,7 +102,7 @@ def chat_with_model(messages):
|
|
101 |
print(f'Step 1: {messages}')
|
102 |
|
103 |
prompt_text = current_tokenizer.decode(inputs["input_ids"][0], skip_special_tokens=False)
|
104 |
-
|
105 |
for token_info in streamer:
|
106 |
token_str = token_info["token"]
|
107 |
token_id = token_info["token_id"]
|
|
|
64 |
output_text = ""
|
65 |
in_think = False
|
66 |
max_new_tokens = 256
|
67 |
+
generated_tokens = 0
|
68 |
|
69 |
prompt = format_prompt(messages)
|
70 |
device = torch.device("cuda")
|
|
|
102 |
print(f'Step 1: {messages}')
|
103 |
|
104 |
prompt_text = current_tokenizer.decode(inputs["input_ids"][0], skip_special_tokens=False)
|
105 |
+
|
106 |
for token_info in streamer:
|
107 |
token_str = token_info["token"]
|
108 |
token_id = token_info["token_id"]
|