only added the eos_token_id to terminators
Browse files
app.py
CHANGED
@@ -29,7 +29,8 @@ llama_tokenizer = AutoTokenizer.from_pretrained("meta-llama/Meta-Llama-3-8B")
|
|
29 |
llama_model = AutoModelForCausalLM.from_pretrained("meta-llama/Meta-Llama-3-8B", token=TOKEN, torch_dtype=torch.float16).to('cuda')
|
30 |
terminators = [
|
31 |
llama_tokenizer.eos_token_id,
|
32 |
-
|
|
|
33 |
]
|
34 |
|
35 |
# Get special tokens list from the tokenizer
|
|
|
29 |
llama_model = AutoModelForCausalLM.from_pretrained("meta-llama/Meta-Llama-3-8B", token=TOKEN, torch_dtype=torch.float16).to('cuda')
|
30 |
terminators = [
|
31 |
llama_tokenizer.eos_token_id,
|
32 |
+
# Remove this line, as an empty string won't convert to a valid token ID
|
33 |
+
# llama_tokenizer.convert_tokens_to_ids("")
|
34 |
]
|
35 |
|
36 |
# Get special tokens list from the tokenizer
|