abacus_chat_proxy / deepseek_v3_tokenizer /deepseek_tokenizer.py
malt666's picture
Upload 12 files
ad9a66f verified
raw
history blame
277 Bytes
# pip3 install transformers
# python3 deepseek_tokenizer.py
import transformers
chat_tokenizer_dir = "./"
tokenizer = transformers.AutoTokenizer.from_pretrained(
chat_tokenizer_dir, trust_remote_code=True
)
result = tokenizer.encode("Hello!")
print(result)