gopiium's picture
Upload folder using huggingface_hub
ba866d1 verified
raw
history blame contribute delete
383 Bytes
from transformers import AutoTokenizer
def assert_tokenizer_consistency(model_id_1, model_id_2):
identical_tokenizers = (
AutoTokenizer.from_pretrained(model_id_1).vocab
== AutoTokenizer.from_pretrained(model_id_2).vocab
)
if not identical_tokenizers:
raise ValueError(f"Tokenizers are not identical for {model_id_1} and {model_id_2}.")