ARP3 / save_tokenizer.py
ZeeAI1's picture
Create save_tokenizer.py
7fe6ba1 verified
raw
history blame contribute delete
252 Bytes
from transformers import AutoTokenizer
# Load tokenizer from original model
tokenizer = AutoTokenizer.from_pretrained("google/flan-t5-large")
# Save tokenizer into your local model folder
tokenizer.save_pretrained("/home/user/app/finetuned-flan-t5")