magbert-ner-api / model.py
hassoudi's picture
Upload model.py
38e01ae verified
raw
history blame
594 Bytes
import os
from transformers import AutoTokenizer, AutoModelForTokenClassification, pipeline
# Safe writable cache dirs in Hugging Face Spaces
os.environ["TRANSFORMERS_CACHE"] = "/tmp/hf_cache"
os.environ["HF_HOME"] = "/tmp/hf_home"
os.environ["HF_HUB_CACHE"] = "/tmp/hf_hub"
HF_TOKEN = os.getenv("HF_TOKEN")
MODEL_ID = "TypicaAI/magbert-ner"
tokenizer = AutoTokenizer.from_pretrained(MODEL_ID, token=HF_TOKEN)
model = AutoModelForTokenClassification.from_pretrained(MODEL_ID, token=HF_TOKEN)
ner_pipeline = pipeline("ner", model=model, tokenizer=tokenizer, aggregation_strategy="first")