Spaces:
Sleeping
Sleeping
import os | |
from transformers import AutoTokenizer, AutoModelForTokenClassification, pipeline | |
# Safe writable cache dirs in Hugging Face Spaces | |
os.environ["TRANSFORMERS_CACHE"] = "/tmp/hf_cache" | |
os.environ["HF_HOME"] = "/tmp/hf_home" | |
os.environ["HF_HUB_CACHE"] = "/tmp/hf_hub" | |
HF_TOKEN = os.getenv("HF_TOKEN") | |
MODEL_ID = "TypicaAI/magbert-ner" | |
tokenizer = AutoTokenizer.from_pretrained(MODEL_ID, token=HF_TOKEN) | |
model = AutoModelForTokenClassification.from_pretrained(MODEL_ID, token=HF_TOKEN) | |
ner_pipeline = pipeline("ner", model=model, tokenizer=tokenizer, aggregation_strategy="first") | |