from transformers import AutoModelForCausalLM, AutoTokenizer

model = AutoModelForCausalLM.from_pretrained("josu/gpt-neo-pt-1.3B")
tokenizer = AutoTokenizer.from_pretrained("josu/gpt-neo-pt-1.3B")

from transformers import pipeline

generator = pipeline('text-generation', model='josu/gpt-neo-pt-1.3B')
Downloads last month
4
Inference Providers NEW
This model isn't deployed by any Inference Provider. 🙋 Ask for provider support

Space using josu/gpt-neo-pt-1.3B 1