Spaces:
Runtime error
Runtime error
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline | |
# Hugging Face Model লোড করা | |
model_id = "deepseek-ai/DeepSeek-R1" | |
model = AutoModelForCausalLM.from_pretrained( | |
model_id, | |
trust_remote_code=True, | |
torch_dtype="auto", # Automatic dtype (no FP8) | |
low_cpu_mem_usage=True # কম মেমোরি ব্যবহার করবে | |
) | |
tokenizer = AutoTokenizer.from_pretrained(model_id, trust_remote_code=True) | |
pipe = pipeline("text-generation", model=model, tokenizer=tokenizer) | |
# টেস্ট রান | |
output = pipe("Hello, who are you?", max_length=100) | |
print(output) | |