File size: 626 Bytes
4e1a807
72344a6
4e1a807
 
72344a6
4e1a807
 
 
 
 
 
72344a6
4e1a807
 
72344a6
4e1a807
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline

# Hugging Face Model লোড করা
model_id = "deepseek-ai/DeepSeek-R1"

model = AutoModelForCausalLM.from_pretrained(
    model_id,
    trust_remote_code=True,
    torch_dtype="auto",  # Automatic dtype (no FP8)
    low_cpu_mem_usage=True  # কম মেমোরি ব্যবহার করবে
)

tokenizer = AutoTokenizer.from_pretrained(model_id, trust_remote_code=True)
pipe = pipeline("text-generation", model=model, tokenizer=tokenizer)

# টেস্ট রান
output = pipe("Hello, who are you?", max_length=100)
print(output)