# app.py from transformers import AutoModelForCausalLM, AutoTokenizer import torch # 加载模型和分词器 model_name = "deepseek-ai/deepseek-math-7b-instruct" tokenizer = AutoTokenizer.from_pretrained(model_name) model = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype=torch.float16, device_map="auto") # 输入数学题 input_text = "解方程 2x + 5 = 15。" # 生成解答 inputs = tokenizer(input_text, return_tensors="pt").to("cuda") outputs = model.generate(**inputs, max_length=100) generated_text = tokenizer.decode(outputs[0], skip_special_tokens=True) # 输出结果 print("问题:", input_text) print("解答:", generated_text)