from fastapi import FastAPI, HTTPException from transformers import AutoModelForCausalLM, AutoTokenizer from huggingface_hub import hf_hub_download import os app = FastAPI() @app.get("/") async def generate_text(): try: # Specify the directory for model download within the Docker container model_dir = "/code/model" os.makedirs(model_dir, exist_ok=True) hf_hub_download(repo_id="TheBloke/Mistral-7B-v0.1-GGUF", filename="mistral-7b-v0.1.Q4_K_M.gguf", local_dir=model_dir) tokenizer = AutoTokenizer.from_pretrained(model_dir) model = AutoModelForCausalLM.from_pretrained(model_dir) prompt = "Once upon a time, there was a" inputs = tokenizer(prompt, return_tensors="pt") output = model.generate(input_ids=inputs["input_ids"], max_length=50, num_return_sequences=3, temperature=0.7) generated_texts = tokenizer.batch_decode(output, skip_special_tokens=True) return generated_texts except Exception as e: raise HTTPException(status_code=500, detail=str(e))