Kaan commited on
Commit
ac2091a
·
verified ·
1 Parent(s): df266e0

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -9
app.py CHANGED
@@ -5,18 +5,17 @@ import os
5
 
6
  app = FastAPI()
7
 
8
-
9
-
10
-
11
-
12
  @app.get("/")
13
  async def generate_text():
14
  try:
15
- current_directory = os.path.dirname(os.path.abspath(__file__))
16
- hf_hub_download(repo_id="TheBloke/Mistral-7B-v0.1-GGUF", filename="mistral-7b-v0.1.Q4_K_M.gguf", local_dir=current_directory)
 
 
 
17
 
18
- tokenizer = AutoTokenizer.from_pretrained(model_folder)
19
- model = AutoModelForCausalLM.from_pretrained(model_folder)
20
 
21
  prompt = "Once upon a time, there was a"
22
  inputs = tokenizer(prompt, return_tensors="pt")
@@ -24,4 +23,4 @@ async def generate_text():
24
  generated_texts = tokenizer.batch_decode(output, skip_special_tokens=True)
25
  return generated_texts
26
  except Exception as e:
27
- raise HTTPException(status_code=500, detail=str(e))
 
5
 
6
  app = FastAPI()
7
 
 
 
 
 
8
  @app.get("/")
9
  async def generate_text():
10
  try:
11
+ # Specify the directory for model download within the Docker container
12
+ model_dir = "/code/model"
13
+ os.makedirs(model_dir, exist_ok=True)
14
+
15
+ hf_hub_download(repo_id="TheBloke/Mistral-7B-v0.1-GGUF", filename="mistral-7b-v0.1.Q4_K_M.gguf", local_dir=model_dir)
16
 
17
+ tokenizer = AutoTokenizer.from_pretrained(model_dir)
18
+ model = AutoModelForCausalLM.from_pretrained(model_dir)
19
 
20
  prompt = "Once upon a time, there was a"
21
  inputs = tokenizer(prompt, return_tensors="pt")
 
23
  generated_texts = tokenizer.batch_decode(output, skip_special_tokens=True)
24
  return generated_texts
25
  except Exception as e:
26
+ raise HTTPException(status_code=500, detail=str(e))