Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,16 +1,29 @@
|
|
1 |
from fastapi import FastAPI, UploadFile, File
|
2 |
-
import json, re, io
|
3 |
from llama_cpp import Llama
|
4 |
from PyPDF2 import PdfReader
|
5 |
from docx import Document
|
6 |
-
import os
|
7 |
|
8 |
-
# ✅
|
9 |
-
|
10 |
-
|
11 |
-
|
12 |
-
|
13 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
14 |
print("✅ Model loaded successfully!")
|
15 |
|
16 |
app = FastAPI(title="Resume Parsing API", description="Extracts key details from resumes using Mistral 7B")
|
|
|
1 |
from fastapi import FastAPI, UploadFile, File
|
2 |
+
import json, re, io, os, requests
|
3 |
from llama_cpp import Llama
|
4 |
from PyPDF2 import PdfReader
|
5 |
from docx import Document
|
|
|
6 |
|
7 |
+
# ✅ Define model URL and path
|
8 |
+
MODEL_URL = "https://huggingface.co/TheBloke/CapybaraHermes-2.5-Mistral-7B-GGUF/resolve/main/capybarahermes-2.5-mistral-7b.Q5_K_M.gguf"
|
9 |
+
MODEL_DIR = "/app/models"
|
10 |
+
MODEL_PATH = os.path.join(MODEL_DIR, "mistral-7b.Q5_K_M.gguf")
|
11 |
+
|
12 |
+
# ✅ Ensure models directory exists
|
13 |
+
os.makedirs(MODEL_DIR, exist_ok=True)
|
14 |
+
|
15 |
+
# ✅ Download model if not already available
|
16 |
+
if not os.path.exists(MODEL_PATH):
|
17 |
+
print(f"🔹 Downloading model from: {MODEL_URL}")
|
18 |
+
response = requests.get(MODEL_URL, stream=True)
|
19 |
+
with open(MODEL_PATH, "wb") as file:
|
20 |
+
for chunk in response.iter_content(chunk_size=8192):
|
21 |
+
file.write(chunk)
|
22 |
+
print("✅ Model downloaded successfully!")
|
23 |
+
|
24 |
+
# ✅ Load Mistral 7B using llama_cpp
|
25 |
+
print(f"🔹 Loading Mistral 7B from {MODEL_PATH} (This may take a while)")
|
26 |
+
llm = Llama(model_path=MODEL_PATH, n_ctx=4096, n_gpu_layers=-1) # Use GPU if available
|
27 |
print("✅ Model loaded successfully!")
|
28 |
|
29 |
app = FastAPI(title="Resume Parsing API", description="Extracts key details from resumes using Mistral 7B")
|