Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -33,6 +33,22 @@ LOGGING_CONFIG = {
|
|
33 |
'process_query': True
|
34 |
}
|
35 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
36 |
@st.cache_data
|
37 |
def load_from_drive(file_id: str):
|
38 |
"""Load pickle file directly from Google Drive"""
|
@@ -120,13 +136,20 @@ def check_environment():
|
|
120 |
|
121 |
@st.cache_resource
|
122 |
def initialize_model():
|
123 |
-
"""Initialize the
|
124 |
-
|
125 |
-
|
126 |
-
|
127 |
-
|
128 |
-
|
129 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
130 |
|
131 |
llm_config = {
|
132 |
"n_ctx": 2048,
|
|
|
33 |
'process_query': True
|
34 |
}
|
35 |
}
|
36 |
+
def download_file_with_progress(url: str, filename: str):
|
37 |
+
"""Download a file with progress bar using requests"""
|
38 |
+
response = requests.get(url, stream=True)
|
39 |
+
total_size = int(response.headers.get('content-length, 0))
|
40 |
+
|
41 |
+
with open(filename, 'wb') as file, tqdm(
|
42 |
+
desc=filename,
|
43 |
+
total=total_size,
|
44 |
+
unit='iB',
|
45 |
+
unit_scale=True,
|
46 |
+
unit_divisor=1024,
|
47 |
+
) as progress_bar:
|
48 |
+
for data in response.iter_content(chunk_size=1024):
|
49 |
+
size = file.write(data)
|
50 |
+
progress_bar.update(size)
|
51 |
+
|
52 |
@st.cache_data
|
53 |
def load_from_drive(file_id: str):
|
54 |
"""Load pickle file directly from Google Drive"""
|
|
|
136 |
|
137 |
@st.cache_resource
|
138 |
def initialize_model():
|
139 |
+
"""Initialize the model with proper error handling and verification"""
|
140 |
+
try:
|
141 |
+
if not os.path.exists(self.model_path):
|
142 |
+
direct_url = "https://huggingface.co/TheBloke/Mistral-7B-v0.1-GGUF/resolve/main/mistral-7b-v0.1.Q4_K_M.gguf"
|
143 |
+
download_file_with_progress(direct_url, self.model_path)
|
144 |
+
|
145 |
+
# Verify file exists and has content
|
146 |
+
if not os.path.exists(self.model_path):
|
147 |
+
raise FileNotFoundError(f"Model file {self.model_path} not found after download attempts")
|
148 |
+
|
149 |
+
if os.path.getsize(self.model_path) < 1000000: # Less than 1MB
|
150 |
+
os.remove(self.model_path)
|
151 |
+
raise ValueError("Downloaded model file is too small, likely corrupted")
|
152 |
+
|
153 |
|
154 |
llm_config = {
|
155 |
"n_ctx": 2048,
|