Spaces:
Build error
Build error
Update app.py
Browse files
app.py
CHANGED
@@ -15,7 +15,7 @@ from langchain.prompts.prompt import PromptTemplate
|
|
15 |
from langchain.chains import ConversationalRetrievalChain
|
16 |
from langchain.chains import LLMChain
|
17 |
from langchain.memory import ConversationBufferMemory
|
18 |
-
from
|
19 |
from pydantic import BaseModel, Field
|
20 |
from Ingestion.ingest import process_document, get_processor_for_file
|
21 |
|
@@ -166,40 +166,39 @@ def clean_llm_response(response):
|
|
166 |
|
167 |
return content
|
168 |
|
169 |
-
# Initialize LLM
|
170 |
-
@st.cache_resource
|
171 |
def load_model():
|
172 |
-
|
173 |
-
|
174 |
-
|
175 |
-
|
176 |
-
|
177 |
-
|
178 |
-
|
179 |
-
|
180 |
-
st.error(f"Error loading model: {str(e)}")
|
181 |
-
return None
|
182 |
|
183 |
-
# Initialize embeddings
|
184 |
-
@st.cache_resource
|
185 |
def load_embeddings():
|
186 |
from langchain_community.embeddings import HuggingFaceEmbeddings
|
187 |
|
188 |
-
|
189 |
-
|
190 |
-
|
191 |
-
|
192 |
-
|
193 |
-
return embeddings
|
194 |
|
195 |
# Sidebar Configuration with improved styling
|
196 |
st.sidebar.markdown("<div style='text-align: center;'><h1>🧠 DocMind AI</h1></div>", unsafe_allow_html=True)
|
197 |
st.sidebar.markdown("<div style='text-align: center;'>AI-Powered Document Analysis</div>", unsafe_allow_html=True)
|
198 |
st.sidebar.markdown("---")
|
199 |
|
200 |
-
# Load LLM
|
201 |
with st.sidebar:
|
202 |
-
|
|
|
|
|
203 |
if llm is not None:
|
204 |
st.markdown("<div class='status-success'>✅ Model loaded successfully!</div>", unsafe_allow_html=True)
|
205 |
else:
|
@@ -539,7 +538,7 @@ def run_analysis():
|
|
539 |
|
540 |
# Only create embeddings if we have chunks
|
541 |
if all_chunks and len(all_chunks) > 0:
|
542 |
-
# Load embeddings
|
543 |
embeddings = load_embeddings()
|
544 |
|
545 |
# Using 'None' as namespace to avoid unique ID issues with Chroma
|
|
|
15 |
from langchain.chains import ConversationalRetrievalChain
|
16 |
from langchain.chains import LLMChain
|
17 |
from langchain.memory import ConversationBufferMemory
|
18 |
+
from langchain_community.vectorstores import Chroma # Fixed import
|
19 |
from pydantic import BaseModel, Field
|
20 |
from Ingestion.ingest import process_document, get_processor_for_file
|
21 |
|
|
|
166 |
|
167 |
return content
|
168 |
|
169 |
+
# Initialize LLM without widgets in the cached function
|
170 |
+
@st.cache_resource
|
171 |
def load_model():
|
172 |
+
try:
|
173 |
+
llm = Llama.from_pretrained(
|
174 |
+
repo_id="stduhpf/google-gemma-3-1b-it-qat-q4_0-gguf-small",
|
175 |
+
filename="gemma-3-1b-it-q4_0_s.gguf",
|
176 |
+
)
|
177 |
+
return llm
|
178 |
+
except Exception as e:
|
179 |
+
return None
|
|
|
|
|
180 |
|
181 |
+
# Initialize embeddings without widgets in the cached function
|
182 |
+
@st.cache_resource
|
183 |
def load_embeddings():
|
184 |
from langchain_community.embeddings import HuggingFaceEmbeddings
|
185 |
|
186 |
+
embeddings = HuggingFaceEmbeddings(
|
187 |
+
model_name="sentence-transformers/all-MiniLM-L6-v2",
|
188 |
+
model_kwargs={'device': 'cpu'}
|
189 |
+
)
|
190 |
+
return embeddings
|
|
|
191 |
|
192 |
# Sidebar Configuration with improved styling
|
193 |
st.sidebar.markdown("<div style='text-align: center;'><h1>🧠 DocMind AI</h1></div>", unsafe_allow_html=True)
|
194 |
st.sidebar.markdown("<div style='text-align: center;'>AI-Powered Document Analysis</div>", unsafe_allow_html=True)
|
195 |
st.sidebar.markdown("---")
|
196 |
|
197 |
+
# Load LLM - Move spinner outside the cached function
|
198 |
with st.sidebar:
|
199 |
+
with st.spinner("Loading model..."):
|
200 |
+
llm = load_model()
|
201 |
+
|
202 |
if llm is not None:
|
203 |
st.markdown("<div class='status-success'>✅ Model loaded successfully!</div>", unsafe_allow_html=True)
|
204 |
else:
|
|
|
538 |
|
539 |
# Only create embeddings if we have chunks
|
540 |
if all_chunks and len(all_chunks) > 0:
|
541 |
+
# Load embeddings - moving spinner outside
|
542 |
embeddings = load_embeddings()
|
543 |
|
544 |
# Using 'None' as namespace to avoid unique ID issues with Chroma
|