Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
-
import os
|
2 |
import streamlit as st
|
3 |
from PyPDF2 import PdfReader
|
4 |
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
|
|
5 |
from langchain_community.embeddings import HuggingFaceEmbeddings
|
6 |
from langchain.vectorstores import FAISS
|
7 |
from langchain_groq import ChatGroq
|
@@ -11,27 +11,36 @@ from dotenv import load_dotenv
|
|
11 |
import re
|
12 |
|
13 |
load_dotenv()
|
|
|
14 |
|
15 |
-
|
16 |
-
st.set_page_config(page_title="PDF Consultor 🔍", page_icon="🔍", layout="wide")
|
17 |
-
GROQ_API_KEY = os.getenv("GROQ_API_KEY")
|
18 |
-
|
19 |
-
# CSS personalizado
|
20 |
-
st.markdown("""
|
21 |
<style>
|
22 |
-
.response-box { padding: 20px; background-color: #f8f9fa; border-radius: 10px; border-left: 5px solid #252850; margin: 20px 0; }
|
23 |
-
.metadata-box { padding: 20px; background-color: #f0f2f6; border-radius: 10px; margin-bottom: 20px; }
|
24 |
.step-number { font-size: 24px; font-weight: bold; }
|
|
|
|
|
|
|
|
|
|
|
|
|
25 |
</style>
|
26 |
-
"""
|
27 |
|
28 |
-
# Funciones auxiliares
|
29 |
def eliminar_proceso_pensamiento(texto):
|
30 |
-
|
31 |
-
|
|
|
32 |
|
33 |
def get_pdf_text(pdf_docs):
|
34 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
35 |
|
36 |
def get_vector_store(text_chunks):
|
37 |
embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2")
|
@@ -39,100 +48,189 @@ def get_vector_store(text_chunks):
|
|
39 |
|
40 |
def get_conversational_chain():
|
41 |
prompt_template = """
|
42 |
-
Responde en español exclusivamente con la información solicitada usando el contexto
|
43 |
-
|
44 |
-
|
|
|
45 |
Contexto:
|
46 |
{context}
|
47 |
-
|
48 |
Pregunta:
|
49 |
{question}
|
50 |
-
|
51 |
Respuesta:
|
52 |
"""
|
53 |
-
model = ChatGroq(
|
|
|
|
|
|
|
|
|
54 |
return load_qa_chain(model, chain_type="stuff", prompt=PromptTemplate(template=prompt_template, input_variables=["context", "question"]))
|
55 |
|
56 |
-
def
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
57 |
if 'vector_store' not in st.session_state:
|
58 |
st.error("Por favor carga un documento primero")
|
59 |
return
|
60 |
-
|
61 |
chain = get_conversational_chain()
|
62 |
-
docs = st.session_state.vector_store.similarity_search(
|
63 |
-
|
64 |
with st.spinner("Analizando documento..."):
|
65 |
-
response = chain({"input_documents": docs, "question":
|
66 |
-
|
67 |
-
respuesta_final, pensamiento = eliminar_proceso_pensamiento(response['output_text'])
|
68 |
-
mostrar_respuesta(respuesta_final, pensamiento)
|
69 |
|
70 |
-
|
71 |
-
|
72 |
-
with st.expander("💭 Pensamiento del modelo"):
|
73 |
-
st.write(pensamiento)
|
74 |
|
75 |
-
def generar_sugerencias():
|
76 |
-
if 'vector_store' not in st.session_state:
|
77 |
-
return []
|
78 |
-
|
79 |
-
docs = st.session_state.vector_store.similarity_search("", k=3)
|
80 |
-
context = "\n".join([doc.page_content for doc in docs])
|
81 |
-
|
82 |
-
prompt_template = """
|
83 |
-
Genera exactamente 3 preguntas simples en español basadas en este contexto.
|
84 |
-
|
85 |
-
Contexto:
|
86 |
-
{context}
|
87 |
-
|
88 |
-
Preguntas sugeridas:
|
89 |
-
"""
|
90 |
-
|
91 |
-
model = ChatGroq(temperature=0.4, model_name="deepseek-r1-distill-llama-70b", groq_api_key=GROQ_API_KEY)
|
92 |
-
response = model.invoke(prompt_template.format(context=context))
|
93 |
-
|
94 |
-
preguntas = [line.split('. ', 1)[1] for line in response.content.split("\n") if line.strip() and line[0].isdigit()]
|
95 |
-
return preguntas[:3]
|
96 |
-
|
97 |
-
# Aplicación principal
|
98 |
def main():
|
|
|
99 |
st.title("PDF Consultor 🔍")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
100 |
|
101 |
-
# Estados de sesión
|
102 |
-
if 'documento_cargado' not in st.session_state:
|
103 |
-
st.session_state.documento_cargado = False
|
104 |
-
st.session_state.sugerencias = []
|
105 |
-
st.session_state.pregunta_actual = ""
|
106 |
-
|
107 |
-
# Sidebar de carga de documentos
|
108 |
with st.sidebar:
|
109 |
-
st.markdown('<p class="step-number">1
|
110 |
-
pdf_docs = st.file_uploader("Subir PDF(s)", accept_multiple_files=True, type=["pdf"])
|
111 |
-
|
112 |
-
|
113 |
-
|
|
|
114 |
raw_text = get_pdf_text(pdf_docs)
|
115 |
-
text_chunks =
|
116 |
vector_store = get_vector_store(text_chunks)
|
|
|
|
|
117 |
st.session_state.vector_store = vector_store
|
118 |
st.session_state.documento_cargado = True
|
119 |
st.session_state.sugerencias = generar_sugerencias()
|
120 |
-
st.success("Documento procesado exitosamente.")
|
121 |
-
st.experimental_rerun()
|
122 |
|
123 |
-
|
124 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
125 |
if st.session_state.sugerencias:
|
126 |
-
st.
|
127 |
-
|
128 |
-
|
129 |
-
|
130 |
-
|
131 |
-
|
132 |
-
|
133 |
-
|
134 |
-
|
135 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
136 |
|
137 |
if __name__ == "__main__":
|
138 |
main()
|
|
|
|
|
1 |
import streamlit as st
|
2 |
from PyPDF2 import PdfReader
|
3 |
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
4 |
+
import os
|
5 |
from langchain_community.embeddings import HuggingFaceEmbeddings
|
6 |
from langchain.vectorstores import FAISS
|
7 |
from langchain_groq import ChatGroq
|
|
|
11 |
import re
|
12 |
|
13 |
load_dotenv()
|
14 |
+
os.getenv("GROQ_API_KEY")
|
15 |
|
16 |
+
css_style = """
|
|
|
|
|
|
|
|
|
|
|
17 |
<style>
|
|
|
|
|
18 |
.step-number { font-size: 24px; font-weight: bold; }
|
19 |
+
.response-box { padding: 20px; background-color: #f8f9fa; border-radius: 10px; border-left: 5px solid #252850; margin: 20px 0; box-shadow: 0 2px 4px rgba(0,0,0,0.1); }
|
20 |
+
.metadata-box { padding: 20px; background-color: #f0f2f6; border-radius: 10px; margin-bottom: 20px; }
|
21 |
+
.custom-input { font-size: 16px; padding: 10px; border-radius: 5px; border: 1px solid #ccc; }
|
22 |
+
.suggestion-container { border: 1px solid #e0e0e0; border-radius: 8px; padding: 15px; margin: 10px 0; background: #f8f9fa; }
|
23 |
+
.suggestion-btn { width: 100%; margin: 3px 0; padding: 8px; border-radius: 5px; border: 1px solid #252850; background: white; cursor: pointer; transition: all 0.2s; }
|
24 |
+
.suggestion-btn:hover { background: #252850; color: white; }
|
25 |
</style>
|
26 |
+
"""
|
27 |
|
|
|
28 |
def eliminar_proceso_pensamiento(texto):
|
29 |
+
texto_limpio = re.sub(r'<.*?>', '', texto, flags=re.DOTALL)
|
30 |
+
lineas = [line.strip() for line in texto_limpio.split('\n') if line.strip()]
|
31 |
+
return lineas[-1] if lineas else "Respuesta no disponible"
|
32 |
|
33 |
def get_pdf_text(pdf_docs):
|
34 |
+
text = ""
|
35 |
+
for pdf in pdf_docs:
|
36 |
+
pdf_reader = PdfReader(pdf)
|
37 |
+
for page in pdf_reader.pages:
|
38 |
+
text += page.extract_text()
|
39 |
+
return text
|
40 |
+
|
41 |
+
def get_text_chunks(text):
|
42 |
+
text_splitter = RecursiveCharacterTextSplitter(chunk_size=5000, chunk_overlap=500)
|
43 |
+
return text_splitter.split_text(text)
|
44 |
|
45 |
def get_vector_store(text_chunks):
|
46 |
embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2")
|
|
|
48 |
|
49 |
def get_conversational_chain():
|
50 |
prompt_template = """
|
51 |
+
Responde en español exclusivamente con la información solicitada usando el contexto, además sé lo más extenso y detallado posible
|
52 |
+
siempre que se pueda desarollar, como explicando el contenido de referencias nombradas.
|
53 |
+
Formato: Respuesta directa sin prefijos. Si no hay información, di "No disponible".
|
54 |
+
|
55 |
Contexto:
|
56 |
{context}
|
57 |
+
|
58 |
Pregunta:
|
59 |
{question}
|
60 |
+
|
61 |
Respuesta:
|
62 |
"""
|
63 |
+
model = ChatGroq(
|
64 |
+
temperature=0.2,
|
65 |
+
model_name="deepseek-r1-distill-llama-70b",
|
66 |
+
groq_api_key=os.getenv("GROQ_API_KEY")
|
67 |
+
)
|
68 |
return load_qa_chain(model, chain_type="stuff", prompt=PromptTemplate(template=prompt_template, input_variables=["context", "question"]))
|
69 |
|
70 |
+
def extract_metadata(vector_store):
|
71 |
+
metadata_questions = {
|
72 |
+
"title": "¿Cuál es el título principal del documento? Formato: Respuesta simple con algunas letras en mayúscula si hiciera falta",
|
73 |
+
"entity": "¿A qué organización pertenece este documento?. Formato: Respuesta directa con el nombre de la entidad.",
|
74 |
+
"date": "¿A qué fecha corresponde el documento? Si existen indicios indica la fecha, sino di 'No disponible'"
|
75 |
+
}
|
76 |
+
|
77 |
+
metadata = {}
|
78 |
+
chain = get_conversational_chain()
|
79 |
+
|
80 |
+
for key, question in metadata_questions.items():
|
81 |
+
docs = vector_store.similarity_search(question, k=2)
|
82 |
+
response = chain({"input_documents": docs, "question": question}, return_only_outputs=True)
|
83 |
+
clean_response = eliminar_proceso_pensamiento(response['output_text'])
|
84 |
+
metadata[key] = clean_response if clean_response else "No disponible"
|
85 |
+
|
86 |
+
return metadata
|
87 |
+
|
88 |
+
def mostrar_respuesta(texto):
|
89 |
+
st.markdown(f'<div class="response-box">{texto}</div>', unsafe_allow_html=True)
|
90 |
+
|
91 |
+
def generar_sugerencias():
|
92 |
+
if 'vector_store' not in st.session_state:
|
93 |
+
return
|
94 |
+
|
95 |
+
try:
|
96 |
+
docs = st.session_state.vector_store.similarity_search("", k=3)
|
97 |
+
context = "\n".join([doc.page_content for doc in docs])
|
98 |
+
|
99 |
+
prompt_template = """
|
100 |
+
Genera exactamente 3 preguntas en español basadas en el contexto.
|
101 |
+
Las preguntas deben ser en español, simples y sencillas de máximo 10 palabras.
|
102 |
+
Formato de respuesta:
|
103 |
+
1. [Pregunta completa en español]
|
104 |
+
2. [Pregunta completa en español]
|
105 |
+
3. [Pregunta completa en español]
|
106 |
+
|
107 |
+
Contexto:
|
108 |
+
{context}
|
109 |
+
"""
|
110 |
+
|
111 |
+
model = ChatGroq(
|
112 |
+
temperature=0.4,
|
113 |
+
model_name="deepseek-r1-distill-llama-70b",
|
114 |
+
groq_api_key=os.getenv("GROQ_API_KEY")
|
115 |
+
)
|
116 |
+
|
117 |
+
response = model.invoke(prompt_template.format(context=context))
|
118 |
+
|
119 |
+
preguntas = []
|
120 |
+
for line in response.content.split("\n"):
|
121 |
+
line = line.strip()
|
122 |
+
if line and line[0].isdigit():
|
123 |
+
pregunta = line.split('. ', 1)[1] if '. ' in line else line[2:]
|
124 |
+
if pregunta:
|
125 |
+
preguntas.append(pregunta)
|
126 |
+
|
127 |
+
return preguntas[:3]
|
128 |
+
|
129 |
+
except Exception as e:
|
130 |
+
st.error(f"Error generando sugerencias: {str(e)}")
|
131 |
+
return
|
132 |
+
|
133 |
+
def procesar_consulta(user_question):
|
134 |
if 'vector_store' not in st.session_state:
|
135 |
st.error("Por favor carga un documento primero")
|
136 |
return
|
137 |
+
|
138 |
chain = get_conversational_chain()
|
139 |
+
docs = st.session_state.vector_store.similarity_search(user_question)
|
140 |
+
|
141 |
with st.spinner("Analizando documento..."):
|
142 |
+
response = chain({"input_documents": docs, "question": user_question}, return_only_outputs=True)
|
|
|
|
|
|
|
143 |
|
144 |
+
respuesta_final = eliminar_proceso_pensamiento(response['output_text'])
|
145 |
+
mostrar_respuesta(respuesta_final)
|
|
|
|
|
146 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
147 |
def main():
|
148 |
+
st.set_page_config(page_title="PDF Consultor 🔍", page_icon="🔍", layout="wide")
|
149 |
st.title("PDF Consultor 🔍")
|
150 |
+
st.markdown(css_style, unsafe_allow_html=True)
|
151 |
+
|
152 |
+
estados = {
|
153 |
+
'documento_cargado': False,
|
154 |
+
'sugerencias': [],
|
155 |
+
'pregunta_actual': "",
|
156 |
+
'respuestas': []
|
157 |
+
}
|
158 |
+
for key, value in estados.items():
|
159 |
+
if key not in st.session_state:
|
160 |
+
st.session_state[key] = value
|
161 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
162 |
with st.sidebar:
|
163 |
+
st.markdown('<p class="step-number">1 Subir archivos</p>', unsafe_allow_html=True)
|
164 |
+
pdf_docs = st.file_uploader("Subir PDF(s)", accept_multiple_files=True, type=["pdf"], label_visibility="collapsed")
|
165 |
+
|
166 |
+
if pdf_docs and not st.session_state.documento_cargado:
|
167 |
+
with st.spinner("Analizando documento..."):
|
168 |
+
try:
|
169 |
raw_text = get_pdf_text(pdf_docs)
|
170 |
+
text_chunks = get_text_chunks(raw_text)
|
171 |
vector_store = get_vector_store(text_chunks)
|
172 |
+
|
173 |
+
st.session_state.metadata = extract_metadata(vector_store)
|
174 |
st.session_state.vector_store = vector_store
|
175 |
st.session_state.documento_cargado = True
|
176 |
st.session_state.sugerencias = generar_sugerencias()
|
|
|
|
|
177 |
|
178 |
+
st.rerun()
|
179 |
+
|
180 |
+
except Exception as e:
|
181 |
+
st.error(f"Error procesando documento: {str(e)}")
|
182 |
+
|
183 |
+
if 'metadata' in st.session_state:
|
184 |
+
st.markdown("---")
|
185 |
+
cols = st.columns(3)
|
186 |
+
campos_metadata = [
|
187 |
+
("📄 Título", "title"),
|
188 |
+
("🏛️ Entidad", "entity"),
|
189 |
+
("📅 Fecha", "date")
|
190 |
+
]
|
191 |
+
|
192 |
+
for col, (icono, key) in zip(cols, campos_metadata):
|
193 |
+
with col:
|
194 |
+
st.markdown(f"""
|
195 |
+
<div class="metadata-box">
|
196 |
+
<div style="font-size:16px; margin-bottom:10px;">{icono}</div>
|
197 |
+
{st.session_state.metadata[key]}
|
198 |
+
</div>
|
199 |
+
""", unsafe_allow_html=True)
|
200 |
+
|
201 |
if st.session_state.sugerencias:
|
202 |
+
st.markdown("---")
|
203 |
+
with st.container():
|
204 |
+
st.markdown("""
|
205 |
+
<div class="suggestion-container">
|
206 |
+
<div style="font-size:14px; color:#666; margin-bottom:8px;">💡 ¿Necesitas ideas?</div>
|
207 |
+
""", unsafe_allow_html=True)
|
208 |
+
|
209 |
+
cols_sugerencias = st.columns(3)
|
210 |
+
for i, (col, pregunta) in enumerate(zip(cols_sugerencias, st.session_state.sugerencias)):
|
211 |
+
with col:
|
212 |
+
if st.button(pregunta, key=f"sug_{i}", help="Haz clic para usar esta pregunta", use_container_width=True):
|
213 |
+
st.session_state.pregunta_actual = pregunta
|
214 |
+
|
215 |
+
st.markdown("</div>", unsafe_allow_html=True)
|
216 |
+
|
217 |
+
if st.session_state.documento_cargado:
|
218 |
+
with st.form(key="consulta_form"):
|
219 |
+
col1, col2 = st.columns([5, 1])
|
220 |
+
with col1:
|
221 |
+
pregunta_usuario = st.text_input("Escribe tu pregunta:", value=st.session_state.get('pregunta_actual', ''), placeholder="Ej: ¿De qué trata este documento?", label_visibility="collapsed")
|
222 |
+
with col2:
|
223 |
+
st.markdown("<br>", unsafe_allow_html=True)
|
224 |
+
enviar = st.form_submit_button("Enviar ▶")
|
225 |
+
|
226 |
+
if enviar or st.session_state.pregunta_actual:
|
227 |
+
pregunta_final = pregunta_usuario or st.session_state.pregunta_actual
|
228 |
+
procesar_consulta(pregunta_final)
|
229 |
+
if 'pregunta_actual' in st.session_state:
|
230 |
+
del st.session_state.pregunta_actual
|
231 |
+
|
232 |
+
elif not st.session_state.documento_cargado:
|
233 |
+
st.info("Por favor, sube un documento PDF para comenzar.")
|
234 |
|
235 |
if __name__ == "__main__":
|
236 |
main()
|