Update app.py
Browse files
app.py
CHANGED
@@ -25,15 +25,25 @@ if not os.path.exists(persist_directory):
|
|
25 |
else:
|
26 |
vectordb = Chroma(persist_directory=persist_directory, embedding_function=embedding_function)
|
27 |
|
28 |
-
client = InferenceClient("google/flan-t5-
|
29 |
|
30 |
ranker = Reranker("answerdotai/answerai-colbert-small-v1", model_type='colbert')
|
31 |
|
32 |
def generate_text(context, query):
|
33 |
-
|
34 |
-
|
35 |
-
|
36 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
37 |
|
38 |
def test_rag_reranking(query, ranker):
|
39 |
print(f"\n馃攳 Pregunta recibida: {query}")
|
|
|
25 |
else:
|
26 |
vectordb = Chroma(persist_directory=persist_directory, embedding_function=embedding_function)
|
27 |
|
28 |
+
client = InferenceClient("google/flan-t5-small", token=os.getenv("HUGGINGFACEHUB_API_TOKEN"))
|
29 |
|
30 |
ranker = Reranker("answerdotai/answerai-colbert-small-v1", model_type='colbert')
|
31 |
|
32 |
def generate_text(context, query):
|
33 |
+
prompt = f"""
|
34 |
+
Contesta la siguiente pregunta bas谩ndote solo en el contexto proporcionado.
|
35 |
+
Si no puedes encontrar la respuesta en el contexto, indica que no tienes suficiente informaci贸n.
|
36 |
+
|
37 |
+
Contexto:
|
38 |
+
{context}
|
39 |
+
|
40 |
+
Pregunta:
|
41 |
+
{query}
|
42 |
+
|
43 |
+
Respuesta:
|
44 |
+
"""
|
45 |
+
response = client.text_generation(prompt=prompt, max_new_tokens=200)
|
46 |
+
return response.strip()
|
47 |
|
48 |
def test_rag_reranking(query, ranker):
|
49 |
print(f"\n馃攳 Pregunta recibida: {query}")
|