Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -85,12 +85,13 @@ def process_query(query_text: str, vectorstore):
|
|
85 |
prompt = prompt_template.format(context=context_text, question=query_text)
|
86 |
|
87 |
# Updated HuggingFaceEndpoint configuration
|
|
|
88 |
model = HuggingFaceEndpoint(
|
89 |
-
repo_id="
|
90 |
task="text2text-generation",
|
91 |
-
temperature=0.5,
|
92 |
-
max_length=512,
|
93 |
-
# huggingfacehub_api_token=os.getenv("HUGGINGFACEHUB_API_TOKEN")
|
94 |
)
|
95 |
response_text = model.invoke(prompt) # Use invoke instead of predict
|
96 |
|
|
|
85 |
prompt = prompt_template.format(context=context_text, question=query_text)
|
86 |
|
87 |
# Updated HuggingFaceEndpoint configuration
|
88 |
+
# Используем модель t5-base вместо google/flan-t5-base
|
89 |
model = HuggingFaceEndpoint(
|
90 |
+
repo_id="t5-base", # Модель, поддерживающая text2text-generation
|
91 |
task="text2text-generation",
|
92 |
+
temperature=0.5,
|
93 |
+
max_length=512,
|
94 |
+
# huggingfacehub_api_token=os.getenv("HUGGINGFACEHUB_API_TOKEN")
|
95 |
)
|
96 |
response_text = model.invoke(prompt) # Use invoke instead of predict
|
97 |
|