SergeyO7 commited on
Commit
e1c91d4
·
verified ·
1 Parent(s): 7220c5a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -11
app.py CHANGED
@@ -81,22 +81,20 @@ def process_query(query_text: str, vectorstore):
81
  for doc, score in results
82
  ])
83
 
84
- prompt_template = ChatPromptTemplate.from_template(PROMPT_TEMPLATE)
85
- prompt = prompt_template.format(context=context_text, question=query_text)
86
 
87
- # Updated HuggingFaceEndpoint configuration
88
- # Используем модель t5-base вместо google/flan-t5-base
89
  model = HuggingFaceEndpoint(
90
- repo_id="deepset/roberta-base-squad2", # Модель для question-answering
91
- task="question-answering",
92
  temperature=0.5,
93
  max_length=512,
94
- # huggingfacehub_api_token=os.getenv("HUGGINGFACEHUB_API_TOKEN")
95
  )
96
-
97
- # Передаем вопрос и контекст в виде словаря
98
- response = model.invoke({"question": query_text, "context": context_text})
99
- response_text = response.get("answer", "Нет ответа")
100
 
101
  sources = list(set([doc.metadata.get("source", "") for doc, _ in results]))
102
  return response_text, sources
 
81
  for doc, score in results
82
  ])
83
 
84
+ # Формируем строковый промпт для модели
85
+ prompt = f"Answer the question based on the following context:\n{context_text}\n\nQuestion: {query_text}"
86
 
87
+ # Используем модель t5-base для text2text-generation
 
88
  model = HuggingFaceEndpoint(
89
+ repo_id="t5-base",
90
+ task="text2text-generation",
91
  temperature=0.5,
92
  max_length=512,
93
+ # huggingfacehub_api_token=os.getenv("HUGGINGFACEHUB_API_TOKEN") # Раскомментируйте, если нужен токен
94
  )
95
+
96
+ # Передаем строковый промпт
97
+ response_text = model.invoke(prompt)
 
98
 
99
  sources = list(set([doc.metadata.get("source", "") for doc, _ in results]))
100
  return response_text, sources