melk2025 commited on
Commit
6262323
·
verified ·
1 Parent(s): 132d6d9

added history

Browse files
Files changed (1) hide show
  1. app.py +16 -7
app.py CHANGED
@@ -83,7 +83,7 @@ qa_answers = list(qa_data.values())
83
  qa_embeddings = semantic_model.encode(qa_questions, convert_to_tensor=True)
84
 
85
  # ---------------------- History-Aware CAG ----------------------
86
- def retrieve_from_cag(user_query):
87
  query_embedding = semantic_model.encode(user_query, convert_to_tensor=True)
88
  cosine_scores = util.cos_sim(query_embedding, qa_embeddings)[0]
89
  best_idx = int(np.argmax(cosine_scores))
@@ -120,10 +120,14 @@ def retrieve_from_rag(user_query, chat_history):
120
  return documents
121
 
122
  # ---------------------- Generation function (OpenRouter) ----------------------
123
- def generate_via_openrouter(context, query):
124
  print("\n--- Generating via OpenRouter ---")
125
  print("Context received:", context)
126
 
 
 
 
 
127
  prompt = f"""<s>[INST]
128
  You are a Moodle expert assistant.
129
  Instructions:
@@ -131,16 +135,20 @@ Instructions:
131
  - Use only the provided documents below to answer.
132
  - If the answer is not in the documents, simply say: "I don't know." / "Je ne sais pas."
133
  - Cite only the sources you use, indicated at the end of each document like (Source: https://example.com).
134
- Documents :
 
 
 
 
135
  {context}
136
- Question : {query}
137
- Answer :
 
138
  [/INST]
139
  """
140
-
141
  try:
142
  response = client1.chat.completions.create(
143
- model="mistralai/mistral-small-3.1-24b-instruct:free",
144
  messages=[{"role": "user", "content": prompt}]
145
  )
146
  return response.choices[0].message.content.strip()
@@ -148,6 +156,7 @@ Answer :
148
  print(f"Erreur lors de la génération : {e}")
149
  return "Erreur lors de la génération."
150
 
 
151
  # ---------------------- Main Chatbot ----------------------
152
  def chatbot(query, chat_history):
153
  print("\n==== New Query ====")
 
83
  qa_embeddings = semantic_model.encode(qa_questions, convert_to_tensor=True)
84
 
85
  # ---------------------- History-Aware CAG ----------------------
86
+ def retrieve_from_cag(user_query, chat_history):
87
  query_embedding = semantic_model.encode(user_query, convert_to_tensor=True)
88
  cosine_scores = util.cos_sim(query_embedding, qa_embeddings)[0]
89
  best_idx = int(np.argmax(cosine_scores))
 
120
  return documents
121
 
122
  # ---------------------- Generation function (OpenRouter) ----------------------
123
+ def generate_via_openrouter(context, query, chat_history=None):
124
  print("\n--- Generating via OpenRouter ---")
125
  print("Context received:", context)
126
 
127
+ history_text = ""
128
+ if chat_history:
129
+ history_text = "\n".join([f"User: {q}\nBot: {a}" for q, a in chat_history[-2:]]) # Last 2 exchanges only
130
+
131
  prompt = f"""<s>[INST]
132
  You are a Moodle expert assistant.
133
  Instructions:
 
135
  - Use only the provided documents below to answer.
136
  - If the answer is not in the documents, simply say: "I don't know." / "Je ne sais pas."
137
  - Cite only the sources you use, indicated at the end of each document like (Source: https://example.com).
138
+
139
+ Chat History:
140
+ {history_text}
141
+
142
+ Documents:
143
  {context}
144
+
145
+ Question: {query}
146
+ Answer:
147
  [/INST]
148
  """
 
149
  try:
150
  response = client1.chat.completions.create(
151
+ model="mistralai/mistral-7b-instruct:free",
152
  messages=[{"role": "user", "content": prompt}]
153
  )
154
  return response.choices[0].message.content.strip()
 
156
  print(f"Erreur lors de la génération : {e}")
157
  return "Erreur lors de la génération."
158
 
159
+
160
  # ---------------------- Main Chatbot ----------------------
161
  def chatbot(query, chat_history):
162
  print("\n==== New Query ====")