annikwag commited on
Commit
5fc3c7d
·
verified ·
1 Parent(s): 2827bd5

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -9
app.py CHANGED
@@ -11,14 +11,9 @@ from torch import cuda
11
  import json
12
  from datetime import datetime
13
 
14
-
15
  ###########
16
- # ToDo move to config file and functions
17
- # Configuration for the dedicated model
18
- DEDICATED_MODEL = "meta-llama/Llama-3.1-8B-Instruct"
19
- DEDICATED_ENDPOINT = "https://qu2d8m6dmsollhly.us-east-1.aws.endpoints.huggingface.cloud"
20
- # Retrieve your write access token from the settings (assuming you stored it in st.secrets)
21
- WRITE_ACCESS_TOKEN = st.secrets["Llama_3_1"]
22
 
23
  def get_rag_answer(query, top_results):
24
  """
@@ -260,8 +255,12 @@ if show_exact_matches:
260
  if not filtered_lexical_no_dupe:
261
  st.write('No exact matches, consider unchecking "Show only exact matches"')
262
  else:
263
- # 6) Display the first 15 matching results
264
- for res in filtered_lexical_no_dupe[:15]:
 
 
 
 
265
  # Metadata
266
  metadata = res.payload.get('metadata', {})
267
  countries = metadata.get('countries', "[]")
 
11
  import json
12
  from datetime import datetime
13
 
14
+ model_config = getconfig("model_params.cfg")
15
  ###########
16
+ # ToDo move to functions
 
 
 
 
 
17
 
18
  def get_rag_answer(query, top_results):
19
  """
 
255
  if not filtered_lexical_no_dupe:
256
  st.write('No exact matches, consider unchecking "Show only exact matches"')
257
  else:
258
+ top_results = filtered_lexical_no_dupe[:15]
259
+ rag_answer = get_rag_answer(var, top_results)
260
+ st.markdown("### Generated Answer")
261
+ st.write(rag_answer)
262
+ st.divider()
263
+ for res in top_results:
264
  # Metadata
265
  metadata = res.payload.get('metadata', {})
266
  countries = metadata.get('countries', "[]")