ofermend commited on
Commit
4c78198
·
1 Parent(s): 6fb6db8
Files changed (1) hide show
  1. agent.py +27 -5
agent.py CHANGED
@@ -16,10 +16,8 @@ prompt = """
16
  [INSTRUCTIONS]
17
  If the search results are irrelevant to the question respond with *** I do not have enough information to answer this question.***
18
  Search results may include tables in a markdown format. When answering a question using a table be careful about which rows and columns contain the answer and include all relevant information from the relevant rows and columns that the query is asking about.
19
- Do not cobble facts together from multiple search results, instead summarize the main facts into a consistent and easy to understand response.
20
  Do not base your response on information or knowledge that is not in the search results.
21
  Make sure your response is answering the query asked. If the query is related to an entity (such as a person or place), make sure you use search results related to that entity.
22
- For queries where only a short answer is required, you can give a brief response.
23
  Consider that each search result is a partial segment from a bigger text, and may be incomplete.
24
  Your output should always be in a single language - the $vectaraLangName language. Check spelling and grammar for the $vectaraLangName language.
25
  Search results for the query *** $vectaraQuery***, are listed below, some are text, some MAY be tables in markdown format.
@@ -56,14 +54,26 @@ def create_assistant_tools(cfg):
56
  Responds to an user question about a particular result, based on the publications.
57
  """,
58
  tool_args_schema = QueryPublicationsArgs,
59
- reranker = "multilingual_reranker_v1", rerank_k = 100,
 
 
 
 
 
 
 
 
 
 
 
 
60
  n_sentences_before = 2, n_sentences_after = 2, lambda_val = 0.005,
61
  summary_num_results = 15,
62
  vectara_summarizer = summarizer,
63
  include_citations = True,
64
  vectara_prompt_text=prompt,
65
  save_history = True,
66
- verbose=True
67
  )
68
 
69
  search_publications = vec_factory.create_search_tool(
@@ -72,7 +82,19 @@ def create_assistant_tools(cfg):
72
  Returns matching publications to a user query.
73
  """,
74
  tool_args_schema = QueryPublicationsArgs,
75
- reranker = "multilingual_reranker_v1", rerank_k = 100,
 
 
 
 
 
 
 
 
 
 
 
 
76
  n_sentences_before = 2, n_sentences_after = 2, lambda_val = 0.005,
77
  save_history = True,
78
  verbose=True
 
16
  [INSTRUCTIONS]
17
  If the search results are irrelevant to the question respond with *** I do not have enough information to answer this question.***
18
  Search results may include tables in a markdown format. When answering a question using a table be careful about which rows and columns contain the answer and include all relevant information from the relevant rows and columns that the query is asking about.
 
19
  Do not base your response on information or knowledge that is not in the search results.
20
  Make sure your response is answering the query asked. If the query is related to an entity (such as a person or place), make sure you use search results related to that entity.
 
21
  Consider that each search result is a partial segment from a bigger text, and may be incomplete.
22
  Your output should always be in a single language - the $vectaraLangName language. Check spelling and grammar for the $vectaraLangName language.
23
  Search results for the query *** $vectaraQuery***, are listed below, some are text, some MAY be tables in markdown format.
 
54
  Responds to an user question about a particular result, based on the publications.
55
  """,
56
  tool_args_schema = QueryPublicationsArgs,
57
+ # reranker = "multilingual_reranker_v1", rerank_k = 100,
58
+ reranker = "chain", rerank_k = 100,
59
+ rerank_chain = [
60
+ {
61
+ "type": "multilingual_reranker_v1",
62
+ # "cutoff": 0.2
63
+ },
64
+ {
65
+ "type": "mmr",
66
+ "diversity_bias": 0.2,
67
+ "limit": 50
68
+ }
69
+ ],
70
  n_sentences_before = 2, n_sentences_after = 2, lambda_val = 0.005,
71
  summary_num_results = 15,
72
  vectara_summarizer = summarizer,
73
  include_citations = True,
74
  vectara_prompt_text=prompt,
75
  save_history = True,
76
+ verbose=False
77
  )
78
 
79
  search_publications = vec_factory.create_search_tool(
 
82
  Returns matching publications to a user query.
83
  """,
84
  tool_args_schema = QueryPublicationsArgs,
85
+ reranker = "chain", rerank_k = 100,
86
+ rerank_chain = [
87
+ {
88
+ "type": "multilingual_reranker_v1",
89
+ # "cutoff": 0.2
90
+ },
91
+ {
92
+ "type": "mmr",
93
+ "diversity_bias": 0.2,
94
+ "limit": 50
95
+ }
96
+ ],
97
+ # reranker = "multilingual_reranker_v1", rerank_k = 100,
98
  n_sentences_before = 2, n_sentences_after = 2, lambda_val = 0.005,
99
  save_history = True,
100
  verbose=True