Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -112,11 +112,11 @@ def submit_message(prompt, prompt_template, temperature, max_tokens, context_len
|
|
112 |
)
|
113 |
|
114 |
|
115 |
-
|
116 |
-
if prompt_template:
|
117 |
-
|
118 |
|
119 |
-
|
120 |
|
121 |
|
122 |
# The embeddings file has to be remade since the serialization is no long compatible
|
@@ -126,13 +126,14 @@ def submit_message(prompt, prompt_template, temperature, max_tokens, context_len
|
|
126 |
# new_docsearch = pickle.load(f)
|
127 |
|
128 |
|
129 |
-
query = str(system_prompt + history + [prompt_msg])
|
130 |
|
131 |
# docs = new_docsearch.similarity_search(query)
|
132 |
|
133 |
-
gen_ai = ChatGoogleGenerativeAI(model="gemini-pro")
|
134 |
response = gen_ai.invoke(
|
135 |
-
input=
|
|
|
136 |
#max_tokens=max_tokens, # for open ai only
|
137 |
#temperature=temperature # for open ai only
|
138 |
)
|
|
|
112 |
)
|
113 |
|
114 |
|
115 |
+
# system_prompt = []
|
116 |
+
#if prompt_template:
|
117 |
+
# system_prompt = [{ "role": "system", "content": prompt_template }]
|
118 |
|
119 |
+
# prompt_msg = { "role": "user", "content": prompt }
|
120 |
|
121 |
|
122 |
# The embeddings file has to be remade since the serialization is no long compatible
|
|
|
126 |
# new_docsearch = pickle.load(f)
|
127 |
|
128 |
|
129 |
+
#query = str(system_prompt + history + [prompt_msg])
|
130 |
|
131 |
# docs = new_docsearch.similarity_search(query)
|
132 |
|
133 |
+
gen_ai = ChatGoogleGenerativeAI(model="gemini-pro", temperature=0.7, top_p=0.85)
|
134 |
response = gen_ai.invoke(
|
135 |
+
input=prompt,
|
136 |
+
context=history,
|
137 |
#max_tokens=max_tokens, # for open ai only
|
138 |
#temperature=temperature # for open ai only
|
139 |
)
|