Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -785,10 +785,6 @@ def submit_message(prompt, prompt_template, temperature, max_tokens, context_len
|
|
785 |
|
786 |
|
787 |
completion = chain.invoke({"question":prompt})
|
788 |
-
print("completion text from invoke on line 787")
|
789 |
-
print(completion['source_documents']['metadata'])
|
790 |
-
print(completion['source_documents']['state']['query_similarity_score'])
|
791 |
-
|
792 |
|
793 |
|
794 |
chain.memory.load_memory_variables({})
|
@@ -800,6 +796,12 @@ def submit_message(prompt, prompt_template, temperature, max_tokens, context_len
|
|
800 |
|
801 |
completion = { "content": completion }
|
802 |
|
|
|
|
|
|
|
|
|
|
|
|
|
803 |
#chat_messages = [(prompt_msg['content'], completion['content'])]
|
804 |
chat_messages = [(prompt, completion['content']['answer'])]
|
805 |
return '', chat_messages, state # total_tokens_used_msg,
|
|
|
785 |
|
786 |
|
787 |
completion = chain.invoke({"question":prompt})
|
|
|
|
|
|
|
|
|
788 |
|
789 |
|
790 |
chain.memory.load_memory_variables({})
|
|
|
796 |
|
797 |
completion = { "content": completion }
|
798 |
|
799 |
+
print("completion text from invoke on line 787")
|
800 |
+
print(completion['content']['source_documents']['metadata'])
|
801 |
+
print(completion['content']['source_documents']['state']['query_similarity_score'])
|
802 |
+
|
803 |
+
|
804 |
+
|
805 |
#chat_messages = [(prompt_msg['content'], completion['content'])]
|
806 |
chat_messages = [(prompt, completion['content']['answer'])]
|
807 |
return '', chat_messages, state # total_tokens_used_msg,
|