Spaces:
Runtime error
Runtime error
Commit
·
c6bb28f
1
Parent(s):
7b28d3b
Update app.py
Browse files
app.py
CHANGED
@@ -103,7 +103,7 @@ def submit_message(prompt, prompt_template, temperature, max_tokens, context_len
|
|
103 |
# completion = openai.ChatCompletion.create(model="gpt-3.5-turbo", messages=system_prompt + history[-context_length*2:] + [prompt_msg], temperature=temperature, max_tokens=max_tokens)
|
104 |
|
105 |
completion = RetrievalQA.from_chain_type(llm=ChatOpenAI(temperature=temperature, max_tokens=max_tokens, model_name="gpt-3.5-turbo"), chain_type="stuff", retriever=vectordb.as_retriever() , return_source_documents=True)
|
106 |
-
completion = completion({"query": system_prompt +
|
107 |
# from https://blog.devgenius.io/chat-with-document-s-using-openai-chatgpt-api-and-text-embedding-6a0ce3dc8bc8
|
108 |
|
109 |
history.append(prompt_msg)
|
@@ -118,10 +118,10 @@ def submit_message(prompt, prompt_template, temperature, max_tokens, context_len
|
|
118 |
"content": f"Error: {e}"
|
119 |
})
|
120 |
|
121 |
-
|
122 |
chat_messages = [(history[i]['content'], history[i+1]['content']) for i in range(0, len(history)-1, 2)]
|
123 |
|
124 |
-
return '', chat_messages
|
125 |
|
126 |
def clear_conversation():
|
127 |
return gr.update(value=None, visible=True), None, "", get_empty_state()
|
|
|
103 |
# completion = openai.ChatCompletion.create(model="gpt-3.5-turbo", messages=system_prompt + history[-context_length*2:] + [prompt_msg], temperature=temperature, max_tokens=max_tokens)
|
104 |
|
105 |
completion = RetrievalQA.from_chain_type(llm=ChatOpenAI(temperature=temperature, max_tokens=max_tokens, model_name="gpt-3.5-turbo"), chain_type="stuff", retriever=vectordb.as_retriever() , return_source_documents=True)
|
106 |
+
completion = completion({"query": system_prompt + [prompt_msg] })
|
107 |
# from https://blog.devgenius.io/chat-with-document-s-using-openai-chatgpt-api-and-text-embedding-6a0ce3dc8bc8
|
108 |
|
109 |
history.append(prompt_msg)
|
|
|
118 |
"content": f"Error: {e}"
|
119 |
})
|
120 |
|
121 |
+
total_tokens_used_msg = f"Total tokens used: {state['total_tokens']}"
|
122 |
chat_messages = [(history[i]['content'], history[i+1]['content']) for i in range(0, len(history)-1, 2)]
|
123 |
|
124 |
+
return '', chat_messages, total_tokens_used_msg, state
|
125 |
|
126 |
def clear_conversation():
|
127 |
return gr.update(value=None, visible=True), None, "", get_empty_state()
|