awacke1 commited on
Commit
60a4e79
·
1 Parent(s): 710e348

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +30 -10
app.py CHANGED
@@ -164,15 +164,15 @@ def chat_with_model(prompt, document_section, model_choice='gpt-3.5-turbo'):
164
  collected_messages = []
165
  key = os.getenv('OPENAI_API_KEY')
166
  client.api_key = key
167
-
168
- for chunk in client.chat.completions.create(
169
- model='gpt-3.5-turbo',
170
- messages=conversation,
171
- temperature=0.5,
172
- stream=True
173
- ):
174
- collected_chunks.append(chunk) # save the event response
175
- chunk_message = chunk['choices'][0]['delta'] # extract the message
176
  collected_messages.append(chunk_message) # save the message
177
  content=chunk["choices"][0].get("delta",{}).get("content")
178
  try:
@@ -182,8 +182,28 @@ def chat_with_model(prompt, document_section, model_choice='gpt-3.5-turbo'):
182
  res_box.markdown(f'*{result}*')
183
  except:
184
  st.write(' ')
185
-
186
  full_reply_content = ''.join([m.get('content', '') for m in collected_messages])
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
187
  st.write("Elapsed time:")
188
  st.write(time.time() - start_time)
189
  readitaloud(full_reply_content)
 
164
  collected_messages = []
165
  key = os.getenv('OPENAI_API_KEY')
166
  client.api_key = key
167
+
168
+ client = AsyncOpenAI()
169
+ stream = await client.chat.completions.create(
170
+ prompt="Say this is a test",
171
+ messages=[{"role": "user", "content": "Say this is a test"}],
172
+ stream=True,
173
+ )
174
+ async for part in stream:
175
+ chunk_message = (part.choices[0].delta.content or "")
176
  collected_messages.append(chunk_message) # save the message
177
  content=chunk["choices"][0].get("delta",{}).get("content")
178
  try:
 
182
  res_box.markdown(f'*{result}*')
183
  except:
184
  st.write(' ')
 
185
  full_reply_content = ''.join([m.get('content', '') for m in collected_messages])
186
+
187
+
188
+ #for chunk in client.chat.completions.create(
189
+ # model='gpt-3.5-turbo',
190
+ # messages=conversation,
191
+ # temperature=0.5,
192
+ # stream=True
193
+ #):
194
+ # collected_chunks.append(chunk) # save the event response
195
+ # chunk_message = chunk['choices'][0]['delta'] # extract the message
196
+ # collected_messages.append(chunk_message) # save the message
197
+ # content=chunk["choices"][0].get("delta",{}).get("content")
198
+ # try:
199
+ # report.append(content)
200
+ # if len(content) > 0:
201
+ # result = "".join(report).strip()
202
+ # res_box.markdown(f'*{result}*')
203
+ # except:
204
+ # st.write(' ')
205
+
206
+ # full_reply_content = ''.join([m.get('content', '') for m in collected_messages])
207
  st.write("Elapsed time:")
208
  st.write(time.time() - start_time)
209
  readitaloud(full_reply_content)