Niansuh commited on
Commit
6190365
·
verified ·
1 Parent(s): ea3d3c5

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +40 -21
main.py CHANGED
@@ -9,6 +9,7 @@ from pydantic import BaseModel
9
  from typing import List, Dict, Any, Optional
10
  import time
11
  import uuid
 
12
 
13
  # Mock implementations for ImageResponse and to_data_uri
14
  class ImageResponse:
@@ -194,27 +195,45 @@ async def chat_completions(request: ChatRequest):
194
  messages=messages
195
  )
196
 
197
- response_content = ""
198
- async for chunk in async_generator:
199
- response_content += chunk if isinstance(chunk, str) else chunk.content # Concatenate response
200
-
201
- # Generate a unique ID and use the current timestamp
202
  response_id = f"chatcmpl-{uuid.uuid4()}"
203
  created_timestamp = int(time.time())
204
 
205
- return {
206
- "id": response_id,
207
- "object": "chat.completion",
208
- "created": created_timestamp,
209
- "model": request.model,
210
- "choices": [
211
- {
212
- "message": {
213
- "role": "assistant",
214
- "content": response_content
215
- },
216
- "finish_reason": "stop",
217
- "index": 0
218
- }
219
- ]
220
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9
  from typing import List, Dict, Any, Optional
10
  import time
11
  import uuid
12
+ from fastapi.responses import StreamingResponse
13
 
14
  # Mock implementations for ImageResponse and to_data_uri
15
  class ImageResponse:
 
195
  messages=messages
196
  )
197
 
 
 
 
 
 
198
  response_id = f"chatcmpl-{uuid.uuid4()}"
199
  created_timestamp = int(time.time())
200
 
201
+ async def generate_response():
202
+ # Stream the response content
203
+ async for chunk in async_generator:
204
+ if isinstance(chunk, str):
205
+ yield {
206
+ "id": response_id,
207
+ "object": "chat.completion.chunk",
208
+ "created": created_timestamp,
209
+ "model": request.model,
210
+ "choices": [
211
+ {
212
+ "message": {
213
+ "role": "assistant",
214
+ "content": chunk
215
+ },
216
+ "finish_reason": None,
217
+ "index": 0
218
+ }
219
+ ]
220
+ }
221
+ else:
222
+ yield {
223
+ "id": response_id,
224
+ "object": "chat.completion.chunk",
225
+ "created": created_timestamp,
226
+ "model": request.model,
227
+ "choices": [
228
+ {
229
+ "message": {
230
+ "role": "assistant",
231
+ "content": chunk.content
232
+ },
233
+ "finish_reason": None,
234
+ "index": 0
235
+ }
236
+ ]
237
+ }
238
+
239
+ return StreamingResponse(generate_response(), media_type="application/json")