Update main.py
Browse files
main.py
CHANGED
@@ -9,6 +9,7 @@ from pydantic import BaseModel
|
|
9 |
from typing import List, Dict, Any, Optional
|
10 |
import time
|
11 |
import uuid
|
|
|
12 |
from fastapi.responses import StreamingResponse
|
13 |
|
14 |
# Mock implementations for ImageResponse and to_data_uri
|
@@ -185,6 +186,7 @@ class ChatRequest(BaseModel):
|
|
185 |
model: str
|
186 |
messages: List[Message]
|
187 |
|
|
|
188 |
@app.post("/v1/chat/completions")
|
189 |
async def chat_completions(request: ChatRequest):
|
190 |
messages = [{"role": msg.role, "content": msg.content} for msg in request.messages]
|
@@ -202,7 +204,7 @@ async def chat_completions(request: ChatRequest):
|
|
202 |
# Stream the response content
|
203 |
async for chunk in async_generator:
|
204 |
if isinstance(chunk, str):
|
205 |
-
yield {
|
206 |
"id": response_id,
|
207 |
"object": "chat.completion.chunk",
|
208 |
"created": created_timestamp,
|
@@ -217,9 +219,9 @@ async def chat_completions(request: ChatRequest):
|
|
217 |
"index": 0
|
218 |
}
|
219 |
]
|
220 |
-
}
|
221 |
else:
|
222 |
-
yield {
|
223 |
"id": response_id,
|
224 |
"object": "chat.completion.chunk",
|
225 |
"created": created_timestamp,
|
@@ -234,6 +236,6 @@ async def chat_completions(request: ChatRequest):
|
|
234 |
"index": 0
|
235 |
}
|
236 |
]
|
237 |
-
}
|
238 |
|
239 |
return StreamingResponse(generate_response(), media_type="application/json")
|
|
|
9 |
from typing import List, Dict, Any, Optional
|
10 |
import time
|
11 |
import uuid
|
12 |
+
import json
|
13 |
from fastapi.responses import StreamingResponse
|
14 |
|
15 |
# Mock implementations for ImageResponse and to_data_uri
|
|
|
186 |
model: str
|
187 |
messages: List[Message]
|
188 |
|
189 |
+
|
190 |
@app.post("/v1/chat/completions")
|
191 |
async def chat_completions(request: ChatRequest):
|
192 |
messages = [{"role": msg.role, "content": msg.content} for msg in request.messages]
|
|
|
204 |
# Stream the response content
|
205 |
async for chunk in async_generator:
|
206 |
if isinstance(chunk, str):
|
207 |
+
yield json.dumps({
|
208 |
"id": response_id,
|
209 |
"object": "chat.completion.chunk",
|
210 |
"created": created_timestamp,
|
|
|
219 |
"index": 0
|
220 |
}
|
221 |
]
|
222 |
+
}).encode('utf-8')
|
223 |
else:
|
224 |
+
yield json.dumps({
|
225 |
"id": response_id,
|
226 |
"object": "chat.completion.chunk",
|
227 |
"created": created_timestamp,
|
|
|
236 |
"index": 0
|
237 |
}
|
238 |
]
|
239 |
+
}).encode('utf-8')
|
240 |
|
241 |
return StreamingResponse(generate_response(), media_type="application/json")
|