mominah commited on
Commit
48318ed
·
verified ·
1 Parent(s): 4c238b3

Update chat.py

Browse files
Files changed (1) hide show
  1. chat.py +18 -10
chat.py CHANGED
@@ -118,17 +118,25 @@ async def post_message(
118
 
119
  async def stream_generator():
120
  full_response = ""
121
- # Pass messages list as the first positional argument to .stream()
122
  for chunk in llm.stream(messages):
123
- # adapt to ChatGroq response structure
124
- content = (
125
- chunk.get("content")
126
- or chunk.get("choices", [{}])[0].get("delta", {}).get("content")
127
- )
128
- if content:
129
- yield content
130
- full_response += content
131
- # save final AI message
 
 
 
 
 
 
 
 
132
  history.add_ai_message(full_response)
133
 
134
  return StreamingResponse(stream_generator(), media_type="text/plain")
 
118
 
119
  async def stream_generator():
120
  full_response = ""
121
+ # Pass messages list as positional 'input' to .stream()
122
  for chunk in llm.stream(messages):
123
+ # 1) Try AIMessageChunk.content
124
+ content = getattr(chunk, "content", None)
125
+ # 2) Fallback to dict-based chunk
126
+ if content is None and isinstance(chunk, dict):
127
+ content = (
128
+ chunk.get("content")
129
+ or chunk.get("choices", [{}])[0]
130
+ .get("delta", {})
131
+ .get("content")
132
+ )
133
+ if not content:
134
+ continue
135
+ # Yield and accumulate
136
+ yield content
137
+ full_response += content
138
+
139
+ # Save final AI message
140
  history.add_ai_message(full_response)
141
 
142
  return StreamingResponse(stream_generator(), media_type="text/plain")