Chandima Prabhath commited on
Commit
8deb81b
·
1 Parent(s): 97cefd5

Enhance whatsapp_webhook to generate and send voice prompts based on LLM replies.

Browse files
Files changed (1) hide show
  1. app.py +4 -1
app.py CHANGED
@@ -402,7 +402,10 @@ async def whatsapp_webhook(request: Request):
402
  return {"success": True}
403
 
404
  # Fallback → voice reply
405
- task_queue.put({"type":"audio","message_id":mid,"chat_id":chat_id,"prompt":body})
 
 
 
406
  return {"success": True}
407
 
408
  @app.get("/", response_class=PlainTextResponse)
 
402
  return {"success": True}
403
 
404
  # Fallback → voice reply
405
+ reply = generate_llm(body)
406
+ client.send_message(mid, chat_id, reply)
407
+ voice_prompt = f"Speak only this dialog: {reply}"
408
+ task_queue.put({"type":"audio","message_id":mid,"chat_id":chat_id,"prompt":voice_prompt})
409
  return {"success": True}
410
 
411
  @app.get("/", response_class=PlainTextResponse)