Spaces:
Sleeping
Sleeping
Commit
·
b8d023d
1
Parent(s):
61d1de3
Update answer on stack of array
Browse files
app.py
CHANGED
@@ -114,9 +114,15 @@ def call_gemini(prompt: str, vision_parts=None) -> str:
|
|
114 |
resp = client.models.generate_content(
|
115 |
model="gemini-2.5-flash-preview-04-17", **kwargs
|
116 |
)
|
117 |
-
|
118 |
-
|
119 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
120 |
|
121 |
@app.post("/voice-transcribe")
|
122 |
async def voice_transcribe(file: UploadFile = File(...)):
|
|
|
114 |
resp = client.models.generate_content(
|
115 |
model="gemini-2.5-flash-preview-04-17", **kwargs
|
116 |
)
|
117 |
+
# Join all .text fields in case Gemini responds in multiple parts.
|
118 |
+
try:
|
119 |
+
text = "".join(part.text for part in resp.candidates[0].content.parts)
|
120 |
+
except Exception as e:
|
121 |
+
logger.error(f"[LLM] ❌ Failed to parse Gemini response: {e}")
|
122 |
+
raise RuntimeError("Gemini API response format error")
|
123 |
+
# Handle response
|
124 |
+
logger.info(f"[LLM] Response: {text}")
|
125 |
+
return text.strip()
|
126 |
|
127 |
@app.post("/voice-transcribe")
|
128 |
async def voice_transcribe(file: UploadFile = File(...)):
|