Spaces:
Running
Running
response.content format
Browse files
app.py
CHANGED
@@ -81,7 +81,8 @@ def create_prompt_for_image_generation(user_prompt: str) -> str:
|
|
81 |
# prompt=prompt, temperature=1., max_tokens=512)
|
82 |
response = model(messages, stop_sequences=["END"])
|
83 |
# return response['choices'][0]['text']
|
84 |
-
return response['choices'][0]['message']['content']
|
|
|
85 |
|
86 |
except Exception as e:
|
87 |
print(f"Error during LLM call: {str(e)}")
|
|
|
81 |
# prompt=prompt, temperature=1., max_tokens=512)
|
82 |
response = model(messages, stop_sequences=["END"])
|
83 |
# return response['choices'][0]['text']
|
84 |
+
# return response['choices'][0]['message']['content']
|
85 |
+
return response.content
|
86 |
|
87 |
except Exception as e:
|
88 |
print(f"Error during LLM call: {str(e)}")
|