oort commited on
Commit
f927a30
·
verified ·
1 Parent(s): cc26efa

response.content format

Browse files
Files changed (1) hide show
  1. app.py +2 -1
app.py CHANGED
@@ -81,7 +81,8 @@ def create_prompt_for_image_generation(user_prompt: str) -> str:
81
  # prompt=prompt, temperature=1., max_tokens=512)
82
  response = model(messages, stop_sequences=["END"])
83
  # return response['choices'][0]['text']
84
- return response['choices'][0]['message']['content']
 
85
 
86
  except Exception as e:
87
  print(f"Error during LLM call: {str(e)}")
 
81
  # prompt=prompt, temperature=1., max_tokens=512)
82
  response = model(messages, stop_sequences=["END"])
83
  # return response['choices'][0]['text']
84
+ # return response['choices'][0]['message']['content']
85
+ return response.content
86
 
87
  except Exception as e:
88
  print(f"Error during LLM call: {str(e)}")