oort commited on
Commit
cc26efa
·
verified ·
1 Parent(s): dcb1376

Resume parsing response

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -81,7 +81,7 @@ def create_prompt_for_image_generation(user_prompt: str) -> str:
81
  # prompt=prompt, temperature=1., max_tokens=512)
82
  response = model(messages, stop_sequences=["END"])
83
  # return response['choices'][0]['text']
84
- return response
85
 
86
  except Exception as e:
87
  print(f"Error during LLM call: {str(e)}")
 
81
  # prompt=prompt, temperature=1., max_tokens=512)
82
  response = model(messages, stop_sequences=["END"])
83
  # return response['choices'][0]['text']
84
+ return response['choices'][0]['message']['content']
85
 
86
  except Exception as e:
87
  print(f"Error during LLM call: {str(e)}")