cwhuh commited on
Commit
138e240
·
1 Parent(s): 08eced3

debug : add log

Browse files
__pycache__/llm_wrapper.cpython-310.pyc CHANGED
Binary files a/__pycache__/llm_wrapper.cpython-310.pyc and b/__pycache__/llm_wrapper.cpython-310.pyc differ
 
llm_wrapper.py CHANGED
@@ -89,6 +89,7 @@ def run_gemini(
89
  model=model,
90
  contents=input_content,
91
  )
 
92
 
93
  chat_output = chat_completion.parsed
94
  input_token = chat_completion.usage_metadata.prompt_token_count
 
89
  model=model,
90
  contents=input_content,
91
  )
92
+ print(f"Chat Completion: {chat_completion}")
93
 
94
  chat_output = chat_completion.parsed
95
  input_token = chat_completion.usage_metadata.prompt_token_count