codelion commited on
Commit
0e83379
·
verified ·
1 Parent(s): 2c92be1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -3
app.py CHANGED
@@ -45,7 +45,7 @@ def generate_item(tag, item_index):
45
  style = random.choice(styles)
46
  perspective = random.choice(perspectives)
47
 
48
- # Generate text with high temperature for diversity
49
  prompt = f"""
50
  Generate a short, engaging TikTok-style caption about {tag}.
51
  Return the response as a JSON object with a single key 'caption' containing the caption text.
@@ -53,11 +53,12 @@ def generate_item(tag, item_index):
53
  Do not include additional commentary or options.
54
  Use creative and varied language to ensure uniqueness.
55
  """
56
- # Adjusted to pass temperature directly, assuming SDK supports it
57
  text_response = client.models.generate_content(
58
  model='gemini-2.5-flash-preview-04-17',
59
  contents=[prompt],
60
- temperature=1.2 # Pass temperature directly instead of generation_config
 
 
61
  )
62
  # Parse JSON response to extract the caption
63
  try:
 
45
  style = random.choice(styles)
46
  perspective = random.choice(perspectives)
47
 
48
+ # Generate text with high temperature for diversity, using the correct config
49
  prompt = f"""
50
  Generate a short, engaging TikTok-style caption about {tag}.
51
  Return the response as a JSON object with a single key 'caption' containing the caption text.
 
53
  Do not include additional commentary or options.
54
  Use creative and varied language to ensure uniqueness.
55
  """
 
56
  text_response = client.models.generate_content(
57
  model='gemini-2.5-flash-preview-04-17',
58
  contents=[prompt],
59
+ config=types.GenerateContentConfig(
60
+ temperature=1.2 # High temperature for diversity, passed via config
61
+ )
62
  )
63
  # Parse JSON response to extract the caption
64
  try: