Deadmon commited on
Commit
c92a90d
·
verified ·
1 Parent(s): eda6dd3

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +64 -167
app.py CHANGED
@@ -20,44 +20,44 @@ IMAGES_DIR = os.path.join(OUTPUT_DIR, "images")
20
  ZIP_FILE = os.path.join(OUTPUT_DIR, "images.zip")
21
  TRACKING_FILE = os.path.join(OUTPUT_DIR, "used_pages.json")
22
 
23
- # Ensure output directory exists
24
  os.makedirs(OUTPUT_DIR, exist_ok=True)
25
 
26
  # Constants
27
  ITEMS_PER_PAGE = 40
28
- DAILY_IMAGE_LIMIT = 2000 # Adjusted for free tier limits
29
- MAX_PAGES = min(DAILY_IMAGE_LIMIT // ITEMS_PER_PAGE, 10) # Limit to 10 pages
30
  IMAGES_PER_ROW = 4
31
  MAX_ROWS = 6
32
  TOTAL_IMAGES = IMAGES_PER_ROW * MAX_ROWS
33
 
34
- # API Configurations (Hardcoded keys)
35
  API_CONFIGS = {
36
  "pexels": {
37
  "base_url": "https://api.pexels.com/v1/search",
38
  "headers": {"Authorization": "klHADHclpse2e2xSP9h747AgfE1Rx0wioemGhXYtedjZzvJ1WBUKwz7g"},
39
  "image_key": "src.medium",
40
  "result_key": "photos",
41
- "delay": 2 # Pexels: 200/hour free tier
42
  },
43
  "unsplash": {
44
  "base_url": "https://api.unsplash.com/search/photos",
45
- "headers": {"Authorization": "Client-ID UKkhpD_Rs5-s1gIlVX28iNs_8E4ysPhQniyIpDpKUnU"},
46
  "image_key": "urls.small",
47
  "result_key": "results",
48
- "delay": 2 # Unsplash: 50/hour free tier
 
49
  },
50
  "pixabay": {
51
  "base_url": "https://pixabay.com/api/",
52
  "headers": {},
53
  "image_key": "webformatURL",
54
  "result_key": "hits",
55
- "delay": 1 # Pixabay: 5000/hour
 
56
  }
57
  }
58
 
59
  def load_used_pages():
60
- """Load or initialize the used pages tracking file."""
61
  today = str(date.today())
62
  if os.path.exists(TRACKING_FILE):
63
  with open(TRACKING_FILE, "r") as f:
@@ -69,12 +69,10 @@ def load_used_pages():
69
  return data
70
 
71
  def save_used_pages(data):
72
- """Save the used pages tracking file."""
73
  with open(TRACKING_FILE, "w") as f:
74
  json.dump(data, f)
75
 
76
  def get_available_pages(num_pages_needed, api_name):
77
- """Get a list of unused page numbers for the specified API."""
78
  data = load_used_pages()
79
  used_pages = set(data["used_pages"].get(api_name, []))
80
  all_pages = set(range(1, MAX_PAGES + 1))
@@ -90,7 +88,6 @@ def get_available_pages(num_pages_needed, api_name):
90
  return selected_pages
91
 
92
  def fetch_image_urls(api_name, category, num_images):
93
- """Fetch image URLs from the specified API based on category and desired number of images."""
94
  config = API_CONFIGS[api_name]
95
  num_pages_needed = (num_images + ITEMS_PER_PAGE - 1) // ITEMS_PER_PAGE
96
  pages = get_available_pages(num_pages_needed, api_name)
@@ -103,57 +100,69 @@ def fetch_image_urls(api_name, category, num_images):
103
  data = load_used_pages()
104
  for page in pages:
105
  if api_name == "pixabay":
106
- url = f"{config['base_url']}?key=45122300-cd3621e1539e8e95430ee3efc&q={category.lower()}&per_page={ITEMS_PER_PAGE}&page={page}"
107
- else:
108
- url = f"{config['base_url']}?query={category.lower()}&per_page={ITEMS_PER_PAGE}&page={page}"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
109
 
110
  try:
111
- logger.info(f"Requesting {url} for {api_name}")
112
- time.sleep(config.get("delay", 0)) # Respect API rate limits
113
- response = requests.get(url, headers=config["headers"], timeout=10)
114
  response.raise_for_status()
115
  data_response = response.json()
116
 
117
- if config["result_key"] not in data_response or not data_response[config["result_key"]]:
118
- logger.warning(f"No {config['result_key']} found in response for page {page} from {api_name}. Response: {data_response}")
 
119
  continue
120
 
121
  page_urls = []
122
- for item in data_response[config["result_key"]]:
123
  if len(image_urls) >= num_images:
124
  break
125
  image_url = item.get(config["image_key"])
126
  if image_url:
127
  page_urls.append(image_url)
128
  else:
129
- logger.warning(f"No {config['image_key']} found in item: {item}")
 
130
  if page_urls:
131
  image_urls.extend(page_urls)
132
  data["used_pages"].setdefault(api_name, []).append(page)
133
  save_used_pages(data)
134
  logger.info(f"Successfully fetched {len(page_urls)} images from page {page} for {api_name}")
135
  else:
136
- logger.warning(f"No valid image URLs extracted from page {page} for {api_name}")
137
  except requests.exceptions.RequestException as e:
138
  logger.error(f"Error fetching page {page} from {api_name}: {e}")
139
- logger.error(f"Response status: {response.status_code}, content: {response.text}")
140
  if response.status_code == 401:
141
- logger.error(f"401 Unauthorized for {api_name}. Check your API key.")
142
- elif response.status_code == 400:
143
- logger.error(f"400 Bad Request for {api_name}. Check parameters or API key.")
144
  elif response.status_code == 429:
145
- logger.error(f"429 Rate Limit Exceeded for {api_name}. Wait before retrying.")
146
  if page in data["used_pages"].get(api_name, []):
147
  data["used_pages"][api_name].remove(page)
148
  save_used_pages(data)
149
- logger.info(f"Removed failed page {page} from {api_name}")
150
  break
151
 
152
  logger.info(f"Total URLs fetched for {api_name}: {len(image_urls)}")
153
  return image_urls[:num_images]
154
 
155
  def download_images(image_urls):
156
- """Download images from the provided URLs and save to IMAGES_DIR."""
157
  if not image_urls:
158
  logger.warning("No image URLs provided to download")
159
  return 0, []
@@ -173,7 +182,7 @@ def download_images(image_urls):
173
  for chunk in response.iter_content(chunk_size=8192):
174
  if chunk:
175
  f.write(chunk)
176
- Image.open(image_path).verify() # Verify it's a valid image
177
  downloaded_count += 1
178
  image_paths.append(image_path)
179
  logger.info(f"Downloaded {idx}/{len(image_urls)}: {url}")
@@ -186,17 +195,15 @@ def download_images(image_urls):
186
  return downloaded_count, image_paths
187
 
188
  def create_zip_file(selected_image_paths):
189
- """Create a ZIP file of the selected images."""
190
  if os.path.exists(ZIP_FILE):
191
  os.remove(ZIP_FILE)
192
- with zipfile.ZipFile(ZIP_FILE, 'w', zipfile.ZIP_DEFLATED) as zipf:
193
  for image_path in selected_image_paths:
194
  arcname = os.path.relpath(image_path, OUTPUT_DIR)
195
  zipf.write(image_path, arcname)
196
  return ZIP_FILE
197
 
198
  def process_and_display(api_name, category, num_images):
199
- """Fetch and download images, then prepare data for display."""
200
  num_images = int(num_images)
201
  if num_images > 24:
202
  num_images = 24
@@ -220,7 +227,6 @@ def process_and_display(api_name, category, num_images):
220
  return status, None, image_paths, image_outputs, checkbox_outputs
221
 
222
  def process_zip_submission(image_paths, *checkbox_states):
223
- """Create a ZIP file based on the selected images."""
224
  if not image_paths:
225
  return "No images available to process.", None
226
 
@@ -232,77 +238,21 @@ def process_zip_submission(image_paths, *checkbox_states):
232
  logger.info(f"ZIP file created with {len(selected_image_paths)} images at {zip_path}")
233
  return f"ZIP file created with {len(selected_image_paths)} images at {zip_path}", zip_path
234
 
235
- # Custom CSS (unchanged)
236
  css = """
237
- .fetch-button {
238
- background-color: #4CAF50;
239
- color: white;
240
- padding: 10px 20px;
241
- border: none;
242
- border-radius: 5px;
243
- cursor: pointer;
244
- }
245
- .fetch-button:hover {
246
- background-color: #45a049;
247
- }
248
- .zip-button {
249
- background-color: #2196F3;
250
- color: white;
251
- padding: 10px 20px;
252
- border: none;
253
- border-radius: 5px;
254
- cursor: pointer;
255
- }
256
- .zip-button:hover {
257
- background-color: #1e88e5;
258
- }
259
- .status-box {
260
- border: 1px solid #ddd;
261
- background-color: #f9f9f9;
262
- padding: 10px;
263
- border-radius: 5px;
264
- }
265
- .input-group {
266
- border: 1px solid #ddd;
267
- padding: 15px;
268
- border-radius: 5px;
269
- background-color: #f0f0f0;
270
- }
271
- .image-container {
272
- position: relative;
273
- width: 100%;
274
- height: 150px;
275
- overflow: hidden;
276
- border-radius: 5px;
277
- }
278
- .image-container img {
279
- width: 100%;
280
- height: 100%;
281
- object-fit: cover;
282
- }
283
- .overlay {
284
- position: absolute;
285
- bottom: 5px;
286
- right: 5px;
287
- background-color: rgba(0, 0, 0, 0.6);
288
- padding: 5px;
289
- border-radius: 5px;
290
- display: flex;
291
- align-items: center;
292
- gap: 5px;
293
- color: white;
294
- font-size: 12px;
295
- }
296
- .overlay label {
297
- margin: 0;
298
- color: white;
299
- }
300
- .overlay input[type="checkbox"] {
301
- margin: 0;
302
- }
303
  """
304
 
305
- # Gradio Interface (unchanged)
306
  with gr.Blocks(title="Stock Photo Downloader", css=css) as demo:
307
  gr.Markdown("<h1 style='text-align: center; color: #333;'>📸 Stock Photo Downloader</h1>")
308
  gr.Markdown("<p style='text-align: center; color: #666;'>Fetch high-quality stock photos from Pexels, Unsplash, and Pixabay.</p>")
@@ -310,39 +260,13 @@ with gr.Blocks(title="Stock Photo Downloader", css=css) as demo:
310
  with gr.Group(elem_classes=["input-group"]):
311
  gr.Markdown("### 🔍 Choose Your Parameters")
312
  with gr.Row():
313
- api_input = gr.Dropdown(
314
- label="API Source",
315
- choices=["pexels", "unsplash", "pixabay"],
316
- value="pexels",
317
- info="Select the stock photo provider."
318
- )
319
- category_input = gr.Dropdown(
320
- label="Category",
321
- choices=["nature", "business", "people", "technology", "food", "travel", "animals", "fashion"],
322
- value="nature",
323
- allow_custom_value=True,
324
- info="Choose a category or enter a custom keyword."
325
- )
326
- num_images_input = gr.Dropdown(
327
- label="Number of Images (Max 24)",
328
- choices=["4", "8", "12", "16", "20", "24"],
329
- value="4",
330
- info="How many images to fetch (up to 24)."
331
- )
332
- download_button = gr.Button(
333
- "Fetch and Display Images",
334
- elem_classes=["fetch-button"],
335
- elem_id="fetch-button"
336
- )
337
 
338
  gr.Markdown("### 📊 Status")
339
- status_output = gr.Textbox(
340
- label="Status",
341
- interactive=False,
342
- placeholder="Status updates will appear here...",
343
- elem_classes=["status-box"],
344
- show_label=False
345
- )
346
 
347
  gr.Markdown("### 💾 Download Your Images")
348
  zip_output = gr.File(label="Download ZIP", visible=False)
@@ -353,43 +277,25 @@ with gr.Blocks(title="Stock Photo Downloader", css=css) as demo:
353
 
354
  image_outputs = []
355
  checkbox_outputs = []
356
-
357
  for row in range(MAX_ROWS):
358
  with gr.Row():
359
  for col in range(IMAGES_PER_ROW):
360
  idx = row * IMAGES_PER_ROW + col
361
  with gr.Column(min_width=150):
362
  with gr.Group(elem_classes=["image-container"]):
363
- image_output = gr.Image(
364
- label=f"Image {idx+1}",
365
- show_label=False,
366
- visible=False,
367
- height=150,
368
- width=150
369
- )
370
  with gr.Row(elem_classes=["overlay"]):
371
- checkbox_output = gr.Checkbox(
372
- label=f"Image {idx+1}",
373
- value=False,
374
- visible=False,
375
- scale=0
376
- )
377
  image_outputs.append(image_output)
378
  checkbox_outputs.append(checkbox_output)
379
 
380
  gr.Markdown("### 📦 Create ZIP File")
381
- submit_button = gr.Button(
382
- "Create ZIP of Selected Images",
383
- elem_classes=["zip-button"],
384
- elem_id="zip-button"
385
- )
386
 
387
  def on_download(api_name, category, num_images):
388
  status, zip_path, image_paths, image_outs, checkbox_outs = process_and_display(api_name, category, num_images)
389
  return (
390
- status,
391
- zip_path,
392
- image_paths,
393
  *[gr.Image(value=img, visible=img is not None, label=f"Image {i+1}", height=150, width=150) if img else gr.Image(value=None, visible=False) for i, img in enumerate(image_outs)],
394
  *[gr.Checkbox(value=chk, visible=i < len(image_paths), label=f"Image {i+1}", scale=0) for i, chk in enumerate(checkbox_outs)]
395
  )
@@ -398,16 +304,7 @@ with gr.Blocks(title="Stock Photo Downloader", css=css) as demo:
398
  status, zip_path = process_zip_submission(image_paths, *checkbox_states)
399
  return status, gr.File(value=zip_path, visible=True) if zip_path else gr.File(visible=False)
400
 
401
- download_button.click(
402
- fn=on_download,
403
- inputs=[api_input, category_input, num_images_input],
404
- outputs=[status_output, zip_output, image_paths_state] + image_outputs + checkbox_outputs
405
- )
406
-
407
- submit_button.click(
408
- fn=on_submit,
409
- inputs=[image_paths_state] + checkbox_outputs,
410
- outputs=[status_output, zip_output]
411
- )
412
 
413
  demo.launch()
 
20
  ZIP_FILE = os.path.join(OUTPUT_DIR, "images.zip")
21
  TRACKING_FILE = os.path.join(OUTPUT_DIR, "used_pages.json")
22
 
 
23
  os.makedirs(OUTPUT_DIR, exist_ok=True)
24
 
25
  # Constants
26
  ITEMS_PER_PAGE = 40
27
+ DAILY_IMAGE_LIMIT = 2000
28
+ MAX_PAGES = min(DAILY_IMAGE_LIMIT // ITEMS_PER_PAGE, 10)
29
  IMAGES_PER_ROW = 4
30
  MAX_ROWS = 6
31
  TOTAL_IMAGES = IMAGES_PER_ROW * MAX_ROWS
32
 
33
+ # API Configurations (Your original keys reinserted)
34
  API_CONFIGS = {
35
  "pexels": {
36
  "base_url": "https://api.pexels.com/v1/search",
37
  "headers": {"Authorization": "klHADHclpse2e2xSP9h747AgfE1Rx0wioemGhXYtedjZzvJ1WBUKwz7g"},
38
  "image_key": "src.medium",
39
  "result_key": "photos",
40
+ "delay": 2
41
  },
42
  "unsplash": {
43
  "base_url": "https://api.unsplash.com/search/photos",
44
+ "headers": {},
45
  "image_key": "urls.small",
46
  "result_key": "results",
47
+ "delay": 2,
48
+ "client_id": "UKkhpD_Rs5-s1gIlVX28iNs_8E4ysPhQniyIpDpKUnU" # Moved to params in fetch_image_urls
49
  },
50
  "pixabay": {
51
  "base_url": "https://pixabay.com/api/",
52
  "headers": {},
53
  "image_key": "webformatURL",
54
  "result_key": "hits",
55
+ "delay": 1,
56
+ "api_key": "45122300-cd3621e1539e8e95430ee3efc" # Used in fetch_image_urls
57
  }
58
  }
59
 
60
  def load_used_pages():
 
61
  today = str(date.today())
62
  if os.path.exists(TRACKING_FILE):
63
  with open(TRACKING_FILE, "r") as f:
 
69
  return data
70
 
71
  def save_used_pages(data):
 
72
  with open(TRACKING_FILE, "w") as f:
73
  json.dump(data, f)
74
 
75
  def get_available_pages(num_pages_needed, api_name):
 
76
  data = load_used_pages()
77
  used_pages = set(data["used_pages"].get(api_name, []))
78
  all_pages = set(range(1, MAX_PAGES + 1))
 
88
  return selected_pages
89
 
90
  def fetch_image_urls(api_name, category, num_images):
 
91
  config = API_CONFIGS[api_name]
92
  num_pages_needed = (num_images + ITEMS_PER_PAGE - 1) // ITEMS_PER_PAGE
93
  pages = get_available_pages(num_pages_needed, api_name)
 
100
  data = load_used_pages()
101
  for page in pages:
102
  if api_name == "pixabay":
103
+ url = f"{config['base_url']}?key={config['api_key']}&q={category.lower()}&per_page={ITEMS_PER_PAGE}&page={page}"
104
+ params = {}
105
+ elif api_name == "unsplash":
106
+ url = config["base_url"]
107
+ params = {
108
+ "query": category.lower(),
109
+ "per_page": ITEMS_PER_PAGE,
110
+ "page": page,
111
+ "client_id": config["client_id"]
112
+ }
113
+ else: # pexels
114
+ url = config["base_url"]
115
+ params = {
116
+ "query": category.lower(),
117
+ "per_page": ITEMS_PER_PAGE,
118
+ "page": page
119
+ }
120
 
121
  try:
122
+ logger.info(f"Requesting {url} with params {params} for {api_name}")
123
+ time.sleep(config.get("delay", 0))
124
+ response = requests.get(url, headers=config["headers"], params=params, timeout=10)
125
  response.raise_for_status()
126
  data_response = response.json()
127
 
128
+ results = data_response.get(config["result_key"], [])
129
+ if not results:
130
+ logger.warning(f"No {config['result_key']} in response for {api_name}, page {page}: {data_response}")
131
  continue
132
 
133
  page_urls = []
134
+ for item in results:
135
  if len(image_urls) >= num_images:
136
  break
137
  image_url = item.get(config["image_key"])
138
  if image_url:
139
  page_urls.append(image_url)
140
  else:
141
+ logger.warning(f"No {config['image_key']} in item: {item}")
142
+
143
  if page_urls:
144
  image_urls.extend(page_urls)
145
  data["used_pages"].setdefault(api_name, []).append(page)
146
  save_used_pages(data)
147
  logger.info(f"Successfully fetched {len(page_urls)} images from page {page} for {api_name}")
148
  else:
149
+ logger.warning(f"No valid URLs extracted from page {page} for {api_name}")
150
  except requests.exceptions.RequestException as e:
151
  logger.error(f"Error fetching page {page} from {api_name}: {e}")
152
+ logger.error(f"Response: {response.text}")
153
  if response.status_code == 401:
154
+ logger.error(f"401 Unauthorized for {api_name}. Replace API key.")
 
 
155
  elif response.status_code == 429:
156
+ logger.error(f"429 Rate Limit Exceeded for {api_name}. Increase delay or wait.")
157
  if page in data["used_pages"].get(api_name, []):
158
  data["used_pages"][api_name].remove(page)
159
  save_used_pages(data)
 
160
  break
161
 
162
  logger.info(f"Total URLs fetched for {api_name}: {len(image_urls)}")
163
  return image_urls[:num_images]
164
 
165
  def download_images(image_urls):
 
166
  if not image_urls:
167
  logger.warning("No image URLs provided to download")
168
  return 0, []
 
182
  for chunk in response.iter_content(chunk_size=8192):
183
  if chunk:
184
  f.write(chunk)
185
+ Image.open(image_path).verify()
186
  downloaded_count += 1
187
  image_paths.append(image_path)
188
  logger.info(f"Downloaded {idx}/{len(image_urls)}: {url}")
 
195
  return downloaded_count, image_paths
196
 
197
  def create_zip_file(selected_image_paths):
 
198
  if os.path.exists(ZIP_FILE):
199
  os.remove(ZIP_FILE)
200
+ with zipfile.ZipFile(ZIP_FILE, "w", zipfile.ZIP_DEFLATED) as zipf:
201
  for image_path in selected_image_paths:
202
  arcname = os.path.relpath(image_path, OUTPUT_DIR)
203
  zipf.write(image_path, arcname)
204
  return ZIP_FILE
205
 
206
  def process_and_display(api_name, category, num_images):
 
207
  num_images = int(num_images)
208
  if num_images > 24:
209
  num_images = 24
 
227
  return status, None, image_paths, image_outputs, checkbox_outputs
228
 
229
  def process_zip_submission(image_paths, *checkbox_states):
 
230
  if not image_paths:
231
  return "No images available to process.", None
232
 
 
238
  logger.info(f"ZIP file created with {len(selected_image_paths)} images at {zip_path}")
239
  return f"ZIP file created with {len(selected_image_paths)} images at {zip_path}", zip_path
240
 
241
+ # Gradio Interface (unchanged CSS and layout)
242
  css = """
243
+ .fetch-button { background-color: #4CAF50; color: white; padding: 10px 20px; border: none; border-radius: 5px; cursor: pointer; }
244
+ .fetch-button:hover { background-color: #45a049; }
245
+ .zip-button { background-color: #2196F3; color: white; padding: 10px 20px; border: none; border-radius: 5px; cursor: pointer; }
246
+ .zip-button:hover { background-color: #1e88e5; }
247
+ .status-box { border: 1px solid #ddd; background-color: #f9f9f9; padding: 10px; border-radius: 5px; }
248
+ .input-group { border: 1px solid #ddd; padding: 15px; border-radius: 5px; background-color: #f0f0f0; }
249
+ .image-container { position: relative; width: 100%; height: 150px; overflow: hidden; border-radius: 5px; }
250
+ .image-container img { width: 100%; height: 100%; object-fit: cover; }
251
+ .overlay { position: absolute; bottom: 5px; right: 5px; background-color: rgba(0, 0, 0, 0.6); padding: 5px; border-radius: 5px; display: flex; align-items: center; gap: 5px; color: white; font-size: 12px; }
252
+ .overlay label { margin: 0; color: white; }
253
+ .overlay input[type="checkbox"] { margin: 0; }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
254
  """
255
 
 
256
  with gr.Blocks(title="Stock Photo Downloader", css=css) as demo:
257
  gr.Markdown("<h1 style='text-align: center; color: #333;'>📸 Stock Photo Downloader</h1>")
258
  gr.Markdown("<p style='text-align: center; color: #666;'>Fetch high-quality stock photos from Pexels, Unsplash, and Pixabay.</p>")
 
260
  with gr.Group(elem_classes=["input-group"]):
261
  gr.Markdown("### 🔍 Choose Your Parameters")
262
  with gr.Row():
263
+ api_input = gr.Dropdown(label="API Source", choices=["pexels", "unsplash", "pixabay"], value="pexels", info="Select the stock photo provider.")
264
+ category_input = gr.Dropdown(label="Category", choices=["nature", "business", "people", "technology", "food", "travel", "animals", "fashion"], value="nature", allow_custom_value=True, info="Choose a category or enter a custom keyword.")
265
+ num_images_input = gr.Dropdown(label="Number of Images (Max 24)", choices=["4", "8", "12", "16", "20", "24"], value="4", info="How many images to fetch (up to 24).")
266
+ download_button = gr.Button("Fetch and Display Images", elem_classes=["fetch-button"])
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
267
 
268
  gr.Markdown("### 📊 Status")
269
+ status_output = gr.Textbox(label="Status", interactive=False, placeholder="Status updates will appear here...", elem_classes=["status-box"], show_label=False)
 
 
 
 
 
 
270
 
271
  gr.Markdown("### 💾 Download Your Images")
272
  zip_output = gr.File(label="Download ZIP", visible=False)
 
277
 
278
  image_outputs = []
279
  checkbox_outputs = []
 
280
  for row in range(MAX_ROWS):
281
  with gr.Row():
282
  for col in range(IMAGES_PER_ROW):
283
  idx = row * IMAGES_PER_ROW + col
284
  with gr.Column(min_width=150):
285
  with gr.Group(elem_classes=["image-container"]):
286
+ image_output = gr.Image(label=f"Image {idx+1}", show_label=False, visible=False, height=150, width=150)
 
 
 
 
 
 
287
  with gr.Row(elem_classes=["overlay"]):
288
+ checkbox_output = gr.Checkbox(label=f"Image {idx+1}", value=False, visible=False, scale=0)
 
 
 
 
 
289
  image_outputs.append(image_output)
290
  checkbox_outputs.append(checkbox_output)
291
 
292
  gr.Markdown("### 📦 Create ZIP File")
293
+ submit_button = gr.Button("Create ZIP of Selected Images", elem_classes=["zip-button"])
 
 
 
 
294
 
295
  def on_download(api_name, category, num_images):
296
  status, zip_path, image_paths, image_outs, checkbox_outs = process_and_display(api_name, category, num_images)
297
  return (
298
+ status, zip_path, image_paths,
 
 
299
  *[gr.Image(value=img, visible=img is not None, label=f"Image {i+1}", height=150, width=150) if img else gr.Image(value=None, visible=False) for i, img in enumerate(image_outs)],
300
  *[gr.Checkbox(value=chk, visible=i < len(image_paths), label=f"Image {i+1}", scale=0) for i, chk in enumerate(checkbox_outs)]
301
  )
 
304
  status, zip_path = process_zip_submission(image_paths, *checkbox_states)
305
  return status, gr.File(value=zip_path, visible=True) if zip_path else gr.File(visible=False)
306
 
307
+ download_button.click(fn=on_download, inputs=[api_input, category_input, num_images_input], outputs=[status_output, zip_output, image_paths_state] + image_outputs + checkbox_outputs)
308
+ submit_button.click(fn=on_submit, inputs=[image_paths_state] + checkbox_outputs, outputs=[status_output, zip_output])
 
 
 
 
 
 
 
 
 
309
 
310
  demo.launch()