import os import requests import zipfile import gradio as gr import shutil from PIL import Image import json from datetime import datetime, date import random # Configuration API_URL = "https://porn-pictures-api.p.rapidapi.com/pornstars/{gender}/{page}" API_HEADERS = { "x-rapidapi-key": "2dcd4142a4msh0d39ff26a5b144cp1e8703jsndf67b0582674", "x-rapidapi-host": "porn-pictures-api.p.rapidapi.com" } OUTPUT_DIR = "downloaded_images" # Base output folder IMAGES_DIR = os.path.join(OUTPUT_DIR, "images") # Subfolder for downloaded images ZIP_FILE = os.path.join(OUTPUT_DIR, "images.zip") # Path for the output ZIP file TRACKING_FILE = os.path.join(OUTPUT_DIR, "used_pages.json") # File to track used pages # Ensure output directory exists os.makedirs(OUTPUT_DIR, exist_ok=True) # Constants ITEMS_PER_PAGE = 40 DAILY_IMAGE_LIMIT = 4000 MAX_PAGES = DAILY_IMAGE_LIMIT // ITEMS_PER_PAGE # 100 pages def load_used_pages(): """Load or initialize the used pages tracking file.""" today = str(date.today()) if os.path.exists(TRACKING_FILE): with open(TRACKING_FILE, "r") as f: data = json.load(f) if data.get("date") != today: # Reset if it's a new day data = {"date": today, "used_pages": []} else: data = {"date": today, "used_pages": []} return data def save_used_pages(data): """Save the used pages tracking file.""" with open(TRACKING_FILE, "w") as f: json.dump(data, f) def get_available_pages(num_pages_needed): """Get a list of unused page numbers.""" data = load_used_pages() used_pages = set(data["used_pages"]) all_pages = set(range(1, MAX_PAGES + 1)) # Pages 1 to 100 available_pages = list(all_pages - used_pages) if len(available_pages) < num_pages_needed: return None # Not enough unique pages left # Randomly select the required number of pages selected_pages = random.sample(available_pages, num_pages_needed) data["used_pages"].extend(selected_pages) save_used_pages(data) return selected_pages def fetch_image_urls(gender, num_images): """Fetch image URLs from the API based on gender and desired number of images.""" num_pages_needed = (num_images + ITEMS_PER_PAGE - 1) // ITEMS_PER_PAGE # Ceiling division pages = get_available_pages(num_pages_needed) if not pages: return [] # Indicate no unique images available image_urls = [] for page in pages: url = API_URL.format(gender=gender.lower(), page=page) try: response = requests.get(url, headers=API_HEADERS) response.raise_for_status() data = response.json() if "result" not in data or not data["result"]: break for item in data["result"]: if len(image_urls) >= num_images: break image_urls.append(item["picture"]) except Exception as e: print(f"Error fetching page {page}: {e}") break return image_urls[:num_images] def download_images(image_urls): """Download images from the provided URLs and save to IMAGES_DIR.""" if os.path.exists(IMAGES_DIR): shutil.rmtree(IMAGES_DIR) # Clear previous contents os.makedirs(IMAGES_DIR, exist_ok=True) downloaded_count = 0 image_paths = [] for idx, url in enumerate(image_urls, 1): try: response = requests.get(url, stream=True) response.raise_for_status() image_path = os.path.join(IMAGES_DIR, f"img{idx}.jpg") with open(image_path, "wb") as f: for chunk in response.iter_content(chunk_size=8192): if chunk: f.write(chunk) Image.open(image_path).verify() downloaded_count += 1 image_paths.append(image_path) print(f"Downloaded {idx}/{len(image_urls)}: {url}") except Exception as e: print(f"Error downloading {url}: {e}") return downloaded_count, image_paths def create_zip_file(selected_image_paths): """Create a ZIP file of the selected images.""" if os.path.exists(ZIP_FILE): os.remove(ZIP_FILE) with zipfile.ZipFile(ZIP_FILE, 'w', zipfile.ZIP_DEFLATED) as zipf: for image_path in selected_image_paths: arcname = os.path.relpath(image_path, OUTPUT_DIR) zipf.write(image_path, arcname) return ZIP_FILE def process_and_display(gender, num_images): """Fetch and download images, then prepare data for display.""" num_images = int(num_images) if num_images > 24: num_images = 24 image_urls = fetch_image_urls(gender, num_images) if not image_urls: return "No unique images available today or API limit reached.", None, None, None downloaded_count, image_paths = download_images(image_urls) if downloaded_count == 0: return "No images were successfully downloaded.", None, None, None return f"Successfully downloaded {downloaded_count}/{num_images} images. Select images to include in ZIP below.", None, image_paths, image_paths def process_zip_submission(image_paths, *checkbox_states): """Create a ZIP file based on the selected images.""" if not image_paths: return "No images available to process.", None selected_image_paths = [image_paths[i] for i, state in enumerate(checkbox_states) if state] if not selected_image_paths: return "No images selected for ZIP.", None zip_path = create_zip_file(selected_image_paths) return f"ZIP file created with {len(selected_image_paths)} images at {zip_path}", zip_path # Gradio Interface with gr.Blocks(title="Image Downloader") as demo: gr.Markdown("### Select Parameters to Download Images") gender_input = gr.Dropdown( label="Category (Gender, Race, Hair Color, Body Type)", choices=["female", "male", "nonbinary", "black", "latino", "asian", "white", "mixed", "redhead", "blonde", "brunette", "blackhair", "bald", "bbw", "curvy", "slim", "muscular", "thick"], value="female", allow_custom_value=True ) num_images_input = gr.Dropdown( label="Number of Images (Max 24)", choices=["4", "8", "12", "16", "20", "24"], value="4" ) download_button = gr.Button("Fetch and Display Images") gr.Markdown("### Download Status") status_output = gr.Textbox(label="Status", interactive=False) gr.Markdown("### Download Your Images") zip_output = gr.File(label="Download ZIP", visible=False) gr.Markdown("### Image Gallery (Click Thumbnails to View Full Size)") image_paths_state = gr.State() IMAGES_PER_ROW = 4 MAX_ROWS = 6 TOTAL_IMAGES = IMAGES_PER_ROW * MAX_ROWS image_outputs = [] checkbox_outputs = [] for row in range(MAX_ROWS): with gr.Row(): for col in range(IMAGES_PER_ROW): idx = row * IMAGES_PER_ROW + col with gr.Column(min_width=150): image_output = gr.Image( label=f"Image {idx+1}", visible=False, height=150, width=150 ) checkbox_output = gr.Checkbox( label=f"Include in ZIP", value=True, visible=False ) image_outputs.append(image_output) checkbox_outputs.append(checkbox_output) gr.Markdown("### Submit Selections") submit_button = gr.Button("Create ZIP of Selected Images") def on_download(gender, num_images): status, zip_path, image_paths, _ = process_and_display(gender, num_images) if image_paths: updates = { status_output: status, zip_output: gr.File(value=None, visible=False), image_paths_state: image_paths } for i in range(TOTAL_IMAGES): if i < len(image_paths): updates[image_outputs[i]] = gr.Image( value=image_paths[i], visible=True, label=f"Image {i+1}", width=150, height=150 ) updates[checkbox_outputs[i]] = gr.Checkbox( value=True, visible=True, label=f"Include in ZIP" ) else: updates[image_outputs[i]] = gr.Image(value=None, visible=False) updates[checkbox_outputs[i]] = gr.Checkbox(value=False, visible=False) return updates return { status_output: status, zip_output: gr.File(visible=False), image_paths_state: None, **{image_outputs[i]: gr.Image(value=None, visible=False) for i in range(TOTAL_IMAGES)}, **{checkbox_outputs[i]: gr.Checkbox(value=False, visible=False) for i in range(TOTAL_IMAGES)} } def on_submit(image_paths, *checkbox_states): status, zip_path = process_zip_submission(image_paths, *checkbox_states) return { status_output: status, zip_output: gr.File(value=zip_path, visible=True) if zip_path else gr.File(visible=False) } download_button.click( fn=on_download, inputs=[gender_input, num_images_input], outputs=[status_output, zip_output, image_paths_state] + image_outputs + checkbox_outputs ) submit_button.click( fn=on_submit, inputs=[image_paths_state] + checkbox_outputs, outputs=[status_output, zip_output] ) demo.launch()