evijit HF Staff commited on
Commit
06fc7a9
Β·
verified Β·
1 Parent(s): e6adc05

Upload image_dataset.ipynb

Browse files
Files changed (1) hide show
  1. image_dataset.ipynb +56 -81
image_dataset.ipynb CHANGED
@@ -581,7 +581,7 @@
581
  },
582
  {
583
  "cell_type": "code",
584
- "execution_count": 19,
585
  "metadata": {},
586
  "outputs": [
587
  {
@@ -589,14 +589,8 @@
589
  "output_type": "stream",
590
  "text": [
591
  "Reading metadata from /fsx/avijit/projects/datacommonsMA/massgis_2023_aerial_imagery/imagefolder-dataset/train/metadata.csv\n",
592
- "Selected 50 samples\n"
593
- ]
594
- },
595
- {
596
- "name": "stderr",
597
- "output_type": "stream",
598
- "text": [
599
- "Processing images: 0%| | 0/50 [00:00<?, ?it/s]"
600
  ]
601
  },
602
  {
@@ -604,28 +598,20 @@
604
  "output_type": "stream",
605
  "text": [
606
  "/fsx/avijit/anaconda3/envs/py312/lib/python3.12/site-packages/PIL/Image.py:3402: DecompressionBombWarning: Image size (100000000 pixels) exceeds limit of 89478485 pixels, could be decompression bomb DOS attack.\n",
607
- " warnings.warn(\n",
608
- "Processing images: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 50/50 [00:48<00:00, 1.04it/s]"
609
  ]
610
  },
611
  {
612
  "name": "stdout",
613
  "output_type": "stream",
614
  "text": [
 
 
615
  "Saved sample dataset to /fsx/avijit/projects/datacommonsMA/massgis_2023_aerial_imagery/imagefolder-dataset/data/sample_dataset_256x256.parquet\n",
616
- "Processed images saved in /fsx/avijit/projects/datacommonsMA/massgis_2023_aerial_imagery/imagefolder-dataset/data/processed_images\n",
617
- "File size: 0.00 MB\n",
618
- "Schema of the created Parquet file:\n",
619
- "image: string\n",
620
- "tilename: string\n",
621
- "zone: int64\n"
622
- ]
623
- },
624
- {
625
- "name": "stderr",
626
- "output_type": "stream",
627
- "text": [
628
- "\n"
629
  ]
630
  }
631
  ],
@@ -637,10 +623,17 @@
637
  "from PIL import Image as PILImage\n",
638
  "import numpy as np\n",
639
  "from tqdm import tqdm\n",
 
 
 
 
 
 
 
640
  "\n",
641
  "base_dir = \"/fsx/avijit/projects/datacommonsMA/massgis_2023_aerial_imagery/imagefolder-dataset\"\n",
642
  "train_dir = os.path.join(base_dir, \"train\")\n",
643
- "output_dir = os.path.join(base_dir, \"data\") # Matches the preview config\n",
644
  "output_path = os.path.join(output_dir, \"sample_dataset_256x256.parquet\")\n",
645
  "\n",
646
  "# Create the output directory if it doesn't exist\n",
@@ -659,84 +652,54 @@
659
  "\n",
660
  "print(f\"Selected {len(metadata_df)} samples\")\n",
661
  "\n",
662
- "# Initialize lists to store data\n",
663
- "image_paths = []\n",
664
- "tilenames = []\n",
665
- "zones = []\n",
666
- "\n",
667
- "processed_images_dir = os.path.join(output_dir, \"processed_images\")\n",
668
- "os.makedirs(processed_images_dir, exist_ok=True)\n",
669
- "\n",
670
- "# Process each image\n",
671
- "for _, row in tqdm(metadata_df.iterrows(), total=len(metadata_df), desc=\"Processing images\"):\n",
672
- " try:\n",
673
- " # Get image path\n",
674
- " img_path = os.path.join(train_dir, row['file_name'])\n",
675
- " \n",
676
- " # Open, resize, and save the processed image\n",
677
- " with PILImage.open(img_path) as img:\n",
678
- " # Convert to RGB if needed\n",
679
- " if img.mode != 'RGB':\n",
680
- " img = img.convert('RGB')\n",
681
- " \n",
682
- " # Resize to target size\n",
683
- " img_resized = img.resize(target_size)\n",
684
- " \n",
685
- " # Save the resized image in a separate folder\n",
686
- " output_image_path = os.path.join(processed_images_dir, row['file_name'])\n",
687
- " img_resized.save(output_image_path, format='JPEG', quality=90)\n",
688
- " \n",
689
- " # Append data\n",
690
- " image_paths.append(output_image_path) # Store file path instead of bytes\n",
691
- " tilenames.append(row['tilename'])\n",
692
- " zones.append(int(row['zone'])) # Ensure zone is an integer\n",
693
- " except Exception as e:\n",
694
- " print(f\"Error processing {row['file_name']}: {e}\")\n",
695
  "\n",
696
- "# Convert lists to Arrow arrays\n",
697
- "image_array = pa.array(image_paths, type=pa.string()) # Store paths as strings\n",
698
- "tilename_array = pa.array(tilenames, type=pa.string())\n",
699
- "zone_array = pa.array(zones, type=pa.int64())\n",
700
  "\n",
701
- "# Define schema explicitly\n",
702
- "schema = pa.schema([\n",
703
- " ('image', pa.string()), # Store as file path (datasets library will auto-load)\n",
704
- " ('tilename', pa.string()),\n",
705
- " ('zone', pa.int64())\n",
706
- "])\n",
707
  "\n",
708
- "# Create Arrow Table using the schema\n",
709
- "table = pa.Table.from_arrays([image_array, tilename_array, zone_array], schema=schema)\n",
710
  "\n",
711
- "# Write to Parquet\n",
712
- "pq.write_table(table, output_path, compression='snappy', flavor=['spark'])\n",
 
713
  "\n",
714
  "print(f\"Saved sample dataset to {output_path}\")\n",
715
- "print(f\"Processed images saved in {processed_images_dir}\")\n",
716
  "print(f\"File size: {os.path.getsize(output_path) / (1024 * 1024):.2f} MB\")\n",
717
  "\n",
718
- "# To check if the schema matches what's expected\n",
719
- "print(\"Schema of the created Parquet file:\")\n",
720
- "parquet_schema = pq.read_schema(output_path)\n",
721
- "print(parquet_schema)\n"
 
722
  ]
723
  },
724
  {
725
  "cell_type": "code",
726
- "execution_count": 20,
727
  "metadata": {},
728
  "outputs": [
729
  {
730
  "name": "stdout",
731
  "output_type": "stream",
732
  "text": [
733
- "<pyarrow._parquet.FileMetaData object at 0x7fa0858cc590>\n",
734
  " created_by: parquet-cpp-arrow version 19.0.0\n",
735
- " num_columns: 3\n",
736
  " num_rows: 50\n",
737
  " num_row_groups: 1\n",
738
  " format_version: 2.6\n",
739
- " serialized_size: 943\n"
740
  ]
741
  }
742
  ],
@@ -758,6 +721,18 @@
758
  "display_name": "py312",
759
  "language": "python",
760
  "name": "py312"
 
 
 
 
 
 
 
 
 
 
 
 
761
  }
762
  },
763
  "nbformat": 4,
 
581
  },
582
  {
583
  "cell_type": "code",
584
+ "execution_count": 22,
585
  "metadata": {},
586
  "outputs": [
587
  {
 
589
  "output_type": "stream",
590
  "text": [
591
  "Reading metadata from /fsx/avijit/projects/datacommonsMA/massgis_2023_aerial_imagery/imagefolder-dataset/train/metadata.csv\n",
592
+ "Selected 50 samples\n",
593
+ "Loading and resizing images...\n"
 
 
 
 
 
 
594
  ]
595
  },
596
  {
 
598
  "output_type": "stream",
599
  "text": [
600
  "/fsx/avijit/anaconda3/envs/py312/lib/python3.12/site-packages/PIL/Image.py:3402: DecompressionBombWarning: Image size (100000000 pixels) exceeds limit of 89478485 pixels, could be decompression bomb DOS attack.\n",
601
+ " warnings.warn(\n"
 
602
  ]
603
  },
604
  {
605
  "name": "stdout",
606
  "output_type": "stream",
607
  "text": [
608
+ "Resizing images...\n",
609
+ "Saving to /fsx/avijit/projects/datacommonsMA/massgis_2023_aerial_imagery/imagefolder-dataset/data/sample_dataset_256x256.parquet...\n",
610
  "Saved sample dataset to /fsx/avijit/projects/datacommonsMA/massgis_2023_aerial_imagery/imagefolder-dataset/data/sample_dataset_256x256.parquet\n",
611
+ "File size: 6.28 MB\n",
612
+ "Verifying saved file...\n",
613
+ "Columns in saved file: ['image', 'tilename', 'zone']\n",
614
+ "Number of rows: 50\n"
 
 
 
 
 
 
 
 
 
615
  ]
616
  }
617
  ],
 
623
  "from PIL import Image as PILImage\n",
624
  "import numpy as np\n",
625
  "from tqdm import tqdm\n",
626
+ "import io\n",
627
+ "\n",
628
+ "# Import the pandas_image_methods library\n",
629
+ "from pandas_image_methods import PILMethods\n",
630
+ "\n",
631
+ "# Register the PIL methods accessor\n",
632
+ "pd.api.extensions.register_series_accessor(\"pil\")(PILMethods)\n",
633
  "\n",
634
  "base_dir = \"/fsx/avijit/projects/datacommonsMA/massgis_2023_aerial_imagery/imagefolder-dataset\"\n",
635
  "train_dir = os.path.join(base_dir, \"train\")\n",
636
+ "output_dir = os.path.join(base_dir, \"data\")\n",
637
  "output_path = os.path.join(output_dir, \"sample_dataset_256x256.parquet\")\n",
638
  "\n",
639
  "# Create the output directory if it doesn't exist\n",
 
652
  "\n",
653
  "print(f\"Selected {len(metadata_df)} samples\")\n",
654
  "\n",
655
+ "# Create DataFrame with just the paths first\n",
656
+ "df = pd.DataFrame({\n",
657
+ " 'file_path': [os.path.join(train_dir, row['file_name']) for _, row in metadata_df.iterrows()],\n",
658
+ " 'tilename': metadata_df['tilename'].tolist(),\n",
659
+ " 'zone': metadata_df['zone'].astype('int64').tolist()\n",
660
+ "})\n",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
661
  "\n",
662
+ "# Load images using the pil accessor\n",
663
+ "print(\"Loading and resizing images...\")\n",
664
+ "df['image'] = df['file_path'].pil.open()\n",
 
665
  "\n",
666
+ "# Resize the images\n",
667
+ "print(\"Resizing images...\")\n",
668
+ "df['image'] = df['image'].pil.resize(target_size)\n",
 
 
 
669
  "\n",
670
+ "# Keep only the required columns for the preview\n",
671
+ "df = df[['image', 'tilename', 'zone']]\n",
672
  "\n",
673
+ "# Save to Parquet (the library will handle the PIL images correctly)\n",
674
+ "print(f\"Saving to {output_path}...\")\n",
675
+ "df.to_parquet(output_path)\n",
676
  "\n",
677
  "print(f\"Saved sample dataset to {output_path}\")\n",
 
678
  "print(f\"File size: {os.path.getsize(output_path) / (1024 * 1024):.2f} MB\")\n",
679
  "\n",
680
+ "# Verify the saved file\n",
681
+ "print(\"Verifying saved file...\")\n",
682
+ "df_check = pd.read_parquet(output_path)\n",
683
+ "print(\"Columns in saved file:\", df_check.columns.tolist())\n",
684
+ "print(\"Number of rows:\", len(df_check))"
685
  ]
686
  },
687
  {
688
  "cell_type": "code",
689
+ "execution_count": 24,
690
  "metadata": {},
691
  "outputs": [
692
  {
693
  "name": "stdout",
694
  "output_type": "stream",
695
  "text": [
696
+ "<pyarrow._parquet.FileMetaData object at 0x7fa0fc694f90>\n",
697
  " created_by: parquet-cpp-arrow version 19.0.0\n",
698
+ " num_columns: 4\n",
699
  " num_rows: 50\n",
700
  " num_row_groups: 1\n",
701
  " format_version: 2.6\n",
702
+ " serialized_size: 2731\n"
703
  ]
704
  }
705
  ],
 
721
  "display_name": "py312",
722
  "language": "python",
723
  "name": "py312"
724
+ },
725
+ "language_info": {
726
+ "codemirror_mode": {
727
+ "name": "ipython",
728
+ "version": 3
729
+ },
730
+ "file_extension": ".py",
731
+ "mimetype": "text/x-python",
732
+ "name": "python",
733
+ "nbconvert_exporter": "python",
734
+ "pygments_lexer": "ipython3",
735
+ "version": "3.12.9"
736
  }
737
  },
738
  "nbformat": 4,