Upload load.py
Browse files
load.py
CHANGED
@@ -1,100 +1,54 @@
|
|
1 |
import json
|
2 |
-
import sys
|
3 |
import os
|
|
|
4 |
import urllib.request as ureq
|
5 |
-
import pdb
|
6 |
|
7 |
-
|
8 |
-
|
9 |
-
|
10 |
-
|
11 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
12 |
with open('dataset.json', 'r') as fp:
|
13 |
-
|
14 |
-
|
15 |
-
## dictionary data contains image URL, questions and answers ##
|
16 |
-
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
-
################################################################
|
21 |
-
############### Script for downloading images ##################
|
22 |
-
################################################################
|
23 |
-
## Make a directory images to store all images there ##########
|
24 |
-
if download == 1:
|
25 |
-
os.makedirs('./images', exist_ok=True)
|
26 |
-
for k in data.keys():
|
27 |
-
ext=os.path.splitext(data[k]['imageURL'])[1]
|
28 |
-
outputFile='images/%s%s'%(k,ext)
|
29 |
-
#pdb.set_trace()
|
30 |
-
ureq.urlretrieve(data[k]['imageURL'],outputFile)
|
31 |
-
|
32 |
-
|
33 |
-
|
34 |
-
#################################################################
|
35 |
-
################### Example of data access #####################
|
36 |
-
################################################################
|
37 |
-
for k in data.keys():
|
38 |
-
ext=os.path.splitext(data[k]['imageURL'])[1]
|
39 |
-
imageFile='images/%s%s'%(k,ext)
|
40 |
-
|
41 |
-
print('************************')
|
42 |
-
print('Image file: %s'%(imageFile))
|
43 |
-
print('List of questions:')
|
44 |
-
print(data[k]['questions'])
|
45 |
-
print('List of corresponding answers:')
|
46 |
-
print(data[k]['answers'])
|
47 |
-
print('Use this image as training (1), validation (2) or testing (3): %s'%(data[k]['split']))
|
48 |
-
print('*************************')
|
49 |
-
|
50 |
-
|
51 |
-
|
52 |
-
|
53 |
-
|
54 |
-
######################################################################
|
55 |
-
########################### Get dataset stats ########################
|
56 |
-
######################################################################
|
57 |
-
genSet=set()
|
58 |
-
for k in data.keys():
|
59 |
-
genSet.add(data[k]['genre'])
|
60 |
-
|
61 |
-
|
62 |
-
|
63 |
-
numImages=len(data.keys())
|
64 |
-
numQApairs=0
|
65 |
-
numWordsInQuestions=0
|
66 |
-
numWordsInAnswers=0
|
67 |
-
numQuestionsPerImage=0
|
68 |
-
ANS=set() # Set of unique answers
|
69 |
-
authorSet=set()
|
70 |
-
bookSet=set()
|
71 |
-
|
72 |
-
|
73 |
-
for imgId in data.keys():
|
74 |
-
numQApairs = numQApairs+len(data[imgId]['questions'])
|
75 |
-
numQuestionsPerImage = numQuestionsPerImage + len(data[imgId]['questions'])
|
76 |
-
authorSet.add(data[imgId]['authorName'])
|
77 |
-
bookSet.add(data[imgId]['title'])
|
78 |
-
|
79 |
-
for qno in range(len(data[imgId]['questions'])):
|
80 |
-
ques=data[imgId]['questions'][qno]
|
81 |
-
numWordsInQuestions = numWordsInQuestions+len(ques.split())
|
82 |
-
for ano in range(len(data[imgId]['answers'])):
|
83 |
-
ans=data[imgId]['answers'][ano]
|
84 |
-
ANS.add(ans)
|
85 |
-
numWordsInAnswers = numWordsInAnswers+len(str(ans).split())
|
86 |
-
|
87 |
-
|
88 |
-
|
89 |
-
print("--------------------------------")
|
90 |
-
print("Number of Images: %d" %(numImages))
|
91 |
-
print("Number of QA pairs: %d" %(numQApairs))
|
92 |
-
print("Number of unique author: %d" %(len(authorSet)))
|
93 |
-
print("Number of unique title: %d" %(len(bookSet)))
|
94 |
-
print("Number of unique answers: %d" %(len(ANS)))
|
95 |
-
print("Number of unique genre: %d" %(len(genSet)))
|
96 |
-
print("Average question length (in words): %.2f" %(float(numWordsInQuestions)/float(numQApairs)))
|
97 |
-
print("Average answer length (in words): %.2f" %(float(numWordsInAnswers)/float(numQApairs)))
|
98 |
-
print("Average number of questions per image: %.2f" %(float(numQuestionsPerImage)/float(numImages)))
|
99 |
-
print("--------------------------------")
|
100 |
|
|
|
|
|
|
1 |
import json
|
|
|
2 |
import os
|
3 |
+
import time
|
4 |
import urllib.request as ureq
|
|
|
5 |
|
6 |
+
def download_image_with_retries(url, output_file, max_retries=3, backoff_factor=1):
|
7 |
+
"""Attempt to download an image with retries upon failure.
|
8 |
+
|
9 |
+
Args:
|
10 |
+
url (str): The URL of the image to download.
|
11 |
+
output_file (str): The local file path to save the downloaded image.
|
12 |
+
max_retries (int): The maximum number of retry attempts.
|
13 |
+
backoff_factor (float): The factor to calculate the delay between retries.
|
14 |
+
"""
|
15 |
+
attempt = 0
|
16 |
+
while attempt < max_retries:
|
17 |
+
try:
|
18 |
+
ureq.urlretrieve(url, output_file)
|
19 |
+
print(f"Successfully downloaded: {output_file}")
|
20 |
+
return True
|
21 |
+
except Exception as e:
|
22 |
+
print(f"Attempt {attempt + 1} failed for {url}: {e}")
|
23 |
+
time.sleep(backoff_factor * (2 ** attempt))
|
24 |
+
attempt += 1
|
25 |
+
print(f"Failed to download {url} after {max_retries} attempts.")
|
26 |
+
return False
|
27 |
+
|
28 |
+
def verify_and_download_images(data):
|
29 |
+
"""Verify if images are downloaded; if not, download them.
|
30 |
+
|
31 |
+
Args:
|
32 |
+
data (dict): The dataset containing image URLs and additional information.
|
33 |
+
"""
|
34 |
+
images_directory = './images'
|
35 |
+
os.makedirs(images_directory, exist_ok=True)
|
36 |
+
|
37 |
+
for key, value in data.items():
|
38 |
+
image_url = value['imageURL']
|
39 |
+
ext = os.path.splitext(image_url)[1]
|
40 |
+
output_file = f'{images_directory}/{key}{ext}'
|
41 |
+
|
42 |
+
if not os.path.exists(output_file):
|
43 |
+
print(f"Image {key}{ext} not found, attempting to download...")
|
44 |
+
if not download_image_with_retries(image_url, output_file):
|
45 |
+
print(f"Warning: Could not download image {image_url}")
|
46 |
+
else:
|
47 |
+
print(f"Image {key}{ext} already exists. No download needed.")
|
48 |
+
|
49 |
+
# Load the dataset JSON file
|
50 |
with open('dataset.json', 'r') as fp:
|
51 |
+
data = json.load(fp)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
52 |
|
53 |
+
# Check and download images as necessary
|
54 |
+
verify_and_download_images(data)
|