-
Notifications
You must be signed in to change notification settings - Fork 0
/
save_image.py
162 lines (137 loc) · 6.59 KB
/
save_image.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
import json
import flickrapi
import pandas as pd
import numpy as np
from argparse import Namespace
from collections import Counter
import pickle
import os
import urllib
args = Namespace(
# Data and Path information
api_key = u'[api_key]',
api_secret = u'[api_secret]',
radius = 5,
save_dir = 'data_storage/',
tags = None,
len_grid = 1,
image_dir = 'data_storage/images/'
)
def get_latlon(id_x, id_y, num = args.len_grid):
lat = 45.438759
lon = 12.327145
return lat,lon
def collect_ids(flickr, lat, lon, radius, x,y, tags = None):
if 'photo_ids_{}_{}.csv'.format(x,y) in [files for root, dirs, files in os.walk(args.save_dir)][0]:
Ids = pd.read_csv(args.save_dir+'photo_ids_{}_{}.csv'.format(x,y),sep='\t')['ids'].astype(str).unique().tolist()
else:
Ids = []
walk = flickr.walk(has_geo = 1, lat = lat, lon = lon, radius = args.radius, tags=tags)
for photo in walk:
id_now = photo.get('id')
if id_now in Ids:
continue
Ids.append(id_now)
if len(Ids)%200 == 0:
print('{} photo ids collected'.format(len(Ids)))
pd.Series(Ids, name = 'ids').to_csv(args.save_dir + 'photo_ids_{}_{}.csv'.format(x,y), index=False)
if len(Ids) >= 5000:
return Ids
pd.Series(Ids, name = 'ids').to_csv(args.save_dir + 'photo_ids_{}_{}.csv'.format(x,y), index=False)
return Ids
def update_df(Photos):
return Photos
def get_photos(flickr, Photos, Ids):
Photos = {str(k):v for k,v in Photos.items()}
processed = Photos.keys()
print(len(processed))
for id_now in Ids:
if id_now in processed:
continue
else:
Photos[id_now] = {}
sizes = json.loads(flickr.photos.getSizes(photo_id = id_now, format='json'))
info = json.loads(flickr.photos.getInfo(photo_id = id_now, format='json'))
try:
url_c = sizes['sizes']['size'][8]['source']
url_q = sizes['sizes']['size'][1]['source']
url_n = sizes['sizes']['size'][4]['source']
url_largest = sizes['sizes']['size'][-1]['source']
can = sizes['sizes']['candownload']
Photos[id_now]['candownload'] = can
Photos[id_now]['url_c'] = url_c
Photos[id_now]['url_q'] = url_q
Photos[id_now]['url_n'] = url_n
Photos[id_now]['url_largest'] = url_largest
Photos[id_now]['others'] = sizes
Photos[id_now]['info'] = info
Photos[id_now]['owner'] = info['photo']['owner']['nsid']
Photos[id_now]['owner_loc'] = info['photo']['owner']['location']
Photos[id_now]['title'] = info['photo']['title']['_content']
Photos[id_now]['description'] = info['photo']['description']['_content']
Photos[id_now]['comments'] = info['photo']['comments']['_content']
Photos[id_now]['taken'] = info['photo']['dates']['taken']
Photos[id_now]['views'] = info['photo']['views']
Photos[id_now]['people'] = info['photo']['people']['haspeople']
Photos[id_now]['tags'] = info['photo']['tags']['tag']
Photos[id_now]['lat'] = info['photo']['location']['latitude']
Photos[id_now]['lon'] = info['photo']['location']['longitude']
Photos[id_now]['neighbourhood'] = info['photo']['location']['neighbourhood']['_content']
Photos[id_now]['url'] = info['photo']['urls']['url'][0]['_content']
if can:
urllib.request.urlretrieve(url_q, args.image_dir+'150/{}.jpg'.format(id_now))
urllib.request.urlretrieve(url_n, args.image_dir+'320/{}.jpg'.format(id_now))
if len(processed)%20 ==1:
print('{}/{} photos collected'.format(len(processed),len(Ids)))
with open(args.image_dir+'Photo_sizes_pre_sep.p', 'wb') as fp:
pickle.dump(Photos,fp, protocol=pickle.HIGHEST_PROTOCOL)
with open(args.image_dir+'Photo_sizes.p', 'wb') as fp:
pickle.dump(Photos,fp, protocol=pickle.HIGHEST_PROTOCOL)
photo_df = pd.DataFrame(Photos).T.drop(['others','info'],axis=1)
photo_df.to_csv(args.image_dir+'photos_sizes.csv', sep='\t',encoding='utf-8-sig')
except Exception as e:
print(e)
print(id_now)
continue
with open(args.image_dir+'Photo_sizes_pre.p', 'wb') as fp:
pickle.dump(Photos,fp, protocol=pickle.HIGHEST_PROTOCOL)
with open(args.image_dir+'Photo_sizes.p', 'wb') as fp:
pickle.dump(Photos,fp, protocol=pickle.HIGHEST_PROTOCOL)
photo_df = pd.DataFrame(Photos).T.drop(['others','info'],axis=1)
photo_df.to_csv(args.image_dir+'photos_sizes.csv', sep='\t',encoding='utf-8-sig')
return Photos
def main():
flickr = flickrapi.FlickrAPI(args.api_key, args.api_secret)
if 'completed.p' in [files for root, dirs, files in os.walk(args.image_dir)][0]:
with open(args.image_dir+'completed.p', 'rb') as fp:
completed = pickle.load(fp)
else:
completed = {}
if 'Photo_sizes.p' in [files for root, dirs, files in os.walk(args.image_dir)][0]:
with open(args.image_dir+'Photo_sizes.p', 'rb') as fp:
Photos = pickle.load(fp)
else:
Photos = {}
for x in range(args.len_grid):
for y in range(args.len_grid):
if (x,y) in completed.keys():
continue
lat,lon = get_latlon(x,y)
if 'photo_ids_{}_{}.csv'.format(x,y) in [files for root, dirs, files in os.walk(args.save_dir)][0]:
#Ids = pd.read_csv(args.save_dir+'photo_ids_{}_{}.csv'.format(x,y),sep='\t')['ids'].tolist()
Ids = collect_ids(flickr, lat,lon, args.radius, tags=args.tags, x=x,y=y)
else:
Ids = collect_ids(flickr, lat,lon, args.radius, tags=args.tags, x=x,y=y)
Photos = get_photos(flickr, Photos, Ids)
completed[(x,y)] = {}
completed[(x,y)]['lat'] = lat
completed[(x,y)]['lon'] = lon
completed[(x,y)]['collected'] = len(Ids)
completed[(x,y)]['total'] = len(Photos)
with open(args.save_dir+'completed.p', 'wb') as fp:
pickle.dump(completed,fp, protocol=pickle.HIGHEST_PROTOCOL)
completed_df = pd.DataFrame(completed).T
completed_df.to_csv(args.save_dir+'completed.csv')
if __name__ == "__main__":
main()
"""## END"""