-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathcollect_data.py
141 lines (113 loc) · 5.19 KB
/
collect_data.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
import flickrapi
import json
import pandas as pd
import numpy as np
from argparse import Namespace
from collections import Counter
import pickle
import os
args = Namespace(
# Data and Path information
api_key = u'[api_key]',
api_secret = u'[api_secret]',
radius = 0.3,
save_dir = 'data_storage/',
tags = None,
len_grid = 20,
)
def get_latlon(id_x, id_y, num = args.len_grid):
lat_min = 45.420855
lon_min = 12.291054
lat_max = 45.448286
lon_max = 12.369234
lat_d = (lat_max - lat_min)/(num-1)
lon_d = (lon_max - lon_min)/(num-1)
lat = lat_min + id_x * lat_d
lon = lon_min + id_y * lon_d
return lat,lon
def collect_ids(flickr, lat, lon, radius, x,y, tags = None):
if 'photo_ids_{}_{}.csv'.format(x,y) in [files for root, dirs, files in os.walk(args.save_dir)][0]:
Ids = pd.read_csv(args.save_dir+'photo_ids_{}_{}.csv'.format(x,y),sep='\t')['ids'].tolist()
else:
Ids = []
walk = flickr.walk(has_geo = 1, lat = lat, lon = lon, radius = args.radius, tags=tags)
for photo in walk:
id_now = photo.get('id')
if id_now in Ids:
continue
Ids.append(id_now)
if len(Ids)%200 == 0:
print('{} photo ids collected'.format(len(Ids)))
pd.Series(Ids, name = 'ids').to_csv(args.save_dir + 'photo_ids_{}_{}.csv'.format(x,y), index=False)
pd.Series(Ids, name = 'ids').to_csv(args.save_dir + 'photo_ids_{}_{}.csv'.format(x,y), index=False)
return Ids
def update_df(Photos):
return Photos
def get_photos(flickr, Photos, Ids):
processed = Photos.keys()
for id_now in Ids:
if str(id_now) in processed:
continue
Photos[id_now] = {}
info = json.loads(flickr.photos.getInfo(photo_id = id_now, format='json'))
Photos[id_now]['info'] = info
Photos[id_now]['owner'] = info['photo']['owner']['nsid']
Photos[id_now]['owner_loc'] = info['photo']['owner']['location']
Photos[id_now]['title'] = info['photo']['title']['_content']
Photos[id_now]['description'] = info['photo']['description']['_content']
Photos[id_now]['comments'] = info['photo']['comments']['_content']
Photos[id_now]['taken'] = info['photo']['dates']['taken']
Photos[id_now]['views'] = info['photo']['views']
Photos[id_now]['people'] = info['photo']['people']['haspeople']
Photos[id_now]['tags'] = info['photo']['tags']['tag']
Photos[id_now]['lat'] = info['photo']['location']['latitude']
Photos[id_now]['lon'] = info['photo']['location']['longitude']
Photos[id_now]['neighbourhood'] = info['photo']['location']['neighbourhood']['_content']
Photos[id_now]['url'] = info['photo']['urls']['url'][0]['_content']
if len(processed)%100 ==1:
print('{}/{} photos collected'.format(len(processed),len(Ids)))
with open(args.save_dir+'Photo_info_last.p', 'wb') as fp:
pickle.dump(Photos,fp, protocol=pickle.HIGHEST_PROTOCOL)
photo_df = pd.DataFrame(Photos).T.drop('info',axis=1)
photo_df.to_csv(args.save_dir+'photos_last.csv', sep='\t',encoding='utf-8-sig')
with open(args.save_dir+'Photo_info_last_pre.p', 'wb') as fp:
pickle.dump(Photos,fp, protocol=pickle.HIGHEST_PROTOCOL)
with open(args.save_dir+'Photo_info_last.p', 'wb') as fp:
pickle.dump(Photos,fp, protocol=pickle.HIGHEST_PROTOCOL)
photo_df = pd.DataFrame(Photos).T.drop('info',axis=1)
photo_df.to_csv(args.save_dir+'photos_last.csv', sep='\t',encoding='utf-8-sig')
return Photos
def main():
flickr = flickrapi.FlickrAPI(args.api_key, args.api_secret)
if 'completed.p' in [files for root, dirs, files in os.walk(args.save_dir)][0]:
with open(args.save_dir+'completed.p', 'rb') as fp:
completed = pickle.load(fp)
else:
completed = {}
for x in range(args.len_grid):
for y in range(args.len_grid):
if (x,y) in completed.keys():
continue
lat,lon = get_latlon(x,y)
if 'photo_ids_{}_{}.csv'.format(x,y) in [files for root, dirs, files in os.walk(args.save_dir)][0]:
Ids = pd.read_csv(args.save_dir+'photo_ids_{}_{}.csv'.format(x,y),sep='\t')['ids'].tolist()
else:
Ids = collect_ids(flickr, lat,lon, args.radius, tags=args.tags, x=x,y=y)
if 'Photo_info_last.p' in [files for root, dirs, files in os.walk(args.save_dir)][0]:
with open(args.save_dir+'Photo_info_last.p', 'rb') as fp:
Photos = pickle.load(fp)
else:
Photos = {}
Photos = get_photos(flickr, Photos, Ids)
completed[(x,y)] = {}
completed[(x,y)]['lat'] = lat
completed[(x,y)]['lon'] = lon
completed[(x,y)]['collected'] = len(Ids)
completed[(x,y)]['total'] = len(Photos)
with open(args.save_dir+'completed.p', 'wb') as fp:
pickle.dump(completed,fp, protocol=pickle.HIGHEST_PROTOCOL)
completed_df = pd.DataFrame(completed).T
completed_df.to_csv(args.save_dir+'completed.csv')
if __name__ == "__main__":
main()
"""## END"""