forked from mike0sv/Reuters-full-data-set
-
Notifications
You must be signed in to change notification settings - Fork 3
/
Copy pathgenerate.py
64 lines (52 loc) · 2.28 KB
/
generate.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
import os
import pickle
from datetime import timedelta, date, datetime
import bs4
import requests
def get_soup_from_link(link):
if not link.startswith('http://www.reuters.com'):
link = 'http://www.reuters.com' + link
print(link)
response = requests.get(link)
assert response.status_code == 200
return bs4.BeautifulSoup(response.content, 'html.parser')
def date_range(start_date, end_date):
for n in range(int((end_date - start_date).days)):
yield start_date + timedelta(n)
def run_full():
today = datetime.now()
output_dir = 'output_' + today.strftime('%Y-%m-%d-%HH%MM')
if not os.path.exists(output_dir):
os.makedirs(output_dir)
print('Generating the Full dataset in : {}'.format(output_dir))
start_date = date(2007, 1, 1)
end_date = today.date()
iterations = 0
for single_date in date_range(start_date, end_date):
output = []
string_date = single_date.strftime("%Y%m%d")
link = 'http://www.reuters.com/resources/archive/us/{}.html'.format(string_date)
try:
soup = get_soup_from_link(link)
targets = soup.find_all('div', {'class': 'headlineMed'})
except Exception:
print('EXCEPTION RAISED. Could not download link : {}. Resuming anyway.'.format(link))
targets = []
for target in targets:
try:
timestamp = str(string_date) + str(target.contents[1])
except Exception:
timestamp = None
print('EXCEPTION RAISED. Timestamp set to None. Resuming.')
title = str(target.contents[0].contents[0])
href = str(target.contents[0].attrs['href'])
print('iterations = {}, date = {}, ts = {}, t = {}, h= {}'.format(str(iterations).zfill(9), string_date,
timestamp, title, href))
output.append({'ts': timestamp, 'title': title, 'href': href})
iterations += 1
output_filename = os.path.join(output_dir, string_date + '.pkl').format(output_dir, string_date)
with open(output_filename, 'wb') as w:
pickle.dump(output, w)
print('-> written dump to {}'.format(output_filename))
if __name__ == '__main__':
run_full()