-
Notifications
You must be signed in to change notification settings - Fork 17
/
vti-notifications.py
127 lines (104 loc) · 4.27 KB
/
vti-notifications.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
#!/usr/bin/env python
##
# Notifications parser and downloader for VirusTotal Intelligence
# This can be used to download results data and samples for VTI notifications of a
# specific Yara ruleset name.
# --
# adam m. swanda
# https://github.com/deadbits/malware-analysis-scripts
# http://www.deadbits.org
##
import os
import sys
import requests
import json
import time
import argparse
import time
import multiprocessing
def get_notifications(rule_name):
""" Get list of VTI Hunting notifications by Yara signature name
@param rule_name: Yara signature to find
@type string
@return results: all found notifications for Yara signature
@rtype list
"""
results = []
req = requests.get(opts['feed'] + opts['api'])
if req.status_code == 200:
data = req.json()
else:
print 'error: \tfailed to send HTTP request (%s%s)' % (opts['feed'], opts['api'])
print 'status:\t%d' % req.status_code
return results
for entry in data['notifications']:
if entry['ruleset_name'] == rule_name:
results.append(entry)
return results
def download_sample(_hash):
""" Download an individual sample from VTI by hash
@param hash: hash of file to download
@type string
"""
download_path = storage_dir + '/downloads/%s' % _hash
download_url = opts['download'] + _hash + '&apikey=' + opts['api']
print '\t%s' % _hash
with open(download_path, 'wb') as fout:
req = requests.get(download_url, stream=True)
fout.writelines(req.iter_content(1024))
if os.path.exists(download_path):
return True
return False
def create_storage(storage_dir):
if not os.path.exists(storage_dir):
print '+ creating storage directory (%s)' % storage_dir
os.mkdir(storage_dir)
if not os.path.exists(storage_dir + '/downloads'):
print '+ creating download directory (%s/downloads)' % storage_dir
os.mkdir(storage_dir + '/downloads')
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--storage', help='directory to store data downloaded samples', action='store', default='./data', required=False)
parser.add_argument('-d', '--download', help='download samples from notifications', action='store_true', default=False, required=False)
parser.add_argument('-r', '--rule', help='rule name to fetch results for', action='store', required=True)
parser.add_argument('-j', '--json', help='save notifications json data to disk', default=True, action='store_true', required=False)
parser.add_argument('-a', '--apikey', help='virustotal API key', action='store', required=True)
parser.add_argument('-c', '--clear', help='clear notifications from VTI after processing', action='store_true', default=False, required=False)
args = parser.parse_args()
do_download = args.download
do_clear = args.clear
rule_name = args.rule
keep_json = args.json
storage_dir = args.storage
api_key = args.apikey
opts = {
'api': api_key,
'feed': 'https://www.virustotal.com/intelligence/hunting/notifications-feed/?key=',
'download': 'https://www.virustotal.com/intelligence/download/?hash='
}
create_storage(storage_dir)
print '+ checking for new notifications ...'
found_notifications = get_notifications(rule_name)
if len(found_notifications) == 0:
print 'warning: no new notifications found for rule %s' % rule_name
sys.exit(0)
print '\n+ found %d new notifications for rule %s' % (len(found_notifications), rule_name)
if keep_json:
print '+ saving json data to %s/notifications.json' % storage_dir
with open(storage_dir + '/notifications.json', 'w') as fp:
json.dump(found_notifications, fp, indent=4)
if do_download:
hashes = []
time.sleep(1)
for entry in found_notifications:
hashes.append(entry['md5'])
print '\n%d samples queued for download ...' % len(hashes)
print '+ downloading: '
if len(hashes) > 10:
pool = multiprocessing.Pool(processes=10)
results = pool.map(download_sample, hashes)
else:
for _hash in hashes:
download_sample(_hash)
if do_clear:
print '- feature not yet implemented!'