-
Notifications
You must be signed in to change notification settings - Fork 45
/
getawslog.py
executable file
·101 lines (90 loc) · 2.8 KB
/
getawslog.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
#!/usr/bin/python
#
# Import AWS CloudTrail data to a local flat file
#
# Author: Xavier Mertens <xavier@rootshell.be>
# Copyright: GPLv3 (http://gplv3.fsf.org/)
# Feel free to use the code, but please share the changes you've made
#
# Install: boto is required to connect to S3 (http://code.google.com/p/boto/)
#
import argparse
import ConfigParser
import boto
import gzip
import os
import sys
import signal
import json
from optparse import OptionParser
def handler(signal, frame):
print "SIGINT received, bye!"
sys.exit(1)
def main(argv):
parser = OptionParser(usage="usage: %prog [options]", version="%prog 1.0")
parser.add_option('-b', '--bucket', dest='logBucket', type='string', \
help='Specify the S3 bucket containing AWS logs')
parser.add_option('-d', '--debug', action='store_true', dest='debug', \
help='Increase verbosity')
parser.add_option('-l', '--log', dest='logFile', type='string', \
help='Local log file')
parser.add_option('-j', '--json', action='store_true', dest='dumpJson', \
help='Reformat JSON message (default: raw)')
parser.add_option('-D', '--delete', action='store_true', dest='deleteFile', \
help='Delete processed files from the AWS S3 bucket')
(options, args) = parser.parse_args()
if options.debug:
print '+++ Debug mode on'
if options.logBucket == None:
print 'ERROR: Missing an AWS S3 bucket! (-b flag)'
sys.exit(1)
if options.logFile == None:
print 'ERROR: Missing a local log file! (-l flag)'
sys.exit(1)
if options.debug: print '+++ Connecting to Amazon S3'
s3 = boto.connect_s3()
buckets = s3.get_all_buckets()
found = 0
for bucket in buckets:
if bucket.name == options.logBucket:
found = 1
c = s3.get_bucket(bucket.name)
for f in c.list():
newFile = os.path.basename(str(f.key))
if newFile != "":
if options.debug:
print "+++ Found new log: ", newFile
f.get_contents_to_filename(newFile)
data = gzip.open(newFile, 'rb')
try:
log = open(options.logFile, 'ab')
except IOError as e:
print "ERROR: Cannot open %s (%s)" % (options.logFile, e.strerror)
sys.exit(1)
if options.dumpJson == None:
log.write(data.read())
log.write("\n")
else:
j = json.load(data)
records = j["Records"]
for item in records:
newline = 0
for field in item:
if newline > 0:
log.write(",")
newline = 1
log.write("\"%s\":\"%s\"" % (field, item[field]))
log.write("\n")
log.close()
try:
os.remove(newFile)
except IOError as e:
print "ERROR: Cannot delete %s (%s)" % (newFile, e.strerror)
if options.deleteFile:
c.delete_key(f.key)
if found == 0:
print "ERROR: Bucket %s not found in S3!" % options.logBucket
if __name__ == '__main__':
signal.signal(signal.SIGINT, handler)
main(sys.argv[1:])
sys.exit(0)