-
Notifications
You must be signed in to change notification settings - Fork 1
/
backup.py
70 lines (63 loc) · 2.66 KB
/
backup.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
"""This script zip TEMPORARY folders and files (according to an regex pattern)
and upload to Amazon S3 service (AND DELETE FOLDERS/FILES AFTER BACKUP)."""
import os
import re
import zipfile
import json
import time
import shutil
import boto3
# Configuration file
with open('config.json') as json_data_file:
CONFIG = json.load(json_data_file)
def zipd(directory, path):
"""Zip entire directory and DELETE it."""
path = ('' if path == '.' else path) + directory
filename = path + '.zip'
zipf = zipfile.ZipFile(filename, 'w', zipfile.ZIP_DEFLATED)
for root, dirs, files in os.walk(path):
for file in files:
zipf.write(os.path.join(root, file))
zipf.close()
shutil.rmtree(path)
return filename
def backupd(path, pattern, prefix=''):
"""Backup folders based on given path, regex pattern and a optional prefix
for filename (backup file)."""
for directories in os.walk(path, topdown=True):
for directory in directories[1]:
folder_name = re.search(pattern, directory)
if folder_name is not None:
backupfile = zipd(directory, path)
upload(backupfile, prefix + directory + '.zip')
# Delete backup file after uploaded (zip)
os.remove(backupfile)
def backupf(path, pattern, backup_filename):
"""Backup files based on regex pattern and DELETE it"""
temppath = ('' if path == '.' else path)
backupfile = temppath + backup_filename + '.zip'
zipf = zipfile.ZipFile(backupfile, 'w', zipfile.ZIP_DEFLATED)
for root, dirs, files in os.walk(path, topdown=True):
for file in files:
filename = re.search(pattern, file)
if filename is not None:
zipf.write(os.path.join(root, file))
os.remove(temppath + file)
zipf.close()
upload(backupfile, backup_filename + '.zip')
# Delete backup file after uploaded (zip)
os.remove(backupfile)
def upload(path, key):
"""Upload file to Amazon S3"""
s3_client = boto3.resource(
's3',
aws_access_key_id=CONFIG['aws']['s3']['access-key'],
aws_secret_access_key=CONFIG['aws']['s3']['secret-access-key']
)
s3_client.meta.client.upload_file(path, CONFIG['aws']['s3']['bucket'],
time.strftime("%Y/%m/%d/") + key)
# Folder name examples: Backup_Relatorios_2110_2017_00_00,
# Backup_Imagens_2110_2017_00_00. File Examples: Backup_BACKUPLOG_0611_2017_00_00.log
# backupd('.', '_(.*)_(\\d{2})(\\d{2})_(\\d{4})_(\\d{2})_(\\d{2})')
# backupf('.', '_(.*)_' + time.strftime("%d%m") + '_(\\d{4})_(\\d{2})_(\\d{2})',
# 'BACKUPLOG_' + time.strftime("%d%m_%Y"))