Skip to content

Commit

Permalink
Adding Support for .zst Update (#9)
Browse files Browse the repository at this point in the history
* Adding .zst support and cleaning code

* Update README.md

* Update README.md
  • Loading branch information
JustinTimperio authored Dec 16, 2019
1 parent 269c367 commit a052237
Show file tree
Hide file tree
Showing 4 changed files with 88 additions and 86 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
# Pacback - Alpha 1.5
**TLDR: This projects ultimate goal is to provide flexible and resilient downgrades while maintaining a slim profile and fast performance.**

***Warning: [On 2019/12/27 Arch Linux will be changing it's default compression method from .xz to .zst.](https://www.reddit.com/r/archlinux/comments/e7tbce/update_archdevpublic_rfc_devtools_changing) This will undoubtedly break Pacback and god knows how many other tools and scripts. I will do my best to mitigate the effects of this update but because both of these formats will be used at the same time, I'm expecting a lot of weird behavior while I work out the bugs.***
***Warning: [On 2019/12/27 Arch Linux will be changing it's default compression method from .xz to .zst.](https://www.reddit.com/r/archlinux/comments/e7tbce/update_archdevpublic_rfc_devtools_changing) I've updated Pacback in advace and everything seems to be working correctly. If you run into any problems please submit an issue.***

### Index:
1. [CLI Commands](https://github.com/JustinTimperio/pacback#pacback-cli-commands-and-flags)
Expand Down
46 changes: 32 additions & 14 deletions core/pac_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
#<#><#><#><#><#><#>#<#>#<#

def pacman_Q(replace_spaces=False):
### Writes the output into /tmp, reads file, then removes file
os.system("pacman -Q > /tmp/pacman_q.meta")
l = read_list('/tmp/pacman_q.meta', typ='set')
rm_file('/tmp/pacman_q.meta', sudo=True)
Expand All @@ -18,61 +19,72 @@ def pacman_Q(replace_spaces=False):
return l

def fetch_paccache(pac_path=None):
### Return File System Lists
pac_cache = search_fs('/var/cache/pacman/pkg', 'set')
user_cache = {f for f in search_fs('~/.cache', 'set') if f.endswith(".pkg.tar.xz")}
user_cache = {f for f in search_fs('~/.cache', 'set') if f.endswith(".pkg.tar.xz") or f.endswith(".pkg.tar.zst")}

if not pac_path == None:
pacback_cache = {f for f in search_fs(pac_path, 'set') if f.endswith('.pkg.tar.xz')}
### Find package versions stored in pacback rps
pacback_cache = {f for f in search_fs(pac_path, 'set') if f.endswith('.pkg.tar.xz') or f.endswith(".pkg.tar.zst")}
fs_list = pac_cache.union(user_cache, pacback_cache)
else:
fs_list = pac_cache.union(user_cache)

### Check for Duplicate Packages in fs_list
### Checks for duplicate packages in fs_list
### This will catch dups anywhere in fs_list but should usually only run when a pac_path is defined and full rp's are present
unique_pkgs = {p.split('/')[-1] for p in fs_list}
if len(fs_list) != len(unique_pkgs):
prWorking('Filtering Duplicate Packages...')
new_fs = set()

for u in unique_pkgs:
u_split = u.split('/')[-1]
### This loop returns the first instance of a file path matching a package name
### All the packages are hardlinked so this provides faster filtering while still pointing to the same inode
for x in fs_list:
if x.split('/')[-1] == u_split:
new_fs.add(x)
break
return new_fs

else:
return fs_list

def search_paccache(pkg_list, fs_list):
bulk_search = ('|'.join(list(re.escape(pkg) for pkg in pkg_list))) ### Packages like g++ need to be escaped
bulk_search = re.compile('|'.join(list(re.escape(pkg) for pkg in pkg_list))) ### Packages like g++ need to be escaped
found_pkgs = set()
for f in fs_list:
if re.findall(bulk_search, f.lower()):
if re.findall(bulk_search, f.lower()): ### Combineing all package names into one search term provides much faster results
found_pkgs.add(f)
return found_pkgs

def trim_pkg_list(pkg_list):
pkg_split = {pkg.split('/')[-1] for pkg in pkg_list} ### Remove Dir Path
pkg_split = {'-'.join(pkg.split('-')[:-1]) for pkg in pkg_split} ### Remove .pkg.tar.xz From Name
pkg_split = {pkg.split('/')[-1] for pkg in pkg_list} ### Removes Dir Path
pkg_split = {'-'.join(pkg.split('-')[:-1]) for pkg in pkg_split} ### Removes x86_64.pkg.tar.xz or any.pkg.tar.xz
return pkg_split


#<#><#><#><#><#><#>#<#>#<#
#+# Version Control
#<#><#><#><#><#><#>#<#>#<#

def check_pacback_version(current_version, rp_path, target_version='nil'):
### Failsafe When Meta Is Missing
if target_version == 'nil':
def check_pacback_version(current_version, rp_path, meta_exists, meta):
if meta_exists == False:
### Check for Full RP Created Before V1.5
if os.path.exists(rp_path + '.tar') or os.path.exists(rp_path + '.tar.gz'):
prError('Full Restore Points Generated Before Version 1.5.0 Are No Longer Compatible With Newer Versions of Pacback!')
prError('Without Meta Data Pacback Can\'t Upgrade This Restore Point!')
fail = True
return fail

### Parse Version if Meta Exists
else:
elif meta_exists == True:
### Find version in metadate file
for m in meta:
if m.split(':')[0] == 'Pacback Version':
target_version = m.split(':')[1]
break

### Parse version into vars
cv_major = int(current_version.split('.')[0])
cv_minor = int(current_version.split('.')[1])
cv_patch = int(current_version.split('.')[2])
Expand Down Expand Up @@ -175,6 +187,12 @@ def pacback_hook(install):
prSuccess('Pacback Hook Removed!')


#<#><#><#><#><#><#>#<#>#<#
#+# Single Package Search
#<#><#><#><#><#><#>#<#>#<#
# def find_pkg(pkg, fs_list):
# re_pkg = re.compile() re.escape()

#<#><#><#><#><#><#>#<#>#<#
#+# Better Cache Cleaning
#<#><#><#><#><#><#>#<#>#<#
Expand All @@ -195,7 +213,7 @@ def clean_cache(count, base_dir):
rps = {f for f in search_fs(base_dir + '/restore-points', 'set') if f.endswith(".meta")}

for m in rps:
### Find Create Date in Meta
### Find RP Create Date in Meta File
meta = read_list(m)
for l in meta:
if l.split(':')[0] == 'Date Created':
Expand All @@ -217,7 +235,7 @@ def clean_cache(count, base_dir):
rm_file(m, sudo=True)
rm_dir(m[:-5], sudo=True)
prSuccess('Restore Point Removed!')
prSuccess(m.split('/')[-1] + ' Passed Comparison!')
prSuccess(m.split('/')[-1] + ' Is Only ' + str(days) + ' Days Old!')


#<#><#><#><#><#><#>#<#>#<#
Expand Down
124 changes: 54 additions & 70 deletions core/pacback.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,52 +19,45 @@ def create_restore_point(rp_num, rp_full, dir_list):
rp_path = base_dir + '/restore-points/rp' + str(rp_num).zfill(2)
rp_tar = rp_path + '/' + str(rp_num).zfill(2) + '_dirs.tar'
rp_meta = rp_path + '.meta'
rp_files = set()
found_pkgs = set()
pac_size = 0
dir_size = 0

### Check for Existing Restore Points
if os.path.exists(rp_path) or os.path.exists(rp_meta):
if args.no_confirm == False:
if int(rp_num) != 0:
prWarning('Restore Point #' + str(rp_num).zfill(2) + ' Already Exists!')
if yn_frame('Do You Want to Overwrite It?') == False:
fail = True
return prError('Aborting RP Creation!'), fail
rm_file(rp_meta, sudo=True)
rm_dir(rp_path, sudo=True)
return prError('Aborting RP Creation!')
rm_file(rp_meta, sudo=True)
rm_dir(rp_path, sudo=True)

if rp_full == True:
dir_size = 0
rp_files = set()
###################################
### Find Pkgs for Restore Point ###
###################################
pac_cache = rp_path + '/pac_cache'
print('Building Full Restore Point...')
prWorking('Retrieving Current Packages...')
current_pkgs = pacman_Q(replace_spaces=True)
pkg_search = pacman_Q(replace_spaces=True)

### Search File System for Pkgs
prWorking('Bulk Scanning for ' + str(len(current_pkgs)) + ' Packages...')
found_pkgs = search_paccache(current_pkgs, fetch_paccache())

### Get Size of Pkgs Found
for p in found_pkgs:
try: pac_size += os.path.getsize(p)
except: pass
prWorking('Bulk Scanning for ' + str(len(pkg_search)) + ' Packages...')
found_pkgs = search_paccache(pkg_search, fetch_paccache())
pac_size = size_of_files(found_pkgs)

### Ask About Missing Pkgs
if len(found_pkgs) != len(current_pkgs):
if len(found_pkgs) != len(pkg_search):
if args.no_confirm == False:
pkg_split = trim_pkg_list(found_pkgs)
prError('The Following Packages Where NOT Found!')
for pkg in set(current_pkgs - pkg_split):
prWarning(pkg + ' Was NOT Found!')
if yn_frame('Do You Still Want to Continue?') == True:
pass
else:
fail=True
return prError('Aborting RP Creation!'), fail
if int(rp_num) != 0:
pkg_split = trim_pkg_list(found_pkgs)
prError('The Following Packages Where NOT Found!')
for pkg in set(pkg_search - pkg_split):
prWarning(pkg + ' Was NOT Found!')
if yn_frame('Do You Still Want to Continue?') == False:
return prError('Aborting RP Creation!')

###############################
### HardLink Packages to RP ###
Expand Down Expand Up @@ -118,37 +111,38 @@ def create_restore_point(rp_num, rp_full, dir_list):
if args.notes:
meta_list.append('Notes: ' + args.notes)

if not len(dir_list) == 0:
dir_meta = ['Dirs File Count: '+ str(len(rp_files)),
'Dirs Total Size: '+ str(convert_size(dir_size)),
'',
'========= Dir List =========']
for dir in dir_list:
dir_meta.append(dir)
meta_list.extend(dir_meta)
if len(dir_list) != 0:
meta_list.append('Dirs File Count: ' + str(len(rp_files)))
meta_list.append('Dirs Total Size: ' + convert_size(dir_size))
meta_list.append('')
meta_list.append('========= Dir List =========')
for d in dir_list:
meta_list.append(d)

meta_list.append('')
meta_list.append('======= Pacman List ========')
for pkg in current_pkgs:
meta_list.append(pkg)

### Export Final Meta File
### Export Final Meta Data File
export_list(rp_meta, meta_list)
prSuccess('Restore Point #' + str(rp_num).zfill(2) + ' Successfully Created!')
fail = False
return fail


#<#><#><#><#><#><#>#<#>#<#
#+# Rollback to RP
#<#><#><#><#><#><#>#<#>#<#

def rollback_to_rp(rp_num):
###########################
### Stage Rollback Vars ###
###########################
### Set Base Var
rp_path = base_dir + '/restore-points/rp' + str(rp_num).zfill(2)
rp_tar = rp_path + '/' + str(rp_num).zfill(2) + '_dirs.tar'
rp_meta = rp_path + '.meta'
current_pkgs = pacman_Q()

### Set Full RP Status
if os.path.exists(rp_path):
full_rp = True
Expand All @@ -162,39 +156,32 @@ def rollback_to_rp(rp_num):
meta_dirs = read_between('========= Dir List =========','======= Pacman List ========', meta)[:-1]
meta_old_pkgs = read_between('======= Pacman List ========','<Endless>', meta)

### Failsafe to Find Version
for m in meta:
if m.split(':')[0] == 'Pacback Version':
target_version = m.split(':')[1]
break
### Compare Versions
check_pacback_version(version, rp_path, target_version)
if fail == True:
return prError('Aborting Due to Version Issues!')

### Checking for New and Changed Packages
changed_pkgs = set(set(meta_old_pkgs) - current_pkgs)
meta_old_pkg_strp = {pkg.split(' ')[0] for pkg in meta_old_pkgs} ### Strip Version
current_pkg_strp = {pkg.split(' ')[0] for pkg in current_pkgs} ### Strip Version
added_pkgs = set(current_pkg_strp - meta_old_pkg_strp)

else:
meta_exists = False
added_pkgs = None
meta_old_pkgs = None
changed_pkgs = None
### Check Version When Meta is Missing
check_pacback_version(version, rp_path, target_version)
if fail == True:
return prError('Aborting Due to Version Issues!')

### Abort if No Files Found
meta = None

### Abort If No Files Are Found
if meta_exists == False and full_rp == False:
return prError('Restore Point #' + str(rp_num).zfill(2) + ' Was NOT FOUND!')

elif full_rp == True:
##########################
### Full Restore Point ###
##########################

### Compare Versions
fail = check_pacback_version(version, rp_path, meta_exists, meta)
if fail == True:
return prError('Aborting Due to Version Issues!')

######################
### Start Rollback ###
######################
if full_rp == True:
#~#~#~#~#~#~#~#~#~#~#~#~#~
#~# Full Restore Point #~#
#~#~#~#~#~#~#~#~#~#~#~#~#~
rp_cache = rp_path + '/pac_cache'

if meta_exists == True:
Expand All @@ -211,9 +198,9 @@ def rollback_to_rp(rp_num):
return prError('Skipping Advanced Features!')

elif meta_exists == True and full_rp == False:
###########################
### Light Restore Point ###
###########################
#~#~#~#~#~#~#~#~#~#~#~#~#~#
#~# Light Restore Point #~#
#~#~#~#~#~#~#~#~#~#~#~#~#~#
prWorking('Bulk Scanning for ' + str(len(meta_old_pkgs)) + ' Packages...')
found_pkgs = search_paccache({s.strip().replace(' ', '-') for s in changed_pkgs}, fetch_paccache())

Expand All @@ -233,10 +220,8 @@ def rollback_to_rp(rp_num):
if len(found_pkgs) == len(changed_pkgs):
prSuccess('All Packages Found In Your Local File System!')
os.system('sudo pacman -U ' + ' '.join(found_pkgs))

else:
pkg_split = trim_pkg_list(found_pkgs)
missing_pkg = set({s.strip().replace(' ', '-') for s in changed_pkgs} - pkg_split)
missing_pkg = set({s.strip().replace(' ', '-') for s in changed_pkgs} - trim_pkg_list(found_pkgs))

### Show Missing Pkgs
prWarning('Couldn\'t Find The Following Package Versions:')
Expand Down Expand Up @@ -271,7 +256,7 @@ def rollback_to_rp(rp_num):
def rollback_to_date(date):
### Validate Date Fromat and Build New URL
if not re.findall(r'([12]\d{3}/(0[1-9]|1[0-2])/(0[1-9]|[12]\d|3[01]))', date):
return print('Invalid Date! Date Must be YYYY/MM/DD Format.')
return prError('Invalid Date! Date Must be YYYY/MM/DD Format.')

### Backup Mirrorlist
if len(read_list('/etc/pacman.d/mirrorlist')) > 1:
Expand Down Expand Up @@ -399,9 +384,8 @@ def rollback_packages(pkg_list):
create_restore_point('00', args.full_rp, args.add_dir)

elif args.upgrade:
fail = create_restore_point('00', args.full_rp, args.add_dir)
if fail == False:
os.system('sudo pacman -Syu')
create_restore_point('00', args.full_rp, args.add_dir)
os.system('sudo pacman -Syu')

elif args.snapback:
if os.path.exists(base_dir + '/restore-points/rp00.meta'):
Expand Down
2 changes: 1 addition & 1 deletion core/python_scripts

0 comments on commit a052237

Please sign in to comment.