-
Notifications
You must be signed in to change notification settings - Fork 2
/
nyupdate.py
executable file
·180 lines (157 loc) · 4.7 KB
/
nyupdate.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
#!/usr/bin/python3
# Author: Jan 'jarainf' Rathner <jan@rathner.net>
import feedparser
import re
import subprocess
import time
import sys
import signal
import os
BASEDIR = os.path.expanduser('~/.nyupdate/')
FEEDFILE = BASEDIR + 'feeds'
QUEUEFILE = BASEDIR + 'queue'
FAILFILE = BASEDIR + 'fails'
NYAAREX = re.compile('.+?(\d+)\.torrent')
UPDATEINTERVAL = 600
RETRYINTERVAL = 5
RETRYATTEMPTS = 5
QUEUERETRIES = 5
ERRORC = '\033[31m'
STATUSC = '\033[34m'
OKC = '\033[32m'
ENDC = '\033[0m'
INVALIDFEED = 'RSS-Feed: %s is not reachable or invalid!'
VALIDFEED = 'RSS-Feed: %s is now being processed!'
INVALIDLINE = 'Line: %s in %s is invalid!'
def _err(string):
return ERRORC + string + ENDC
def _stat(string):
return STATUSC + string + ENDC
def _ok(string):
return OKC + string + ENDC
def _get_torrents(url):
rssfeed = feedparser.parse(url)
if not bool(rssfeed.bozo):
return { entry.link : entry.title for entry in rssfeed.entries }
else:
return False
def _check_queue(queue):
deletions = ()
if queue:
print(_stat('Retrying torrents from queue...'))
else:
return
for torrent, tries in queue.items():
print('Attempting to add torrent \'%s\' to queue...' % torrent)
if _addtorrent(torrent):
print(_ok('Success!'))
deletions += (torrent,)
else:
queue[torrent] += 1
if tries >= QUEUERETRIES:
print(_err('Failed to queue torrent after %d tries and %d cycles, human intervention required.' % (RETRYATTEMPTS, tries)))
_append_file(torrent, FAILFILE)
deletions += (torrent,)
else:
print(_err('Failed to queue torrent after %d tries and %d cycles, retrying in next cycle.' % (RETRYATTEMPTS, tries)))
for deletion in deletions:
del queue[deletion]
_write_file(queue, QUEUEFILE)
return queue
def _check_rss(feeds):
global _queue
for feed, last in feeds.items():
data = _get_torrents(feed)
if not data:
print(_err(INVALIDFEED % feed))
continue
else:
print(VALIDFEED % feed)
newlast = last
for url, title in sorted(data.items(), key = lambda x: NYAAREX.match(x[0]).group(1)):
tuid = int(NYAAREX.match(url).group(1))
if tuid <= last:
continue
if tuid > newlast:
newlast = tuid
print(_ok('Adding %s to queue!' % title))
if not _addtorrent(url):
print(_err('Failed to queue torrent after %d tries, skipping.' % RETRYATTEMPTS))
_queue[url] = 0
_append_file(url, QUEUEFILE)
feeds[feed] = newlast
return feeds
def _addtorrent(url):
exitcode = subprocess.call(['transmission-remote', '--add', url])
for i in (j for j in range(RETRYATTEMPTS - 1) if bool(exitcode)):
print(_err('Failed to queue torrent, retrying in %d seconds.' % RETRYINTERVAL))
time.sleep(RETRYINTERVAL)
exitcode = subprocess.call(['transmission-remote', '--add', url])
return not bool(exitcode)
def _read_file(dfile):
data = {}
with open(dfile, 'r') as f:
for a_line in f:
line = ''.join(a_line.split())
if line.startswith('#') or line == '':
continue
parsed = line.split('@')
if len(parsed) < 2:
data[parsed[0]] = 0
elif len(parsed) == 2:
try:
data[parsed[0]] = int(parsed[1])
except:
print(_err(INVALIDLINE % (line, dfile)))
else:
print(_err(INVALIDLINE % (line, dfile)))
return data
def _append_file(data, dfile):
with open(dfile, 'a') as f:
f.write(data + os.linesep)
def _write_file(data, dfile):
hashtext = ''
with open(dfile, 'r') as f:
for line in f:
hashtext += line
hashtext = hashtext.split(os.linesep)
with open(dfile + '.new', 'w') as f:
for line in hashtext:
if line.startswith('#'):
f.write(line + os.linesep)
f.write(os.linesep)
for (key, value) in data.items():
f.write(key + ' @ ' + str(value) + os.linesep)
os.rename(dfile + '.new', dfile)
def _signals(signum = None, frame = None):
global _parsed_feeds
_parsed_feeds = _reload_config(_parsed_feeds)
if signum == 1:
print('Reloaded feed information')
else:
print('Program is stopping now.')
_write_file(_parsed_feeds, FEEDFILE)
print('Program has successfully terminated!')
sys.exit(0)
def _reload_config(memfeeds):
feeds = _read_file(FEEDFILE)
for feed in feeds.keys():
if feed in memfeeds.keys():
feeds[feed] = _parsed_feeds[feed]
return feeds
def main():
feedparser.PREFERRED_XML_PARSERS.remove('drv_libxml2') # This parser appears to be broken with Python3
global _parsed_feeds
_parsed_feeds = _read_file(FEEDFILE)
global _queue
_queue = _read_file(QUEUEFILE)
for sig in [signal.SIGTERM, signal.SIGINT, signal.SIGQUIT, signal.SIGHUP]:
signal.signal(sig, _signals)
while True:
print(_stat('Checking feeds now...'))
_parsed_feeds = _check_rss(_parsed_feeds)
_queue = _check_queue(_queue)
print(_stat('Checking again in %.2f minutes.' % (UPDATEINTERVAL / 60)))
time.sleep(UPDATEINTERVAL)
if __name__ == '__main__':
main()