-
Notifications
You must be signed in to change notification settings - Fork 1
/
game_sync.py
735 lines (575 loc) · 28.1 KB
/
game_sync.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
import argparse
import base64
import functools
import gzip
import io
import logging
import os
import re
import shutil
import socket
import stat
import subprocess
import time
import zipfile
import zlib
from pathlib import Path
from typing import Optional, Union
import dateutil.parser
import requests
import ujson
from deepdiff import DeepDiff
import db
import main
import utils
import validation
from constants import admin_user_id
from utils import plural
def run_syncs():
global log, game_sync_hash
start_time = time.time()
log = main.create_logger('game_sync')
parser = argparse.ArgumentParser()
parser.add_argument('project', help="Only sync test a specific project (ID or name, use quotes if need be)", nargs='?')
parser.add_argument('--all', action='store_true', help="Run all sync checks", default=False)
cli_project = parser.parse_args().project
force_run_all = parser.parse_args().all
with open('game_sync.py', 'rb') as game_sync_py:
game_sync_hash = zlib.adler32(game_sync_py.read())
if cli_project:
if cli_project.isdigit():
log.info(f"Running sync test for project ID {cli_project} only")
test_project_ids = (int(cli_project),)
else:
test_project_ids = [int(p['project_id']) for p in db.projects.get_by_name_or_id(cli_project)]
log.info(f"Running sync test for project ID{plural(test_project_ids)} {test_project_ids} only")
else:
projects = db.projects.dict()
test_project_ids = []
for project_id in sorted(projects, key=lambda x: projects[x]['last_commit_time'], reverse=True):
project = projects[project_id]
if project['do_run_validation'] and db.path_caches.get(project_id):
test_project_ids.append(project_id)
all_sync_tests = {project_id: projects[project_id]['name'] for project_id in test_project_ids}
log.info(f"Running all sync tests: {all_sync_tests} (forcing all: {force_run_all})")
update_everest()
try:
for project_id in test_project_ids:
sync_test(project_id, cli_project or force_run_all)
except Exception:
log_error()
close_game()
post_cleanup()
raise
post_cleanup()
log.info(f"All sync checks time: {format_elapsed_time(start_time)}")
def sync_test(project_id: int, force: bool):
start_time = time.time()
current_log = io.StringIO()
stream_handler = logging.StreamHandler(current_log)
stream_handler.setFormatter(logging.Formatter('%(asctime)s:%(levelname)s: %(message)s'))
log.addHandler(stream_handler)
project = db.projects.get(project_id)
if not project['do_run_validation'] and not force:
log.info(f"Abandoning sync test for project \"{project['name']}\" due to it now being disabled")
consider_disabling_after_inactivity(project, time.time(), True)
return
log.info(f"Running sync test for project: {project['name']}")
mods = project['mods']
repo = project['repo']
previous_desyncs = project['desyncs']
prev_environment_state = project['sync_environment_state']
filetimes = {}
desyncs = []
mods_to_load = set(mods)
mods_to_load |= {'CelesteTAS', 'SpeedrunTool', 'AltEnterFullscreen', 'HelperTestMapHider'}
files_timed = 0
remove_save_files()
queued_update_commits = []
crash_logs_data = {}
crash_logs_dir = f'{game_dir()}\\CrashLogs'
project_is_maingame = project_id == 598945702554501130
get_mod_dependencies.cache_clear()
for mod in mods:
mods_to_load |= get_mod_dependencies(mod)
main.generate_request_headers(project['installation_owner'], 300)
environment_state = generate_environment_state(project, mods_to_load)
if environment_state['last_commit_time'] > project['last_commit_time']:
log.info(f"Last repo commit time is later than improvement channel post ({environment_state['last_commit_time']} > {int(project['last_commit_time'])}), updating project")
project['last_commit_time'] = environment_state['last_commit_time']
db.projects.set(project_id, project)
if environment_state == prev_environment_state and not force:
log.info(f"Abandoning sync test for project \"{project['name']}\" due to environment state matching previous run")
consider_disabling_after_inactivity(project, time.time(), True)
return
log.info(f"Environment state changes: {DeepDiff(prev_environment_state, environment_state, ignore_order=True, ignore_numeric_type_changes=True, verbose_level=2)}")
get_mod_everest_yaml.cache_clear()
generate_blacklist(mods_to_load)
log.info(f"Created blacklist, launching game with {len(mods_to_load)} mod{plural(mods_to_load)}")
close_game()
start_game()
# make sure path cache is correct while the game is launching
main.generate_path_cache(project_id)
path_cache = db.path_caches.get(project_id)
if not path_cache and not force:
log.info(f"Abandoning sync test due to path cache now being empty")
consider_disabling_after_inactivity(project, time.time(), True)
close_game()
return
# clone repo
repo_cloned = repo.partition('/')[2]
repo_path = f'{game_dir()}\\repos\\{repo_cloned}'
if not os.path.isdir(f'{game_dir()}\\repos'):
os.mkdir(f'{game_dir()}\\repos')
elif os.path.isdir(repo_path):
shutil.rmtree(repo_path, onexc=del_rw)
time.sleep(0.1)
cwd = os.getcwd()
os.chdir(f'{game_dir()}\\repos')
clone_time = time.time()
subprocess.run(f'git clone --depth=1 --recursive https://github.com/{repo}', capture_output=True)
os.chdir(cwd)
log.info(f"Cloned repo to {repo_path}")
asserts_added = {}
# add asserts for cached SIDs
try:
sid_cache = db.sid_caches.get(project_id, consistent_read=False)
log.info(f'Loaded {len(sid_cache)} cached SIDs')
except db.DBKeyError:
sid_cache = {}
log.info("Created SID cache entry")
for tas_filename in path_cache:
file_path_repo = path_cache[tas_filename]
if file_path_repo in sid_cache:
with open(f'{repo_path}\\{file_path_repo}'.replace('/', '\\'), 'r+') as tas_file:
tas_lines = tas_file.readlines()
sid = sid_cache[file_path_repo]
for tas_line in enumerate(tas_lines):
if tas_line[1].lower() == '#start\n':
assert_line = f'Assert,Equal,{sid},{{Level.Session.Area.SID}}\n'
assert_line_num = tas_line[0] + 3
tas_lines.insert(assert_line_num, assert_line)
tas_file.seek(0)
tas_file.writelines(tas_lines)
asserts_added[file_path_repo] = (assert_line_num, assert_line)
break
if asserts_added:
log.info(f"Added SID assertions to {len(asserts_added)} file{plural(asserts_added)}: {asserts_added}")
game_process = wait_for_game_load(mods_to_load, project['name'])
for tas_filename in path_cache:
file_path_repo = path_cache[tas_filename]
file_path_repo_backslashes = file_path_repo.replace('/', '\\')
file_path = f'{repo_path}\\{file_path_repo_backslashes}'
if 'lobby' in file_path_repo.lower() and 'lobby' not in tas_filename.lower():
log.info(f"Skipping {tas_filename} (lobby)")
continue
elif tas_filename in ('translocation.tas', 'mt_celeste_jazz_club.tas'):
continue
with open(file_path, 'r', encoding='UTF8') as tas_file:
tas_lines = tas_file.readlines()
# set up tas file
tas_parsed = validation.parse_tas_file(tas_lines, False, False)
if tas_parsed.found_finaltime:
finaltime_line_lower = tas_lines[tas_parsed.finaltime_line_num].lower()
has_filetime = finaltime_line_lower.startswith('filetime')
finaltime_is_midway = finaltime_line_lower.startswith('midway')
finaltime_line_blank = f'{tas_lines[tas_parsed.finaltime_line_num].partition(' ')[0]} \n'
tas_lines_og = tas_lines.copy()
tas_lines[tas_parsed.finaltime_line_num] = finaltime_line_blank
if has_filetime or finaltime_is_midway:
clear_debug_save()
if has_filetime:
has_console_load = [line for line in tas_lines if line.startswith('console load')] != []
if not has_console_load:
# if it starts from begin, then menu there. doesn't change mod
tas_lines[:0] = ['unsafe\n', 'console overworld\n', '2\n', '1,J\n', '94\n', '1,J\n', '56\n', 'Repeat 20\n', '1,D\n', '1,F,180\n', 'Endrepeat\n', '1,J\n', '14\n']
else:
log.info(f"{tas_filename} has no final time")
continue
tas_lines.insert(0, f'Set,CollabUtils2.DisplayEndScreenForAllMaps,{not has_filetime}\n')
tas_lines.append('\n***')
with open(file_path, 'w', encoding='UTF8') as tas_file:
tas_file.truncate()
tas_file.write(''.join(tas_lines))
# now run it
time.sleep(0.5)
initial_mtime = os.path.getmtime(file_path)
file_sync_start_time = time.time()
log.info(f"Sync checking {tas_filename} ({tas_parsed.finaltime_trimmed})")
tas_started = False
tas_finished = False
sid = None
game_crashed = False
while not tas_started and not game_crashed:
try:
requests.post(f'http://localhost:32270/tas/playtas?filePath={file_path}', timeout=10)
except requests.RequestException:
if not game_process.is_running():
game_crashed = True
else:
crash_logs = os.listdir(crash_logs_dir)
tas_started = True
while not tas_finished and not game_crashed:
if time.time() - file_sync_start_time > 3600 * 5:
raise TimeoutError(f"File {tas_filename} in project {project['name']} has frozen after five hours")
try:
scaled_sleep(20 if has_filetime else 5)
session_data = requests.get('http://localhost:32270/tas/info', timeout=2).text
except requests.RequestException:
if not game_process.is_running():
game_crashed = True
else:
tas_finished = 'Running: False' in session_data
if not has_filetime:
sid = session_data.partition('SID: ')[2].partition(' (')[0]
log.info("TAS has finished")
files_timed += 1
scaled_sleep(15 if has_filetime or 'SID: ()' in session_data else 5)
extra_sleeps = 0
while not game_crashed and os.path.getmtime(file_path) == initial_mtime and extra_sleeps < 5:
time.sleep(3 + (extra_sleeps ** 2))
extra_sleeps += 1
log.info(f"Extra sleeps: {extra_sleeps}")
updated_crash_logs = os.listdir(crash_logs_dir)
if game_crashed or len(updated_crash_logs) > len(crash_logs):
new_crash_logs = [file for file in updated_crash_logs if file not in crash_logs]
log.warning(f"Game crashed ({new_crash_logs}), restarting and continuing")
desyncs.append((tas_filename, "Crashed game"))
scaled_sleep(10)
close_game()
scaled_sleep(5)
start_game()
for new_crash_log_name in new_crash_logs:
with open(f'{crash_logs_dir}\\{new_crash_log_name}', 'rb') as new_crash_log:
crash_logs_data[f'{new_crash_log_name}.gz'] = gzip.compress(new_crash_log.read())
game_process = wait_for_game_load(mods_to_load, project['name'])
continue
# determine if it synced or not
with open(file_path, 'rb') as tas_file:
tas_updated = validation.as_lines(tas_file.read())
tas_parsed_new = validation.parse_tas_file(tas_updated, False, False, tas_parsed.finaltime_type)
# for silvers
if has_filetime: # or tas_lines[tas_parsed.finaltime_line_num].lower().startswith('midway'):
clear_debug_save()
if not tas_parsed_new.found_finaltime:
log.warning(f"Desynced (no {finaltime_line_blank.partition(':')[0]})")
log.info(session_data.partition('<pre>')[2].partition('</pre>')[0])
desyncs.append((tas_filename, None))
continue
frame_diff = validation.calculate_time_difference(tas_parsed_new.finaltime, tas_parsed.finaltime)
time_synced = frame_diff == 0
if has_filetime or project_is_maingame:
log.info(f"Time: {tas_parsed_new.finaltime_trimmed}")
if has_filetime:
filetimes[tas_filename] = tas_parsed_new.finaltime_trimmed
if not time_synced:
new_time_line = tas_updated[tas_parsed_new.finaltime_line_num]
tas_lines_og[tas_parsed.finaltime_line_num] = f'{new_time_line}\n'
commit_message = f"{'+' if frame_diff > 0 else ''}{frame_diff}f {tas_filename} ({tas_parsed_new.finaltime_trimmed})"
queued_update_commits.append((file_path_repo, tas_lines_og, commit_message))
# don't commit now, since there may be desyncs
else:
if not tas_parsed_new.finaltime_frames:
log_error(f"Couldn't parse FileTime frames for {file_path_repo}")
continue
log_command = log.info if time_synced else log.warning
time_delta = (f"{tas_parsed.finaltime_trimmed}({tas_parsed.finaltime_frames}) -> {tas_parsed_new.finaltime_trimmed}({tas_parsed_new.finaltime_frames}) "
f"({'+' if frame_diff > 0 else ''}{frame_diff}f)")
log_command(f"{'Synced' if time_synced else 'Desynced'}: {time_delta}")
if time_synced:
if file_path_repo not in sid_cache and sid:
sid_cache[file_path_repo] = sid
db.sid_caches.set(project_id, sid_cache)
log.info(f"Cached SID for {file_path_repo}: {sid}")
elif not sid:
log.warning(f"Running {file_path_repo} yielded no SID")
else:
desyncs.append((tas_filename, time_delta))
close_game()
project = db.projects.get(project_id) # update this, in case it has changed since starting
project['sync_environment_state'] = environment_state
project['filetimes'] = filetimes
project['last_run_validation'] = int(clone_time)
project['desyncs'] = [desync[0] for desync in desyncs]
new_desyncs = [d for d in desyncs if d[0] not in previous_desyncs]
log.info(f"All desyncs: {desyncs}")
log.info(f"New desyncs: {new_desyncs}")
report_text = report_log = None
if new_desyncs:
new_desyncs_formatted = format_desyncs(new_desyncs)
desyncs_formatted = format_desyncs(desyncs)
desyncs_block = '' if desyncs == new_desyncs else f"\nAll desyncs:\n```\n{desyncs_formatted}```"
report_text = f"Sync check found {len(new_desyncs)} new desync{plural(new_desyncs)} ({files_timed} file{plural(files_timed)} tested):" \
f"\n```\n{new_desyncs_formatted}```{desyncs_block}"[:1900]
stream_handler.flush()
report_log = gzip.compress(re_redact_token.sub("'token': [REDACTED]", current_log.getvalue()).encode('UTF8'))
disabled_text = consider_disabling_after_inactivity(project, clone_time, False)
db.projects.set(project_id, project)
db.sync_results.set(project_id, {'report_text': report_text, 'disabled_text': disabled_text, 'log': report_log, 'crash_logs': crash_logs_data})
log.info("Wrote sync result to DB")
# commit updated fullgame files
for queued_commit in queued_update_commits:
file_path_repo, lines, commit_message = queued_commit
lines_joined = ''.join(lines)
desyncs_found = [d for d in desyncs if d[0][:-4] in lines_joined]
# but only if all the files in them sync
if desyncs_found:
log.info(f"Not committing updated fullgame file {file_path_repo} due to desyncs: {desyncs_found}")
continue
main.generate_request_headers(project['installation_owner'], 300)
commit_data = {'content': base64.b64encode(lines_joined.encode('UTF8')).decode('UTF8'),
'sha': main.get_sha(repo, file_path_repo),
'message': commit_message}
log.info(f"Committing updated fullgame file: \"{commit_data['message']}\"")
r = requests.put(f'https://api.github.com/repos/{repo}/contents/{file_path_repo}', headers=main.headers, data=ujson.dumps(commit_data))
utils.handle_potential_request_error(r, 200)
commit_url = ujson.loads(r.content)['commit']['html_url']
log.info(f"Successfully committed: {commit_url}")
if project_is_maingame:
db.sync_results.set(int(time.time()), {'user_ids': (admin_user_id, 234520815658336258), 'message': f"Committed `{commit_message}` <{commit_url}>"})
log.info(f"Sync check time: {format_elapsed_time(start_time)}")
def clear_debug_save():
try:
requests.post('http://localhost:32270/console?command=overworld', timeout=10)
scaled_sleep(4)
requests.post('http://localhost:32270/console?command=clrsav', timeout=10)
scaled_sleep(4)
log.info("Cleared debug save")
scaled_sleep(4)
except (requests.Timeout, requests.ConnectionError):
pass
def format_desyncs(desyncs: list) -> str:
formatted = []
for desync in desyncs:
if desync[1]:
formatted.append(f'{desync[0]}: {desync[1]}')
else:
formatted.append(desync[0])
return '\n'.join(formatted)
def generate_blacklist(mods_to_load: set):
installed_mods = [item for item in os.listdir(f'{game_dir()}\\Mods') if item.endswith('.zip')]
blacklist = []
for installed_mod in installed_mods:
if installed_mod.removesuffix('.zip') not in mods_to_load:
blacklist.append(installed_mod)
with open(f'{game_dir()}\\Mods\\blacklist.txt', 'w') as blacklist_txt:
blacklist_txt.write("# This file has been created by the Improvements Tracker\n")
blacklist_txt.write('\n'.join(blacklist))
# remove all files related to any save
def remove_save_files():
saves_dir = f'{game_dir()}\\Saves'
save_files = [f'{saves_dir}\\{file}' for file in os.listdir(saves_dir) if file.startswith('debug') or (file[0].isdigit() and file[0] != '0')]
for save_file in save_files:
os.remove(save_file)
try:
log.info(f"Removed {len(save_files)} save files")
except AttributeError:
pass
def post_cleanup():
generate_blacklist(set())
remove_save_files()
def del_rw(function, path, excinfo):
os.chmod(path, stat.S_IWRITE)
os.remove(path)
def start_game():
subprocess.Popen(f'{game_dir()}\\Celeste.exe', creationflags=0x00000010) # the creationflag is for not waiting until the process exits
def wait_for_game_load(mods: set, project_name: str):
import psutil
game_loaded = False
last_game_loading_notify = time.perf_counter()
wait_start_time = time.time()
while not game_loaded:
try:
scaled_sleep(5)
requests.get('http://localhost:32270/', timeout=2)
except requests.RequestException:
current_time = time.perf_counter()
if current_time - last_game_loading_notify > 60:
last_game_loading_notify = current_time
if time.time() - wait_start_time > 3600:
raise TimeoutError(f"Game failed to load after an hour for project {project_name}")
else:
game_loaded = True
mod_versions_start_time = time.perf_counter()
scaled_sleep(5)
log.info(f"Game loaded, mod versions: {mod_versions(mods)}")
scaled_sleep(max(0, 10 - (time.perf_counter() - mod_versions_start_time)))
for process in psutil.process_iter(['name']):
if process.name() == 'Celeste.exe':
process.nice(psutil.BELOW_NORMAL_PRIORITY_CLASS)
log.info("Set game process to low priority")
return process
def close_game():
import psutil
closed = False
try:
# https://docs.microsoft.com/en-us/windows-server/administration/windows-commands/tasklist
processes = str(subprocess.check_output('tasklist /fi "STATUS eq running"')).split(r'\r\n')
except subprocess.CalledProcessError:
processes = []
log_error()
for process_line in processes:
if '.exe' not in process_line:
continue
process_name = process_line.split('.exe')[0]
process_pid = int(process_line.split('.exe')[1].split()[0])
if process_name == 'Celeste':
try:
psutil.Process(process_pid).kill()
log.info("Closed Celeste")
closed = True
except psutil.NoSuchProcess:
log_error()
elif 'studio' in process_name.lower() and 'celeste' in process_name.lower():
try:
psutil.Process(process_pid).kill()
log.info("Closed Studio")
except psutil.NoSuchProcess:
log_error()
if not closed:
log.info("No running game to close")
@functools.cache
def get_mod_dependencies(mod: str) -> set:
everest_yaml = get_mod_everest_yaml(mod)
dependencies = set()
if everest_yaml and 'Dependencies' in everest_yaml:
for dependency in everest_yaml['Dependencies']:
if dependency['Name'] in ('Everest', 'EverestCore', 'Celeste'):
continue
dependencies.add(dependency['Name'])
dependencies |= get_mod_dependencies(dependency['Name'])
return dependencies
@functools.cache
def get_mod_everest_yaml(mod: str, zip_path: Optional[Path] = None) -> Optional[dict]:
if not zip_path:
zip_path = mods_dir().joinpath(f'{mod}.zip')
if not zip_path.is_file():
return None
with zipfile.ZipFile(zip_path) as mod_zip:
yaml_name = None
if zipfile.Path(mod_zip, 'everest.yaml').is_file():
yaml_name = 'everest.yaml'
elif zipfile.Path(mod_zip, 'everest.yml').is_file():
yaml_name = 'everest.yml'
if yaml_name:
with mod_zip.open(yaml_name) as everest_yaml:
import yaml
return yaml.safe_load(everest_yaml)[0]
else:
return None
def mod_versions(mods: set) -> str:
versions = []
for mod in mods:
everest_yaml = get_mod_everest_yaml(mod)
versions.append(f"{mod} = {everest_yaml['Version'] if everest_yaml else "UNKNOWN"}")
return ", ".join(sorted(versions))
def generate_environment_state(project: dict, mods: set) -> dict:
log.info("Generating environment state")
state = {'host': socket.gethostname(), 'last_commit_time': 0, 'everest_version': None, 'mod_versions': {}, 'game_sync_hash': game_sync_hash,
'excluded_items': project['excluded_items'], 'installation_owner': project['installation_owner'], 'is_lobby': project['is_lobby'], 'repo': project['repo'],
'subdir': project['subdir']}
try:
r_commits = requests.get(f'https://api.github.com/repos/{project['repo']}/commits', headers=main.headers, params={'per_page': 1}, timeout=10)
utils.handle_potential_request_error(r_commits, 200)
except requests.RequestException:
log_error()
return project['sync_environment_state']
if r_commits.status_code == 200:
commit = ujson.loads(r_commits.content)
state['last_commit_time'] = int(dateutil.parser.parse(commit[0]['commit']['author']['date']).timestamp())
state['everest_version'] = latest_everest_stable_version()
gb_mods = gb_mod_versions()
if not gb_mods:
gb_mod_versions.cache_clear()
for mod in mods:
if mod in gb_mods:
mod_gb = gb_mods[mod]
else:
mod_gb = gb_mods[mod.replace('_', ' ')]
state['mod_versions'][mod] = mod_gb['Version']
log.info(f"Done: {state}")
assert len(state['mod_versions']) == len(mods)
return state
def update_everest():
latest_everest = latest_everest_stable_version()
last_everests = db.misc.get('sync_check_everest_versions')
host_name = socket.gethostname()
last_everest = last_everests[host_name] if host_name in last_everests else None
if last_everest != latest_everest:
log.info(f"Updating Everest from {last_everest} to {latest_everest}")
everest_install = subprocess.run('mons install itch stable', capture_output=True)
log.info(f"Installed: {everest_install.stderr.partition(b'\r')[0].decode('UTF8')}")
last_everests[host_name] = latest_everest
db.misc.set('sync_check_everest_versions', last_everests)
else:
log.info(f"Everest version: {latest_everest}")
def consider_disabling_after_inactivity(project: dict, reference_time: Union[int, float], from_abandoned: bool) -> Optional[str]:
time_since_last_commit = int(reference_time) - int(project['last_commit_time'])
disabled_text = ("Disabled sync checking after a month of no improvements. If you would like to reenable it, rerun the `/register_project` command. "
"Otherwise, it will be automatically reenabled on the next valid improvement/draft.")
if time_since_last_commit > 2629800 and project['do_run_validation']:
project['do_run_validation'] = False
project['sync_check_timed_out'] = True
log.warning(f"Disabled auto sync check after {time_since_last_commit} seconds of inactivity")
if from_abandoned:
db.projects.set(project['project_id'], project)
db.sync_results.set(project['project_id'], {'report_text': None, 'disabled_text': disabled_text})
else:
# don't need to return projects since it's mutable
return disabled_text
def format_elapsed_time(start_time: float) -> str:
hours, seconds = divmod(time.time() - start_time, 3600)
minutes = seconds / 60
return f"{int(hours)}h {int(minutes)}m"
@functools.cache
def latest_everest_stable_version() -> Optional[int]:
try:
azure_params = {'statusFilter': 'completed', 'resultFilter': 'succeeded', 'branchName': 'refs/heads/stable', 'definitions': 3}
r_everest = requests.get('https://dev.azure.com/EverestAPI/Everest/_apis/build/builds', headers={'Content-Type': 'application/json'}, params=azure_params, timeout=10)
utils.handle_potential_request_error(r_everest, 200)
except requests.RequestException:
log_error()
return None
everest_builds = ujson.loads(r_everest.content)
return everest_builds['value'][0]['id'] + 700
@functools.cache
def gb_mod_versions() -> Optional[dict]:
try:
r_mods = requests.get('https://maddie480.ovh/celeste/everest_update.yaml', timeout=60)
utils.handle_potential_request_error(r_mods, 200)
except requests.RequestException:
log_error()
return None
import yaml
return yaml.safe_load(r_mods.content)
@functools.cache
def mods_dir() -> Path:
mod_paths = (Path('D:/celeste/Mods'),
Path('G:/celeste/Mods'),
Path('C:/Users/Administrator/Desktop/mods'),
Path('C:/Users/Bob/Documents/Celeste Itch/Mods'),
Path('C:/Users/Vamp/Documents/celeste/Mods'))
for mod_path in mod_paths:
if mod_path.is_dir():
return mod_path
raise FileNotFoundError("ok where'd my mods go")
@functools.cache
def game_dir() -> Path:
game_dirs = (Path('D:/celeste'),
Path('G:/celeste'),
Path('C:/Users/Bob/Documents/Celeste Itch'),
Path('C:/Users/Vamp/Documents/celeste'))
for possible_game_dir in game_dirs:
if possible_game_dir.is_dir():
return possible_game_dir
raise FileNotFoundError("ok where'd the game go")
def scaled_sleep(seconds: float):
time.sleep(seconds * 0.75)
def log_error(message: Optional[str] = None):
error = utils.log_error(message)
db.sync_results.set(int(time.time()), {'reported_error': True, 'error': error[-1950:]})
log: Union[logging.Logger, utils.LogPlaceholder] = utils.LogPlaceholder()
re_redact_token = re.compile(r"'token': '[^']*'")
game_sync_hash = None
if __name__ == '__main__':
run_syncs()