Skip to content

Commit

Permalink
Test 2
Browse files Browse the repository at this point in the history
  • Loading branch information
AThousandShips committed Sep 7, 2024
1 parent 3fb2c05 commit 358a5be
Show file tree
Hide file tree
Showing 2 changed files with 67 additions and 51 deletions.
3 changes: 2 additions & 1 deletion SConstruct
Original file line number Diff line number Diff line change
Expand Up @@ -1092,7 +1092,6 @@ if "check_c_headers" in env:
env.AppendUnique(CPPDEFINES=[headers[header]])


# FIXME: This method mixes both cosmetic progress stuff and cache handling...
methods.show_progress(env)
# TODO: replace this with `env.Dump(format="json")`
# once we start requiring SCons 4.0 as min version.
Expand Down Expand Up @@ -1124,3 +1123,5 @@ def purge_flaky_files():


atexit.register(purge_flaky_files)

methods.clean_cache(env)
115 changes: 65 additions & 50 deletions methods.py
Original file line number Diff line number Diff line change
Expand Up @@ -892,7 +892,6 @@ def show_progress(env):
# Has its own progress/tracking tool that clashes with ours
return

import atexit
import sys

from SCons.Script import AlwaysBuild, Command, Progress
Expand All @@ -907,21 +906,18 @@ def show_progress(env):
node_count_fname = str(env.Dir("#")) + "/.scons_node_count"

import math
import time

class cache_progress:
# The default is 1 GB cache and 12 hours half life
def __init__(self, path=None, limit=1073741824, half_life=43200):
# The default is 1 GB cache
def __init__(self, path=None, limit=1073741824):
self.path = path
self.limit = limit
self.exponent_scale = math.log(2) / half_life
if env["verbose"] and path is not None:
screen.write(
"Current cache limit is {} (used: {})\n".format(
self.convert_size(limit), self.convert_size(self.get_size(path))
)
)
# self.delete(self.file_list())

def __call__(self, node, *args, **kw):
nonlocal node_count, node_count_max, node_count_interval, node_count_fname, show_progress
Expand All @@ -938,45 +934,6 @@ def __call__(self, node, *args, **kw):
screen.write("\r[Initial build] ")
screen.flush()

def delete(self, files):
if len(files) == 0:
return
if env["verbose"]:
# Utter something
screen.write("\rPurging %d %s from cache...\n" % (len(files), len(files) > 1 and "files" or "file"))
[os.remove(f) for f in files]

def file_list(self, limit_mult=1.0):
if self.path is None:
# Nothing to do
return []
# Gather a list of (filename, (size, atime)) within the
# cache directory
file_stat = [(x, os.stat(x)[6:8]) for x in glob.glob(os.path.join(self.path, "*", "*"))]
if file_stat == []:
# Nothing to do
return []
# Weight the cache files by size (assumed to be roughly
# proportional to the recompilation time) times an exponential
# decay since the ctime, and return a list with the entries
# (filename, size, weight).
current_time = time.time()
file_stat = [(x[0], x[1][0], (current_time - x[1][1])) for x in file_stat]
# Sort by the most recently accessed files (most sensible to keep) first
file_stat.sort(key=lambda x: x[2])
# Search for the first entry where the storage limit is
# reached
sum, mark = 0, None
for i, x in enumerate(file_stat):
sum += x[1]
if sum > self.limit * limit_mult:
mark = i
break
if mark is None:
return []
else:
return [x[0] for x in file_stat[mark:]]

def convert_size(self, size_bytes):
if size_bytes == 0:
return "0 bytes"
Expand All @@ -999,7 +956,6 @@ def progress_finish(target, source, env):
try:
with open(node_count_fname, "w", encoding="utf-8", newline="\n") as f:
f.write("%d\n" % node_count)
progressor.delete(progressor.file_list(1.1))
except Exception:
pass

Expand All @@ -1019,14 +975,73 @@ def progress_finish(target, source, env):
progress_finish_command = Command("progress_finish", [], progress_finish)
AlwaysBuild(progress_finish_command)

def progress_finally():
nonlocal node_count, progressor

def clean_cache(env):
import atexit
import time

class cache_clean:
def __init__(self, path=None, limit=1073741824):
self.path = path
self.limit = limit

def clean(self):
self.delete(self.file_list())

def delete(self, files):
if len(files) == 0:
return
if env["verbose"]:
# Utter something
print("Purging %d %s from cache..." % (len(files), len(files) > 1 and "files" or "file"))
[os.remove(f) for f in files]

def file_list(self):
if self.path is None:
# Nothing to do
return []
# Gather a list of (filename, (size, atime)) within the
# cache directory
file_stat = [(x, os.stat(x)[6:8]) for x in glob.glob(os.path.join(self.path, "*", "*"))]
if file_stat == []:
# Nothing to do
return []
# Weight the cache files by size (assumed to be roughly
# proportional to the recompilation time) times an exponential
# decay since the ctime, and return a list with the entries
# (filename, size, weight).
current_time = time.time()
file_stat = [(x[0], x[1][0], (current_time - x[1][1])) for x in file_stat]
# Sort by the most recently accessed files (most sensible to keep) first
file_stat.sort(key=lambda x: x[2])
# Search for the first entry where the storage limit is
# reached
sum, mark = 0, None
for i, x in enumerate(file_stat):
sum += x[1]
if sum > self.limit:
mark = i
break
if mark is None:
return []
else:
return [x[0] for x in file_stat[mark:]]

def cache_finally():
nonlocal cleaner
try:
progressor.delete(progressor.file_list())
cleaner.clean()
print("Final")
except Exception:
pass

atexit.register(progress_finally)
cache_directory = os.environ.get("SCONS_CACHE")
# Simple cache pruning, attached to SCons' progress callback. Trim the
# cache directory to a size not larger than cache_limit.
cache_limit = float(os.getenv("SCONS_CACHE_LIMIT", 1024)) * 1024 * 1024
cleaner = cache_clean(cache_directory, cache_limit)

atexit.register(cache_finally)


def dump(env):
Expand Down

0 comments on commit 358a5be

Please sign in to comment.