diff --git a/base/loading.jl b/base/loading.jl index d3e4c0b5353af..51c0ca413b513 100644 --- a/base/loading.jl +++ b/base/loading.jl @@ -329,6 +329,7 @@ end const project_names = ("JuliaProject.toml", "Project.toml") const manifest_names = ("JuliaManifest.toml", "Manifest.toml") +const preferences_names = ("JuliaLocalPreferences.toml", "LocalPreferences.toml") # classify the LOAD_PATH entry to be one of: # - `false`: nonexistant / nothing to see here @@ -378,31 +379,6 @@ function manifest_deps_get(env::String, where::PkgId, name::String)::Union{Nothi return nothing end -function uuid_in_environment(project_file::String, uuid::UUID) - # First, check to see if we're looking for the environment itself - proj_uuid = get(parsed_toml(project_file), "uuid", nothing) - if proj_uuid !== nothing && UUID(proj_uuid) == uuid - return true - end - - # Check to see if there's a Manifest.toml associated with this project - manifest_file = project_file_manifest_path(project_file) - if manifest_file === nothing - return false - end - manifest = parsed_toml(manifest_file) - for (dep_name, entries) in manifest - for entry in entries - entry_uuid = get(entry, "uuid", nothing)::Union{String, Nothing} - if uuid !== nothing && UUID(entry_uuid) == uuid - return true - end - end - end - # If all else fails, return `false` - return false -end - function manifest_uuid_path(env::String, pkg::PkgId)::Union{Nothing,String} project_file = env_project_file(env) if project_file isa String @@ -1276,7 +1252,12 @@ end @assert precompile(create_expr_cache, (PkgId, String, String, typeof(_concrete_dependencies), typeof(stderr), typeof(stdout))) @assert precompile(create_expr_cache, (PkgId, String, String, typeof(_concrete_dependencies), typeof(stderr), typeof(stdout))) -function compilecache_path(pkg::PkgId)::String +function compilecache_dir(pkg::PkgId) + entrypath, entryfile = cache_file_entry(pkg) + return joinpath(DEPOT_PATH[1], entrypath) +end + +function compilecache_path(pkg::PkgId, prefs_hash::UInt64)::String entrypath, entryfile = cache_file_entry(pkg) cachepath = joinpath(DEPOT_PATH[1], entrypath) isdir(cachepath) || mkpath(cachepath) @@ -1286,7 +1267,7 @@ function compilecache_path(pkg::PkgId)::String crc = _crc32c(something(Base.active_project(), "")) crc = _crc32c(unsafe_string(JLOptions().image_file), crc) crc = _crc32c(unsafe_string(JLOptions().julia_bin), crc) - crc = _crc32c(get_preferences_hash(pkg.uuid), crc) + crc = _crc32c(prefs_hash, crc) project_precompile_slug = slug(crc, 5) abspath(cachepath, string(entryfile, "_", project_precompile_slug, ".ji")) end @@ -1310,18 +1291,9 @@ const MAX_NUM_PRECOMPILE_FILES = Ref(10) function compilecache(pkg::PkgId, path::String, internal_stderr::IO = stderr, internal_stdout::IO = stdout) # decide where to put the resulting cache file - cachefile = compilecache_path(pkg) - cachepath = dirname(cachefile) - # prune the directory with cache files - if pkg.uuid !== nothing - entrypath, entryfile = cache_file_entry(pkg) - cachefiles = filter!(x -> startswith(x, entryfile * "_"), readdir(cachepath)) - if length(cachefiles) >= MAX_NUM_PRECOMPILE_FILES[] - idx = findmin(mtime.(joinpath.(cachepath, cachefiles)))[2] - rm(joinpath(cachepath, cachefiles[idx])) - end - end - # build up the list of modules that we want the` precompile process to preserve + cachepath = compilecache_dir(pkg) + + # build up the list of modules that we want the precompile process to preserve concrete_deps = copy(_concrete_dependencies) for (key, mod) in loaded_modules if !(mod === Main || mod === Core || mod === Base) @@ -1334,6 +1306,7 @@ function compilecache(pkg::PkgId, path::String, internal_stderr::IO = stderr, in # create a temporary file in `cachepath` directory, write the cache in it, # write the checksum, _and then_ atomically move the file to `cachefile`. + mkpath(cachepath) tmppath, tmpio = mktemp(cachepath) local p try @@ -1347,6 +1320,21 @@ function compilecache(pkg::PkgId, path::String, internal_stderr::IO = stderr, in # inherit permission from the source file chmod(tmppath, filemode(path) & 0o777) + # Read preferences hash back from .ji file (we can't precompute because + # we don't actually know what the list of compile-time preferences are without compiling) + prefs_hash = preferences_hash(tmppath) + cachefile = compilecache_path(pkg, prefs_hash) + + # prune the directory with cache files + if pkg.uuid !== nothing + entrypath, entryfile = cache_file_entry(pkg) + cachefiles = filter!(x -> startswith(x, entryfile * "_"), readdir(cachepath)) + if length(cachefiles) >= MAX_NUM_PRECOMPILE_FILES[] + idx = findmin(mtime.(joinpath.(cachepath, cachefiles)))[2] + rm(joinpath(cachepath, cachefiles[idx])) + end + end + # this is atomic according to POSIX: rename(tmppath, cachefile; force=true) return cachefile @@ -1357,7 +1345,7 @@ function compilecache(pkg::PkgId, path::String, internal_stderr::IO = stderr, in if p.exitcode == 125 return PrecompilableError() else - error("Failed to precompile $pkg to $cachefile.") + error("Failed to precompile $pkg to $tmppath.") end end @@ -1383,17 +1371,23 @@ function parse_cache_header(f::IO) build_id = read(f, UInt64) # build UUID (mostly just a timestamp) push!(modules, PkgId(uuid, sym) => build_id) end - totbytes = read(f, Int64) # total bytes for file dependencies + totbytes = read(f, Int64) # total bytes for file dependencies + preferences # read the list of requirements # and split the list into include and requires statements includes = CacheHeaderIncludes[] requires = Pair{PkgId, PkgId}[] while true n2 = read(f, Int32) - n2 == 0 && break + totbytes -= 4 + if n2 == 0 + break + end depname = String(read(f, n2)) + totbytes -= n2 mtime = read(f, Float64) + totbytes -= 8 n1 = read(f, Int32) + totbytes -= 4 # map ids to keys modkey = (n1 == 0) ? PkgId("") : modules[n1].first modpath = String[] @@ -1402,7 +1396,9 @@ function parse_cache_header(f::IO) while true n1 = read(f, Int32) totbytes -= 4 - n1 == 0 && break + if n1 == 0 + break + end push!(modpath, String(read(f, n1))) totbytes -= n1 end @@ -1412,12 +1408,22 @@ function parse_cache_header(f::IO) else push!(includes, CacheHeaderIncludes(modkey, depname, mtime, modpath)) end - totbytes -= 4 + 4 + n2 + 8 + end + prefs = String[] + while true + n2 = read(f, Int32) + totbytes -= 4 + if n2 == 0 + break + end + push!(prefs, String(read(f, n2))) + totbytes -= n2 end prefs_hash = read(f, UInt64) totbytes -= 8 - @assert totbytes == 12 "header of cache file appears to be corrupt" srctextpos = read(f, Int64) + totbytes -= 8 + @assert totbytes == 0 "header of cache file appears to be corrupt (totbytes == $(totbytes))" # read the list of modules that are required to be present during loading required_modules = Vector{Pair{PkgId, UInt64}}() while true @@ -1428,7 +1434,7 @@ function parse_cache_header(f::IO) build_id = read(f, UInt64) # build id push!(required_modules, PkgId(uuid, sym) => build_id) end - return modules, (includes, requires), required_modules, srctextpos, prefs_hash + return modules, (includes, requires), required_modules, srctextpos, prefs, prefs_hash end function parse_cache_header(cachefile::String; srcfiles_only::Bool=false) @@ -1437,21 +1443,37 @@ function parse_cache_header(cachefile::String; srcfiles_only::Bool=false) !isvalid_cache_header(io) && throw(ArgumentError("Invalid header in cache file $cachefile.")) ret = parse_cache_header(io) srcfiles_only || return ret - modules, (includes, requires), required_modules, srctextpos, prefs_hash = ret + modules, (includes, requires), required_modules, srctextpos, prefs, prefs_hash = ret srcfiles = srctext_files(io, srctextpos) delidx = Int[] for (i, chi) in enumerate(includes) chi.filename ∈ srcfiles || push!(delidx, i) end deleteat!(includes, delidx) - return modules, (includes, requires), required_modules, srctextpos, prefs_hash + return modules, (includes, requires), required_modules, srctextpos, prefs, prefs_hash + finally + close(io) + end +end + + + +preferences_hash(f::IO) = parse_cache_header(f)[end] +function preferences_hash(cachefile::String) + io = open(cachefile, "r") + try + if !isvalid_cache_header(io) + throw(ArgumentError("Invalid header in cache file $cachefile.")) + end + return preferences_hash(io) finally close(io) end end + function cache_dependencies(f::IO) - defs, (includes, requires), modules, srctextpos, prefs_hash = parse_cache_header(f) + defs, (includes, requires), modules, srctextpos, prefs, prefs_hash = parse_cache_header(f) return modules, map(chi -> (chi.filename, chi.mtime), includes) # return just filename and mtime end @@ -1466,7 +1488,7 @@ function cache_dependencies(cachefile::String) end function read_dependency_src(io::IO, filename::AbstractString) - modules, (includes, requires), required_modules, srctextpos, prefs_hash = parse_cache_header(io) + modules, (includes, requires), required_modules, srctextpos, prefs, prefs_hash = parse_cache_header(io) srctextpos == 0 && error("no source-text stored in cache file") seek(io, srctextpos) return _read_dependency_src(io, filename) @@ -1511,36 +1533,140 @@ function srctext_files(f::IO, srctextpos::Int64) return files end -# Find the Project.toml that we should load/store to for Preferences -function get_preferences_project_path(uuid::UUID) - for env in load_path() - project_file = env_project_file(env) - if !isa(project_file, String) - continue - end - if uuid_in_environment(project_file, uuid) - return project_file +# Test to see if this UUID is mentioned in this `Project.toml`; either as +# the top-level UUID (e.g. that of the project itself) or as a dependency. +function get_uuid_name(project::Dict, uuid::UUID) + if haskey(project, "uuid") && haskey(project, "name") && + UUID(project["uuid"]) == uuid + return project["name"] + elseif haskey(project, "deps") + struuid = string(uuid) + for (k, v) in project["deps"] + if v == struuid + return k + end end end + return nothing end -function get_preferences(uuid::UUID; - prefs_key::String = "compile-preferences") - project_path = get_preferences_project_path(uuid) - if project_path !== nothing - preferences = get(parsed_toml(project_path), prefs_key, Dict{String,Any}()) - if haskey(preferences, string(uuid)) - return preferences[string(uuid)] +function get_uuid_name(project_toml::String, uuid::UUID) + project = parsed_toml(project_toml) + return get_uuid_name(project, uuid) +end + +function collect_preferences!(project_toml::String, uuid::UUID) + # We'll return a list of dicts to be merged + dicts = Dict[] + + # Get the name of this UUID to this project; if it can't find it, skip out. + project = parsed_toml(project_toml) + pkg_name = get_uuid_name(project, uuid) + if pkg_name === nothing + return Dict[] + end + + # Look first inside of `Project.toml` to see we have preferences embedded within there + if haskey(project, "preferences") && isa(project["preferences"], Dict) + push!(dicts, get(project["preferences"], pkg_name, Dict())) + end + + # Next, look for `(Julia)LocalPreferences.toml` files next to this `Project.toml` + project_dir = dirname(project_toml) + for name in preferences_names + toml_path = joinpath(project_dir, name) + if isfile(toml_path) + prefs = parsed_toml(toml_path) + push!(dicts, get(prefs, pkg_name, Dict())) + + # If we find `JuliaLocalPreferences.toml`, don't look for `LocalPreferences.toml` + break + end + end + + return dicts +end + +""" + recursive_prefs_merge(base::Dict, overrides::Dict...) + +Helper function to merge preference dicts recursively, honoring overrides in nested +dictionaries properly. +""" +function recursive_prefs_merge(base::Dict, overrides::Dict...) + new_base = Base._typeddict(base, overrides...) + + for override in overrides + # Clear entries are keys that should be deleted from any previous setting. + if haskey(override, "__clear__") && isa(override["__clear__"], Vector) + for k in override["__clear__"] + delete!(new_base, k) + end + end + + for (k, v) in override + # Note that if `base` has a mapping that is _not_ a `Dict`, and `override` + if haskey(new_base, k) && isa(new_base[k], Dict) && isa(override[k], Dict) + new_base[k] = recursive_prefs_merge(new_base[k], override[k]) + else + new_base[k] = override[k] + end + end + end + return new_base +end + +function get_preferences(uuid::UUID) + merged_prefs = Dict{String,Any}() + for env in reverse(load_path()) + project_toml = env_project_file(env) + if !isa(project_toml, String) + continue + end + + # Collect all dictionaries from the current point in the load path, then merge them in + dicts = collect_preferences!(project_toml, uuid) + merged_prefs = recursive_prefs_merge(merged_prefs, dicts...) + end + return merged_prefs +end + +function get_preferences_hash(uuid::UUID, prefs_list::Vector{String}) + # Start from the "null" hash + h = get_preferences_hash(nothing, prefs_list) + + # Load the preferences + prefs = get_preferences(uuid) + + # Walk through each name that's called out as a compile-time preference + for name in prefs_list + if haskey(prefs, name) + h = hash(prefs[name], h) end end - # Fall back to default value of "no preferences". - return Dict{String,Any}() + return h end -get_preferences_hash(uuid::UUID) = UInt64(hash(get_preferences(uuid))) -get_preferences_hash(m::Module) = get_preferences_hash(PkgId(m).uuid) -get_preferences_hash(::Nothing) = UInt64(hash(Dict{String,Any}())) +get_preferences_hash(m::Module, prefs_list::Vector{String}) = get_preferences_hash(PkgId(m).uuid, prefs_list) +get_preferences_hash(::Nothing, prefs_list::Vector{String}) = UInt64(0x6e65726566657250) +# This is how we keep track of who is using what preferences at compile-time +const COMPILETIME_PREFERENCES = Dict{UUID,Set{String}}() + +# In `Preferences.jl`, if someone calls `load_preference(@__MODULE__, key)` while we're precompiling, +# we mark that usage as a usage at compile-time and call this method, so that at the end of `.ji` generation, +# we can +function record_compiletime_preference(uuid::UUID, key::String) + if !haskey(COMPILETIME_PREFERENCES, uuid) + COMPILETIME_PREFERENCES[uuid] = Set((key,)) + else + push!(COMPILETIME_PREFERENCES[uuid], key) + end + return nothing +end +get_compiletime_preferences(uuid::UUID) = collect(get(COMPILETIME_PREFERENCES, uuid, String[])) +get_compiletime_preferences(m::Module) = get_compiletime_preferences(PkgId(m).uuid) +get_compiletime_preferences(::Nothing) = String[] # returns true if it "cachefile.ji" is stale relative to "modpath.jl" # otherwise returns the list of dependencies to also check @@ -1551,7 +1677,7 @@ function stale_cachefile(modpath::String, cachefile::String) @debug "Rejecting cache file $cachefile due to it containing an invalid cache header" return true # invalid cache file end - modules, (includes, requires), required_modules, srctextpos, prefs_hash = parse_cache_header(io) + modules, (includes, requires), required_modules, srctextpos, prefs, prefs_hash = parse_cache_header(io) id = isempty(modules) ? nothing : first(modules).first modules = Dict{PkgId, UInt64}(modules) @@ -1627,7 +1753,7 @@ function stale_cachefile(modpath::String, cachefile::String) end if isa(id, PkgId) - curr_prefs_hash = get_preferences_hash(id.uuid) + curr_prefs_hash = get_preferences_hash(id.uuid, prefs) if prefs_hash != curr_prefs_hash @debug "Rejecting cache file $cachefile because preferences hash does not match 0x$(string(prefs_hash, base=16)) != 0x$(string(curr_prefs_hash, base=16))" return true diff --git a/src/dump.c b/src/dump.c index 287bf2d44701b..e1955b6836936 100644 --- a/src/dump.c +++ b/src/dump.c @@ -1126,25 +1126,50 @@ static int64_t write_dependency_list(ios_t *s, jl_array_t **udepsp, jl_array_t * // Calculate Preferences hash for current package. jl_value_t *prefs_hash = NULL; + jl_value_t *prefs_list = NULL; if (jl_base_module) { // Toplevel module is the module we're currently compiling, use it to get our preferences hash jl_value_t * toplevel = (jl_value_t*)jl_get_global(jl_base_module, jl_symbol("__toplevel__")); jl_value_t * prefs_hash_func = jl_get_global(jl_base_module, jl_symbol("get_preferences_hash")); + jl_value_t * get_compiletime_prefs_func = jl_get_global(jl_base_module, jl_symbol("get_compiletime_preferences")); - if (toplevel && prefs_hash_func) { - // call get_preferences_hash(__toplevel__) - jl_value_t *prefs_hash_args[2] = {prefs_hash_func, (jl_value_t*)toplevel}; + if (toplevel && prefs_hash_func && get_compiletime_prefs_func) { + // Temporary invoke in newest world age size_t last_age = jl_get_ptls_states()->world_age; jl_get_ptls_states()->world_age = jl_world_counter; - prefs_hash = (jl_value_t*)jl_apply(prefs_hash_args, 2); + + // call get_compiletime_prefs(__toplevel__) + jl_value_t *args[3] = {get_compiletime_prefs_func, (jl_value_t*)toplevel, NULL}; + prefs_list = (jl_value_t*)jl_apply(args, 2); + + // Call get_preferences_hash(__toplevel__, prefs_list) + args[0] = prefs_hash_func; + args[2] = prefs_list; + prefs_hash = (jl_value_t*)jl_apply(args, 3); + + // Reset world age to normal jl_get_ptls_states()->world_age = last_age; } } // If we successfully got the preferences, write it out, otherwise write `0` for this `.ji` file. - if (prefs_hash != NULL) { + if (prefs_hash != NULL && prefs_list != NULL) { + size_t i, l = jl_array_len(prefs_list); + for (i = 0; i < l; i++) { + jl_value_t *pref_name = jl_array_ptr_ref(prefs_list, i); + size_t slen = jl_string_len(pref_name); + write_int32(s, slen); + ios_write(s, jl_string_data(pref_name), slen); + } + write_int32(s, 0); // terminator write_uint64(s, jl_unbox_uint64(prefs_hash)); } else { + // This is an error path, but let's at least generate a valid `.ji` file. + // We declare an empty list of preference names, followed by a zero-hash. + // The zero-hash is not what would be generated for an empty set of preferences, + // and so this `.ji` file will be invalidated by a future non-erroring pass + // through this function. + write_int32(s, 0); write_uint64(s, 0); }