Skip to content

Commit

Permalink
Pkg protocol: use if JULIA_PKG_SERVER is set (#1444)
Browse files Browse the repository at this point in the history
  • Loading branch information
StefanKarpinski authored Nov 28, 2019
1 parent 8f97b18 commit 258d74e
Show file tree
Hide file tree
Showing 5 changed files with 157 additions and 60 deletions.
18 changes: 16 additions & 2 deletions src/Artifacts.jl
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import ..GitTools
using ..BinaryPlatforms
import ..TOML
import ..Types: parse_toml, write_env_usage
import ...Pkg: pkg_server
using ..PlatformEngines
using SHA

Expand Down Expand Up @@ -718,8 +719,12 @@ Download/install an artifact into the artifact store. Returns `true` on success
!!! compat "Julia 1.3"
This function requires at least Julia 1.3.
"""
function download_artifact(tree_hash::SHA1, tarball_url::String, tarball_hash::String;
verbose::Bool = false)
function download_artifact(
tree_hash::SHA1,
tarball_url::String,
tarball_hash::Union{String, Nothing} = nothing;
verbose::Bool = false,
)
if artifact_exists(tree_hash)
return true
end
Expand Down Expand Up @@ -850,6 +855,15 @@ function ensure_artifact_installed(name::String, meta::Dict, artifacts_toml::Str
hash = SHA1(meta["git-tree-sha1"])

if !artifact_exists(hash)
# first try downloading from Pkg server
# TODO: only do this if Pkg server knows about this package
if (server = pkg_server()) !== nothing
url = "$server/artifact/$hash"
if download_artifact(hash, url)
return artifact_path(hash)
end
end

# If this artifact does not exist on-disk already, ensure it has download
# information, then download it!
if !haskey(meta, "download")
Expand Down
40 changes: 26 additions & 14 deletions src/Operations.jl
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,8 @@ using ..Types, ..Resolve, ..PlatformEngines, ..GitTools, ..Display
import ..depots, ..depots1, ..devdir, ..set_readonly, ..Types.PackageEntry
import ..Artifacts: ensure_all_artifacts_installed, artifact_names, extract_all_hashes, artifact_exists
using ..BinaryPlatforms
import ..Pkg

import ...Pkg
import ...Pkg: pkg_server

#########
# Utils #
Expand Down Expand Up @@ -444,6 +444,7 @@ end
########################
# Package installation #
########################

function get_archive_url_for_version(url::String, ref)
if (m = match(r"https://github.com/(.*?)/(.*?).git", url)) !== nothing
return "https://api.github.com/repos/$(m.captures[1])/$(m.captures[2])/tarball/$(ref)"
Expand All @@ -453,20 +454,18 @@ end

# Returns if archive successfully installed
function install_archive(
urls::Vector{String},
urls::Vector{Pair{String,Bool}},
hash::SHA1,
version_path::String
)::Bool
tmp_objects = String[]
url_success = false
for url in urls
archive_url = get_archive_url_for_version(url, hash)
archive_url !== nothing || continue
for (url, top) in urls
path = tempname() * randstring(6) * ".tar.gz"
push!(tmp_objects, path) # for cleanup
url_success = true
try
PlatformEngines.download(archive_url, path; verbose=false)
PlatformEngines.download(url, path; verbose=false)
catch e
e isa InterruptException && rethrow()
url_success = false
Expand All @@ -479,18 +478,22 @@ function install_archive(
unpack(path, dir; verbose=false)
catch e
e isa InterruptException && rethrow()
@warn "failed to extract archive downloaded from $(archive_url)"
@warn "failed to extract archive downloaded from $(url)"
url_success = false
end
url_success || continue
dirs = readdir(dir)
# 7z on Win might create this spurious file
filter!(x -> x != "pax_global_header", dirs)
@assert length(dirs) == 1
if top
unpacked = dir
else
dirs = readdir(dir)
# 7z on Win might create this spurious file
filter!(x -> x != "pax_global_header", dirs)
@assert length(dirs) == 1
unpacked = joinpath(dir, dirs[1])
end
# Assert that the tarball unpacked to the tree sha we wanted
# TODO: Enable on Windows when tree_hash handles
# executable bits correctly, see JuliaLang/julia #33212.
unpacked = joinpath(dir, dirs[1])
if !Sys.iswindows()
if SHA1(GitTools.tree_hash(unpacked)) != hash
@warn "tarball content does not match git-tree-sha1"
Expand Down Expand Up @@ -632,7 +635,16 @@ function download_source(ctx::Context, pkgs::Vector{PackageSpec},
continue
end
try
success = install_archive(urls[pkg.uuid], pkg.tree_hash, path)
archive_urls = Pair{String,Bool}[]
if (server = pkg_server()) !== nothing
url = "$server/package/$(pkg.uuid)/$(pkg.tree_hash)"
push!(archive_urls, url => true)
end
for repo_url in urls[pkg.uuid]
url = get_archive_url_for_version(repo_url, pkg.tree_hash)
push!(archive_urls, url => false)
end
success = install_archive(archive_urls, pkg.tree_hash, path)
if success && readonly
set_readonly(path) # In add mode, files should be read-only
end
Expand Down
7 changes: 7 additions & 0 deletions src/Pkg.jl
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,13 @@ function depots1()
return d[1]
end

function pkg_server()
server = get(ENV, "JULIA_PKG_SERVER", nothing)
server === nothing && return nothing
startswith(server, r"\w+://") || (server = "https://$server")
return server
end

logdir(depot = depots1()) = joinpath(depot, "logs")
devdir(depot = depots1()) = get(ENV, "JULIA_PKG_DEVDIR", joinpath(depots1(), "dev"))
envdir(depot = depots1()) = joinpath(depot, "environments")
Expand Down
14 changes: 7 additions & 7 deletions src/PlatformEngines.jl
Original file line number Diff line number Diff line change
Expand Up @@ -593,7 +593,7 @@ function download(url::AbstractString, dest::AbstractString;
end

"""
download_verify(url::AbstractString, hash::AbstractString,
download_verify(url::AbstractString, hash::Union{AbstractString, Nothing},
dest::AbstractString; verbose::Bool = false,
force::Bool = false, quiet_download::Bool = false)
Expand All @@ -614,7 +614,7 @@ set to `false`) the downloading process will be completely silent. If
`verbose` is set to `true`, messages about integrity verification will be
printed in addition to messages regarding downloading.
"""
function download_verify(url::AbstractString, hash::AbstractString,
function download_verify(url::AbstractString, hash::Union{AbstractString, Nothing},
dest::AbstractString; verbose::Bool = false,
force::Bool = false, quiet_download::Bool = true)
# Whether the file existed in the first place
Expand All @@ -628,7 +628,7 @@ function download_verify(url::AbstractString, hash::AbstractString,

# verify download, if it passes, return happy. If it fails, (and
# `force` is `true`, re-download!)
if verify(dest, hash; verbose=verbose)
if hash !== nothing && verify(dest, hash; verbose=verbose)
return true
elseif !force
error("Verification failed, not overwriting $(dest)")
Expand All @@ -640,7 +640,7 @@ function download_verify(url::AbstractString, hash::AbstractString,

# Download the file, optionally continuing
download(url, dest; verbose=verbose || !quiet_download)
if !verify(dest, hash; verbose=verbose)
if hash !== nothing && !verify(dest, hash; verbose=verbose)
# If the file already existed, it's possible the initially downloaded chunk
# was bad. If verification fails after downloading, auto-delete the file
# and start over from scratch.
Expand All @@ -652,7 +652,7 @@ function download_verify(url::AbstractString, hash::AbstractString,

# Download and verify from scratch
download(url, dest; verbose=verbose || !quiet_download)
if !verify(dest, hash; verbose=verbose)
if hash !== nothing && !verify(dest, hash; verbose=verbose)
error("Verification failed")
end
else
Expand Down Expand Up @@ -790,7 +790,7 @@ function package(src_dir::AbstractString, tarball_path::AbstractString)
end

"""
download_verify_unpack(url::AbstractString, hash::AbstractString,
download_verify_unpack(url::AbstractString, hash::Union{AbstractString, Nothing},
dest::AbstractString; tarball_path = nothing,
verbose::Bool = false, ignore_existence::Bool = false,
force::Bool = false)
Expand Down Expand Up @@ -819,7 +819,7 @@ Returns `true` if a tarball was actually unpacked, `false` if nothing was
changed in the destination prefix.
"""
function download_verify_unpack(url::AbstractString,
hash::AbstractString,
hash::Union{AbstractString, Nothing},
dest::AbstractString;
tarball_path = nothing,
ignore_existence::Bool = false,
Expand Down
138 changes: 101 additions & 37 deletions src/Types.jl
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,10 @@ import Base.string
using REPL.TerminalMenus

using ..TOML
import ..Pkg, ..UPDATED_REGISTRY_THIS_SESSION, ..DEFAULT_IO
import ..Pkg: GitTools, depots, depots1, logdir, set_readonly, safe_realpath
import ...Pkg, ..UPDATED_REGISTRY_THIS_SESSION, ..DEFAULT_IO
import ...Pkg: GitTools, depots, depots1, logdir, set_readonly, safe_realpath, pkg_server
import ..BinaryPlatforms: Platform
import ..PlatformEngines: probe_platform_engines!, download, download_verify_unpack

import Base: SHA1
using SHA
Expand Down Expand Up @@ -895,6 +896,31 @@ function populate_known_registries_with_urls!(registries::Vector{RegistrySpec})
end
end

function pkg_server_registry_url(uuid::UUID)
server = pkg_server()
server === nothing && return nothing
probe_platform_engines!()
hash = nothing
try
mktemp() do tmp_path, io
download("$server/registries", tmp_path, verbose=false)
for line in eachline(io)
if (m = match(r"^/registry/([^/]+)/([^/]+)$", line)) !== nothing
uuid == UUID(m.captures[1]) || continue
hash = String(m.captures[2])
break
end
end
end
catch err
@warn "could not download $server/registries"
end
hash === nothing ? nothing : "$server/registry/$uuid/$hash"
end
pkg_server_registry_url(::Nothing) = nothing

pkg_server_url_hash(url::String) = split(url, '/')[end]

# entry point for `registry add`
clone_or_cp_registries(regs::Vector{RegistrySpec}, depot::String=depots1()) =
clone_or_cp_registries(Context(), regs, depot)
Expand All @@ -905,43 +931,56 @@ function clone_or_cp_registries(ctx::Context, regs::Vector{RegistrySpec}, depot:
pkgerror("ambiguous registry specification; both url and path is set.")
end
# clone to tmpdir first
tmp = mktempdir()
if reg.path !== nothing # copy from local source
printpkgstyle(ctx, :Copying, "registry from `$(Base.contractuser(reg.path))`")
cp(reg.path, tmp; force=true)
elseif reg.url !== nothing # clone from url
LibGit2.with(GitTools.clone(ctx, reg.url, tmp; header = "registry from $(repr(reg.url))")) do repo
mktempdir() do tmp
if (url = pkg_server_registry_url(reg.uuid)) !== nothing
# download from Pkg server
try
download_verify_unpack(url, nothing, tmp, ignore_existence = true)
catch err
pkgerror("could not download $url")
end
tree_info_file = joinpath(tmp, ".tree_info.toml")
ispath(tree_info_file) &&
error("tree info file $tree_info_file already exists")
open(tree_info_file, write=true) do io
hash = pkg_server_url_hash(url)
println(io, "git-tree-sha1 = ", repr(hash))
end
elseif reg.path !== nothing # copy from local source
printpkgstyle(ctx, :Copying, "registry from `$(Base.contractuser(reg.path))`")
cp(reg.path, tmp; force=true)
elseif reg.url !== nothing # clone from url
LibGit2.with(GitTools.clone(ctx, reg.url, tmp; header = "registry from $(repr(reg.url))")) do repo
end
else
pkgerror("no path or url specified for registry")
end
else
pkgerror("no path or url specified for registry")
end
# verify that the clone looks like a registry
if !isfile(joinpath(tmp, "Registry.toml"))
pkgerror("no `Registry.toml` file in cloned registry.")
end
registry = read_registry(joinpath(tmp, "Registry.toml"); cache=false) # don't cache this tmp registry
verify_registry(registry)
# copy to `depot`
# slug = Base.package_slug(UUID(registry["uuid"]))
regpath = joinpath(depot, "registries", registry["name"]#=, slug=#)
ispath(dirname(regpath)) || mkpath(dirname(regpath))
if Pkg.isdir_windows_workaround(regpath)
existing_registry = read_registry(joinpath(regpath, "Registry.toml"))
if registry["uuid"] == existing_registry["uuid"]
println(ctx.io,
"registry `$(registry["name"])` already exist in `$(Base.contractuser(regpath))`.")
# verify that the clone looks like a registry
if !isfile(joinpath(tmp, "Registry.toml"))
pkgerror("no `Registry.toml` file in cloned registry.")
end
registry = read_registry(joinpath(tmp, "Registry.toml"); cache=false) # don't cache this tmp registry
verify_registry(registry)
# copy to `depot`
# slug = Base.package_slug(UUID(registry["uuid"]))
regpath = joinpath(depot, "registries", registry["name"]#=, slug=#)
ispath(dirname(regpath)) || mkpath(dirname(regpath))
if Pkg.isdir_windows_workaround(regpath)
existing_registry = read_registry(joinpath(regpath, "Registry.toml"))
if registry["uuid"] == existing_registry["uuid"]
println(ctx.io,
"registry `$(registry["name"])` already exist in `$(Base.contractuser(regpath))`.")
else
throw(PkgError("registry `$(registry["name"])=\"$(registry["uuid"])\"` conflicts with " *
"existing registry `$(existing_registry["name"])=\"$(existing_registry["uuid"])\"`. " *
"To install it you can clone it manually into e.g. " *
"`$(Base.contractuser(joinpath(depot, "registries", registry["name"]*"-2")))`."))
end
else
throw(PkgError("registry `$(registry["name"])=\"$(registry["uuid"])\"` conflicts with " *
"existing registry `$(existing_registry["name"])=\"$(existing_registry["uuid"])\"`. " *
"To install it you can clone it manually into e.g. " *
"`$(Base.contractuser(joinpath(depot, "registries", registry["name"]*"-2")))`."))
cp(tmp, regpath)
printpkgstyle(ctx, :Added, "registry `$(registry["name"])` to `$(Base.contractuser(regpath))`")
end
else
cp(tmp, regpath)
printpkgstyle(ctx, :Added, "registry `$(registry["name"])` to `$(Base.contractuser(regpath))`")
end
# Clean up
Base.rm(tmp; recursive=true, force=true)
end
return nothing
end
Expand Down Expand Up @@ -1020,8 +1059,33 @@ function update_registries(ctx::Context, regs::Vector{RegistrySpec} = collect_re
!force && UPDATED_REGISTRY_THIS_SESSION[] && return
errors = Tuple{String, String}[]
for reg in unique(r -> r.uuid, find_installed_registries(ctx, regs))
if isdir(joinpath(reg.path, ".git"))
regpath = pathrepr(reg.path)
regpath = pathrepr(reg.path)
if isfile(joinpath(reg.path, ".tree_info.toml"))
printpkgstyle(ctx, :Updating, "registry at " * regpath)
tree_info = TOML.parsefile(joinpath(reg.path, ".tree_info.toml"))
old_hash = tree_info["git-tree-sha1"]
url = pkg_server_registry_url(reg.uuid)
if url !== nothing && (new_hash = pkg_server_url_hash(url)) != old_hash
# TODO: update faster by using a diff, if available
mktempdir() do tmp
try
download_verify_unpack(url, nothing, tmp, ignore_existence = true)
catch err
@warn "could not download $url"
end
tree_info_file = joinpath(tmp, ".tree_info.toml")
ispath(tree_info_file) &&
error("tree info file $tree_info_file already exists")
open(tree_info_file, write=true) do io
println(io, "git-tree-sha1 = ", repr(new_hash))
end
registry_file = joinpath(tmp, "Registry.toml")
registry = read_registry(registry_file; cache=false)
verify_registry(registry)
mv(tmp, reg.path, force=true)
end
end
elseif isdir(joinpath(reg.path, ".git"))
printpkgstyle(ctx, :Updating, "registry at " * regpath)
# Using LibGit2.with here crashes julia when running the
# tests for PkgDev wiht "Unreachable reached".
Expand Down

0 comments on commit 258d74e

Please sign in to comment.