diff --git a/perl/lib/Nix/Store.xs b/perl/lib/Nix/Store.xs index 890310b3e80..335bc40bc82 100644 --- a/perl/lib/Nix/Store.xs +++ b/perl/lib/Nix/Store.xs @@ -106,7 +106,7 @@ SV * queryPathInfo(char * path, int base32) XPUSHs(&PL_sv_undef); else XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0))); - auto s = info->narHash.to_string(base32 ? Base32 : Base16); + auto s = info->narHash.to_string(base32 ? Base::Base32 : Base::Base16); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); mXPUSHi(info->registrationTime); mXPUSHi(info->narSize); @@ -192,7 +192,7 @@ SV * hashPath(char * algo, int base32, char * path) PPCODE: try { Hash h = hashPath(parseHashType(algo), path).first; - auto s = h.to_string(base32 ? Base32 : Base16, false); + auto s = h.to_string(base32 ? Base::Base32 : Base::Base16, false); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); @@ -203,7 +203,7 @@ SV * hashFile(char * algo, int base32, char * path) PPCODE: try { Hash h = hashFile(parseHashType(algo), path); - auto s = h.to_string(base32 ? Base32 : Base16, false); + auto s = h.to_string(base32 ? Base::Base32 : Base::Base16, false); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); @@ -214,7 +214,7 @@ SV * hashString(char * algo, int base32, char * s) PPCODE: try { Hash h = hashString(parseHashType(algo), s); - auto s = h.to_string(base32 ? Base32 : Base16, false); + auto s = h.to_string(base32 ? Base::Base32 : Base::Base16, false); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); @@ -225,7 +225,7 @@ SV * convertHash(char * algo, char * s, int toBase32) PPCODE: try { Hash h(s, parseHashType(algo)); - string s = h.to_string(toBase32 ? Base32 : Base16, false); + string s = h.to_string(toBase32 ? Base::Base32 : Base::Base16, false); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); diff --git a/src/build-remote/build-remote.cc b/src/build-remote/build-remote.cc index 00340b7872a..58fd4a8ae66 100644 --- a/src/build-remote/build-remote.cc +++ b/src/build-remote/build-remote.cc @@ -184,7 +184,7 @@ static int _main(int argc, char * * argv) try { - Activity act(*logger, lvlTalkative, actUnknown, fmt("connecting to '%s'", bestMachine->storeUri)); + Activity act(*logger, Verbosity::Talkative, ActivityType::Unknown, fmt("connecting to '%s'", bestMachine->storeUri)); Store::Params storeParams; if (hasPrefix(bestMachine->storeUri, "ssh://")) { @@ -222,7 +222,7 @@ static int _main(int argc, char * * argv) AutoCloseFD uploadLock = openLockFile(currentLoad + "/" + escapeUri(storeUri) + ".upload-lock", true); { - Activity act(*logger, lvlTalkative, actUnknown, fmt("waiting for the upload lock to '%s'", storeUri)); + Activity act(*logger, Verbosity::Talkative, ActivityType::Unknown, fmt("waiting for the upload lock to '%s'", storeUri)); auto old = signal(SIGALRM, handleAlarm); alarm(15 * 60); @@ -235,7 +235,7 @@ static int _main(int argc, char * * argv) auto substitute = settings.buildersUseSubstitutes ? Substitute : NoSubstitute; { - Activity act(*logger, lvlTalkative, actUnknown, fmt("copying dependencies to '%s'", storeUri)); + Activity act(*logger, Verbosity::Talkative, ActivityType::Unknown, fmt("copying dependencies to '%s'", storeUri)); copyPaths(store, ref(sshStore), store->parseStorePathSet(inputs), NoRepair, NoCheckSigs, substitute); } @@ -254,7 +254,7 @@ static int _main(int argc, char * * argv) if (!store->isValidPath(store->parseStorePath(path))) missing.insert(store->parseStorePath(path)); if (!missing.empty()) { - Activity act(*logger, lvlTalkative, actUnknown, fmt("copying outputs from '%s'", storeUri)); + Activity act(*logger, Verbosity::Talkative, ActivityType::Unknown, fmt("copying outputs from '%s'", storeUri)); for (auto & i : missing) store->locksHeld.insert(store->printStorePath(i)); /* FIXME: ugly */ copyPaths(ref(sshStore), store, missing, NoRepair, NoCheckSigs, NoSubstitute); diff --git a/src/cpptoml/cpptoml.h b/src/cpptoml/cpptoml.h index 5a00da3b4cd..fae1f0bc9fb 100644 --- a/src/cpptoml/cpptoml.h +++ b/src/cpptoml/cpptoml.h @@ -51,7 +51,7 @@ using string_to_base_map = std::unordered_map>; #endif -// if defined, `base` will retain type information in form of an enum class +// if defined, `base` will retain type information in form of an enum struct // such that static_cast can be used instead of dynamic_cast // #define CPPTOML_NO_RTTI @@ -405,7 +405,7 @@ inline std::shared_ptr make_table_array(bool is_inline = false); #if defined(CPPTOML_NO_RTTI) /// Base type used to store underlying data type explicitly if RTTI is disabled -enum class base_type +enum struct base_type { NONE, STRING, @@ -2268,7 +2268,7 @@ class parser return key; } - enum class parse_type + enum struct parse_type { STRING = 1, LOCAL_TIME, diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 82eb1582e8a..99c1070cebe 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -1669,10 +1669,10 @@ string EvalState::copyPathToStore(PathSet & context, const Path & path) else { auto p = settings.readOnlyMode ? store->computeStorePathForPath(std::string(baseNameOf(path)), checkSourcePath(path)).first - : store->addToStore(std::string(baseNameOf(path)), checkSourcePath(path), FileIngestionMethod::Recursive, htSHA256, defaultPathFilter, repair); + : store->addToStore(std::string(baseNameOf(path)), checkSourcePath(path), FileIngestionMethod::Recursive, HashType::SHA256, defaultPathFilter, repair); dstPath = store->printStorePath(p); srcToStore.insert_or_assign(path, std::move(p)); - printMsg(lvlChatty, "copied source '%1%' -> '%2%'", path, dstPath); + printMsg(Verbosity::Chatty, "copied source '%1%' -> '%2%'", path, dstPath); } context.insert(dstPath); diff --git a/src/libexpr/function-trace.cc b/src/libexpr/function-trace.cc index c6057b3842f..882da99370f 100644 --- a/src/libexpr/function-trace.cc +++ b/src/libexpr/function-trace.cc @@ -6,13 +6,13 @@ namespace nix { FunctionCallTrace::FunctionCallTrace(const Pos & pos) : pos(pos) { auto duration = std::chrono::high_resolution_clock::now().time_since_epoch(); auto ns = std::chrono::duration_cast(duration); - printMsg(lvlInfo, "function-trace entered %1% at %2%", pos, ns.count()); + printMsg(Verbosity::Info, "function-trace entered %1% at %2%", pos, ns.count()); } FunctionCallTrace::~FunctionCallTrace() { auto duration = std::chrono::high_resolution_clock::now().time_since_epoch(); auto ns = std::chrono::duration_cast(duration); - printMsg(lvlInfo, "function-trace exited %1% at %2%", pos, ns.count()); + printMsg(Verbosity::Info, "function-trace exited %1% at %2%", pos, ns.count()); } } diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index 1993fa6c103..991de24af0a 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -626,7 +626,7 @@ Expr * EvalState::parseExprFromString(std::string_view s, const Path & basePath) Expr * EvalState::parseStdin() { - //Activity act(*logger, lvlTalkative, format("parsing standard input")); + //Activity act(*logger, Verbosity::Talkative, format("parsing standard input")); return parseExprFromString(drainFD(0), absPath(".")); } diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 4c72da44ad5..0d4a6c4cee9 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -718,15 +718,15 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * * if (outputs.size() != 1 || *(outputs.begin()) != "out") throw Error(format("multiple outputs are not supported in fixed-output derivations, at %1%") % posDrvName); - HashType ht = outputHashAlgo.empty() ? htUnknown : parseHashType(outputHashAlgo); + std::optional ht = parseHashTypeOpt(outputHashAlgo); Hash h(*outputHash, ht); auto outPath = state.store->makeFixedOutputPath(ingestionMethod, h, drvName); if (!jsonObject) drv.env["out"] = state.store->printStorePath(outPath); drv.outputs.insert_or_assign("out", DerivationOutput { std::move(outPath), - ingestionMethodPrefix(ingestionMethod) + printHashType(h.type), - h.to_string(Base16, false), + ingestionMethodPrefix(ingestionMethod) + printHashType(*h.type), + h.to_string(Base::Base16, false), }); } @@ -757,7 +757,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * * auto drvPath = writeDerivation(state.store, drv, drvName, state.repair); auto drvPathS = state.store->printStorePath(drvPath); - printMsg(lvlChatty, "instantiated '%1%' -> '%2%'", drvName, drvPathS); + printMsg(Verbosity::Chatty, "instantiated '%1%' -> '%2%'", drvName, drvPathS); /* Optimisation, but required in read-only mode! because in that case we don't actually write store derivations, so we can't @@ -933,14 +933,14 @@ static void prim_findFile(EvalState & state, const Pos & pos, Value * * args, Va static void prim_hashFile(EvalState & state, const Pos & pos, Value * * args, Value & v) { string type = state.forceStringNoCtx(*args[0], pos); - HashType ht = parseHashType(type); - if (ht == htUnknown) + std::optional ht = parseHashType(type); + if (!ht) throw Error(format("unknown hash type '%1%', at %2%") % type % pos); PathSet context; // discarded Path p = state.coerceToPath(pos, *args[1], context); - mkString(v, hashFile(ht, state.checkSourcePath(p)).to_string(Base16, false), context); + mkString(v, hashFile(*ht, state.checkSourcePath(p)).to_string(Base::Base16, false), context); } /* Read a directory (without . or ..) */ @@ -1076,8 +1076,8 @@ static void addPath(EvalState & state, const Pos & pos, const string & name, con Path dstPath; if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) { dstPath = state.store->printStorePath(settings.readOnlyMode - ? state.store->computeStorePathForPath(name, path, method, htSHA256, filter).first - : state.store->addToStore(name, path, method, htSHA256, filter, state.repair)); + ? state.store->computeStorePathForPath(name, path, method, HashType::SHA256, filter).first + : state.store->addToStore(name, path, method, HashType::SHA256, filter, state.repair)); if (expectedHash && expectedStorePath != state.store->parseStorePath(dstPath)) throw Error("store path mismatch in (possibly filtered) path added from '%s'", path); } else @@ -1125,7 +1125,7 @@ static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value } else if (n == "recursive") method = FileIngestionMethod { state.forceBool(*attr.value, *attr.pos) }; else if (n == "sha256") - expectedHash = Hash(state.forceStringNoCtx(*attr.value, *attr.pos), htSHA256); + expectedHash = Hash(state.forceStringNoCtx(*attr.value, *attr.pos), HashType::SHA256); else throw EvalError(format("unsupported argument '%1%' to 'addPath', at %2%") % attr.name % *attr.pos); } @@ -1811,14 +1811,14 @@ static void prim_stringLength(EvalState & state, const Pos & pos, Value * * args static void prim_hashString(EvalState & state, const Pos & pos, Value * * args, Value & v) { string type = state.forceStringNoCtx(*args[0], pos); - HashType ht = parseHashType(type); - if (ht == htUnknown) + std::optional ht = parseHashType(type); + if (!ht) throw Error(format("unknown hash type '%1%', at %2%") % type % pos); PathSet context; // discarded string s = state.forceString(*args[1], context, pos); - mkString(v, hashString(ht, s).to_string(Base16, false), context); + mkString(v, hashString(*ht, s).to_string(Base::Base16, false), context); } diff --git a/src/libexpr/primops/fetchGit.cc b/src/libexpr/primops/fetchGit.cc index 1a8798fccb0..b199c9e13cc 100644 --- a/src/libexpr/primops/fetchGit.cc +++ b/src/libexpr/primops/fetchGit.cc @@ -29,7 +29,7 @@ static void prim_fetchGit(EvalState & state, const Pos & pos, Value * * args, Va else if (n == "ref") ref = state.forceStringNoCtx(*attr.value, *attr.pos); else if (n == "rev") - rev = Hash(state.forceStringNoCtx(*attr.value, *attr.pos), htSHA1); + rev = Hash(state.forceStringNoCtx(*attr.value, *attr.pos), HashType::SHA1); else if (n == "name") name = state.forceStringNoCtx(*attr.value, *attr.pos); else if (n == "submodules") @@ -67,7 +67,7 @@ static void prim_fetchGit(EvalState & state, const Pos & pos, Value * * args, Va mkString(*state.allocAttr(v, state.sOutPath), storePath, PathSet({storePath})); // Backward compatibility: set 'rev' to // 0000000000000000000000000000000000000000 for a dirty tree. - auto rev2 = input2->getRev().value_or(Hash(htSHA1)); + auto rev2 = input2->getRev().value_or(Hash(HashType::SHA1)); mkString(*state.allocAttr(v, state.symbols.create("rev")), rev2.gitRev()); mkString(*state.allocAttr(v, state.symbols.create("shortRev")), rev2.gitShortRev()); // Backward compatibility: set 'revCount' to 0 for a dirty tree. diff --git a/src/libexpr/primops/fetchMercurial.cc b/src/libexpr/primops/fetchMercurial.cc index 0a1ba49d5e4..913ae172b4b 100644 --- a/src/libexpr/primops/fetchMercurial.cc +++ b/src/libexpr/primops/fetchMercurial.cc @@ -31,7 +31,7 @@ static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * ar // be both a revision or a branch/tag name. auto value = state.forceStringNoCtx(*attr.value, *attr.pos); if (std::regex_match(value, revRegex)) - rev = Hash(value, htSHA1); + rev = Hash(value, HashType::SHA1); else ref = value; } @@ -71,7 +71,7 @@ static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * ar mkString(*state.allocAttr(v, state.symbols.create("branch")), *input2->getRef()); // Backward compatibility: set 'rev' to // 0000000000000000000000000000000000000000 for a dirty tree. - auto rev2 = input2->getRev().value_or(Hash(htSHA1)); + auto rev2 = input2->getRev().value_or(Hash(HashType::SHA1)); mkString(*state.allocAttr(v, state.symbols.create("rev")), rev2.gitRev()); mkString(*state.allocAttr(v, state.symbols.create("shortRev")), std::string(rev2.gitRev(), 0, 12)); if (tree.info.revCount) diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc index c5a0d988683..0a62a07569c 100644 --- a/src/libexpr/primops/fetchTree.cc +++ b/src/libexpr/primops/fetchTree.cc @@ -23,7 +23,7 @@ void emitTreeAttrs( assert(tree.info.narHash); mkString(*state.allocAttr(v, state.symbols.create("narHash")), - tree.info.narHash.to_string(SRI)); + tree.info.narHash.to_string(Base::SRI)); if (input->getRev()) { mkString(*state.allocAttr(v, state.symbols.create("rev")), input->getRev()->gitRev()); @@ -103,7 +103,7 @@ static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v, if (n == "url") url = state.forceStringNoCtx(*attr.value, *attr.pos); else if (n == "sha256") - expectedHash = Hash(state.forceStringNoCtx(*attr.value, *attr.pos), htSHA256); + expectedHash = Hash(state.forceStringNoCtx(*attr.value, *attr.pos), HashType::SHA256); else if (n == "name") name = state.forceStringNoCtx(*attr.value, *attr.pos); else @@ -137,7 +137,7 @@ static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v, if (expectedHash) { auto hash = unpack ? state.store->queryPathInfo(storePath)->narHash - : hashFile(htSHA256, path); + : hashFile(HashType::SHA256, path); if (hash != *expectedHash) throw Error((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n wanted: %s\n got: %s", *url, expectedHash->to_string(), hash.to_string()); diff --git a/src/libfetchers/fetchers.cc b/src/libfetchers/fetchers.cc index 94ac30e3800..a13533c3cf8 100644 --- a/src/libfetchers/fetchers.cc +++ b/src/libfetchers/fetchers.cc @@ -47,7 +47,7 @@ Attrs Input::toAttrs() const { auto attrs = toAttrsInternal(); if (narHash) - attrs.emplace("narHash", narHash->to_string(SRI)); + attrs.emplace("narHash", narHash->to_string(Base::SRI)); attrs.emplace("type", type()); return attrs; } @@ -67,7 +67,7 @@ std::pair> Input::fetchTree(ref store) if (narHash && narHash != input->narHash) throw Error("NAR hash mismatch in input '%s' (%s), expected '%s', got '%s'", - to_string(), tree.actualPath, narHash->to_string(SRI), input->narHash->to_string(SRI)); + to_string(), tree.actualPath, narHash->to_string(Base::SRI), input->narHash->to_string(Base::SRI)); return {std::move(tree), input}; } diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index 17cc602285c..75d70c1b44f 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -95,7 +95,7 @@ struct GitInput : Input auto input = std::make_shared(*this); - assert(!rev || rev->type == htSHA1); + assert(!rev || rev->type == HashType::SHA1); std::string cacheType = "git"; if (shallow) cacheType += "-shallow"; @@ -195,7 +195,7 @@ struct GitInput : Input return files.count(file); }; - auto storePath = store->addToStore("source", actualUrl, FileIngestionMethod::Recursive, htSHA256, filter); + auto storePath = store->addToStore("source", actualUrl, FileIngestionMethod::Recursive, HashType::SHA256, filter); auto tree = Tree { .actualPath = store->printStorePath(storePath), @@ -225,21 +225,21 @@ struct GitInput : Input if (isLocal) { if (!input->rev) - input->rev = Hash(chomp(runProgram("git", true, { "-C", actualUrl, "rev-parse", *input->ref })), htSHA1); + input->rev = Hash(chomp(runProgram("git", true, { "-C", actualUrl, "rev-parse", *input->ref })), HashType::SHA1); repoDir = actualUrl; } else { if (auto res = getCache()->lookup(store, mutableAttrs)) { - auto rev2 = Hash(getStrAttr(res->first, "rev"), htSHA1); + auto rev2 = Hash(getStrAttr(res->first, "rev"), HashType::SHA1); if (!rev || rev == rev2) { input->rev = rev2; return makeResult(res->first, std::move(res->second)); } } - Path cacheDir = getCacheDir() + "/nix/gitv3/" + hashString(htSHA256, actualUrl).to_string(Base32, false); + Path cacheDir = getCacheDir() + "/nix/gitv3/" + hashString(HashType::SHA256, actualUrl).to_string(Base::Base32, false); repoDir = cacheDir; if (!pathExists(cacheDir)) { @@ -277,12 +277,15 @@ struct GitInput : Input } if (doFetch) { - Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching Git repository '%s'", actualUrl)); + Activity act(*logger, Verbosity::Talkative, ActivityType::Unknown, fmt("fetching Git repository '%s'", actualUrl)); // FIXME: git stderr messes up our progress indicator, so // we're using --quiet for now. Should process its stderr. try { - runProgram("git", true, { "-C", repoDir, "fetch", "--quiet", "--force", "--", actualUrl, fmt("%s:%s", *input->ref, *input->ref) }); + auto fetchRef = input->ref->compare(0, 5, "refs/") == 0 + ? *input->ref + : "refs/heads/" + *input->ref; + runProgram("git", true, { "-C", repoDir, "fetch", "--quiet", "--force", "--", actualUrl, fmt("%s:%s", fetchRef, fetchRef) }); } catch (Error & e) { if (!pathExists(localRefFile)) throw; warn("could not update local clone of Git repository '%s'; continuing with the most recent version", actualUrl); @@ -298,7 +301,7 @@ struct GitInput : Input } if (!input->rev) - input->rev = Hash(chomp(readFile(localRefFile)), htSHA1); + input->rev = Hash(chomp(readFile(localRefFile)), HashType::SHA1); } bool isShallow = chomp(runProgram("git", true, { "-C", repoDir, "rev-parse", "--is-shallow-repository" })) == "true"; @@ -347,7 +350,7 @@ struct GitInput : Input unpackTarfile(*source, tmpDir); } - auto storePath = store->addToStore(name, tmpDir, FileIngestionMethod::Recursive, htSHA256, filter); + auto storePath = store->addToStore(name, tmpDir, FileIngestionMethod::Recursive, HashType::SHA256, filter); auto lastModified = std::stoull(runProgram("git", true, { "-C", repoDir, "log", "-1", "--format=%ct", input->rev->gitRev() })); @@ -418,12 +421,12 @@ struct GitInputScheme : InputScheme auto input = std::make_unique(parseURL(getStrAttr(attrs, "url"))); if (auto ref = maybeGetStrAttr(attrs, "ref")) { - if (!std::regex_match(*ref, refRegex)) + if (std::regex_search(*ref, badGitRefRegex)) throw BadURL("invalid Git branch/tag name '%s'", *ref); input->ref = *ref; } if (auto rev = maybeGetStrAttr(attrs, "rev")) - input->rev = Hash(*rev, htSHA1); + input->rev = Hash(*rev, HashType::SHA1); input->shallow = maybeGetBoolAttr(attrs, "shallow").value_or(false); diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc index 8675a5a662d..99336fc542e 100644 --- a/src/libfetchers/github.cc +++ b/src/libfetchers/github.cc @@ -45,7 +45,7 @@ struct GitHubInput : Input auto path = owner + "/" + repo; assert(!(ref && rev)); if (ref) path += "/" + *ref; - if (rev) path += "/" + rev->to_string(Base16, false); + if (rev) path += "/" + rev->to_string(Base::Base16, false); return ParsedURL { .scheme = "github", .path = path, @@ -76,7 +76,7 @@ struct GitHubInput : Input readFile( store->toRealPath( downloadFile(store, url, "source", false).storePath))); - rev = Hash(json["sha"], htSHA1); + rev = Hash(json["sha"], HashType::SHA1); debug("HEAD revision for '%s' is %s", url, rev->gitRev()); } @@ -106,7 +106,7 @@ struct GitHubInput : Input // might have stricter rate limits. auto url = fmt("https://api.github.com/repos/%s/%s/tarball/%s", - owner, repo, rev->to_string(Base16, false)); + owner, repo, rev->to_string(Base::Base16, false)); std::string accessToken = settings.githubAccessToken.get(); if (accessToken != "") @@ -140,7 +140,7 @@ struct GitHubInputScheme : InputScheme if (path.size() == 2) { } else if (path.size() == 3) { if (std::regex_match(path[2], revRegex)) - input->rev = Hash(path[2], htSHA1); + input->rev = Hash(path[2], HashType::SHA1); else if (std::regex_match(path[2], refRegex)) input->ref = path[2]; else @@ -152,7 +152,7 @@ struct GitHubInputScheme : InputScheme if (name == "rev") { if (input->rev) throw BadURL("GitHub URL '%s' contains multiple commit hashes", url.url); - input->rev = Hash(value, htSHA1); + input->rev = Hash(value, HashType::SHA1); } else if (name == "ref") { if (!std::regex_match(value, refRegex)) @@ -185,7 +185,7 @@ struct GitHubInputScheme : InputScheme input->repo = getStrAttr(attrs, "repo"); input->ref = maybeGetStrAttr(attrs, "ref"); if (auto rev = maybeGetStrAttr(attrs, "rev")) - input->rev = Hash(*rev, htSHA1); + input->rev = Hash(*rev, HashType::SHA1); return input; } }; diff --git a/src/libfetchers/mercurial.cc b/src/libfetchers/mercurial.cc index 2e0d4bf4d83..feffc48d630 100644 --- a/src/libfetchers/mercurial.cc +++ b/src/libfetchers/mercurial.cc @@ -114,7 +114,7 @@ struct MercurialInput : Input return files.count(file); }; - auto storePath = store->addToStore("source", actualUrl, FileIngestionMethod::Recursive, htSHA256, filter); + auto storePath = store->addToStore("source", actualUrl, FileIngestionMethod::Recursive, HashType::SHA256, filter); return {Tree { .actualPath = store->printStorePath(storePath), @@ -167,14 +167,14 @@ struct MercurialInput : Input }); if (auto res = getCache()->lookup(store, mutableAttrs)) { - auto rev2 = Hash(getStrAttr(res->first, "rev"), htSHA1); + auto rev2 = Hash(getStrAttr(res->first, "rev"), HashType::SHA1); if (!rev || rev == rev2) { input->rev = rev2; return makeResult(res->first, std::move(res->second)); } } - Path cacheDir = fmt("%s/nix/hg/%s", getCacheDir(), hashString(htSHA256, actualUrl).to_string(Base32, false)); + Path cacheDir = fmt("%s/nix/hg/%s", getCacheDir(), hashString(HashType::SHA256, actualUrl).to_string(Base::Base32, false)); /* If this is a commit hash that we already have, we don't have to pull again. */ @@ -184,7 +184,7 @@ struct MercurialInput : Input RunOptions("hg", { "log", "-R", cacheDir, "-r", input->rev->gitRev(), "--template", "1" }) .killStderr(true)).second == "1")) { - Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching Mercurial repository '%s'", actualUrl)); + Activity act(*logger, Verbosity::Talkative, ActivityType::Unknown, fmt("fetching Mercurial repository '%s'", actualUrl)); if (pathExists(cacheDir)) { try { @@ -210,7 +210,7 @@ struct MercurialInput : Input runProgram("hg", true, { "log", "-R", cacheDir, "-r", revOrRef, "--template", "{node} {rev} {branch}" })); assert(tokens.size() == 3); - input->rev = Hash(tokens[0], htSHA1); + input->rev = Hash(tokens[0], HashType::SHA1); auto revCount = std::stoull(tokens[1]); input->ref = tokens[2]; @@ -293,7 +293,7 @@ struct MercurialInputScheme : InputScheme input->ref = *ref; } if (auto rev = maybeGetStrAttr(attrs, "rev")) - input->rev = Hash(*rev, htSHA1); + input->rev = Hash(*rev, HashType::SHA1); return input; } }; diff --git a/src/libfetchers/path.cc b/src/libfetchers/path.cc index ba2cc192e54..3f50addd8b6 100644 --- a/src/libfetchers/path.cc +++ b/src/libfetchers/path.cc @@ -101,7 +101,7 @@ struct PathInputScheme : InputScheme for (auto & [name, value] : url.query) if (name == "rev") - input->rev = Hash(value, htSHA1); + input->rev = Hash(value, HashType::SHA1); else if (name == "revCount") { uint64_t revCount; if (!string2Int(value, revCount)) @@ -129,7 +129,7 @@ struct PathInputScheme : InputScheme for (auto & [name, value] : attrs) if (name == "rev") - input->rev = Hash(getStrAttr(attrs, "rev"), htSHA1); + input->rev = Hash(getStrAttr(attrs, "rev"), HashType::SHA1); else if (name == "revCount") input->revCount = getIntAttr(attrs, "revCount"); else if (name == "lastModified") diff --git a/src/libfetchers/tarball.cc b/src/libfetchers/tarball.cc index b6e57379bd0..1030c191a2d 100644 --- a/src/libfetchers/tarball.cc +++ b/src/libfetchers/tarball.cc @@ -66,9 +66,9 @@ DownloadFileResult downloadFile( } else { StringSink sink; dumpString(*res.data, sink); - auto hash = hashString(htSHA256, *res.data); + auto hash = hashString(HashType::SHA256, *res.data); ValidPathInfo info(store->makeFixedOutputPath(FileIngestionMethod::Flat, hash, name)); - info.narHash = hashString(htSHA256, *sink.s); + info.narHash = hashString(HashType::SHA256, *sink.s); info.narSize = sink.s->size(); info.ca = makeFixedOutputCA(FileIngestionMethod::Flat, hash); auto source = StringSource { *sink.s }; @@ -142,7 +142,7 @@ Tree downloadTarball( throw nix::Error("tarball '%s' contains an unexpected number of top-level files", url); auto topDir = tmpDir + "/" + members.begin()->name; lastModified = lstat(topDir).st_mtime; - unpackedStorePath = store->addToStore(name, topDir, FileIngestionMethod::Recursive, htSHA256, defaultPathFilter, NoRepair); + unpackedStorePath = store->addToStore(name, topDir, FileIngestionMethod::Recursive, HashType::SHA256, defaultPathFilter, NoRepair); } Attrs infoAttrs({ @@ -196,9 +196,9 @@ struct TarballInput : Input // NAR hashes are preferred over file hashes since tar/zip files // don't have a canonical representation. if (narHash) - url2.query.insert_or_assign("narHash", narHash->to_string(SRI)); + url2.query.insert_or_assign("narHash", narHash->to_string(Base::SRI)); else if (hash) - url2.query.insert_or_assign("hash", hash->to_string(SRI)); + url2.query.insert_or_assign("hash", hash->to_string(Base::SRI)); return url2; } @@ -207,7 +207,7 @@ struct TarballInput : Input Attrs attrs; attrs.emplace("url", url.to_string()); if (hash) - attrs.emplace("hash", hash->to_string(SRI)); + attrs.emplace("hash", hash->to_string(Base::SRI)); return attrs; } diff --git a/src/libmain/common-args.cc b/src/libmain/common-args.cc index 51e199ea5ce..2bcad9f7b22 100644 --- a/src/libmain/common-args.cc +++ b/src/libmain/common-args.cc @@ -10,19 +10,21 @@ MixCommonArgs::MixCommonArgs(const string & programName) .longName = "verbose", .shortName = 'v', .description = "increase verbosity level", - .handler = {[]() { verbosity = (Verbosity) (verbosity + 1); }}, + .handler = {[]() { verbosity = (Verbosity) ((uint64_t) verbosity + 1); }}, }); addFlag({ .longName = "quiet", .description = "decrease verbosity level", - .handler = {[]() { verbosity = verbosity > lvlError ? (Verbosity) (verbosity - 1) : lvlError; }}, + .handler = {[]() { verbosity = verbosity > Verbosity::Error + ? (Verbosity) ((uint64_t) verbosity - 1) + : Verbosity::Error; }}, }); addFlag({ .longName = "debug", .description = "enable debug output", - .handler = {[]() { verbosity = lvlDebug; }}, + .handler = {[]() { verbosity = Verbosity::Debug; }}, }); addFlag({ diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc index 70d1f0186c2..d0e7f4eb118 100644 --- a/src/libmain/shared.cc +++ b/src/libmain/shared.cc @@ -256,7 +256,7 @@ void parseCmdLine(const string & programName, const Strings & args, void printVersion(const string & programName) { std::cout << format("%1% (Nix) %2%") % programName % nixVersion << std::endl; - if (verbosity > lvlInfo) { + if (verbosity > Verbosity::Info) { Strings cfg; #if HAVE_BOEHMGC cfg.push_back("gc"); diff --git a/src/libmain/shared.hh b/src/libmain/shared.hh index b4957465227..96d001ec389 100644 --- a/src/libmain/shared.hh +++ b/src/libmain/shared.hh @@ -43,11 +43,11 @@ struct StorePathWithOutputs; void printMissing( ref store, const std::vector & paths, - Verbosity lvl = lvlInfo); + Verbosity lvl = Verbosity::Info); void printMissing(ref store, const StorePathSet & willBuild, const StorePathSet & willSubstitute, const StorePathSet & unknown, - unsigned long long downloadSize, unsigned long long narSize, Verbosity lvl = lvlInfo); + unsigned long long downloadSize, unsigned long long narSize, Verbosity lvl = Verbosity::Info); string getArg(const string & opt, Strings::iterator & i, const Strings::iterator & end); diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc index e6a59a32095..418b2eff609 100644 --- a/src/libstore/binary-cache-store.cc +++ b/src/libstore/binary-cache-store.cc @@ -137,7 +137,7 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource auto narInfo = make_ref(info); narInfo->narSize = nar->size(); - narInfo->narHash = hashString(htSHA256, *nar); + narInfo->narHash = hashString(HashType::SHA256, *nar); if (info.narHash && info.narHash != narInfo->narHash) throw Error("refusing to copy corrupted path '%1%' to binary cache", printStorePath(info.path)); @@ -172,16 +172,16 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource auto now1 = std::chrono::steady_clock::now(); auto narCompressed = compress(compression, *nar, parallelCompression); auto now2 = std::chrono::steady_clock::now(); - narInfo->fileHash = hashString(htSHA256, *narCompressed); + narInfo->fileHash = hashString(HashType::SHA256, *narCompressed); narInfo->fileSize = narCompressed->size(); auto duration = std::chrono::duration_cast(now2 - now1).count(); - printMsg(lvlTalkative, "copying path '%1%' (%2% bytes, compressed %3$.1f%% in %4% ms) to binary cache", + printMsg(Verbosity::Talkative, "copying path '%1%' (%2% bytes, compressed %3$.1f%% in %4% ms) to binary cache", printStorePath(narInfo->path), narInfo->narSize, ((1.0 - (double) narCompressed->size() / nar->size()) * 100.0), duration); - narInfo->url = "nar/" + narInfo->fileHash.to_string(Base32, false) + ".nar" + narInfo->url = "nar/" + narInfo->fileHash.to_string(Base::Base32, false) + ".nar" + (compression == "xz" ? ".xz" : compression == "bzip2" ? ".bz2" : compression == "br" ? ".br" : @@ -209,7 +209,7 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource // to a GC'ed file, so overwriting might be useful... if (fileExists(key)) return; - printMsg(lvlTalkative, "creating debuginfo link from '%s' to '%s'", key, target); + printMsg(Verbosity::Talkative, "creating debuginfo link from '%s' to '%s'", key, target); upsertFile(key, json.dump(), "application/json"); }; @@ -302,7 +302,7 @@ void BinaryCacheStore::queryPathInfoUncached(const StorePath & storePath, { auto uri = getUri(); auto storePathS = printStorePath(storePath); - auto act = std::make_shared(*logger, lvlTalkative, actQueryPathInfo, + auto act = std::make_shared(*logger, Verbosity::Talkative, ActivityType::QueryPathInfo, fmt("querying info about '%s' on '%s'", storePathS, uri), Logger::Fields{storePathS, uri}); PushActivity pact(act->id); diff --git a/src/libstore/build.cc b/src/libstore/build.cc index f5c132a8397..b93855f79de 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -107,7 +107,13 @@ typedef std::map WeakGoalMap; class Goal : public std::enable_shared_from_this { public: - typedef enum {ecBusy, ecSuccess, ecFailed, ecNoSubstituters, ecIncompleteClosure} ExitCode; + enum struct ExitCode { + Busy, + Success, + Failed, + NoSubstituters, + IncompleteClosure, + }; protected: @@ -141,7 +147,7 @@ class Goal : public std::enable_shared_from_this Goal(Worker & worker) : worker(worker) { nrFailed = nrNoSubstituters = nrIncompleteClosure = 0; - exitCode = ecBusy; + exitCode = ExitCode::Busy; } virtual ~Goal() @@ -361,8 +367,8 @@ class Worker { actDerivations.progress(doneBuilds, expectedBuilds + doneBuilds, runningBuilds, failedBuilds); actSubstitutions.progress(doneSubstitutions, expectedSubstitutions + doneSubstitutions, runningSubstitutions, failedSubstitutions); - act.setExpected(actFileTransfer, expectedDownloadSize + doneDownloadSize); - act.setExpected(actCopyPath, expectedNarSize + doneNarSize); + act.setExpected(ActivityType::Download, expectedDownloadSize + doneDownloadSize); + act.setExpected(ActivityType::CopyPath, expectedNarSize + doneNarSize); } }; @@ -395,13 +401,13 @@ void Goal::waiteeDone(GoalPtr waitee, ExitCode result) trace(format("waitee '%1%' done; %2% left") % waitee->name % waitees.size()); - if (result == ecFailed || result == ecNoSubstituters || result == ecIncompleteClosure) ++nrFailed; + if (result == ExitCode::Failed || result == ExitCode::NoSubstituters || result == ExitCode::IncompleteClosure) ++nrFailed; - if (result == ecNoSubstituters) ++nrNoSubstituters; + if (result == ExitCode::NoSubstituters) ++nrNoSubstituters; - if (result == ecIncompleteClosure) ++nrIncompleteClosure; + if (result == ExitCode::IncompleteClosure) ++nrIncompleteClosure; - if (waitees.empty() || (result == ecFailed && !settings.keepGoing)) { + if (waitees.empty() || (result == ExitCode::Failed && !settings.keepGoing)) { /* If we failed and keepGoing is not set, we remove all remaining waitees. */ @@ -421,8 +427,8 @@ void Goal::waiteeDone(GoalPtr waitee, ExitCode result) void Goal::amDone(ExitCode result) { trace("done"); - assert(exitCode == ecBusy); - assert(result == ecSuccess || result == ecFailed || result == ecNoSubstituters || result == ecIncompleteClosure); + assert(exitCode == ExitCode::Busy); + assert(result == ExitCode::Success || result == ExitCode::Failed || result == ExitCode::NoSubstituters || result == ExitCode::IncompleteClosure); exitCode = result; for (auto & i : waiters) { GoalPtr goal = i.lock(); @@ -672,7 +678,7 @@ HookInstance::HookInstance() Strings args = { std::string(baseNameOf(settings.buildHook.get())), - std::to_string(verbosity), + std::to_string((uint64_t)verbosity), }; execv(settings.buildHook.get().c_str(), stringsToCharPtrs(args).data()); @@ -1401,7 +1407,7 @@ void DerivationGoal::started() { "building '%s'", worker.store.printStorePath(drvPath), curRound, nrRounds); fmt("building '%s'", worker.store.printStorePath(drvPath)); if (hook) msg += fmt(" on '%s'", machineName); - act = std::make_unique(*logger, lvlInfo, actBuild, msg, + act = std::make_unique(*logger, Verbosity::Info, ActivityType::Build, msg, Logger::Fields{worker.store.printStorePath(drvPath), hook ? machineName : "", curRound, nrRounds}); mcRunningBuilds = std::make_unique>(worker.runningBuilds); worker.updateProgress(); @@ -1457,6 +1463,20 @@ void DerivationGoal::tryToBuild() supported for local builds. */ bool buildLocally = buildMode != bmNormal || parsedDrv->willBuildLocally(); + auto started = [&]() { + auto msg = fmt( + buildMode == bmRepair ? "repairing outputs of '%s'" : + buildMode == bmCheck ? "checking outputs of '%s'" : + nrRounds > 1 ? "building '%s' (round %d/%d)" : + "building '%s'", worker.store.printStorePath(drvPath), curRound, nrRounds); + fmt("building '%s'", worker.store.printStorePath(drvPath)); + if (hook) msg += fmt(" on '%s'", machineName); + act = std::make_unique(*logger, Verbosity::Info, ActivityType::Build, msg, + Logger::Fields{worker.store.printStorePath(drvPath), hook ? machineName : "", curRound, nrRounds}); + mcRunningBuilds = std::make_unique>(worker.runningBuilds); + worker.updateProgress(); + }; + /* Is the build hook willing to accept this job? */ if (!buildLocally) { switch (tryBuildHook()) { @@ -1659,7 +1679,7 @@ void DerivationGoal::buildDone() registerOutputs(); if (settings.postBuildHook != "") { - Activity act(*logger, lvlInfo, actPostBuildHook, + Activity act(*logger, Verbosity::Info, ActivityType::PostBuildHook, fmt("running post-build-hook '%s'", settings.postBuildHook), Logger::Fields{worker.store.printStorePath(drvPath)}); PushActivity pact(act.id); @@ -1694,7 +1714,7 @@ void DerivationGoal::buildDone() if (settings.verboseBuild) { printError("post-build-hook: " + currentLine); } else { - act.result(resPostBuildLogLine, currentLine); + act.result(ResultType::PostBuildLogLine, currentLine); } currentLine.clear(); } @@ -2097,7 +2117,7 @@ void DerivationGoal::startBuilder() /* Clean up the chroot directory automatically. */ autoDelChroot = std::make_shared(chrootRootDir); - printMsg(lvlChatty, format("setting up chroot environment in '%1%'") % chrootRootDir); + printMsg(Verbosity::Chatty, format("setting up chroot environment in '%1%'") % chrootRootDir); if (mkdir(chrootRootDir.c_str(), 0750) == -1) throw SysError(format("cannot create '%1%'") % chrootRootDir); @@ -2206,7 +2226,7 @@ void DerivationGoal::startBuilder() } if (useChroot && settings.preBuildHook != "" && dynamic_cast(drv.get())) { - printMsg(lvlChatty, format("executing pre-build hook '%1%'") + printMsg(Verbosity::Chatty, format("executing pre-build hook '%1%'") % settings.preBuildHook); auto args = useChroot ? Strings({worker.store.printStorePath(drvPath), chrootRootDir}) : Strings({ worker.store.printStorePath(drvPath) }); @@ -2248,7 +2268,7 @@ void DerivationGoal::startBuilder() startDaemon(); /* Run the builder. */ - printMsg(lvlChatty, format("executing builder '%1%'") % drv->builder); + printMsg(Verbosity::Chatty, format("executing builder '%1%'") % drv->builder); /* Create the log file. */ Path logFile = openLogFile(); @@ -2484,8 +2504,8 @@ void DerivationGoal::initTmpDir() { if (passAsFile.find(i.first) == passAsFile.end()) { env[i.first] = i.second; } else { - auto hash = hashString(htSHA256, i.first); - string fn = ".attr-" + hash.to_string(Base32, false); + auto hash = hashString(HashType::SHA256, i.first); + string fn = ".attr-" + hash.to_string(Base::Base32, false); Path p = tmpDir + "/" + fn; writeFile(p, rewriteStrings(i.second, inputRewrites)); chownToBuilder(p); @@ -2734,7 +2754,7 @@ struct RestrictedStore : public LocalFSStore { throw Error("queryPathFromHashPart"); } StorePath addToStore(const string & name, const Path & srcPath, - FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, + FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = HashType::SHA256, PathFilter & filter = defaultPathFilter, RepairFlag repair = NoRepair) override { throw Error("addToStore"); } @@ -2747,7 +2767,7 @@ struct RestrictedStore : public LocalFSStore } StorePath addToStoreFromDump(const string & dump, const string & name, - FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair) override + FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = HashType::SHA256, RepairFlag repair = NoRepair) override { auto path = next->addToStoreFromDump(dump, name, method, hashAlgo, repair); goal.addDependency(path); @@ -3706,8 +3726,8 @@ void DerivationGoal::registerOutputs() /* Check the hash. In hash mode, move the path produced by the derivation to its content-addressed location. */ Hash h2 = outputHashMode == FileIngestionMethod::Recursive - ? hashPath(h.type, actualPath).first - : hashFile(h.type, actualPath); + ? hashPath(*h.type, actualPath).first + : hashFile(*h.type, actualPath); auto dest = worker.store.makeFixedOutputPath(outputHashMode, h2, i.second.path.name()); @@ -3718,7 +3738,7 @@ void DerivationGoal::registerOutputs() worker.hashMismatch = true; delayedException = std::make_exception_ptr( BuildError("hash mismatch in fixed-output derivation '%s':\n wanted: %s\n got: %s", - worker.store.printStorePath(dest), h.to_string(SRI), h2.to_string(SRI))); + worker.store.printStorePath(dest), h.to_string(Base::SRI), h2.to_string(Base::SRI))); Path actualDest = worker.store.Store::toRealPath(dest); @@ -4163,7 +4183,7 @@ void DerivationGoal::flushLine() if (logTail.size() > settings.logLines) logTail.pop_front(); } - act->result(resBuildLogLine, currentLogLine); + act->result(ResultType::BuildLogLine, currentLogLine); } currentLogLine = ""; @@ -4190,7 +4210,7 @@ void DerivationGoal::addHashRewrite(const StorePath & path) auto h1 = std::string(((std::string_view) path.to_string()).substr(0, 32)); auto p = worker.store.makeStorePath( "rewrite:" + std::string(drvPath.to_string()) + ":" + std::string(path.to_string()), - Hash(htSHA256), path.name()); + Hash(HashType::SHA256), path.name()); auto h2 = std::string(((std::string_view) p.to_string()).substr(0, 32)); deletePath(worker.store.printStorePath(p)); inputRewrites[h1] = h2; @@ -4203,7 +4223,7 @@ void DerivationGoal::done(BuildResult::Status status, const string & msg) { result.status = status; result.errorMsg = msg; - amDone(result.success() ? ecSuccess : ecFailed); + amDone(result.success() ? ExitCode::Success : ExitCode::Failed); if (result.status == BuildResult::TimedOut) worker.timedOut = true; if (result.status == BuildResult::PermanentFailure) @@ -4344,7 +4364,7 @@ void SubstitutionGoal::init() /* If the path already exists we're done. */ if (!repair && worker.store.isValidPath(storePath)) { - amDone(ecSuccess); + amDone(ExitCode::Success); return; } @@ -4369,7 +4389,7 @@ void SubstitutionGoal::tryNext() /* Hack: don't indicate failure if there were no substituters. In that case the calling derivation should just do a build. */ - amDone(substituterFailed ? ecFailed : ecNoSubstituters); + amDone(substituterFailed ? ExitCode::Failed : ExitCode::NoSubstituters); if (substituterFailed) { worker.failedSubstitutions++; @@ -4452,7 +4472,7 @@ void SubstitutionGoal::referencesValid() if (nrFailed > 0) { debug("some references of path '%s' could not be realised", worker.store.printStorePath(storePath)); - amDone(nrNoSubstituters > 0 || nrIncompleteClosure > 0 ? ecIncompleteClosure : ecFailed); + amDone(nrNoSubstituters > 0 || nrIncompleteClosure > 0 ? ExitCode::IncompleteClosure : ExitCode::Failed); return; } @@ -4490,7 +4510,7 @@ void SubstitutionGoal::tryToRun() /* Wake up the worker loop when we're done. */ Finally updateStats([this]() { outPipe.writeSide = -1; }); - Activity act(*logger, actSubstitute, Logger::Fields{worker.store.printStorePath(storePath), sub->getUri()}); + Activity act(*logger, ActivityType::Substitute, Logger::Fields{worker.store.printStorePath(storePath), sub->getUri()}); PushActivity pact(act.id); copyStorePath(ref(sub), ref(worker.store.shared_from_this()), @@ -4539,7 +4559,7 @@ void SubstitutionGoal::finished() worker.markContentsGood(storePath.clone()); - printMsg(lvlChatty, "substitution of path '%s' succeeded", worker.store.printStorePath(storePath)); + printMsg(Verbosity::Chatty, "substitution of path '%s' succeeded", worker.store.printStorePath(storePath)); maintainRunningSubstitutions.reset(); @@ -4557,7 +4577,7 @@ void SubstitutionGoal::finished() worker.updateProgress(); - amDone(ecSuccess); + amDone(ExitCode::Success); } @@ -4576,9 +4596,9 @@ void SubstitutionGoal::handleEOF(int fd) Worker::Worker(LocalStore & store) - : act(*logger, actRealise) - , actDerivations(*logger, actBuilds) - , actSubstitutions(*logger, actCopyPaths) + : act(*logger, ActivityType::Realise) + , actDerivations(*logger, ActivityType::Builds) + , actSubstitutions(*logger, ActivityType::CopyPaths) , store(store) { /* Debugging: prevent recursive workers. */ @@ -4662,7 +4682,7 @@ void Worker::removeGoal(GoalPtr goal) topGoals.erase(goal); /* If a top-level goal failed, then kill all other goals (unless keepGoing was set). */ - if (goal->getExitCode() == Goal::ecFailed && !settings.keepGoing) + if (goal->getExitCode() == Goal::ExitCode::Failed && !settings.keepGoing) topGoals.clear(); } @@ -4806,7 +4826,7 @@ void Worker::run(const Goals & _topGoals) void Worker::waitForInput() { - printMsg(lvlVomit, "waiting for children"); + printMsg(Verbosity::Vomit, "waiting for children"); /* Process output from the file descriptors attached to the children, namely log output and output path creation commands. @@ -4898,7 +4918,7 @@ void Worker::waitForInput() if (errno != EINTR) throw SysError("%s: read failed", goal->getName()); } else { - printMsg(lvlVomit, format("%1%: read %2% bytes") + printMsg(Verbosity::Vomit, format("%1%: read %2% bytes") % goal->getName() % rd); string data((char *) buffer.data(), rd); j->lastOutput = after; @@ -4907,7 +4927,7 @@ void Worker::waitForInput() } } - if (goal->getExitCode() == Goal::ecBusy && + if (goal->getExitCode() == Goal::ExitCode::Busy && 0 != settings.maxSilentTime && j->respectTimeouts && after - j->lastOutput >= std::chrono::seconds(settings.maxSilentTime)) @@ -4918,7 +4938,7 @@ void Worker::waitForInput() goal->timedOut(); } - else if (goal->getExitCode() == Goal::ecBusy && + else if (goal->getExitCode() == Goal::ExitCode::Busy && 0 != settings.buildTimeout && j->respectTimeouts && after - j->timeStarted >= std::chrono::seconds(settings.buildTimeout)) @@ -4979,8 +4999,8 @@ bool Worker::pathContentsGood(const StorePath & path) if (!pathExists(store.printStorePath(path))) res = false; else { - HashResult current = hashPath(info->narHash.type, store.printStorePath(path)); - Hash nullHash(htSHA256); + HashResult current = hashPath(*info->narHash.type, store.printStorePath(path)); + Hash nullHash(HashType::SHA256); res = info->narHash == nullHash || info->narHash == current.first; } pathContentsGoodCache.insert_or_assign(path.clone(), res); @@ -5029,7 +5049,7 @@ void LocalStore::buildPaths(const std::vector & drvPaths, StorePathSet failed; for (auto & i : goals) { - if (i->getExitCode() != Goal::ecSuccess) { + if (i->getExitCode() != Goal::ExitCode::Success) { DerivationGoal * i2 = dynamic_cast(i.get()); if (i2) failed.insert(i2->getDrvPath()); else failed.insert(dynamic_cast(i.get())->getStorePath()); @@ -5074,7 +5094,7 @@ void LocalStore::ensurePath(const StorePath & path) worker.run(goals); - if (goal->getExitCode() != Goal::ecSuccess) + if (goal->getExitCode() != Goal::ExitCode::Success) throw Error(worker.exitStatus(), "path '%s' does not exist and cannot be created", printStorePath(path)); } @@ -5087,7 +5107,7 @@ void LocalStore::repairPath(const StorePath & path) worker.run(goals); - if (goal->getExitCode() != Goal::ecSuccess) { + if (goal->getExitCode() != Goal::ExitCode::Success) { /* Since substituting the path didn't work, if we have a valid deriver, then rebuild the deriver. */ auto info = queryPathInfo(path); diff --git a/src/libstore/builtins/fetchurl.cc b/src/libstore/builtins/fetchurl.cc index 486babf1427..770df292711 100644 --- a/src/libstore/builtins/fetchurl.cc +++ b/src/libstore/builtins/fetchurl.cc @@ -63,9 +63,9 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData) for (auto hashedMirror : settings.hashedMirrors.get()) try { if (!hasSuffix(hashedMirror, "/")) hashedMirror += '/'; - auto ht = parseHashType(getAttr("outputHashAlgo")); + auto ht = parseHashTypeOpt(getAttr("outputHashAlgo")); auto h = Hash(getAttr("outputHash"), ht); - fetch(hashedMirror + printHashType(h.type) + "/" + h.to_string(Base16, false)); + fetch(hashedMirror + printHashType(*h.type) + "/" + h.to_string(Base::Base16, false)); return; } catch (Error & e) { debug(e.what()); diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc index 117ee8c3585..970fc80645f 100644 --- a/src/libstore/daemon.cc +++ b/src/libstore/daemon.cc @@ -115,7 +115,13 @@ struct TunnelLogger : public Logger } StringSink buf; - buf << STDERR_START_ACTIVITY << act << lvl << type << s << fields << parent; + buf << STDERR_START_ACTIVITY + << act + << (uint64_t) lvl + << (uint64_t) type + << s + << fields + << parent; enqueueMsg(*buf.s); } @@ -131,7 +137,10 @@ struct TunnelLogger : public Logger { if (GET_PROTOCOL_MINOR(clientVersion) < 20) return; StringSink buf; - buf << STDERR_RESULT << act << type << fields; + buf << STDERR_RESULT + << act + << (uint64_t) type + << fields; enqueueMsg(*buf.s); } }; @@ -303,7 +312,7 @@ static void performOp(TunnelLogger * logger, ref store, logger->startWork(); auto hash = store->queryPathInfo(path)->narHash; logger->stopWork(); - to << hash.to_string(Base16, false); + to << hash.to_string(Base::Base16, false); break; } @@ -563,7 +572,7 @@ static void performOp(TunnelLogger * logger, ref store, clientSettings.maxBuildJobs = readInt(from); clientSettings.maxSilentTime = readInt(from); readInt(from); // obsolete useBuildHook - clientSettings.verboseBuild = lvlError == (Verbosity) readInt(from); + clientSettings.verboseBuild = Verbosity::Error == (Verbosity) readInt(from); readInt(from); // obsolete logType readInt(from); // obsolete printBuildTrace clientSettings.buildCores = readInt(from); @@ -648,7 +657,7 @@ static void performOp(TunnelLogger * logger, ref store, if (GET_PROTOCOL_MINOR(clientVersion) >= 17) to << 1; to << (info->deriver ? store->printStorePath(*info->deriver) : "") - << info->narHash.to_string(Base16, false); + << info->narHash.to_string(Base::Base16, false); writeStorePaths(*store, to, info->references); to << info->registrationTime << info->narSize; if (GET_PROTOCOL_MINOR(clientVersion) >= 16) { @@ -708,7 +717,7 @@ static void performOp(TunnelLogger * logger, ref store, auto deriver = readString(from); if (deriver != "") info.deriver = store->parseStorePath(deriver); - info.narHash = Hash(readString(from), htSHA256); + info.narHash = Hash(readString(from), HashType::SHA256); info.references = readStorePaths(*store, from); from >> info.registrationTime >> info.narSize >> info.ultimate; info.sigs = readStrings(from); @@ -791,7 +800,7 @@ void processConnection( Finally finally([&]() { _isInterrupted = false; - prevLogger->log(lvlDebug, fmt("%d operations", opCount)); + prevLogger->log(Verbosity::Debug, fmt("%d operations", opCount)); }); if (GET_PROTOCOL_MINOR(clientVersion) >= 14 && readInt(from)) { diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc index acab418b63a..bcfb49c69f0 100644 --- a/src/libstore/derivations.cc +++ b/src/libstore/derivations.cc @@ -23,8 +23,6 @@ void DerivationOutput::parseHashInfo(FileIngestionMethod & method, Hash & hash) } HashType hashType = parseHashType(algo); - if (hashType == htUnknown) - throw Error("unknown hash algorithm '%s'", algo); hash = Hash(this->hash, hashType); } @@ -367,7 +365,7 @@ Hash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOutput /* Return a fixed hash for fixed-output derivations. */ if (drv.isFixedOutput()) { DerivationOutputs::const_iterator i = drv.outputs.begin(); - return hashString(htSHA256, "fixed:out:" + return hashString(HashType::SHA256, "fixed:out:" + i->second.hashAlgo + ":" + i->second.hash + ":" + store.printStorePath(i->second.path)); @@ -383,10 +381,10 @@ Hash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOutput h = drvHashes.insert_or_assign(i.first.clone(), hashDerivationModulo(store, readDerivation(store, store.toRealPath(i.first)), false)).first; } - inputs2.insert_or_assign(h->second.to_string(Base16, false), i.second); + inputs2.insert_or_assign(h->second.to_string(Base::Base16, false), i.second); } - return hashString(htSHA256, drv.unparse(store, maskOutputs, &inputs2)); + return hashString(HashType::SHA256, drv.unparse(store, maskOutputs, &inputs2)); } @@ -456,7 +454,7 @@ void writeDerivation(Sink & out, const Store & store, const BasicDerivation & dr std::string hashPlaceholder(const std::string & outputName) { // FIXME: memoize? - return "/" + hashString(htSHA256, "nix-output:" + outputName).to_string(Base32, false); + return "/" + hashString(HashType::SHA256, "nix-output:" + outputName).to_string(Base::Base32, false); } diff --git a/src/libstore/export-import.cc b/src/libstore/export-import.cc index f0d01a24043..aef5cc6daf9 100644 --- a/src/libstore/export-import.cc +++ b/src/libstore/export-import.cc @@ -11,7 +11,7 @@ struct HashAndWriteSink : Sink { Sink & writeSink; HashSink hashSink; - HashAndWriteSink(Sink & writeSink) : writeSink(writeSink), hashSink(htSHA256) + HashAndWriteSink(Sink & writeSink) : writeSink(writeSink), hashSink(HashType::SHA256) { } virtual void operator () (const unsigned char * data, size_t len) @@ -34,7 +34,7 @@ void Store::exportPaths(const StorePathSet & paths, Sink & sink) //logger->incExpected(doneLabel, sorted.size()); for (auto & path : sorted) { - //Activity act(*logger, lvlInfo, format("exporting path '%s'") % path); + //Activity act(*logger, Verbosity::Info, format("exporting path '%s'") % path); sink << 1; exportPath(path, sink); //logger->incProgress(doneLabel); @@ -55,7 +55,7 @@ void Store::exportPath(const StorePath & path, Sink & sink) filesystem corruption from spreading to other machines. Don't complain if the stored hash is zero (unknown). */ Hash hash = hashAndWriteSink.currentHash(); - if (hash != info->narHash && info->narHash != Hash(info->narHash.type)) + if (hash != info->narHash && info->narHash != Hash(*info->narHash.type)) throw Error("hash of path '%s' has changed from '%s' to '%s'!", printStorePath(path), info->narHash.to_string(), hash.to_string()); @@ -86,7 +86,7 @@ StorePaths Store::importPaths(Source & source, std::shared_ptr acces ValidPathInfo info(parseStorePath(readString(source))); - //Activity act(*logger, lvlInfo, format("importing path '%s'") % info.path); + //Activity act(*logger, Verbosity::Info, format("importing path '%s'") % info.path); info.references = readStorePaths(*this, source); @@ -94,7 +94,7 @@ StorePaths Store::importPaths(Source & source, std::shared_ptr acces if (deriver != "") info.deriver = parseStorePath(deriver); - info.narHash = hashString(htSHA256, *tee.source.data); + info.narHash = hashString(HashType::SHA256, *tee.source.data); info.narSize = tee.source.data->size(); // Ignore optional legacy signature. diff --git a/src/libstore/filetransfer.cc b/src/libstore/filetransfer.cc index e9684b3d495..933c8038d67 100644 --- a/src/libstore/filetransfer.cc +++ b/src/libstore/filetransfer.cc @@ -77,7 +77,7 @@ struct curlFileTransfer : public FileTransfer Callback && callback) : fileTransfer(fileTransfer) , request(request) - , act(*logger, lvlTalkative, actFileTransfer, + , act(*logger, Verbosity::Talkative, ActivityType::Download, fmt(request.data ? "uploading '%s'" : "downloading '%s'", request.uri), {request.uri}, request.parentAct) , callback(std::move(callback)) @@ -163,7 +163,7 @@ struct curlFileTransfer : public FileTransfer { size_t realSize = size * nmemb; std::string line((char *) contents, realSize); - printMsg(lvlVomit, format("got header for '%s': %s") % request.uri % trim(line)); + printMsg(Verbosity::Vomit, format("got header for '%s': %s") % request.uri % trim(line)); if (line.compare(0, 5, "HTTP/") == 0) { // new response starts result.etag = ""; auto ss = tokenizeString>(line, " "); @@ -246,7 +246,7 @@ struct curlFileTransfer : public FileTransfer curl_easy_reset(req); - if (verbosity >= lvlVomit) { + if (verbosity >= Verbosity::Vomit) { curl_easy_setopt(req, CURLOPT_VERBOSE, 1); curl_easy_setopt(req, CURLOPT_DEBUGFUNCTION, TransferItem::debugCallback); } diff --git a/src/libstore/gc.cc b/src/libstore/gc.cc index 95a4bc934a8..f2646f6b347 100644 --- a/src/libstore/gc.cc +++ b/src/libstore/gc.cc @@ -78,7 +78,7 @@ void LocalStore::syncWithGC() void LocalStore::addIndirectRoot(const Path & path) { - string hash = hashString(htSHA1, path).to_string(Base32, false); + string hash = hashString(HashType::SHA1, path).to_string(Base::Base32, false); Path realRoot = canonPath((format("%1%/%2%/auto/%3%") % stateDir % gcRootsDir % hash).str()); makeSymlink(realRoot, path); @@ -637,7 +637,7 @@ void LocalStore::tryToDelete(GCState & state, const Path & path) auto realPath = realStoreDir + "/" + std::string(baseNameOf(path)); if (realPath == linksDir || realPath == trashDir) return; - //Activity act(*logger, lvlDebug, format("considering whether to delete '%1%'") % path); + //Activity act(*logger, Verbosity::Debug, format("considering whether to delete '%1%'") % path); auto storePath = maybeParseStorePath(path); @@ -702,7 +702,7 @@ void LocalStore::removeUnusedLinks(const GCState & state) continue; } - printMsg(lvlTalkative, format("deleting unused link '%1%'") % path); + printMsg(Verbosity::Talkative, format("deleting unused link '%1%'") % path); if (unlink(path.c_str()) == -1) throw SysError(format("deleting '%1%'") % path); diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index af20d389b06..61e7603b799 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -139,7 +139,7 @@ struct LegacySSHStore : public Store << cmdAddToStoreNar << printStorePath(info.path) << (info.deriver ? printStorePath(*info.deriver) : "") - << info.narHash.to_string(Base16, false); + << info.narHash.to_string(Base::Base16, false); writeStorePaths(*this, conn->to, info.references); conn->to << info.registrationTime diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 14c816e7282..cca4ef8095b 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -581,7 +581,7 @@ uint64_t LocalStore::addValidPath(State & state, state.stmtRegisterValidPath.use() (printStorePath(info.path)) - (info.narHash.to_string(Base16)) + (info.narHash.to_string(Base::Base16)) (info.registrationTime == 0 ? time(0) : info.registrationTime) (info.deriver ? printStorePath(*info.deriver) : "", (bool) info.deriver) (info.narSize, info.narSize != 0) @@ -681,7 +681,7 @@ void LocalStore::updatePathInfo(State & state, const ValidPathInfo & info) { state.stmtUpdatePathInfo.use() (info.narSize, info.narSize != 0) - (info.narHash.to_string(Base16)) + (info.narHash.to_string(Base::Base16)) (info.ultimate ? 1 : 0, info.ultimate) (concatStringsSep(" ", info.sigs), !info.sigs.empty()) (info.ca, !info.ca.empty()) @@ -909,7 +909,7 @@ void LocalStore::registerValidPaths(const ValidPathInfos & infos) StorePathSet paths; for (auto & i : infos) { - assert(i.narHash.type == htSHA256); + assert(i.narHash.type == HashType::SHA256); if (isValidPath_(*state, i.path)) updatePathInfo(*state, i); else @@ -1007,9 +1007,9 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source, of the NAR. */ std::unique_ptr hashSink; if (info.ca == "" || !info.references.count(info.path)) - hashSink = std::make_unique(htSHA256); + hashSink = std::make_unique(HashType::SHA256); else - hashSink = std::make_unique(htSHA256, storePathToHash(printStorePath(info.path))); + hashSink = std::make_unique(HashType::SHA256, storePathToHash(printStorePath(info.path))); LambdaSource wrapperSource([&](unsigned char * data, size_t len) -> size_t { size_t n = source.read(data, len); @@ -1049,7 +1049,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source, StorePath LocalStore::addToStoreFromDump(const string & dump, const string & name, FileIngestionMethod method, HashType hashAlgo, RepairFlag repair) { - if (method == FileIngestionMethod::Git && hashAlgo != htSHA1) + if (method == FileIngestionMethod::Git && hashAlgo != HashType::SHA1) throw Error("git ingestion must use sha1 hash"); Hash h = hashString(hashAlgo, dump); @@ -1098,10 +1098,10 @@ StorePath LocalStore::addToStoreFromDump(const string & dump, const string & nam sha256); otherwise, compute it here. */ HashResult hash; if (method == FileIngestionMethod::Recursive) { - hash.first = hashAlgo == htSHA256 ? h : hashString(htSHA256, dump); + hash.first = hashAlgo == HashType::SHA256 ? h : hashString(HashType::SHA256, dump); hash.second = dump.size(); } else - hash = hashPath(htSHA256, realPath); + hash = hashPath(HashType::SHA256, realPath); optimisePath(realPath); // FIXME: combine with hashPath() @@ -1124,7 +1124,7 @@ StorePath LocalStore::addToStore(const string & name, const Path & _srcPath, { Path srcPath(absPath(_srcPath)); - if (method == FileIngestionMethod::Git && hashAlgo != htSHA1) + if (method == FileIngestionMethod::Git && hashAlgo != HashType::SHA1) throw Error("git ingestion must use sha1 hash"); /* Read the whole path into memory. This is not a very scalable @@ -1161,7 +1161,7 @@ StorePath LocalStore::addToStore(const string & name, const Path & _srcPath, StorePath LocalStore::addTextToStore(const string & name, const string & s, const StorePathSet & references, RepairFlag repair) { - auto hash = hashString(htSHA256, s); + auto hash = hashString(HashType::SHA256, s); auto dstPath = makeTextPath(name, hash, references); addTempRoot(dstPath); @@ -1185,7 +1185,7 @@ StorePath LocalStore::addTextToStore(const string & name, const string & s, StringSink sink; dumpString(s, sink); - auto narHash = hashString(htSHA256, *sink.s); + auto narHash = hashString(HashType::SHA256, *sink.s); optimisePath(realPath); @@ -1271,9 +1271,9 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair) printInfo("checking link hashes..."); for (auto & link : readDirectory(linksDir)) { - printMsg(lvlTalkative, "checking contents of '%s'", link.name); + printMsg(Verbosity::Talkative, "checking contents of '%s'", link.name); Path linkPath = linksDir + "/" + link.name; - string hash = hashPath(htSHA256, linkPath).first.to_string(Base32, false); + string hash = hashPath(HashType::SHA256, linkPath).first.to_string(Base::Base32, false); if (hash != link.name) { printError( "link '%s' was modified! expected hash '%s', got '%s'", @@ -1291,20 +1291,20 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair) printInfo("checking store hashes..."); - Hash nullHash(htSHA256); + Hash nullHash(HashType::SHA256); for (auto & i : validPaths) { try { auto info = std::const_pointer_cast(std::shared_ptr(queryPathInfo(i))); /* Check the content hash (optionally - slow). */ - printMsg(lvlTalkative, "checking contents of '%s'", printStorePath(i)); + printMsg(Verbosity::Talkative, "checking contents of '%s'", printStorePath(i)); std::unique_ptr hashSink; if (info->ca == "" || !info->references.count(info->path)) - hashSink = std::make_unique(info->narHash.type); + hashSink = std::make_unique(*info->narHash.type); else - hashSink = std::make_unique(info->narHash.type, storePathToHash(printStorePath(info->path))); + hashSink = std::make_unique(*info->narHash.type, storePathToHash(printStorePath(info->path))); dumpPath(Store::toRealPath(i), *hashSink); auto current = hashSink->finish(); diff --git a/src/libstore/local-store.hh b/src/libstore/local-store.hh index c1e75390c5d..bc9a415b302 100644 --- a/src/libstore/local-store.hh +++ b/src/libstore/local-store.hh @@ -157,7 +157,7 @@ public: true) or simply the contents of a regular file (if recursive == false). */ StorePath addToStoreFromDump(const string & dump, const string & name, - FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair) override; + FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = HashType::SHA256, RepairFlag repair = NoRepair) override; StorePath addTextToStore(const string & name, const string & s, const StorePathSet & references, RepairFlag repair) override; diff --git a/src/libstore/misc.cc b/src/libstore/misc.cc index 9c47fe52454..fb538a1c52d 100644 --- a/src/libstore/misc.cc +++ b/src/libstore/misc.cc @@ -112,7 +112,7 @@ void Store::queryMissing(const std::vector & targets, StorePathSet & willBuild_, StorePathSet & willSubstitute_, StorePathSet & unknown_, unsigned long long & downloadSize_, unsigned long long & narSize_) { - Activity act(*logger, lvlDebug, actUnknown, "querying info about missing paths"); + Activity act(*logger, Verbosity::Debug, ActivityType::Unknown, "querying info about missing paths"); downloadSize_ = narSize_ = 0; diff --git a/src/libstore/nar-info.cc b/src/libstore/nar-info.cc index 1375094b5bf..a35886302f3 100644 --- a/src/libstore/nar-info.cc +++ b/src/libstore/nar-info.cc @@ -86,11 +86,11 @@ std::string NarInfo::to_string(const Store & store) const res += "URL: " + url + "\n"; assert(compression != ""); res += "Compression: " + compression + "\n"; - assert(fileHash.type == htSHA256); - res += "FileHash: " + fileHash.to_string(Base32) + "\n"; + assert(fileHash.type == HashType::SHA256); + res += "FileHash: " + fileHash.to_string(Base::Base32) + "\n"; res += "FileSize: " + std::to_string(fileSize) + "\n"; - assert(narHash.type == htSHA256); - res += "NarHash: " + narHash.to_string(Base32) + "\n"; + assert(narHash.type == HashType::SHA256); + res += "NarHash: " + narHash.to_string(Base::Base32) + "\n"; res += "NarSize: " + std::to_string(narSize) + "\n"; res += "References: " + concatStringsSep(" ", shortRefs()) + "\n"; diff --git a/src/libstore/optimise-store.cc b/src/libstore/optimise-store.cc index 8ac382e9dbf..5c01e1b3b28 100644 --- a/src/libstore/optimise-store.cc +++ b/src/libstore/optimise-store.cc @@ -57,7 +57,7 @@ LocalStore::InodeHash LocalStore::loadInodeHash() } if (errno) throw SysError(format("reading directory '%1%'") % linksDir); - printMsg(lvlTalkative, format("loaded %1% hash inodes") % inodeHash.size()); + printMsg(Verbosity::Talkative, format("loaded %1% hash inodes") % inodeHash.size()); return inodeHash; } @@ -149,11 +149,11 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats, Also note that if `path' is a symlink, then we're hashing the contents of the symlink (i.e. the result of readlink()), not the contents of the target (which may not even exist). */ - Hash hash = hashPath(htSHA256, path).first; + Hash hash = hashPath(HashType::SHA256, path).first; debug(format("'%1%' has hash '%2%'") % path % hash.to_string()); /* Check if this is a known hash. */ - Path linkPath = linksDir + "/" + hash.to_string(Base32, false); + Path linkPath = linksDir + "/" + hash.to_string(Base::Base32, false); retry: if (!pathExists(linkPath)) { @@ -199,7 +199,7 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats, goto retry; } - printMsg(lvlTalkative, format("linking '%1%' to '%2%'") % path % linkPath); + printMsg(Verbosity::Talkative, format("linking '%1%' to '%2%'") % path % linkPath); /* Make the containing directory writable, but only if it's not the store itself (we don't want or need to mess with its @@ -246,13 +246,13 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats, stats.blocksFreed += st.st_blocks; if (act) - act->result(resFileLinked, st.st_size, st.st_blocks); + act->result(ResultType::FileLinked, st.st_size, st.st_blocks); } void LocalStore::optimiseStore(OptimiseStats & stats) { - Activity act(*logger, actOptimiseStore); + Activity act(*logger, ActivityType::OptimiseStore); auto paths = queryAllValidPaths(); InodeHash inodeHash = loadInodeHash(); @@ -265,7 +265,7 @@ void LocalStore::optimiseStore(OptimiseStats & stats) addTempRoot(i); if (!isValidPath(i)) continue; /* path was GC'ed, probably */ { - Activity act(*logger, lvlTalkative, actUnknown, fmt("optimising path '%s'", printStorePath(i))); + Activity act(*logger, Verbosity::Talkative, ActivityType::Unknown, fmt("optimising path '%s'", printStorePath(i))); optimisePath_(&act, stats, realStoreDir + "/" + std::string(i.to_string()), inodeHash); } done++; diff --git a/src/libstore/references.cc b/src/libstore/references.cc index 102e1592180..6652e1e26ca 100644 --- a/src/libstore/references.cc +++ b/src/libstore/references.cc @@ -54,7 +54,7 @@ struct RefScanSink : Sink string tail; - RefScanSink() : hashSink(htSHA256) { } + RefScanSink() : hashSink(HashType::SHA256) { } void operator () (const unsigned char * data, size_t len); }; @@ -96,7 +96,7 @@ PathSet scanForReferences(const string & path, string s = string(baseName, 0, pos); assert(s.size() == refLength); assert(backMap.find(s) == backMap.end()); - // parseHash(htSHA256, s); + // parseHash(HashType::SHA256, s); sink.hashes.insert(s); backMap[s] = i; } diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index fbc161cc92c..16e94b687e5 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -178,11 +178,11 @@ void RemoteStore::setOptions(Connection & conn) << settings.keepFailed << settings.keepGoing << settings.tryFallback - << verbosity + << (uint64_t) verbosity << settings.maxBuildJobs << settings.maxSilentTime << true - << (settings.verboseBuild ? lvlError : lvlVomit) + << (uint64_t) (settings.verboseBuild ? Verbosity::Error : Verbosity::Vomit) << 0 // obsolete log type << 0 /* obsolete print build trace */ << settings.buildCores @@ -376,7 +376,7 @@ void RemoteStore::queryPathInfoUncached(const StorePath & path, info = std::make_shared(path.clone()); auto deriver = readString(conn->from); if (deriver != "") info->deriver = parseStorePath(deriver); - info->narHash = Hash(readString(conn->from), htSHA256); + info->narHash = Hash(readString(conn->from), HashType::SHA256); info->references = readStorePaths(*this, conn->from); conn->from >> info->registrationTime >> info->narSize; if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 16) { @@ -472,7 +472,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source, conn->to << wopAddToStoreNar << printStorePath(info.path) << (info.deriver ? printStorePath(*info.deriver) : "") - << info.narHash.to_string(Base16, false); + << info.narHash.to_string(Base::Base16, false); writeStorePaths(*this, conn->to, info.references); conn->to << info.registrationTime << info.narSize << info.ultimate << info.sigs << info.ca @@ -489,7 +489,7 @@ StorePath RemoteStore::addToStore(const string & name, const Path & _srcPath, { if (repair) throw Error("repairing is not supported when building through the Nix daemon"); - if (method == FileIngestionMethod::Git && hashAlgo != htSHA1) + if (method == FileIngestionMethod::Git && hashAlgo != HashType::SHA1) throw Error("git ingestion must use sha1 hash"); Path srcPath(absPath(_srcPath)); @@ -509,7 +509,7 @@ StorePath RemoteStore::addToStore(const string & name, const Path & _srcPath, conn->to << wopAddToStore << name - << ((hashAlgo == htSHA256 && method == FileIngestionMethod::Recursive) ? 0 : 1) /* backwards compatibility hack */ + << ((hashAlgo == HashType::SHA256 && method == FileIngestionMethod::Recursive) ? 0 : 1) /* backwards compatibility hack */ << (uint8_t) method << printHashType(hashAlgo); diff --git a/src/libstore/remote-store.hh b/src/libstore/remote-store.hh index 3c86b4524eb..bd541779c14 100644 --- a/src/libstore/remote-store.hh +++ b/src/libstore/remote-store.hh @@ -65,7 +65,7 @@ public: std::shared_ptr accessor) override; StorePath addToStore(const string & name, const Path & srcPath, - FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, + FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = HashType::SHA256, PathFilter & filter = defaultPathFilter, RepairFlag repair = NoRepair) override; StorePath addTextToStore(const string & name, const string & s, diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index b24e7b7d621..f43581bf671 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -68,9 +68,9 @@ static void initAWS() shared.cc), so don't let aws-sdk-cpp override it. */ options.cryptoOptions.initAndCleanupOpenSSL = false; - if (verbosity >= lvlDebug) { + if (verbosity >= Verbosity::Debug) { options.loggingOptions.logLevel = - verbosity == lvlDebug + verbosity == Verbosity::Debug ? Aws::Utils::Logging::LogLevel::Debug : Aws::Utils::Logging::LogLevel::Trace; options.loggingOptions.logger_create_fn = [options]() { diff --git a/src/libstore/ssh.cc b/src/libstore/ssh.cc index 84548a6e4eb..f61c094a220 100644 --- a/src/libstore/ssh.cc +++ b/src/libstore/ssh.cc @@ -58,7 +58,7 @@ std::unique_ptr SSHMaster::startCommand(const std::string addCommonSSHOpts(args); if (socketPath != "") args.insert(args.end(), {"-S", socketPath}); - if (verbosity >= lvlChatty) + if (verbosity >= Verbosity::Chatty) args.push_back("-v"); } @@ -110,7 +110,7 @@ Path SSHMaster::startMaster() , "-o", "LocalCommand=echo started" , "-o", "PermitLocalCommand=yes" }; - if (verbosity >= lvlChatty) + if (verbosity >= Verbosity::Chatty) args.push_back("-v"); addCommonSSHOpts(args); execvp(args.begin()->c_str(), stringsToCharPtrs(args).data()); diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 39c4009b5a0..7e6358bdab9 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -142,8 +142,8 @@ StorePath Store::makeStorePath(const string & type, const Hash & hash, std::string_view name) const { /* e.g., "source:sha256:1abc...:/nix/store:foo.tar.gz" */ - string s = type + ":" + hash.to_string(Base16) + ":" + storeDir + ":" + std::string(name); - auto h = compressHash(hashString(htSHA256, s), 20); + string s = type + ":" + hash.to_string(Base::Base16) + ":" + storeDir + ":" + std::string(name); + auto h = compressHash(hashString(HashType::SHA256, s), 20); return StorePath::make(h.hash, name); } @@ -178,18 +178,18 @@ StorePath Store::makeFixedOutputPath( const StorePathSet & references, bool hasSelfReference) const { - if (method == FileIngestionMethod::Git && hash.type != htSHA1) + if (method == FileIngestionMethod::Git && hash.type != HashType::SHA1) throw Error("Git file ingestion must use sha1 hash"); - if (hash.type == htSHA256 && method == FileIngestionMethod::Recursive) { + if (hash.type == HashType::SHA256 && method == FileIngestionMethod::Recursive) { return makeStorePath(makeType(*this, "source", references, hasSelfReference), hash, name); } else { assert(references.empty()); return makeStorePath("output:out", - hashString(htSHA256, + hashString(HashType::SHA256, "fixed:out:" + ingestionMethodPrefix(method) - + hash.to_string(Base16) + ":"), + + hash.to_string(Base::Base16) + ":"), name); } } @@ -198,7 +198,7 @@ StorePath Store::makeFixedOutputPath( StorePath Store::makeTextPath(std::string_view name, const Hash & hash, const StorePathSet & references) const { - assert(hash.type == htSHA256); + assert(hash.type == HashType::SHA256); /* Stuff the references (if any) into the type. This is a bit hacky, but we can't put them in `s' since that would be ambiguous. */ @@ -231,7 +231,7 @@ std::pair Store::computeStorePathForPath(std::string_view name, StorePath Store::computeStorePathForText(const string & name, const string & s, const StorePathSet & references) const { - return makeTextPath(name, hashString(htSHA256, s), references); + return makeTextPath(name, hashString(HashType::SHA256, s), references); } @@ -444,7 +444,7 @@ string Store::makeValidityRegistration(const StorePathSet & paths, auto info = queryPathInfo(i); if (showHash) { - s += info->narHash.to_string(Base16, false) + "\n"; + s += info->narHash.to_string(Base::Base16, false) + "\n"; s += (format("%1%\n") % info->narSize).str(); } @@ -582,7 +582,7 @@ void copyStorePath(ref srcStore, ref dstStore, auto srcUri = srcStore->getUri(); auto dstUri = dstStore->getUri(); - Activity act(*logger, lvlInfo, actCopyPath, + Activity act(*logger, Verbosity::Info, ActivityType::CopyPath, srcUri == "local" || srcUri == "daemon" ? fmt("copying path '%s' to '%s'", srcStore->printStorePath(storePath), dstUri) : dstUri == "local" || dstUri == "daemon" @@ -599,7 +599,7 @@ void copyStorePath(ref srcStore, ref dstStore, StringSink sink; srcStore->narFromPath({storePath}, sink); auto info2 = make_ref(*info); - info2->narHash = hashString(htSHA256, *sink.s); + info2->narHash = hashString(HashType::SHA256, *sink.s); if (!info->narSize) info2->narSize = sink.s->size(); if (info->ultimate) info2->ultimate = false; info = info2; @@ -641,7 +641,7 @@ void copyPaths(ref srcStore, ref dstStore, const StorePathSet & st if (missing.empty()) return; - Activity act(*logger, lvlInfo, actCopyPaths, fmt("copying %d paths", missing.size())); + Activity act(*logger, Verbosity::Info, ActivityType::CopyPaths, fmt("copying %d paths", missing.size())); std::atomic nrDone{0}; std::atomic nrFailed{0}; @@ -667,7 +667,7 @@ void copyPaths(ref srcStore, ref dstStore, const StorePathSet & st auto info = srcStore->queryPathInfo(srcStore->parseStorePath(storePath)); bytesExpected += info->narSize; - act.setExpected(actCopyPath, bytesExpected); + act.setExpected(ActivityType::CopyPath, bytesExpected); return srcStore->printStorePathSet(info->references); }, @@ -686,7 +686,7 @@ void copyPaths(ref srcStore, ref dstStore, const StorePathSet & st nrFailed++; if (!settings.keepGoing) throw e; - logger->log(lvlError, fmt("could not copy %s: %s", storePathS, e.what())); + logger->log(Verbosity::Error, fmt("could not copy %s: %s", storePathS, e.what())); showProgress(); return; } @@ -732,7 +732,7 @@ std::optional decodeValidPathInfo(const Store & store, std::istre if (hashGiven) { string s; getline(str, s); - info.narHash = Hash(s, htSHA256); + info.narHash = Hash(s, HashType::SHA256); getline(str, s); if (!string2Int(s, info.narSize)) throw Error("number expected"); } @@ -775,7 +775,7 @@ std::string ValidPathInfo::fingerprint(const Store & store) const store.printStorePath(path)); return "1;" + store.printStorePath(path) + ";" - + narHash.to_string(Base32) + ";" + + narHash.to_string(Base::Base32) + ";" + std::to_string(narSize) + ";" + concatStringsSep(",", store.printStorePathSet(references)); } @@ -853,7 +853,7 @@ Strings ValidPathInfo::shortRefs() const std::string makeFixedOutputCA(FileIngestionMethod method, const Hash & hash) { - if (method == FileIngestionMethod::Git && hash.type != htSHA1) + if (method == FileIngestionMethod::Git && hash.type != HashType::SHA1) throw Error("git file ingestion must use sha1 hashes"); return "fixed:" + ingestionMethodPrefix(method) + hash.to_string(); } diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index b1e25fc7d66..0c1ca3286f1 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -359,7 +359,7 @@ public: path and the cryptographic hash of the contents of srcPath. */ std::pair computeStorePathForPath(std::string_view name, const Path & srcPath, FileIngestionMethod method = FileIngestionMethod::Recursive, - HashType hashAlgo = htSHA256, PathFilter & filter = defaultPathFilter) const; + HashType hashAlgo = HashType::SHA256, PathFilter & filter = defaultPathFilter) const; /* Preparatory part of addTextToStore(). @@ -457,12 +457,12 @@ public: The function object `filter' can be used to exclude files (see libutil/archive.hh). */ virtual StorePath addToStore(const string & name, const Path & srcPath, - FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, + FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = HashType::SHA256, PathFilter & filter = defaultPathFilter, RepairFlag repair = NoRepair) = 0; // FIXME: remove? virtual StorePath addToStoreFromDump(const string & dump, const string & name, - FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair) + FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = HashType::SHA256, RepairFlag repair = NoRepair) { throw Error("addToStoreFromDump() is not supported by this store"); } @@ -556,7 +556,7 @@ public: each path is included. */ void pathInfoToJSON(JSONPlaceholder & jsonOut, const StorePathSet & storePaths, bool includeImpureInfo, bool showClosureSize, - Base hashBase = Base32, + Base hashBase = Base::Base32, AllowInvalidFlag allowInvalid = DisallowInvalid); /* Return the size of the closure of the specified path, that is, diff --git a/src/libutil/args.cc b/src/libutil/args.cc index f829415d106..4fe9539e4b8 100644 --- a/src/libutil/args.cc +++ b/src/libutil/args.cc @@ -162,8 +162,18 @@ Args::Flag Args::Flag::mkHashTypeFlag(std::string && longName, HashType * ht) .labels = {"hash-algo"}, .handler = {[ht](std::string s) { *ht = parseHashType(s); - if (*ht == htUnknown) - throw UsageError("unknown hash type '%1%'", s); + }} + }; +} + +Args::Flag Args::Flag::mkHashTypeOptFlag(std::string && longName, std::optional * oht) +{ + return Flag { + .longName = std::move(longName), + .description = "hash algorithm ('md5', 'sha1', 'sha256', or 'sha512'). Optional as can also be gotten from SRI hash itself.", + .labels = {"hash-algo"}, + .handler = {[oht](std::string s) { + *oht = std::optional { parseHashType(s) }; }} }; } diff --git a/src/libutil/args.hh b/src/libutil/args.hh index 1932e6a8ae4..f2315f67a7c 100644 --- a/src/libutil/args.hh +++ b/src/libutil/args.hh @@ -10,7 +10,7 @@ namespace nix { MakeError(UsageError, Error); -enum HashType : char; +enum struct HashType : char; class Args { @@ -85,6 +85,7 @@ protected: Handler handler; static Flag mkHashTypeFlag(std::string && longName, HashType * ht); + static Flag mkHashTypeOptFlag(std::string && longName, std::optional * oht); }; std::map longFlags; diff --git a/src/libutil/compression.cc b/src/libutil/compression.cc index 860b04adb79..75e889f41d3 100644 --- a/src/libutil/compression.cc +++ b/src/libutil/compression.cc @@ -314,7 +314,7 @@ struct XzCompressionSink : CompressionSink ret = lzma_stream_encoder_mt(&strm, &mt_options); done = true; #else - printMsg(lvlError, "warning: parallel XZ compression requested but not supported, falling back to single-threaded compression"); + printMsg(Verbosity::Error, "warning: parallel XZ compression requested but not supported, falling back to single-threaded compression"); #endif } diff --git a/src/libutil/git.cc b/src/libutil/git.cc index 3e613949a26..a7d37258d3b 100644 --- a/src/libutil/git.cc +++ b/src/libutil/git.cc @@ -58,11 +58,11 @@ static string getString(Source & source, int n) // Unfortunately, no access to libstore headers here. static string getStoreEntry(const Path & storeDir, Hash hash, string name) { - Hash hash1 = hashString(htSHA256, "fixed:out:git:" + hash.to_string(Base16) + ":"); - Hash hash2 = hashString(htSHA256, "output:out:" + hash1.to_string(Base16) + ":" + storeDir + ":" + name); + Hash hash1 = hashString(HashType::SHA256, "fixed:out:git:" + hash.to_string(Base::Base16) + ":"); + Hash hash2 = hashString(HashType::SHA256, "output:out:" + hash1.to_string(Base::Base16) + ":" + storeDir + ":" + name); Hash hash3 = compressHash(hash2, 20); - return hash3.to_string(Base32, false) + "-" + name; + return hash3.to_string(Base::Base32, false) + "-" + name; } static void parse(ParseSink & sink, Source & source, const Path & path, const Path & realStoreDir, const Path & storeDir) @@ -109,7 +109,7 @@ static void parse(ParseSink & sink, Source & source, const Path & path, const Pa string hashs = getString(source, 20); left -= 20; - Hash hash(htSHA1); + Hash hash(HashType::SHA1); std::copy(hashs.begin(), hashs.end(), hash.hash); string entryName = getStoreEntry(storeDir, hash, name); diff --git a/src/libutil/hash.cc b/src/libutil/hash.cc index b3f3db23178..ca1cd974c3d 100644 --- a/src/libutil/hash.cc +++ b/src/libutil/hash.cc @@ -4,6 +4,7 @@ #include #include +#include "args.hh" #include "hash.hh" #include "archive.hh" #include "git.hh" @@ -19,11 +20,13 @@ namespace nix { void Hash::init() { - if (type == htMD5) hashSize = md5HashSize; - else if (type == htSHA1) hashSize = sha1HashSize; - else if (type == htSHA256) hashSize = sha256HashSize; - else if (type == htSHA512) hashSize = sha512HashSize; - else abort(); + if (!type) abort(); + switch (*type) { + case HashType::MD5: hashSize = md5HashSize; break; + case HashType::SHA1: hashSize = sha1HashSize; break; + case HashType::SHA256: hashSize = sha256HashSize; break; + case HashType::SHA512: hashSize = sha512HashSize; break; + } assert(hashSize <= maxHashSize); memset(hash, 0, maxHashSize); } @@ -99,34 +102,43 @@ static string printHash32(const Hash & hash) string printHash16or32(const Hash & hash) { - return hash.to_string(hash.type == htMD5 ? Base16 : Base32, false); + return hash.to_string(hash.type == HashType::MD5 ? Base::Base16 : Base::Base32, false); } +HashType assertInitHashType(const Hash & h) { + if (h.type) + return *h.type; + else + abort(); +} + std::string Hash::to_string(Base base, bool includeType) const { std::string s; - if (base == SRI || includeType) { - s += printHashType(type); - s += base == SRI ? '-' : ':'; + if (base == Base::SRI || includeType) { + s += printHashType(assertInitHashType(*this)); + s += base == Base::SRI ? '-' : ':'; } switch (base) { - case Base16: + case Base::Base16: s += printHash16(*this); break; - case Base32: + case Base::Base32: s += printHash32(*this); break; - case Base64: - case SRI: + case Base::Base64: + case Base::SRI: s += base64Encode(std::string((const char *) hash, hashSize)); break; } return s; } +Hash::Hash(const std::string & s, HashType type) : Hash(s, std::optional { type }) { } +Hash::Hash(const std::string & s) : Hash(s, std::optional{}) { } -Hash::Hash(const std::string & s, HashType type) +Hash::Hash(const std::string & s, std::optional type) : type(type) { size_t pos = 0; @@ -137,17 +149,17 @@ Hash::Hash(const std::string & s, HashType type) sep = s.find('-'); if (sep != string::npos) { isSRI = true; - } else if (type == htUnknown) + } else if (! type) throw BadHash("hash '%s' does not include a type", s); } if (sep != string::npos) { string hts = string(s, 0, sep); this->type = parseHashType(hts); - if (this->type == htUnknown) + if (!this->type) throw BadHash("unknown hash type '%s'", hts); - if (type != htUnknown && type != this->type) - throw BadHash("hash '%s' should have type '%s'", s, printHashType(type)); + if (type && type != this->type) + throw BadHash("hash '%s' should have type '%s'", s, printHashType(*type)); pos = sep + 1; } @@ -203,7 +215,7 @@ Hash::Hash(const std::string & s, HashType type) } else - throw BadHash("hash '%s' has wrong length for hash type '%s'", s, printHashType(type)); + throw BadHash("hash '%s' has wrong length for hash type '%s'", s, printHashType(*type)); } @@ -218,29 +230,29 @@ union Ctx static void start(HashType ht, Ctx & ctx) { - if (ht == htMD5) MD5_Init(&ctx.md5); - else if (ht == htSHA1) SHA1_Init(&ctx.sha1); - else if (ht == htSHA256) SHA256_Init(&ctx.sha256); - else if (ht == htSHA512) SHA512_Init(&ctx.sha512); + if (ht == HashType::MD5) MD5_Init(&ctx.md5); + else if (ht == HashType::SHA1) SHA1_Init(&ctx.sha1); + else if (ht == HashType::SHA256) SHA256_Init(&ctx.sha256); + else if (ht == HashType::SHA512) SHA512_Init(&ctx.sha512); } static void update(HashType ht, Ctx & ctx, const unsigned char * bytes, size_t len) { - if (ht == htMD5) MD5_Update(&ctx.md5, bytes, len); - else if (ht == htSHA1) SHA1_Update(&ctx.sha1, bytes, len); - else if (ht == htSHA256) SHA256_Update(&ctx.sha256, bytes, len); - else if (ht == htSHA512) SHA512_Update(&ctx.sha512, bytes, len); + if (ht == HashType::MD5) MD5_Update(&ctx.md5, bytes, len); + else if (ht == HashType::SHA1) SHA1_Update(&ctx.sha1, bytes, len); + else if (ht == HashType::SHA256) SHA256_Update(&ctx.sha256, bytes, len); + else if (ht == HashType::SHA512) SHA512_Update(&ctx.sha512, bytes, len); } static void finish(HashType ht, Ctx & ctx, unsigned char * hash) { - if (ht == htMD5) MD5_Final(hash, &ctx.md5); - else if (ht == htSHA1) SHA1_Final(hash, &ctx.sha1); - else if (ht == htSHA256) SHA256_Final(hash, &ctx.sha256); - else if (ht == htSHA512) SHA512_Final(hash, &ctx.sha512); + if (ht == HashType::MD5) MD5_Final(hash, &ctx.md5); + else if (ht == HashType::SHA1) SHA1_Final(hash, &ctx.sha1); + else if (ht == HashType::SHA256) SHA256_Final(hash, &ctx.sha256); + else if (ht == HashType::SHA512) SHA512_Final(hash, &ctx.sha512); } @@ -326,24 +338,34 @@ Hash compressHash(const Hash & hash, unsigned int newSize) } -HashType parseHashType(const string & s) +std::optional parseHashTypeOpt(const string & s) { - if (s == "md5") return htMD5; - else if (s == "sha1") return htSHA1; - else if (s == "sha256") return htSHA256; - else if (s == "sha512") return htSHA512; - else return htUnknown; + if (s == "md5") return HashType::MD5; + else if (s == "sha1") return HashType::SHA1; + else if (s == "sha256") return HashType::SHA256; + else if (s == "sha512") return HashType::SHA512; + else return std::optional {}; } +HashType parseHashType(const string & s) +{ + auto opt_h = parseHashTypeOpt(s); + if (opt_h) + return *opt_h; + else + throw UsageError("unknown hash algorithm '%1%'", s); +} string printHashType(HashType ht) { - if (ht == htMD5) return "md5"; - else if (ht == htSHA1) return "sha1"; - else if (ht == htSHA256) return "sha256"; - else if (ht == htSHA512) return "sha512"; - else abort(); + string ret; + switch (ht) { + case HashType::MD5: ret = "md5"; break; + case HashType::SHA1: ret = "sha1"; break; + case HashType::SHA256: ret = "sha256"; break; + case HashType::SHA512: ret = "sha512"; break; + } + return ret; } - } diff --git a/src/libutil/hash.hh b/src/libutil/hash.hh index 1af94c2b47c..5b9870966c1 100644 --- a/src/libutil/hash.hh +++ b/src/libutil/hash.hh @@ -10,7 +10,12 @@ namespace nix { MakeError(BadHash, Error); -enum HashType : char { htUnknown, htMD5, htSHA1, htSHA256, htSHA512 }; +enum struct HashType : char { + MD5, + SHA1, + SHA256, + SHA512, +}; const int md5HashSize = 16; @@ -20,7 +25,12 @@ const int sha512HashSize = 64; extern const string base32Chars; -enum Base : int { Base64, Base32, Base16, SRI }; +enum struct Base : int { + Base64, + Base32, + Base16, + SRI, +}; struct Hash @@ -29,7 +39,7 @@ struct Hash unsigned int hashSize = 0; unsigned char hash[maxHashSize] = {}; - HashType type = htUnknown; + std::optional type = {}; /* Create an unset hash object. */ Hash() { }; @@ -40,14 +50,18 @@ struct Hash /* Initialize the hash from a string representation, in the format "[:]" or "-" (a Subresource Integrity hash expression). If the 'type' argument - is htUnknown, then the hash type must be specified in the + is not present, then the hash type must be specified in the string. */ - Hash(const std::string & s, HashType type = htUnknown); + Hash(const std::string & s, std::optional type); + // type must be provided + Hash(const std::string & s, HashType type); + // hash type must be part of string + Hash(const std::string & s); void init(); /* Check whether a hash is set. */ - operator bool () const { return type != htUnknown; } + operator bool () const { return (bool) type; } /* Check whether two hash are equal. */ bool operator == (const Hash & h2) const; @@ -79,18 +93,18 @@ struct Hash /* Return a string representation of the hash, in base-16, base-32 or base-64. By default, this is prefixed by the hash type (e.g. "sha256:"). */ - std::string to_string(Base base = Base32, bool includeType = true) const; + std::string to_string(Base base = Base::Base32, bool includeType = true) const; std::string gitRev() const { - assert(type == htSHA1); - return to_string(Base16, false); + assert(type == HashType::SHA1); + return to_string(Base::Base16, false); } std::string gitShortRev() const { - assert(type == htSHA1); - return std::string(to_string(Base16, false), 0, 7); + assert(type == HashType::SHA1); + return std::string(to_string(Base::Base16, false), 0, 7); } }; @@ -119,6 +133,8 @@ Hash compressHash(const Hash & hash, unsigned int newSize); /* Parse a string representing a hash type. */ HashType parseHashType(const string & s); +/* Will return nothing on parse error */ +std::optional parseHashTypeOpt(const string & s); /* And the reverse. */ string printHashType(HashType ht); diff --git a/src/libutil/logging.cc b/src/libutil/logging.cc index 3cc4ef8f15b..5cd2524c6c7 100644 --- a/src/libutil/logging.cc +++ b/src/libutil/logging.cc @@ -22,7 +22,7 @@ Logger * logger = makeDefaultLogger(); void Logger::warn(const std::string & msg) { - log(lvlWarn, ANSI_YELLOW "warning:" ANSI_NORMAL " " + msg); + log(Verbosity::Warn, ANSI_YELLOW "warning:" ANSI_NORMAL " " + msg); } void Logger::writeToStdout(std::string_view s) @@ -51,10 +51,10 @@ class SimpleLogger : public Logger if (systemd) { char c; switch (lvl) { - case lvlError: c = '3'; break; - case lvlWarn: c = '4'; break; - case lvlInfo: c = '5'; break; - case lvlTalkative: case lvlChatty: c = '6'; break; + case Verbosity::Error: c = '3'; break; + case Verbosity::Warn: c = '4'; break; + case Verbosity::Info: c = '5'; break; + case Verbosity::Talkative: case Verbosity::Chatty: c = '6'; break; default: c = '7'; } prefix = std::string("<") + c + ">"; @@ -72,7 +72,7 @@ class SimpleLogger : public Logger } }; -Verbosity verbosity = lvlInfo; +Verbosity verbosity = Verbosity::Info; void warnOnce(bool & haveWarned, const FormatOrString & fs) { @@ -129,7 +129,7 @@ struct JSONLogger : Logger void write(const nlohmann::json & json) { - prevLogger.log(lvlError, "@nix " + json.dump()); + prevLogger.log(Verbosity::Error, "@nix " + json.dump()); } void log(Verbosity lvl, const FormatOrString & fs) override @@ -204,7 +204,7 @@ bool handleJSONLogMessage(const std::string & msg, if (action == "start") { auto type = (ActivityType) json["type"]; - if (trusted || type == actFileTransfer) + if (trusted || type == ActivityType::Download) activities.emplace(std::piecewise_construct, std::forward_as_tuple(json["id"]), std::forward_as_tuple(*logger, (Verbosity) json["level"], type, @@ -222,7 +222,7 @@ bool handleJSONLogMessage(const std::string & msg, else if (action == "setPhase") { std::string phase = json["phase"]; - act.result(resSetPhase, phase); + act.result(ResultType::SetPhase, phase); } else if (action == "msg") { diff --git a/src/libutil/logging.hh b/src/libutil/logging.hh index 18c24d50877..3449f5080f5 100644 --- a/src/libutil/logging.hh +++ b/src/libutil/logging.hh @@ -4,41 +4,41 @@ namespace nix { -typedef enum { - lvlError = 0, - lvlWarn, - lvlInfo, - lvlTalkative, - lvlChatty, - lvlDebug, - lvlVomit -} Verbosity; - -typedef enum { - actUnknown = 0, - actCopyPath = 100, - actFileTransfer = 101, - actRealise = 102, - actCopyPaths = 103, - actBuilds = 104, - actBuild = 105, - actOptimiseStore = 106, - actVerifyPaths = 107, - actSubstitute = 108, - actQueryPathInfo = 109, - actPostBuildHook = 110, -} ActivityType; - -typedef enum { - resFileLinked = 100, - resBuildLogLine = 101, - resUntrustedPath = 102, - resCorruptedPath = 103, - resSetPhase = 104, - resProgress = 105, - resSetExpected = 106, - resPostBuildLogLine = 107, -} ResultType; +enum struct Verbosity : uint64_t { + Error = 0, + Warn, + Info, + Talkative, + Chatty, + Debug, + Vomit, +}; + +enum struct ActivityType : uint64_t { + Unknown = 0, + CopyPath = 100, + Download = 101, + Realise = 102, + CopyPaths = 103, + Builds = 104, + Build = 105, + OptimiseStore = 106, + VerifyPaths = 107, + Substitute = 108, + QueryPathInfo = 109, + PostBuildHook = 110, +}; + +enum struct ResultType : uint64_t { + FileLinked = 100, + BuildLogLine = 101, + UntrustedPath = 102, + CorruptedPath = 103, + SetPhase = 104, + Progress = 105, + SetExpected = 106, + PostBuildLogLine = 107, +}; typedef uint64_t ActivityId; @@ -67,7 +67,7 @@ public: void log(const FormatOrString & fs) { - log(lvlInfo, fs); + log(Verbosity::Info, fs); } virtual void warn(const std::string & msg); @@ -104,17 +104,17 @@ struct Activity Activity(Logger & logger, ActivityType type, const Logger::Fields & fields = {}, ActivityId parent = getCurActivity()) - : Activity(logger, lvlError, type, "", fields, parent) { }; + : Activity(logger, Verbosity::Error, type, "", fields, parent) { }; Activity(const Activity & act) = delete; ~Activity(); void progress(uint64_t done = 0, uint64_t expected = 0, uint64_t running = 0, uint64_t failed = 0) const - { result(resProgress, done, expected, running, failed); } + { result(ResultType::Progress, done, expected, running, failed); } void setExpected(ActivityType type2, uint64_t expected) const - { result(resSetExpected, type2, expected); } + { result(ResultType::SetExpected, (uint64_t)type2, expected); } template void result(ResultType type, const Args & ... args) const @@ -161,11 +161,11 @@ extern Verbosity verbosity; /* suppress msgs > this */ } \ } while (0) -#define printError(args...) printMsg(lvlError, args) -#define printInfo(args...) printMsg(lvlInfo, args) -#define printTalkative(args...) printMsg(lvlTalkative, args) -#define debug(args...) printMsg(lvlDebug, args) -#define vomit(args...) printMsg(lvlVomit, args) +#define printError(args...) printMsg(Verbosity::Error, args) +#define printInfo(args...) printMsg(Verbosity::Info, args) +#define printTalkative(args...) printMsg(Verbosity::Talkative, args) +#define debug(args...) printMsg(Verbosity::Debug, args) +#define vomit(args...) printMsg(Verbosity::Vomit, args) template inline void warn(const std::string & fs, const Args & ... args) diff --git a/src/libutil/lru-cache.hh b/src/libutil/lru-cache.hh index 8b83f842c32..6ef4a3e067d 100644 --- a/src/libutil/lru-cache.hh +++ b/src/libutil/lru-cache.hh @@ -1,5 +1,6 @@ #pragma once +#include #include #include #include diff --git a/src/libutil/tests/hash.cc b/src/libutil/tests/hash.cc index 7cb4398178a..ecc0d4a0347 100644 --- a/src/libutil/tests/hash.cc +++ b/src/libutil/tests/hash.cc @@ -10,28 +10,28 @@ namespace nix { TEST(hashString, testKnownMD5Hashes1) { // values taken from: https://tools.ietf.org/html/rfc1321 auto s1 = ""; - auto hash = hashString(HashType::htMD5, s1); + auto hash = hashString(HashType::MD5, s1); ASSERT_EQ(hash.to_string(Base::Base16), "md5:d41d8cd98f00b204e9800998ecf8427e"); } TEST(hashString, testKnownMD5Hashes2) { // values taken from: https://tools.ietf.org/html/rfc1321 auto s2 = "abc"; - auto hash = hashString(HashType::htMD5, s2); + auto hash = hashString(HashType::MD5, s2); ASSERT_EQ(hash.to_string(Base::Base16), "md5:900150983cd24fb0d6963f7d28e17f72"); } TEST(hashString, testKnownSHA1Hashes1) { // values taken from: https://tools.ietf.org/html/rfc3174 auto s = "abc"; - auto hash = hashString(HashType::htSHA1, s); + auto hash = hashString(HashType::SHA1, s); ASSERT_EQ(hash.to_string(Base::Base16),"sha1:a9993e364706816aba3e25717850c26c9cd0d89d"); } TEST(hashString, testKnownSHA1Hashes2) { // values taken from: https://tools.ietf.org/html/rfc3174 auto s = "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"; - auto hash = hashString(HashType::htSHA1, s); + auto hash = hashString(HashType::SHA1, s); ASSERT_EQ(hash.to_string(Base::Base16),"sha1:84983e441c3bd26ebaae4aa1f95129e5e54670f1"); } @@ -39,7 +39,7 @@ namespace nix { // values taken from: https://tools.ietf.org/html/rfc4634 auto s = "abc"; - auto hash = hashString(HashType::htSHA256, s); + auto hash = hashString(HashType::SHA256, s); ASSERT_EQ(hash.to_string(Base::Base16), "sha256:ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad"); } @@ -47,7 +47,7 @@ namespace nix { TEST(hashString, testKnownSHA256Hashes2) { // values taken from: https://tools.ietf.org/html/rfc4634 auto s = "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"; - auto hash = hashString(HashType::htSHA256, s); + auto hash = hashString(HashType::SHA256, s); ASSERT_EQ(hash.to_string(Base::Base16), "sha256:248d6a61d20638b8e5c026930c3e6039a33ce45964ff2167f6ecedd419db06c1"); } @@ -55,7 +55,7 @@ namespace nix { TEST(hashString, testKnownSHA512Hashes1) { // values taken from: https://tools.ietf.org/html/rfc4634 auto s = "abc"; - auto hash = hashString(HashType::htSHA512, s); + auto hash = hashString(HashType::SHA512, s); ASSERT_EQ(hash.to_string(Base::Base16), "sha512:ddaf35a193617abacc417349ae20413112e6fa4e89a9" "7ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd" @@ -66,15 +66,10 @@ namespace nix { // values taken from: https://tools.ietf.org/html/rfc4634 auto s = "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"; - auto hash = hashString(HashType::htSHA512, s); + auto hash = hashString(HashType::SHA512, s); ASSERT_EQ(hash.to_string(Base::Base16), "sha512:8e959b75dae313da8cf4f72814fc143f8f7779c6eb9f7fa1" "7299aeadb6889018501d289e4900f7e4331b99dec4b5433a" "c7d329eeb6dd26545e96e55b874be909"); } - - TEST(hashString, hashingWithUnknownAlgoExits) { - auto s = "unknown"; - ASSERT_DEATH(hashString(HashType::htUnknown, s), ""); - } } diff --git a/src/libutil/tests/lru-cache.cc b/src/libutil/tests/lru-cache.cc new file mode 100644 index 00000000000..091d3d5ede1 --- /dev/null +++ b/src/libutil/tests/lru-cache.cc @@ -0,0 +1,130 @@ +#include "lru-cache.hh" +#include + +namespace nix { + + /* ---------------------------------------------------------------------------- + * size + * --------------------------------------------------------------------------*/ + + TEST(LRUCache, sizeOfEmptyCacheIsZero) { + LRUCache c(10); + ASSERT_EQ(c.size(), 0); + } + + TEST(LRUCache, sizeOfSingleElementCacheIsOne) { + LRUCache c(10); + c.upsert("foo", "bar"); + ASSERT_EQ(c.size(), 1); + } + + /* ---------------------------------------------------------------------------- + * upsert / get + * --------------------------------------------------------------------------*/ + + TEST(LRUCache, getFromEmptyCache) { + LRUCache c(10); + auto val = c.get("x"); + ASSERT_EQ(val.has_value(), false); + } + + TEST(LRUCache, getExistingValue) { + LRUCache c(10); + c.upsert("foo", "bar"); + auto val = c.get("foo"); + ASSERT_EQ(val, "bar"); + } + + TEST(LRUCache, getNonExistingValueFromNonEmptyCache) { + LRUCache c(10); + c.upsert("foo", "bar"); + auto val = c.get("another"); + ASSERT_EQ(val.has_value(), false); + } + + TEST(LRUCache, upsertOnZeroCapacityCache) { + LRUCache c(0); + c.upsert("foo", "bar"); + auto val = c.get("foo"); + ASSERT_EQ(val.has_value(), false); + } + + TEST(LRUCache, updateExistingValue) { + LRUCache c(1); + c.upsert("foo", "bar"); + + auto val = c.get("foo"); + ASSERT_EQ(val.value_or("error"), "bar"); + ASSERT_EQ(c.size(), 1); + + c.upsert("foo", "changed"); + val = c.get("foo"); + ASSERT_EQ(val.value_or("error"), "changed"); + ASSERT_EQ(c.size(), 1); + } + + TEST(LRUCache, overwriteOldestWhenCapacityIsReached) { + LRUCache c(3); + c.upsert("one", "eins"); + c.upsert("two", "zwei"); + c.upsert("three", "drei"); + + ASSERT_EQ(c.size(), 3); + ASSERT_EQ(c.get("one").value_or("error"), "eins"); + + // exceed capacity + c.upsert("another", "whatever"); + + ASSERT_EQ(c.size(), 3); + // Retrieving "one" makes it the most recent element thus + // two will be the oldest one and thus replaced. + ASSERT_EQ(c.get("two").has_value(), false); + ASSERT_EQ(c.get("another").value(), "whatever"); + } + + /* ---------------------------------------------------------------------------- + * clear + * --------------------------------------------------------------------------*/ + + TEST(LRUCache, clearEmptyCache) { + LRUCache c(10); + c.clear(); + ASSERT_EQ(c.size(), 0); + } + + TEST(LRUCache, clearNonEmptyCache) { + LRUCache c(10); + c.upsert("one", "eins"); + c.upsert("two", "zwei"); + c.upsert("three", "drei"); + ASSERT_EQ(c.size(), 3); + c.clear(); + ASSERT_EQ(c.size(), 0); + } + + /* ---------------------------------------------------------------------------- + * erase + * --------------------------------------------------------------------------*/ + + TEST(LRUCache, eraseFromEmptyCache) { + LRUCache c(10); + ASSERT_EQ(c.erase("foo"), false); + ASSERT_EQ(c.size(), 0); + } + + TEST(LRUCache, eraseMissingFromNonEmptyCache) { + LRUCache c(10); + c.upsert("one", "eins"); + ASSERT_EQ(c.erase("foo"), false); + ASSERT_EQ(c.size(), 1); + ASSERT_EQ(c.get("one").value_or("error"), "eins"); + } + + TEST(LRUCache, eraseFromNonEmptyCache) { + LRUCache c(10); + c.upsert("one", "eins"); + ASSERT_EQ(c.erase("one"), true); + ASSERT_EQ(c.size(), 0); + ASSERT_EQ(c.get("one").value_or("empty"), "empty"); + } +} diff --git a/src/libutil/url.cc b/src/libutil/url.cc index 5d5328e5de6..88c09eef9d7 100644 --- a/src/libutil/url.cc +++ b/src/libutil/url.cc @@ -4,6 +4,7 @@ namespace nix { std::regex refRegex(refRegexS, std::regex::ECMAScript); +std::regex badGitRefRegex(badGitRefRegexS, std::regex::ECMAScript); std::regex revRegex(revRegexS, std::regex::ECMAScript); std::regex flakeIdRegex(flakeIdRegexS, std::regex::ECMAScript); diff --git a/src/libutil/url.hh b/src/libutil/url.hh index 1503023a2fc..4a0d4071ba6 100644 --- a/src/libutil/url.hh +++ b/src/libutil/url.hh @@ -49,6 +49,12 @@ const static std::string pathRegex = "(?:" + segmentRegex + "(?:/" + segmentRege const static std::string refRegexS = "[a-zA-Z0-9][a-zA-Z0-9_.-]*"; // FIXME: check extern std::regex refRegex; +// Instead of defining what a good Git Ref is, we define what a bad Git Ref is +// This is because of the definition of a ref in refs.c in https://github.com/git/git +// See tests/fetchGitRefs.sh for the full definition +const static std::string badGitRefRegexS = "//|^[./]|/\\.|\\.\\.|[[:cntrl:][:space:]:?^~\[]|\\\\|\\*|\\.lock$|\\.lock/|@\\{|[/.]$|^@$|^$"; +extern std::regex badGitRefRegex; + // A Git revision (a SHA-1 commit hash). const static std::string revRegexS = "[0-9a-fA-F]{40}"; extern std::regex revRegex; diff --git a/src/libutil/util.cc b/src/libutil/util.cc index 71db92d772a..d17e952ae20 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -459,7 +459,7 @@ void deletePath(const Path & path) void deletePath(const Path & path, unsigned long long & bytesFreed) { - //Activity act(*logger, lvlDebug, format("recursively deleting path '%1%'") % path); + //Activity act(*logger, Verbosity::Debug, format("recursively deleting path '%1%'") % path); bytesFreed = 0; _deletePath(path, bytesFreed); } @@ -1450,7 +1450,7 @@ string base64Decode(const string & s) char digit = decode[(unsigned char) c]; if (digit == -1) - throw Error("invalid character in Base64 string"); + throw Error("invalid character in Base::Base64 string"); bits += 6; d = d << 6 | digit; diff --git a/src/libutil/util.hh b/src/libutil/util.hh index a63ee05b377..750957d54c7 100644 --- a/src/libutil/util.hh +++ b/src/libutil/util.hh @@ -454,7 +454,7 @@ std::string filterANSIEscapes(const std::string & s, unsigned int width = std::numeric_limits::max()); -/* Base64 encoding/decoding. */ +/* Base::Base64 encoding/decoding. */ string base64Encode(const string & s); string base64Decode(const string & s); diff --git a/src/nix-copy-closure/nix-copy-closure.cc b/src/nix-copy-closure/nix-copy-closure.cc index b10184718d1..711074e56ad 100755 --- a/src/nix-copy-closure/nix-copy-closure.cc +++ b/src/nix-copy-closure/nix-copy-closure.cc @@ -22,7 +22,7 @@ static int _main(int argc, char ** argv) printVersion("nix-copy-closure"); else if (*arg == "--gzip" || *arg == "--bzip2" || *arg == "--xz") { if (*arg != "--gzip") - printMsg(lvlError, format("Warning: '%1%' is not implemented, falling back to gzip") % *arg); + printMsg(Verbosity::Error, format("Warning: '%1%' is not implemented, falling back to gzip") % *arg); gzip = true; } else if (*arg == "--from") toMode = false; @@ -31,7 +31,7 @@ static int _main(int argc, char ** argv) else if (*arg == "--include-outputs") includeOutputs = true; else if (*arg == "--show-progress") - printMsg(lvlError, "Warning: '--show-progress' is not implemented"); + printMsg(Verbosity::Error, "Warning: '--show-progress' is not implemented"); else if (*arg == "--dry-run") dryRun = true; else if (*arg == "--use-substitutes" || *arg == "-s") diff --git a/src/nix-env/nix-env.cc b/src/nix-env/nix-env.cc index d62febaff53..b7d3f433760 100644 --- a/src/nix-env/nix-env.cc +++ b/src/nix-env/nix-env.cc @@ -972,7 +972,7 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) try { paths.insert(globals.state->store->parseStorePath(i.queryOutPath())); } catch (AssertionError & e) { - printMsg(lvlTalkative, "skipping derivation named '%s' which gives an assertion failure", i.queryName()); + printMsg(Verbosity::Talkative, "skipping derivation named '%s' which gives an assertion failure", i.queryName()); i.setFailed(); } validPaths = globals.state->store->queryValidPaths(paths); @@ -998,7 +998,7 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) try { if (i.hasFailed()) continue; - //Activity act(*logger, lvlDebug, format("outputting query result '%1%'") % i.attrPath); + //Activity act(*logger, Verbosity::Debug, format("outputting query result '%1%'") % i.attrPath); if (globals.prebuiltOnly && !validPaths.count(globals.state->store->parseStorePath(i.queryOutPath())) && @@ -1174,7 +1174,7 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) cout.flush(); } catch (AssertionError & e) { - printMsg(lvlTalkative, "skipping derivation named '%1%' which gives an assertion failure", i.queryName()); + printMsg(Verbosity::Talkative, "skipping derivation named '%1%' which gives an assertion failure", i.queryName()); } catch (Error & e) { e.addPrefix(fmt("while querying the derivation named '%1%':\n", i.queryName())); throw; diff --git a/src/nix-prefetch-url/nix-prefetch-url.cc b/src/nix-prefetch-url/nix-prefetch-url.cc index 5a686c8cd67..e60c6615d1c 100644 --- a/src/nix-prefetch-url/nix-prefetch-url.cc +++ b/src/nix-prefetch-url/nix-prefetch-url.cc @@ -51,7 +51,7 @@ string resolveMirrorUri(EvalState & state, string uri) static int _main(int argc, char * * argv) { { - HashType ht = htSHA256; + HashType ht = HashType::SHA256; std::vector args; bool printPath = getEnv("PRINT_PATH") == "1"; bool fromExpr = false; @@ -72,8 +72,6 @@ static int _main(int argc, char * * argv) else if (*arg == "--type") { string s = getArg(*arg, arg, end); ht = parseHashType(s); - if (ht == htUnknown) - throw UsageError(format("unknown hash type '%1%'") % s); } else if (*arg == "--print-path") printPath = true; diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index 68fe6f72735..fe7b776ae9c 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -372,8 +372,8 @@ static void opQuery(Strings opFlags, Strings opArgs) for (auto & j : maybeUseOutputs(store->followLinksToStorePath(i), useOutput, forceRealise)) { auto info = store->queryPathInfo(j); if (query == qHash) { - assert(info->narHash.type == htSHA256); - cout << fmt("%s\n", info->narHash.to_string(Base32)); + assert(info->narHash.type == HashType::SHA256); + cout << fmt("%s\n", info->narHash.to_string(Base::Base32)); } else if (query == qSize) cout << fmt("%d\n", info->narSize); } @@ -502,7 +502,7 @@ static void registerValidity(bool reregister, bool hashGiven, bool canonicalise) if (canonicalise) canonicalisePathMetaData(store->printStorePath(info->path), -1); if (!hashGiven) { - HashResult hash = hashPath(htSHA256, store->printStorePath(info->path)); + HashResult hash = hashPath(HashType::SHA256, store->printStorePath(info->path)); info->narHash = hash.first; info->narSize = hash.second; } @@ -720,9 +720,9 @@ static void opVerifyPath(Strings opFlags, Strings opArgs) for (auto & i : opArgs) { auto path = store->followLinksToStorePath(i); - printMsg(lvlTalkative, "checking path '%s'...", store->printStorePath(path)); + printMsg(Verbosity::Talkative, "checking path '%s'...", store->printStorePath(path)); auto info = store->queryPathInfo(path); - HashSink sink(info->narHash.type); + HashSink sink(*info->narHash.type); store->narFromPath(path, sink); auto current = sink.finish(); if (current.first != info->narHash) { @@ -781,7 +781,7 @@ static void opServe(Strings opFlags, Strings opArgs) auto getBuildSettings = [&]() { // FIXME: changing options here doesn't work if we're // building through the daemon. - verbosity = lvlError; + verbosity = Verbosity::Error; settings.keepLog = false; settings.useSubstitutes = false; settings.maxSilentTime = readInt(in); @@ -940,7 +940,7 @@ static void opServe(Strings opFlags, Strings opArgs) auto deriver = readString(in); if (deriver != "") info.deriver = store->parseStorePath(deriver); - info.narHash = Hash(readString(in), htSHA256); + info.narHash = Hash(readString(in), HashType::SHA256); info.references = readStorePaths(*store, in); in >> info.registrationTime >> info.narSize >> info.ultimate; info.sigs = readStrings(in); diff --git a/src/nix/add-to-store.cc b/src/nix/add-to-store.cc index c96a16cebc8..31cd241649b 100644 --- a/src/nix/add-to-store.cc +++ b/src/nix/add-to-store.cc @@ -53,8 +53,8 @@ struct CmdAddToStore : MixDryRun, StoreCommand StringSink sink; dumpPath(path, sink); - auto narHash = hashString(htSHA256, *sink.s); - auto hash = git ? dumpGitHash(htSHA1, path) : narHash; + auto narHash = hashString(HashType::SHA256, *sink.s); + auto hash = git ? dumpGitHash(HashType::SHA1, path) : narHash; ValidPathInfo info(store->makeFixedOutputPath(ingestionMethod, hash, *namePart)); info.narHash = narHash; @@ -62,7 +62,7 @@ struct CmdAddToStore : MixDryRun, StoreCommand info.ca = makeFixedOutputCA(ingestionMethod, hash); if (!dryRun) { - auto addedPath = store->addToStore(*namePart, path, ingestionMethod, git ? htSHA1 : htSHA256); + auto addedPath = store->addToStore(*namePart, path, ingestionMethod, git ? HashType::SHA1 : HashType::SHA256); if (addedPath != info.path) throw Error(format("Added path %s does not match calculated path %s; something has changed") % addedPath.to_string() % info.path.to_string()); } diff --git a/src/nix/hash.cc b/src/nix/hash.cc index 41a66676967..48d84a0c189 100644 --- a/src/nix/hash.cc +++ b/src/nix/hash.cc @@ -11,18 +11,18 @@ using namespace nix; struct CmdHash : Command { FileIngestionMethod mode; - Base base = SRI; + Base base = Base::SRI; bool truncate = false; - HashType ht = htSHA256; + HashType ht = HashType::SHA256; std::vector paths; std::optional modulus; CmdHash(FileIngestionMethod mode) : mode(mode) { - mkFlag(0, "sri", "print hash in SRI format", &base, SRI); - mkFlag(0, "base64", "print hash in base-64", &base, Base64); - mkFlag(0, "base32", "print hash in base-32 (Nix-specific)", &base, Base32); - mkFlag(0, "base16", "print hash in base-16", &base, Base16); + mkFlag(0, "sri", "print hash in Base::SRI format", &base, Base::SRI); + mkFlag(0, "base64", "print hash in base-64", &base, Base::Base64); + mkFlag(0, "base32", "print hash in base-32 (Nix-specific)", &base, Base::Base32); + mkFlag(0, "base16", "print hash in base-16", &base, Base::Base16); addFlag(Flag::mkHashTypeFlag("type", &ht)); #if 0 mkFlag() @@ -81,7 +81,7 @@ struct CmdHash : Command h = hashSink->finish().first; if (truncate && h.hashSize > 20) h = compressHash(h, 20); - logger->stdout(h.to_string(base, base == SRI)); + logger->stdout(h.to_string(base, base == Base::SRI)); } } }; @@ -93,22 +93,22 @@ static RegisterCommand r3("hash-git", [](){ return make_ref(FileIngesti struct CmdToBase : Command { Base base; - HashType ht = htUnknown; + std::optional ht; std::vector args; CmdToBase(Base base) : base(base) { - addFlag(Flag::mkHashTypeFlag("type", &ht)); + addFlag(Flag::mkHashTypeOptFlag("type", &ht)); expectArgs("strings", &args); } std::string description() override { return fmt("convert a hash to %s representation", - base == Base16 ? "base-16" : - base == Base32 ? "base-32" : - base == Base64 ? "base-64" : - "SRI"); + base == Base::Base16 ? "base-16" : + base == Base::Base32 ? "base-32" : + base == Base::Base64 ? "base-64" : + "Base::SRI"); } Category category() override { return catUtility; } @@ -116,19 +116,19 @@ struct CmdToBase : Command void run() override { for (auto s : args) - logger->stdout(Hash(s, ht).to_string(base, base == SRI)); + logger->stdout(Hash(s, ht).to_string(base, base == Base::SRI)); } }; -static RegisterCommand r4("to-base16", [](){ return make_ref(Base16); }); -static RegisterCommand r5("to-base32", [](){ return make_ref(Base32); }); -static RegisterCommand r6("to-base64", [](){ return make_ref(Base64); }); -static RegisterCommand r7("to-sri", [](){ return make_ref(SRI); }); +static RegisterCommand r4("to-base16", [](){ return make_ref(Base::Base16); }); +static RegisterCommand r5("to-base32", [](){ return make_ref(Base::Base32); }); +static RegisterCommand r6("to-base64", [](){ return make_ref(Base::Base64); }); +static RegisterCommand r7("to-sri", [](){ return make_ref(Base::SRI); }); /* Legacy nix-hash command. */ static int compatNixHash(int argc, char * * argv) { - HashType ht = htMD5; + HashType ht = HashType::MD5; bool flat = false; bool base32 = false; bool truncate = false; @@ -146,8 +146,6 @@ static int compatNixHash(int argc, char * * argv) else if (*arg == "--type") { string s = getArg(*arg, arg, end); ht = parseHashType(s); - if (ht == htUnknown) - throw UsageError(format("unknown hash type '%1%'") % s); } else if (*arg == "--to-base16") op = opTo16; else if (*arg == "--to-base32") op = opTo32; @@ -161,14 +159,14 @@ static int compatNixHash(int argc, char * * argv) if (op == opHash) { CmdHash cmd(flat ? FileIngestionMethod::Flat : FileIngestionMethod::Recursive); cmd.ht = ht; - cmd.base = base32 ? Base32 : Base16; + cmd.base = base32 ? Base::Base32 : Base::Base16; cmd.truncate = truncate; cmd.paths = ss; cmd.run(); } else { - CmdToBase cmd(op == opTo32 ? Base32 : Base16); + CmdToBase cmd(op == opTo32 ? Base::Base32 : Base::Base16); cmd.args = ss; cmd.ht = ht; cmd.run(); diff --git a/src/nix/installables.cc b/src/nix/installables.cc index 937d692063b..17d15f5eecc 100644 --- a/src/nix/installables.cc +++ b/src/nix/installables.cc @@ -279,7 +279,7 @@ Buildables build(ref store, RealiseMode mode, } if (mode == DryRun) - printMissing(store, pathsToBuild, lvlError); + printMissing(store, pathsToBuild, Verbosity::Error); else if (mode == Build) store->buildPaths(pathsToBuild); diff --git a/src/nix/main.cc b/src/nix/main.cc index ef301580ab9..fa0bb5b51be 100644 --- a/src/nix/main.cc +++ b/src/nix/main.cc @@ -160,7 +160,7 @@ void mainWrapped(int argc, char * * argv) if (legacy) return legacy(argc, argv); } - verbosity = lvlWarn; + verbosity = Verbosity::Warn; settings.verboseBuild = false; NixArgs args; diff --git a/src/nix/make-content-addressable.cc b/src/nix/make-content-addressable.cc index 3e7ff544d6d..bd948a9831e 100644 --- a/src/nix/make-content-addressable.cc +++ b/src/nix/make-content-addressable.cc @@ -72,7 +72,7 @@ struct CmdMakeContentAddressable : StorePathsCommand, MixJSON *sink.s = rewriteStrings(*sink.s, rewrites); - HashModuloSink hashModuloSink(htSHA256, oldHashPart); + HashModuloSink hashModuloSink(HashType::SHA256, oldHashPart); hashModuloSink((unsigned char *) sink.s->data(), sink.s->size()); auto narHash = hashModuloSink.finish().first; diff --git a/src/nix/path-info.cc b/src/nix/path-info.cc index 88d7fffd445..91d62bcec1b 100644 --- a/src/nix/path-info.cc +++ b/src/nix/path-info.cc @@ -91,7 +91,7 @@ struct CmdPathInfo : StorePathsCommand, MixJSON store->pathInfoToJSON(jsonRoot, // FIXME: preserve order? storePathsToSet(storePaths), - true, showClosureSize, SRI, AllowInvalid); + true, showClosureSize, Base::SRI, AllowInvalid); } else { diff --git a/src/nix/progress-bar.cc b/src/nix/progress-bar.cc index 8e7ba95a341..76c6123f0f5 100644 --- a/src/nix/progress-bar.cc +++ b/src/nix/progress-bar.cc @@ -39,7 +39,7 @@ class ProgressBar : public Logger struct ActInfo { std::string s, lastLine, phase; - ActivityType type = actUnknown; + ActivityType type = ActivityType::Unknown; uint64_t done = 0; uint64_t expected = 0; uint64_t running = 0; @@ -153,7 +153,7 @@ class ProgressBar : public Logger state->its.emplace(act, i); state->activitiesByType[type].its.emplace(act, i); - if (type == actBuild) { + if (type == ActivityType::Build) { auto name = storePathToName(getS(fields, 0)); if (hasSuffix(name, ".drv")) name = name.substr(0, name.size() - 4); @@ -168,7 +168,7 @@ class ProgressBar : public Logger i->name = DrvName(name).name; } - if (type == actSubstitute) { + if (type == ActivityType::Substitute) { auto name = storePathToName(getS(fields, 0)); auto sub = getS(fields, 1); i->s = fmt( @@ -178,7 +178,7 @@ class ProgressBar : public Logger name, sub); } - if (type == actPostBuildHook) { + if (type == ActivityType::PostBuildHook) { auto name = storePathToName(getS(fields, 0)); if (hasSuffix(name, ".drv")) name = name.substr(0, name.size() - 4); @@ -186,14 +186,14 @@ class ProgressBar : public Logger i->name = DrvName(name).name; } - if (type == actQueryPathInfo) { + if (type == ActivityType::QueryPathInfo) { auto name = storePathToName(getS(fields, 0)); i->s = fmt("querying " ANSI_BOLD "%s" ANSI_NORMAL " on %s", name, getS(fields, 1)); } - if ((type == actFileTransfer && hasAncestor(*state, actCopyPath, parent)) - || (type == actFileTransfer && hasAncestor(*state, actQueryPathInfo, parent)) - || (type == actCopyPath && hasAncestor(*state, actSubstitute, parent))) + if ((type == ActivityType::Download && hasAncestor(*state, ActivityType::CopyPath, parent)) + || (type == ActivityType::Download && hasAncestor(*state, ActivityType::QueryPathInfo, parent)) + || (type == ActivityType::CopyPath && hasAncestor(*state, ActivityType::Substitute, parent))) i->visible = false; update(*state); @@ -238,13 +238,13 @@ class ProgressBar : public Logger { auto state(state_.lock()); - if (type == resFileLinked) { + if (type == ResultType::FileLinked) { state->filesLinked++; state->bytesLinked += getI(fields, 0); update(*state); } - else if (type == resBuildLogLine || type == resPostBuildLogLine) { + else if (type == ResultType::BuildLogLine || type == ResultType::PostBuildLogLine) { auto lastLine = trim(getS(fields, 0)); if (!lastLine.empty()) { auto i = state->its.find(act); @@ -252,10 +252,10 @@ class ProgressBar : public Logger ActInfo info = *i->second; if (printBuildLogs) { auto suffix = "> "; - if (type == resPostBuildLogLine) { + if (type == ResultType::PostBuildLogLine) { suffix = " (post)> "; } - log(*state, lvlInfo, ANSI_FAINT + info.name.value_or("unnamed") + suffix + ANSI_NORMAL + lastLine); + log(*state, Verbosity::Info, ANSI_FAINT + info.name.value_or("unnamed") + suffix + ANSI_NORMAL + lastLine); } else { state->activities.erase(i->second); info.lastLine = lastLine; @@ -266,24 +266,24 @@ class ProgressBar : public Logger } } - else if (type == resUntrustedPath) { + else if (type == ResultType::UntrustedPath) { state->untrustedPaths++; update(*state); } - else if (type == resCorruptedPath) { + else if (type == ResultType::CorruptedPath) { state->corruptedPaths++; update(*state); } - else if (type == resSetPhase) { + else if (type == ResultType::SetPhase) { auto i = state->its.find(act); assert(i != state->its.end()); i->second->phase = getS(fields, 0); update(*state); } - else if (type == resProgress) { + else if (type == ResultType::Progress) { auto i = state->its.find(act); assert(i != state->its.end()); ActInfo & actInfo = *i->second; @@ -294,7 +294,7 @@ class ProgressBar : public Logger update(*state); } - else if (type == resSetExpected) { + else if (type == ResultType::SetExpected) { auto i = state->its.find(act); assert(i != state->its.end()); ActInfo & actInfo = *i->second; @@ -406,10 +406,10 @@ class ProgressBar : public Logger res += s; }; - showActivity(actBuilds, "%s built"); + showActivity(ActivityType::Builds, "%s built"); - auto s1 = renderActivity(actCopyPaths, "%s copied"); - auto s2 = renderActivity(actCopyPath, "%s MiB", "%.1f", MiB); + auto s1 = renderActivity(ActivityType::CopyPaths, "%s copied"); + auto s2 = renderActivity(ActivityType::CopyPath, "%s MiB", "%.1f", MiB); if (!s1.empty() || !s2.empty()) { if (!res.empty()) res += ", "; @@ -417,10 +417,10 @@ class ProgressBar : public Logger if (!s2.empty()) { res += " ("; res += s2; res += ')'; } } - showActivity(actFileTransfer, "%s MiB DL", "%.1f", MiB); + showActivity(ActivityType::Download, "%s MiB DL", "%.1f", MiB); { - auto s = renderActivity(actOptimiseStore, "%s paths optimised"); + auto s = renderActivity(ActivityType::OptimiseStore, "%s paths optimised"); if (s != "") { s += fmt(", %.1f MiB / %d inodes freed", state.bytesLinked / MiB, state.filesLinked); if (!res.empty()) res += ", "; @@ -429,7 +429,7 @@ class ProgressBar : public Logger } // FIXME: don't show "done" paths in green. - showActivity(actVerifyPaths, "%s paths verified"); + showActivity(ActivityType::VerifyPaths, "%s paths verified"); if (state.corruptedPaths) { if (!res.empty()) res += ", "; diff --git a/src/nix/repl.cc b/src/nix/repl.cc index ea8ff1553b5..7d66419bd64 100644 --- a/src/nix/repl.cc +++ b/src/nix/repl.cc @@ -218,12 +218,12 @@ void NixRepl::mainLoop(const std::vector & files) // input without clearing the input so far. continue; } else { - printMsg(lvlError, format(error + "%1%%2%") % (settings.showTrace ? e.prefix() : "") % e.msg()); + printMsg(Verbosity::Error, format(error + "%1%%2%") % (settings.showTrace ? e.prefix() : "") % e.msg()); } } catch (Error & e) { - printMsg(lvlError, format(error + "%1%%2%") % (settings.showTrace ? e.prefix() : "") % e.msg()); + printMsg(Verbosity::Error, format(error + "%1%%2%") % (settings.showTrace ? e.prefix() : "") % e.msg()); } catch (Interrupted & e) { - printMsg(lvlError, format(error + "%1%%2%") % (settings.showTrace ? e.prefix() : "") % e.msg()); + printMsg(Verbosity::Error, format(error + "%1%%2%") % (settings.showTrace ? e.prefix() : "") % e.msg()); } // We handled the current input fully, so we should clear it diff --git a/src/nix/sigs.cc b/src/nix/sigs.cc index 6c9b9a79286..311817d1fba 100644 --- a/src/nix/sigs.cc +++ b/src/nix/sigs.cc @@ -47,7 +47,7 @@ struct CmdCopySigs : StorePathsCommand //logger->setExpected(doneLabel, storePaths.size()); auto doPath = [&](const Path & storePathS) { - //Activity act(*logger, lvlInfo, format("getting signatures for '%s'") % storePath); + //Activity act(*logger, Verbosity::Info, format("getting signatures for '%s'") % storePath); checkInterrupt(); diff --git a/src/nix/upgrade-nix.cc b/src/nix/upgrade-nix.cc index 678780f333f..9018e69b3d3 100644 --- a/src/nix/upgrade-nix.cc +++ b/src/nix/upgrade-nix.cc @@ -73,12 +73,12 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand } { - Activity act(*logger, lvlInfo, actUnknown, fmt("downloading '%s'...", store->printStorePath(storePath))); + Activity act(*logger, Verbosity::Info, ActivityType::Unknown, fmt("downloading '%s'...", store->printStorePath(storePath))); store->ensurePath(storePath); } { - Activity act(*logger, lvlInfo, actUnknown, fmt("verifying that '%s' works...", store->printStorePath(storePath))); + Activity act(*logger, Verbosity::Info, ActivityType::Unknown, fmt("verifying that '%s' works...", store->printStorePath(storePath))); auto program = store->printStorePath(storePath) + "/bin/nix-env"; auto s = runProgram(program, false, {"--version"}); if (s.find("Nix") == std::string::npos) @@ -88,7 +88,7 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand stopProgressBar(); { - Activity act(*logger, lvlInfo, actUnknown, + Activity act(*logger, Verbosity::Info, ActivityType::Unknown, fmt("installing '%s' into profile '%s'...", store->printStorePath(storePath), profileDir)); runProgram(settings.nixBinDir + "/nix-env", false, {"--profile", profileDir, "-i", store->printStorePath(storePath), "--no-sandbox"}); @@ -139,7 +139,7 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand /* Return the store path of the latest stable Nix. */ StorePath getLatestNix(ref store) { - Activity act(*logger, lvlInfo, actUnknown, "querying latest Nix version"); + Activity act(*logger, Verbosity::Info, ActivityType::Unknown, "querying latest Nix version"); // FIXME: use nixos.org? auto req = FileTransferRequest(storePathsUrl); diff --git a/src/nix/verify.cc b/src/nix/verify.cc index cf1fa6a990c..fa05e73530b 100644 --- a/src/nix/verify.cc +++ b/src/nix/verify.cc @@ -59,7 +59,7 @@ struct CmdVerify : StorePathsCommand auto publicKeys = getDefaultPublicKeys(); - Activity act(*logger, actVerifyPaths); + Activity act(*logger, ActivityType::VerifyPaths); std::atomic done{0}; std::atomic untrusted{0}; @@ -77,7 +77,7 @@ struct CmdVerify : StorePathsCommand try { checkInterrupt(); - Activity act2(*logger, lvlInfo, actUnknown, fmt("checking '%s'", storePath)); + Activity act2(*logger, Verbosity::Info, ActivityType::Unknown, fmt("checking '%s'", storePath)); MaintainCount> mcActive(active); update(); @@ -88,9 +88,9 @@ struct CmdVerify : StorePathsCommand std::unique_ptr hashSink; if (info->ca == "") - hashSink = std::make_unique(info->narHash.type); + hashSink = std::make_unique(*info->narHash.type); else - hashSink = std::make_unique(info->narHash.type, storePathToHash(store->printStorePath(info->path))); + hashSink = std::make_unique(*info->narHash.type, storePathToHash(store->printStorePath(info->path))); store->narFromPath(info->path, *hashSink); @@ -98,7 +98,7 @@ struct CmdVerify : StorePathsCommand if (hash.first != info->narHash) { corrupted++; - act2.result(resCorruptedPath, store->printStorePath(info->path)); + act2.result(ResultType::CorruptedPath, store->printStorePath(info->path)); printError( "path '%s' was modified! expected hash '%s', got '%s'", store->printStorePath(info->path), info->narHash.to_string(), hash.first.to_string()); @@ -149,7 +149,7 @@ struct CmdVerify : StorePathsCommand if (!good) { untrusted++; - act2.result(resUntrustedPath, store->printStorePath(info->path)); + act2.result(ResultType::UntrustedPath, store->printStorePath(info->path)); printError("path '%s' is untrusted", store->printStorePath(info->path)); } diff --git a/tests/fetchGitRefs.sh b/tests/fetchGitRefs.sh new file mode 100644 index 00000000000..93993ae9098 --- /dev/null +++ b/tests/fetchGitRefs.sh @@ -0,0 +1,111 @@ +source common.sh + +if [[ -z $(type -p git) ]]; then + echo "Git not installed; skipping Git tests" + exit 99 +fi + +clearStore + +repo="$TEST_ROOT/git" + +rm -rf "$repo" "${repo}-tmp" "$TEST_HOME/.cache/nix" + +git init "$repo" +git -C "$repo" config user.email "foobar@example.com" +git -C "$repo" config user.name "Foobar" + +echo utrecht > "$repo"/hello +git -C "$repo" add hello +git -C "$repo" commit -m 'Bla1' + +path=$(nix eval --raw "(builtins.fetchGit { url = $repo; ref = \"master\"; }).outPath") + +# Test various combinations of ref names +# (taken from the git project) + +# git help check-ref-format +# Git imposes the following rules on how references are named: +# +# 1. They can include slash / for hierarchical (directory) grouping, but no slash-separated component can begin with a dot . or end with the sequence .lock. +# 2. They must contain at least one /. This enforces the presence of a category like heads/, tags/ etc. but the actual names are not restricted. If the --allow-onelevel option is used, this rule is waived. +# 3. They cannot have two consecutive dots .. anywhere. +# 4. They cannot have ASCII control characters (i.e. bytes whose values are lower than \040, or \177 DEL), space, tilde ~, caret ^, or colon : anywhere. +# 5. They cannot have question-mark ?, asterisk *, or open bracket [ anywhere. See the --refspec-pattern option below for an exception to this rule. +# 6. They cannot begin or end with a slash / or contain multiple consecutive slashes (see the --normalize option below for an exception to this rule) +# 7. They cannot end with a dot .. +# 8. They cannot contain a sequence @{. +# 9. They cannot be the single character @. +# 10. They cannot contain a \. + +valid_ref() { + { set +x; printf >&2 '\n>>>>>>>>>> valid_ref %s\b <<<<<<<<<<\n' $(printf %s "$1" | sed -n -e l); set -x; } + git check-ref-format --branch "$1" >/dev/null + git -C "$repo" branch "$1" master >/dev/null + path1=$(nix eval --raw "(builtins.fetchGit { url = $repo; ref = ''$1''; }).outPath") + [[ $path1 = $path ]] + git -C "$repo" branch -D "$1" >/dev/null +} + +invalid_ref() { + { set +x; printf >&2 '\n>>>>>>>>>> invalid_ref %s\b <<<<<<<<<<\n' $(printf %s "$1" | sed -n -e l); set -x; } + # special case for a sole @: + # --branch @ will try to interpret @ as a branch reference and not fail. Thus we need --allow-onelevel + if [ "$1" = "@" ]; then + (! git check-ref-format --allow-onelevel "$1" >/dev/null 2>&1) + else + (! git check-ref-format --branch "$1" >/dev/null 2>&1) + fi + nix --debug eval --raw "(builtins.fetchGit { url = $repo; ref = ''$1''; }).outPath" 2>&1 | grep 'error: invalid Git branch/tag name' >/dev/null +} + + +valid_ref 'foox' +valid_ref '1337' +valid_ref 'foo.baz' +valid_ref 'foo/bar/baz' +valid_ref 'foo./bar' +valid_ref 'heads/foo@bar' +valid_ref "$(printf 'heads/fu\303\237')" +valid_ref 'foo-bar-baz' +valid_ref '$1' +valid_ref 'foo.locke' + +invalid_ref 'refs///heads/foo' +invalid_ref 'heads/foo/' +invalid_ref '///heads/foo' +invalid_ref '.foo' +invalid_ref './foo' +invalid_ref './foo/bar' +invalid_ref 'foo/./bar' +invalid_ref 'foo/bar/.' +invalid_ref 'foo bar' +invalid_ref 'foo?bar' +invalid_ref 'foo^bar' +invalid_ref 'foo~bar' +invalid_ref 'foo:bar' +invalid_ref 'foo[bar' +invalid_ref 'foo/bar/.' +invalid_ref '.refs/foo' +invalid_ref 'refs/heads/foo.' +invalid_ref 'heads/foo..bar' +invalid_ref 'heads/foo?bar' +invalid_ref 'heads/foo.lock' +invalid_ref 'heads///foo.lock' +invalid_ref 'foo.lock/bar' +invalid_ref 'foo.lock///bar' +invalid_ref 'heads/v@{ation' +invalid_ref 'heads/foo\.ar' # should fail due to \ +invalid_ref 'heads/foo\bar' # should fail due to \ +invalid_ref "$(printf 'heads/foo\t')" # should fail because it has a TAB +invalid_ref "$(printf 'heads/foo\177')" +invalid_ref '@' + +invalid_ref 'foo/*' +invalid_ref '*/foo' +invalid_ref 'foo/*/bar' +invalid_ref '*' +invalid_ref 'foo/*/*' +invalid_ref '*/foo/*' +invalid_ref '/foo' +invalid_ref '' diff --git a/tests/local.mk b/tests/local.mk index 35a80a16a5a..536661af88e 100644 --- a/tests/local.mk +++ b/tests/local.mk @@ -18,6 +18,7 @@ nix_tests = \ nar-access.sh \ structured-attrs.sh \ fetchGit.sh \ + fetchGitRefs.sh \ fetchGitSubmodules.sh \ fetchMercurial.sh \ signing.sh \ @@ -30,8 +31,7 @@ nix_tests = \ nix-copy-ssh.sh \ post-hook.sh \ function-trace.sh \ - recursive.sh \ - git.sh + recursive.sh # parallel.sh install-tests += $(foreach x, $(nix_tests), tests/$(x))