From d29786f25854c910e08d9a8438d589f02adc9569 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 5 Apr 2024 16:35:12 +0200 Subject: [PATCH] downloadFile(): Remove the "locked" (aka "immutable") flag This was used in only one place, namely builtins.fetchurl with an expected hash. Since this can cause similar issues as described in #9814 and #9905 with the "locked" flag for fetchTarball and fetchTree, let's just remove it. Note that if an expected hash is given and the hash algorithm is SHA-256, then we will never do a download anyway if the resulting store path already exists. So removing the "locked" flag will only cause potentially unnecessary HTTP requests (subject to the tarball TTL) for non-SHA-256 hashes. --- src/libexpr/primops/fetchTree.cc | 2 +- src/libfetchers/github.cc | 8 ++++---- src/libfetchers/registry.cc | 2 +- src/libfetchers/tarball.cc | 5 ++--- src/libfetchers/tarball.hh | 1 - src/nix-channel/nix-channel.cc | 6 +++--- 6 files changed, 11 insertions(+), 13 deletions(-) diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc index 7906f8ac3..0190aca3d 100644 --- a/src/libexpr/primops/fetchTree.cc +++ b/src/libexpr/primops/fetchTree.cc @@ -473,7 +473,7 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v auto storePath = unpack ? fetchToStore(*state.store, fetchers::downloadTarball(*url).accessor, FetchMode::Copy, name) - : fetchers::downloadFile(state.store, *url, name, (bool) expectedHash).storePath; + : fetchers::downloadFile(state.store, *url, name).storePath; if (expectedHash) { auto hash = unpack diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc index 60e323464..985f2e479 100644 --- a/src/libfetchers/github.cc +++ b/src/libfetchers/github.cc @@ -357,7 +357,7 @@ struct GitHubInputScheme : GitArchiveInputScheme auto json = nlohmann::json::parse( readFile( store->toRealPath( - downloadFile(store, url, "source", false, headers).storePath))); + downloadFile(store, url, "source", headers).storePath))); return RefInfo { .rev = Hash::parseAny(std::string { json["sha"] }, HashAlgorithm::SHA1), @@ -431,7 +431,7 @@ struct GitLabInputScheme : GitArchiveInputScheme auto json = nlohmann::json::parse( readFile( store->toRealPath( - downloadFile(store, url, "source", false, headers).storePath))); + downloadFile(store, url, "source", headers).storePath))); return RefInfo { .rev = Hash::parseAny(std::string(json[0]["id"]), HashAlgorithm::SHA1) @@ -495,7 +495,7 @@ struct SourceHutInputScheme : GitArchiveInputScheme std::string refUri; if (ref == "HEAD") { auto file = store->toRealPath( - downloadFile(store, fmt("%s/HEAD", base_url), "source", false, headers).storePath); + downloadFile(store, fmt("%s/HEAD", base_url), "source", headers).storePath); std::ifstream is(file); std::string line; getline(is, line); @@ -511,7 +511,7 @@ struct SourceHutInputScheme : GitArchiveInputScheme std::regex refRegex(refUri); auto file = store->toRealPath( - downloadFile(store, fmt("%s/info/refs", base_url), "source", false, headers).storePath); + downloadFile(store, fmt("%s/info/refs", base_url), "source", headers).storePath); std::ifstream is(file); std::string line; diff --git a/src/libfetchers/registry.cc b/src/libfetchers/registry.cc index 9c7bc0cfe..e00b9de46 100644 --- a/src/libfetchers/registry.cc +++ b/src/libfetchers/registry.cc @@ -158,7 +158,7 @@ static std::shared_ptr getGlobalRegistry(ref store) } if (!hasPrefix(path, "/")) { - auto storePath = downloadFile(store, path, "flake-registry.json", false).storePath; + auto storePath = downloadFile(store, path, "flake-registry.json").storePath; if (auto store2 = store.dynamic_pointer_cast()) store2->addPermRoot(storePath, getCacheDir() + "/nix/flake-registry.json"); path = store->toRealPath(storePath); diff --git a/src/libfetchers/tarball.cc b/src/libfetchers/tarball.cc index f08509cb7..a1f934c35 100644 --- a/src/libfetchers/tarball.cc +++ b/src/libfetchers/tarball.cc @@ -19,7 +19,6 @@ DownloadFileResult downloadFile( ref store, const std::string & url, const std::string & name, - bool locked, const Headers & headers) { // FIXME: check store @@ -101,7 +100,7 @@ DownloadFileResult downloadFile( inAttrs, infoAttrs, *storePath, - locked); + false); } return { @@ -306,7 +305,7 @@ struct FileInputScheme : CurlInputScheme the Nix store directly, since there is little deduplication benefit in using the Git cache for single big files like tarballs. */ - auto file = downloadFile(store, getStrAttr(input.attrs, "url"), input.getName(), false); + auto file = downloadFile(store, getStrAttr(input.attrs, "url"), input.getName()); auto narHash = store->queryPathInfo(file.storePath)->narHash; input.attrs.insert_or_assign("narHash", narHash.to_string(HashFormat::SRI, true)); diff --git a/src/libfetchers/tarball.hh b/src/libfetchers/tarball.hh index 77ad3bf09..bcb5dcc5e 100644 --- a/src/libfetchers/tarball.hh +++ b/src/libfetchers/tarball.hh @@ -25,7 +25,6 @@ DownloadFileResult downloadFile( ref store, const std::string & url, const std::string & name, - bool locked, const Headers & headers = {}); struct DownloadTarballResult diff --git a/src/nix-channel/nix-channel.cc b/src/nix-channel/nix-channel.cc index 48553fa31..9f7f557b5 100644 --- a/src/nix-channel/nix-channel.cc +++ b/src/nix-channel/nix-channel.cc @@ -112,7 +112,7 @@ static void update(const StringSet & channelNames) // We want to download the url to a file to see if it's a tarball while also checking if we // got redirected in the process, so that we can grab the various parts of a nix channel // definition from a consistent location if the redirect changes mid-download. - auto result = fetchers::downloadFile(store, url, std::string(baseNameOf(url)), false); + auto result = fetchers::downloadFile(store, url, std::string(baseNameOf(url))); auto filename = store->toRealPath(result.storePath); url = result.effectiveUrl; @@ -126,9 +126,9 @@ static void update(const StringSet & channelNames) if (!unpacked) { // Download the channel tarball. try { - filename = store->toRealPath(fetchers::downloadFile(store, url + "/nixexprs.tar.xz", "nixexprs.tar.xz", false).storePath); + filename = store->toRealPath(fetchers::downloadFile(store, url + "/nixexprs.tar.xz", "nixexprs.tar.xz").storePath); } catch (FileTransferError & e) { - filename = store->toRealPath(fetchers::downloadFile(store, url + "/nixexprs.tar.bz2", "nixexprs.tar.bz2", false).storePath); + filename = store->toRealPath(fetchers::downloadFile(store, url + "/nixexprs.tar.bz2", "nixexprs.tar.bz2").storePath); } } // Regardless of where it came from, add the expression representing this channel to accumulated expression