Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

primops: add builtins.convertHash #7708

Merged
merged 8 commits into from
Oct 19, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
2 changes: 2 additions & 0 deletions doc/manual/src/release-notes/rl-next.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,4 +8,6 @@

- Introduce new flake installable syntax `flakeref#.attrPath` where the "." prefix denotes no searching of default attribute prefixes like `packages.<SYSTEM>` or `legacyPackages.<SYSTEM>`.

- Introduce a new built-in function [`builtins.convertHash`](@docroot@/language/builtins.md#builtins-convertHash).

- `builtins.fetchTree` is now marked as stable.
12 changes: 6 additions & 6 deletions perl/lib/Nix/Store.xs
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ SV * queryReferences(char * path)
SV * queryPathHash(char * path)
PPCODE:
try {
auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash.to_string(Base32, true);
auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash.to_string(HashFormat::Base32, true);
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
} catch (Error & e) {
croak("%s", e.what());
Expand All @@ -104,7 +104,7 @@ SV * queryPathInfo(char * path, int base32)
XPUSHs(&PL_sv_undef);
else
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0)));
auto s = info->narHash.to_string(base32 ? Base32 : Base16, true);
auto s = info->narHash.to_string(base32 ? HashFormat::Base32 : HashFormat::Base16, true);
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
mXPUSHi(info->registrationTime);
mXPUSHi(info->narSize);
Expand Down Expand Up @@ -206,7 +206,7 @@ SV * hashPath(char * algo, int base32, char * path)
PPCODE:
try {
Hash h = hashPath(parseHashType(algo), path).first;
auto s = h.to_string(base32 ? Base32 : Base16, false);
auto s = h.to_string(base32 ? HashFormat::Base32 : HashFormat::Base16, false);
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
} catch (Error & e) {
croak("%s", e.what());
Expand All @@ -217,7 +217,7 @@ SV * hashFile(char * algo, int base32, char * path)
PPCODE:
try {
Hash h = hashFile(parseHashType(algo), path);
auto s = h.to_string(base32 ? Base32 : Base16, false);
auto s = h.to_string(base32 ? HashFormat::Base32 : HashFormat::Base16, false);
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
} catch (Error & e) {
croak("%s", e.what());
Expand All @@ -228,7 +228,7 @@ SV * hashString(char * algo, int base32, char * s)
PPCODE:
try {
Hash h = hashString(parseHashType(algo), s);
auto s = h.to_string(base32 ? Base32 : Base16, false);
auto s = h.to_string(base32 ? HashFormat::Base32 : HashFormat::Base16, false);
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
} catch (Error & e) {
croak("%s", e.what());
Expand All @@ -239,7 +239,7 @@ SV * convertHash(char * algo, char * s, int toBase32)
PPCODE:
try {
auto h = Hash::parseAny(s, parseHashType(algo));
auto s = h.to_string(toBase32 ? Base32 : Base16, false);
auto s = h.to_string(toBase32 ? HashFormat::Base32 : HashFormat::Base16, false);
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
} catch (Error & e) {
croak("%s", e.what());
Expand Down
2 changes: 1 addition & 1 deletion src/libexpr/eval-cache.cc
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ struct AttrDb
Path cacheDir = getCacheDir() + "/nix/eval-cache-v5";
createDirs(cacheDir);

Path dbPath = cacheDir + "/" + fingerprint.to_string(Base16, false) + ".sqlite";
Path dbPath = cacheDir + "/" + fingerprint.to_string(HashFormat::Base16, false) + ".sqlite";

state->db = SQLite(dbPath);
state->db.isCache();
Expand Down
99 changes: 97 additions & 2 deletions src/libexpr/primops.cc
Original file line number Diff line number Diff line change
Expand Up @@ -1758,7 +1758,7 @@ static void prim_hashFile(EvalState & state, const PosIdx pos, Value * * args, V

auto path = realisePath(state, pos, *args[1]);

v.mkString(hashString(*ht, path.readFile()).to_string(Base16, false));
v.mkString(hashString(*ht, path.readFile()).to_string(HashFormat::Base16, false));
}

static RegisterPrimOp primop_hashFile({
Expand Down Expand Up @@ -3760,7 +3760,7 @@ static void prim_hashString(EvalState & state, const PosIdx pos, Value * * args,
NixStringContext context; // discarded
auto s = state.forceString(*args[1], context, pos, "while evaluating the second argument passed to builtins.hashString");

v.mkString(hashString(*ht, s).to_string(Base16, false));
v.mkString(hashString(*ht, s).to_string(HashFormat::Base16, false));
}

static RegisterPrimOp primop_hashString({
Expand All @@ -3774,6 +3774,101 @@ static RegisterPrimOp primop_hashString({
.fun = prim_hashString,
});

static void prim_convertHash(EvalState & state, const PosIdx pos, Value * * args, Value & v)
{
state.forceAttrs(*args[0], pos, "while evaluating the first argument passed to builtins.convertHash");
auto &inputAttrs = args[0]->attrs;

Bindings::iterator iteratorHash = getAttr(state, state.symbols.create("hash"), inputAttrs, "while locating the attribute 'hash'");
auto hash = state.forceStringNoCtx(*iteratorHash->value, pos, "while evaluating the attribute 'hash'");

Bindings::iterator iteratorHashAlgo = inputAttrs->find(state.symbols.create("hashAlgo"));
std::optional<HashType> ht = std::nullopt;
if (iteratorHashAlgo != inputAttrs->end()) {
ht = parseHashType(state.forceStringNoCtx(*iteratorHashAlgo->value, pos, "while evaluating the attribute 'hashAlgo'"));
}

Bindings::iterator iteratorToHashFormat = getAttr(state, state.symbols.create("toHashFormat"), args[0]->attrs, "while locating the attribute 'toHashFormat'");
HashFormat hf = parseHashFormat(state.forceStringNoCtx(*iteratorToHashFormat->value, pos, "while evaluating the attribute 'toHashFormat'"));

v.mkString(Hash::parseAny(hash, ht).to_string(hf, hf == HashFormat::SRI));
}

static RegisterPrimOp primop_convertHash({
.name = "__convertHash",
.args = {"args"},
.doc = R"(
Return the specified representation of a hash string, based on the attributes presented in *args*:

- `hash`
ShamrockLee marked this conversation as resolved.
Show resolved Hide resolved

The hash to be converted.
The hash format is detected automatically.

- `hashAlgo`

The algorithm used to create the hash. Must be one of
- `"md5"`
- `"sha1"`
- `"sha256"`
- `"sha512"`

The attribute may be omitted when `hash` is an [SRI hash](https://www.w3.org/TR/SRI/#the-integrity-attribute) or when the hash is prefixed with the hash algorithm name followed by a colon.
That `<hashAlgo>:<hashBody>` syntax is supported for backwards compatibility with existing tooling.

- `toHashFormat`

The format of the resulting hash. Must be one of
- `"base16"`
- `"base32"`
- `"base64"`
- `"sri"`

The result hash is the *toHashFormat* representation of the hash *hash*.

> **Example**
>
> Convert a SHA256 hash in Base16 to SRI:
>
> ```nix
> builtins.convertHash {
> hash = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855";
> toHashFormat = "sri";
> hashAlgo = "sha256";
> }
> ```
>
> "sha256-47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU="

> **Example**
>
> Convert a SHA256 hash in SRI to Base16:
>
> ```nix
> builtins.convertHash {
> hash = "sha256-47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU=";
> toHashFormat = "base16";
> }
> ```
>
> "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"

> **Example**
>
> Convert a hash in the form `<hashAlgo>:<hashBody>` in Base16 to SRI:
>
> ```nix
> builtins.convertHash {
> hash = "sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855";
> toHashFormat = "sri";
> }
> ```
>
> "sha256-47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU="
)",
.fun = prim_convertHash,
});

struct RegexCache
{
// TODO use C++20 transparent comparison when available
Expand Down
4 changes: 2 additions & 2 deletions src/libexpr/primops/fetchTree.cc
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ void emitTreeAttrs(

auto narHash = input.getNarHash();
assert(narHash);
attrs.alloc("narHash").mkString(narHash->to_string(SRI, true));
attrs.alloc("narHash").mkString(narHash->to_string(HashFormat::SRI, true));

if (input.getType() == "git")
attrs.alloc("submodules").mkBool(
Expand Down Expand Up @@ -297,7 +297,7 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
: hashFile(htSHA256, state.store->toRealPath(storePath));
if (hash != *expectedHash)
state.debugThrowLastTrace(EvalError((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n specified: %s\n got: %s",
*url, expectedHash->to_string(Base32, true), hash.to_string(Base32, true)));
*url, expectedHash->to_string(HashFormat::Base32, true), hash.to_string(HashFormat::Base32, true)));
}

state.allowAndSetStorePathString(storePath, v);
Expand Down
2 changes: 1 addition & 1 deletion src/libexpr/tests/error_traces.cc
Original file line number Diff line number Diff line change
Expand Up @@ -1084,7 +1084,7 @@ namespace nix {

ASSERT_TRACE1("hashString \"foo\" \"content\"",
UsageError,
hintfmt("unknown hash algorithm '%s'", "foo"));
hintfmt("unknown hash algorithm '%s', expect 'md5', 'sha1', 'sha256', or 'sha512'", "foo"));

ASSERT_TRACE2("hashString \"sha256\" {}",
TypeError,
Expand Down
4 changes: 2 additions & 2 deletions src/libfetchers/fetchers.cc
Original file line number Diff line number Diff line change
Expand Up @@ -147,12 +147,12 @@ std::pair<Tree, Input> Input::fetch(ref<Store> store) const
};

auto narHash = store->queryPathInfo(tree.storePath)->narHash;
input.attrs.insert_or_assign("narHash", narHash.to_string(SRI, true));
input.attrs.insert_or_assign("narHash", narHash.to_string(HashFormat::SRI, true));

if (auto prevNarHash = getNarHash()) {
if (narHash != *prevNarHash)
throw Error((unsigned int) 102, "NAR hash mismatch in input '%s' (%s), expected '%s', got '%s'",
to_string(), tree.actualPath, prevNarHash->to_string(SRI, true), narHash.to_string(SRI, true));
to_string(), tree.actualPath, prevNarHash->to_string(HashFormat::SRI, true), narHash.to_string(HashFormat::SRI, true));
}

if (auto prevLastModified = getLastModified()) {
Expand Down
4 changes: 2 additions & 2 deletions src/libfetchers/git.cc
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ bool touchCacheFile(const Path & path, time_t touch_time)
Path getCachePath(std::string_view key)
{
return getCacheDir() + "/nix/gitv3/" +
hashString(htSHA256, key).to_string(Base32, false);
hashString(htSHA256, key).to_string(HashFormat::Base32, false);
}

// Returns the name of the HEAD branch.
Expand Down Expand Up @@ -417,7 +417,7 @@ struct GitInputScheme : InputScheme
auto checkHashType = [&](const std::optional<Hash> & hash)
{
if (hash.has_value() && !(hash->type == htSHA1 || hash->type == htSHA256))
throw Error("Hash '%s' is not supported by Git. Supported types are sha1 and sha256.", hash->to_string(Base16, true));
throw Error("Hash '%s' is not supported by Git. Supported types are sha1 and sha256.", hash->to_string(HashFormat::Base16, true));
};

auto getLockedAttrs = [&]()
Expand Down
8 changes: 4 additions & 4 deletions src/libfetchers/github.cc
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ struct GitArchiveInputScheme : InputScheme
auto path = owner + "/" + repo;
assert(!(ref && rev));
if (ref) path += "/" + *ref;
if (rev) path += "/" + rev->to_string(Base16, false);
if (rev) path += "/" + rev->to_string(HashFormat::Base16, false);
return ParsedURL {
.scheme = type(),
.path = path,
Expand Down Expand Up @@ -296,7 +296,7 @@ struct GitHubInputScheme : GitArchiveInputScheme
: "https://api.%s/repos/%s/%s/tarball/%s";

const auto url = fmt(urlFmt, host, getOwner(input), getRepo(input),
input.getRev()->to_string(Base16, false));
input.getRev()->to_string(HashFormat::Base16, false));

return DownloadUrl { url, headers };
}
Expand Down Expand Up @@ -362,7 +362,7 @@ struct GitLabInputScheme : GitArchiveInputScheme
auto host = maybeGetStrAttr(input.attrs, "host").value_or("gitlab.com");
auto url = fmt("https://%s/api/v4/projects/%s%%2F%s/repository/archive.tar.gz?sha=%s",
host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"),
input.getRev()->to_string(Base16, false));
input.getRev()->to_string(HashFormat::Base16, false));

Headers headers = makeHeadersWithAuthTokens(host);
return DownloadUrl { url, headers };
Expand Down Expand Up @@ -449,7 +449,7 @@ struct SourceHutInputScheme : GitArchiveInputScheme
auto host = maybeGetStrAttr(input.attrs, "host").value_or("git.sr.ht");
auto url = fmt("https://%s/%s/%s/archive/%s.tar.gz",
host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"),
input.getRev()->to_string(Base16, false));
input.getRev()->to_string(HashFormat::Base16, false));

Headers headers = makeHeadersWithAuthTokens(host);
return DownloadUrl { url, headers };
Expand Down
4 changes: 2 additions & 2 deletions src/libfetchers/mercurial.cc
Original file line number Diff line number Diff line change
Expand Up @@ -206,7 +206,7 @@ struct MercurialInputScheme : InputScheme
auto checkHashType = [&](const std::optional<Hash> & hash)
{
if (hash.has_value() && hash->type != htSHA1)
throw Error("Hash '%s' is not supported by Mercurial. Only sha1 is supported.", hash->to_string(Base16, true));
throw Error("Hash '%s' is not supported by Mercurial. Only sha1 is supported.", hash->to_string(HashFormat::Base16, true));
};


Expand Down Expand Up @@ -252,7 +252,7 @@ struct MercurialInputScheme : InputScheme
}
}

Path cacheDir = fmt("%s/nix/hg/%s", getCacheDir(), hashString(htSHA256, actualUrl).to_string(Base32, false));
Path cacheDir = fmt("%s/nix/hg/%s", getCacheDir(), hashString(htSHA256, actualUrl).to_string(HashFormat::Base32, false));

/* If this is a commit hash that we already have, we don't
have to pull again. */
Expand Down
2 changes: 1 addition & 1 deletion src/libfetchers/tarball.cc
Original file line number Diff line number Diff line change
Expand Up @@ -250,7 +250,7 @@ struct CurlInputScheme : InputScheme
// NAR hashes are preferred over file hashes since tar/zip
// files don't have a canonical representation.
if (auto narHash = input.getNarHash())
url.query.insert_or_assign("narHash", narHash->to_string(SRI, true));
url.query.insert_or_assign("narHash", narHash->to_string(HashFormat::SRI, true));
return url;
}

Expand Down
2 changes: 1 addition & 1 deletion src/libstore/binary-cache-store.cc
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,7 @@ ref<const ValidPathInfo> BinaryCacheStore::addToStoreCommon(
auto [fileHash, fileSize] = fileHashSink.finish();
narInfo->fileHash = fileHash;
narInfo->fileSize = fileSize;
narInfo->url = "nar/" + narInfo->fileHash->to_string(Base32, false) + ".nar"
narInfo->url = "nar/" + narInfo->fileHash->to_string(HashFormat::Base32, false) + ".nar"
+ (compression == "xz" ? ".xz" :
compression == "bzip2" ? ".bz2" :
compression == "zstd" ? ".zst" :
Expand Down
6 changes: 3 additions & 3 deletions src/libstore/build/local-derivation-goal.cc
Original file line number Diff line number Diff line change
Expand Up @@ -1066,7 +1066,7 @@ void LocalDerivationGoal::initTmpDir() {
env[i.first] = i.second;
} else {
auto hash = hashString(htSHA256, i.first);
std::string fn = ".attr-" + hash.to_string(Base32, false);
std::string fn = ".attr-" + hash.to_string(HashFormat::Base32, false);
Path p = tmpDir + "/" + fn;
writeFile(p, rewriteStrings(i.second, inputRewrites));
chownToBuilder(p);
Expand Down Expand Up @@ -2583,8 +2583,8 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
delayedException = std::make_exception_ptr(
BuildError("hash mismatch in fixed-output derivation '%s':\n specified: %s\n got: %s",
worker.store.printStorePath(drvPath),
wanted.to_string(SRI, true),
got.to_string(SRI, true)));
wanted.to_string(HashFormat::SRI, true),
got.to_string(HashFormat::SRI, true)));
}
if (!newInfo0.references.empty())
delayedException = std::make_exception_ptr(
Expand Down
2 changes: 1 addition & 1 deletion src/libstore/builtins/fetchurl.cc
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData)
if (!hasSuffix(hashedMirror, "/")) hashedMirror += '/';
std::optional<HashType> ht = parseHashTypeOpt(getAttr("outputHashAlgo"));
Hash h = newHashAllowEmpty(getAttr("outputHash"), ht);
fetch(hashedMirror + printHashType(h.type) + "/" + h.to_string(Base16, false));
fetch(hashedMirror + printHashType(h.type) + "/" + h.to_string(HashFormat::Base16, false));
return;
} catch (Error & e) {
debug(e.what());
Expand Down
2 changes: 1 addition & 1 deletion src/libstore/content-address.cc
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ std::string ContentAddress::render() const
+ makeFileIngestionPrefix(method);
},
}, method.raw)
+ this->hash.to_string(Base32, true);
+ this->hash.to_string(HashFormat::Base32, true);
}

/**
Expand Down
2 changes: 1 addition & 1 deletion src/libstore/daemon.cc
Original file line number Diff line number Diff line change
Expand Up @@ -334,7 +334,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
logger->startWork();
auto hash = store->queryPathInfo(path)->narHash;
logger->stopWork();
to << hash.to_string(Base16, false);
to << hash.to_string(HashFormat::Base16, false);
break;
}

Expand Down