lix/src/libstore/references.cc

184 lines
4.9 KiB
C++
Raw Normal View History

#include "references.hh"
#include "hash.hh"
#include "util.hh"
#include "archive.hh"
#include <map>
2009-03-30 19:35:55 +00:00
#include <cstdlib>
namespace nix {
static unsigned int refLength = 32; /* characters */
2018-05-02 11:56:34 +00:00
static void search(const unsigned char * s, size_t len,
StringSet & hashes, StringSet & seen)
{
static bool initialised = false;
static bool isBase32[256];
if (!initialised) {
for (unsigned int i = 0; i < 256; ++i) isBase32[i] = false;
for (unsigned int i = 0; i < base32Chars.size(); ++i)
isBase32[(unsigned char) base32Chars[i]] = true;
initialised = true;
}
2015-07-17 17:24:28 +00:00
2018-05-02 11:56:34 +00:00
for (size_t i = 0; i + refLength <= len; ) {
int j;
bool match = true;
for (j = refLength - 1; j >= 0; --j)
if (!isBase32[(unsigned char) s[i + j]]) {
i += j + 1;
match = false;
break;
}
if (!match) continue;
2006-09-22 11:28:23 +00:00
string ref((const char *) s + i, refLength);
if (hashes.erase(ref)) {
debug(format("found reference to '%1%' at offset '%2%'")
% ref % i);
seen.insert(ref);
}
++i;
}
}
struct RefScanSink : Sink
{
HashSink hashSink;
StringSet hashes;
StringSet seen;
string tail;
RefScanSink() : hashSink(htSHA256) { }
2015-07-17 17:24:28 +00:00
void operator () (const unsigned char * data, size_t len);
};
void RefScanSink::operator () (const unsigned char * data, size_t len)
{
hashSink(data, len);
/* It's possible that a reference spans the previous and current
fragment, so search in the concatenation of the tail of the
previous fragment and the start of the current fragment. */
string s = tail + string((const char *) data, len > refLength ? refLength : len);
search((const unsigned char *) s.data(), s.size(), hashes, seen);
search(data, len, hashes, seen);
2018-05-02 11:56:34 +00:00
size_t tailLen = len <= refLength ? len : refLength;
tail =
string(tail, tail.size() < refLength - tailLen ? 0 : tail.size() - (refLength - tailLen)) +
string((const char *) data + len - tailLen, tailLen);
}
PathSet scanForReferences(const string & path,
const PathSet & refs, HashResult & hash)
{
RefScanSink sink;
std::map<string, Path> backMap;
/* For efficiency (and a higher hit rate), just search for the
hash part of the file name. (This assumes that all references
have the form `HASH-bla'). */
2015-07-17 17:24:28 +00:00
for (auto & i : refs) {
auto baseName = std::string(baseNameOf(i));
string::size_type pos = baseName.find('-');
if (pos == string::npos)
throw Error("bad reference '%1%'", i);
string s = string(baseName, 0, pos);
assert(s.size() == refLength);
assert(backMap.find(s) == backMap.end());
// parseHash(htSHA256, s);
sink.hashes.insert(s);
2015-07-17 17:24:28 +00:00
backMap[s] = i;
}
/* Look for the hashes in the NAR dump of the path. */
dumpPath(path, sink);
/* Map the hashes found back to their store paths. */
PathSet found;
2015-07-17 17:24:28 +00:00
for (auto & i : sink.seen) {
std::map<string, Path>::iterator j;
2015-07-17 17:24:28 +00:00
if ((j = backMap.find(i)) == backMap.end()) abort();
found.insert(j->second);
}
hash = sink.hashSink.finish();
2015-07-17 17:24:28 +00:00
return found;
}
Allow content-addressable paths to have references This adds a command 'nix make-content-addressable' that rewrites the specified store paths into content-addressable paths. The advantage of such paths is that 1) they can be imported without signatures; 2) they can enable deduplication in cases where derivation changes do not cause output changes (apart from store path hashes). For example, $ nix make-content-addressable -r nixpkgs.cowsay rewrote '/nix/store/g1g31ah55xdia1jdqabv1imf6mcw0nb1-glibc-2.25-49' to '/nix/store/48jfj7bg78a8n4f2nhg269rgw1936vj4-glibc-2.25-49' ... rewrote '/nix/store/qbi6rzpk0bxjw8lw6azn2mc7ynnn455q-cowsay-3.03+dfsg1-16' to '/nix/store/iq6g2x4q62xp7y7493bibx0qn5w7xz67-cowsay-3.03+dfsg1-16' We can then copy the resulting closure to another store without signatures: $ nix copy --trusted-public-keys '' ---to ~/my-nix /nix/store/iq6g2x4q62xp7y7493bibx0qn5w7xz67-cowsay-3.03+dfsg1-16 In order to support self-references in content-addressable paths, these paths are hashed "modulo" self-references, meaning that self-references are zeroed out during hashing. Somewhat annoyingly, this means that the NAR hash stored in the Nix database is no longer necessarily equal to the output of "nix hash-path"; for content-addressable paths, you need to pass the --modulo flag: $ nix path-info --json /nix/store/iq6g2x4q62xp7y7493bibx0qn5w7xz67-cowsay-3.03+dfsg1-16 | jq -r .[].narHash sha256:0ri611gdilz2c9rsibqhsipbfs9vwcqvs811a52i2bnkhv7w9mgw $ nix hash-path --type sha256 --base32 /nix/store/iq6g2x4q62xp7y7493bibx0qn5w7xz67-cowsay-3.03+dfsg1-16 1ggznh07khq0hz6id09pqws3a8q9pn03ya3c03nwck1kwq8rclzs $ nix hash-path --type sha256 --base32 /nix/store/iq6g2x4q62xp7y7493bibx0qn5w7xz67-cowsay-3.03+dfsg1-16 --modulo iq6g2x4q62xp7y7493bibx0qn5w7xz67 0ri611gdilz2c9rsibqhsipbfs9vwcqvs811a52i2bnkhv7w9mgw
2018-03-29 22:56:13 +00:00
RewritingSink::RewritingSink(const std::string & from, const std::string & to, Sink & nextSink)
: from(from), to(to), nextSink(nextSink)
{
assert(from.size() == to.size());
}
void RewritingSink::operator () (const unsigned char * data, size_t len)
{
std::string s(prev);
s.append((const char *) data, len);
size_t j = 0;
while ((j = s.find(from, j)) != string::npos) {
matches.push_back(pos + j);
s.replace(j, from.size(), to);
}
prev = s.size() < from.size() ? s : std::string(s, s.size() - from.size() + 1, from.size() - 1);
auto consumed = s.size() - prev.size();
pos += consumed;
if (consumed) nextSink((unsigned char *) s.data(), consumed);
}
void RewritingSink::flush()
{
if (prev.empty()) return;
pos += prev.size();
nextSink((unsigned char *) prev.data(), prev.size());
prev.clear();
}
HashModuloSink::HashModuloSink(HashType ht, const std::string & modulus)
: hashSink(ht)
, rewritingSink(modulus, std::string(modulus.size(), 0), hashSink)
{
}
void HashModuloSink::operator () (const unsigned char * data, size_t len)
{
rewritingSink(data, len);
}
HashResult HashModuloSink::finish()
{
rewritingSink.flush();
/* Hash the positions of the self-references. This ensures that a
NAR with self-references and a NAR with some of the
self-references already zeroed out do not produce a hash
collision. FIXME: proof. */
for (auto & pos : rewritingSink.matches) {
auto s = fmt("|%d", pos);
hashSink((unsigned char *) s.data(), s.size());
}
auto h = hashSink.finish();
return {h.first, rewritingSink.pos};
}
}