2006-09-04 21:06:23 +00:00
|
|
|
#include "references.hh"
|
|
|
|
#include "hash.hh"
|
|
|
|
#include "util.hh"
|
2009-03-28 20:51:33 +00:00
|
|
|
#include "archive.hh"
|
2006-09-04 21:06:23 +00:00
|
|
|
|
2003-07-15 21:24:05 +00:00
|
|
|
#include <map>
|
2009-03-30 19:35:55 +00:00
|
|
|
#include <cstdlib>
|
2021-08-07 17:10:25 +00:00
|
|
|
#include <mutex>
|
2003-07-15 21:24:05 +00:00
|
|
|
|
2006-09-04 21:06:23 +00:00
|
|
|
|
|
|
|
namespace nix {
|
2003-07-14 10:23:11 +00:00
|
|
|
|
|
|
|
|
2021-10-04 12:29:42 +00:00
|
|
|
static size_t refLength = 32; /* characters */
|
2005-11-16 08:27:06 +00:00
|
|
|
|
|
|
|
|
2021-10-04 12:29:42 +00:00
|
|
|
static void search(
|
|
|
|
std::string_view s,
|
|
|
|
StringSet & hashes,
|
|
|
|
StringSet & seen)
|
2003-07-14 10:23:11 +00:00
|
|
|
{
|
2021-08-07 17:10:25 +00:00
|
|
|
static std::once_flag initialised;
|
2005-11-16 08:27:06 +00:00
|
|
|
static bool isBase32[256];
|
2021-08-07 17:10:25 +00:00
|
|
|
std::call_once(initialised, [](){
|
2005-11-16 08:27:06 +00:00
|
|
|
for (unsigned int i = 0; i < 256; ++i) isBase32[i] = false;
|
|
|
|
for (unsigned int i = 0; i < base32Chars.size(); ++i)
|
|
|
|
isBase32[(unsigned char) base32Chars[i]] = true;
|
2021-08-07 17:10:25 +00:00
|
|
|
});
|
2015-07-17 17:24:28 +00:00
|
|
|
|
2021-10-04 12:29:42 +00:00
|
|
|
for (size_t i = 0; i + refLength <= s.size(); ) {
|
2005-11-16 08:27:06 +00:00
|
|
|
int j;
|
|
|
|
bool match = true;
|
|
|
|
for (j = refLength - 1; j >= 0; --j)
|
|
|
|
if (!isBase32[(unsigned char) s[i + j]]) {
|
|
|
|
i += j + 1;
|
|
|
|
match = false;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
if (!match) continue;
|
2021-10-04 12:29:42 +00:00
|
|
|
std::string ref(s.substr(i, refLength));
|
2019-10-09 13:51:52 +00:00
|
|
|
if (hashes.erase(ref)) {
|
2023-03-02 14:44:19 +00:00
|
|
|
debug("found reference to '%1%' at offset '%2%'", ref, i);
|
2005-11-16 08:27:06 +00:00
|
|
|
seen.insert(ref);
|
2003-07-14 10:23:11 +00:00
|
|
|
}
|
2005-11-16 08:27:06 +00:00
|
|
|
++i;
|
2003-07-14 10:23:11 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2021-10-04 12:29:42 +00:00
|
|
|
void RefScanSink::operator () (std::string_view data)
|
2003-07-14 10:23:11 +00:00
|
|
|
{
|
2021-10-04 12:29:42 +00:00
|
|
|
/* It's possible that a reference spans the previous and current
|
|
|
|
fragment, so search in the concatenation of the tail of the
|
|
|
|
previous fragment and the start of the current fragment. */
|
|
|
|
auto s = tail;
|
2021-11-18 11:12:31 +00:00
|
|
|
auto tailLen = std::min(data.size(), refLength);
|
|
|
|
s.append(data.data(), tailLen);
|
2021-10-04 12:29:42 +00:00
|
|
|
search(s, hashes, seen);
|
|
|
|
|
|
|
|
search(data, hashes, seen);
|
|
|
|
|
|
|
|
auto rest = refLength - tailLen;
|
|
|
|
if (rest < tail.size())
|
|
|
|
tail = tail.substr(tail.size() - rest);
|
|
|
|
tail.append(data.data() + data.size() - tailLen, tailLen);
|
|
|
|
}
|
2003-07-14 10:23:11 +00:00
|
|
|
|
|
|
|
|
2022-11-04 13:19:31 +00:00
|
|
|
PathRefScanSink::PathRefScanSink(StringSet && hashes, std::map<std::string, StorePath> && backMap)
|
|
|
|
: RefScanSink(std::move(hashes))
|
|
|
|
, backMap(std::move(backMap))
|
|
|
|
{ }
|
2020-08-07 19:09:26 +00:00
|
|
|
|
2022-11-04 13:19:31 +00:00
|
|
|
PathRefScanSink PathRefScanSink::fromPaths(const StorePathSet & refs)
|
2020-08-07 19:09:26 +00:00
|
|
|
{
|
2021-10-04 12:29:42 +00:00
|
|
|
StringSet hashes;
|
2021-10-04 11:47:38 +00:00
|
|
|
std::map<std::string, StorePath> backMap;
|
2003-07-14 10:23:11 +00:00
|
|
|
|
2015-07-17 17:24:28 +00:00
|
|
|
for (auto & i : refs) {
|
2021-10-04 11:47:38 +00:00
|
|
|
std::string hashPart(i.hashPart());
|
|
|
|
auto inserted = backMap.emplace(hashPart, i).second;
|
|
|
|
assert(inserted);
|
2021-10-04 12:29:42 +00:00
|
|
|
hashes.insert(hashPart);
|
2003-07-14 10:23:11 +00:00
|
|
|
}
|
|
|
|
|
2022-11-04 13:19:31 +00:00
|
|
|
return PathRefScanSink(std::move(hashes), std::move(backMap));
|
|
|
|
}
|
2003-07-15 21:24:05 +00:00
|
|
|
|
2022-11-04 13:19:31 +00:00
|
|
|
StorePathSet PathRefScanSink::getResultPaths()
|
|
|
|
{
|
2009-03-28 20:51:33 +00:00
|
|
|
/* Map the hashes found back to their store paths. */
|
2021-10-04 11:47:38 +00:00
|
|
|
StorePathSet found;
|
2022-11-04 13:19:31 +00:00
|
|
|
for (auto & i : getResult()) {
|
2021-10-04 11:47:38 +00:00
|
|
|
auto j = backMap.find(i);
|
|
|
|
assert(j != backMap.end());
|
2005-11-16 08:27:06 +00:00
|
|
|
found.insert(j->second);
|
2003-07-15 21:24:05 +00:00
|
|
|
}
|
2003-07-14 10:23:11 +00:00
|
|
|
|
2020-08-07 19:09:26 +00:00
|
|
|
return found;
|
2003-07-14 10:23:11 +00:00
|
|
|
}
|
2006-09-04 21:06:23 +00:00
|
|
|
|
2009-03-28 20:51:33 +00:00
|
|
|
|
2022-11-04 13:19:31 +00:00
|
|
|
std::pair<StorePathSet, HashResult> scanForReferences(
|
|
|
|
const std::string & path,
|
|
|
|
const StorePathSet & refs)
|
|
|
|
{
|
|
|
|
HashSink hashSink { htSHA256 };
|
|
|
|
auto found = scanForReferences(hashSink, path, refs);
|
|
|
|
auto hash = hashSink.finish();
|
|
|
|
return std::pair<StorePathSet, HashResult>(found, hash);
|
|
|
|
}
|
|
|
|
|
|
|
|
StorePathSet scanForReferences(
|
|
|
|
Sink & toTee,
|
|
|
|
const Path & path,
|
|
|
|
const StorePathSet & refs)
|
|
|
|
{
|
|
|
|
PathRefScanSink refsSink = PathRefScanSink::fromPaths(refs);
|
|
|
|
TeeSink sink { refsSink, toTee };
|
|
|
|
|
|
|
|
/* Look for the hashes in the NAR dump of the path. */
|
|
|
|
dumpPath(path, sink);
|
|
|
|
|
|
|
|
return refsSink.getResultPaths();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2018-03-29 22:56:13 +00:00
|
|
|
RewritingSink::RewritingSink(const std::string & from, const std::string & to, Sink & nextSink)
|
|
|
|
: from(from), to(to), nextSink(nextSink)
|
|
|
|
{
|
|
|
|
assert(from.size() == to.size());
|
|
|
|
}
|
|
|
|
|
2020-12-02 13:00:43 +00:00
|
|
|
void RewritingSink::operator () (std::string_view data)
|
2018-03-29 22:56:13 +00:00
|
|
|
{
|
|
|
|
std::string s(prev);
|
2020-12-02 13:00:43 +00:00
|
|
|
s.append(data);
|
2018-03-29 22:56:13 +00:00
|
|
|
|
|
|
|
size_t j = 0;
|
2022-02-25 15:00:00 +00:00
|
|
|
while ((j = s.find(from, j)) != std::string::npos) {
|
2018-03-29 22:56:13 +00:00
|
|
|
matches.push_back(pos + j);
|
|
|
|
s.replace(j, from.size(), to);
|
|
|
|
}
|
|
|
|
|
|
|
|
prev = s.size() < from.size() ? s : std::string(s, s.size() - from.size() + 1, from.size() - 1);
|
|
|
|
|
|
|
|
auto consumed = s.size() - prev.size();
|
|
|
|
|
|
|
|
pos += consumed;
|
|
|
|
|
2020-12-02 13:00:43 +00:00
|
|
|
if (consumed) nextSink(s.substr(0, consumed));
|
2018-03-29 22:56:13 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void RewritingSink::flush()
|
|
|
|
{
|
|
|
|
if (prev.empty()) return;
|
|
|
|
pos += prev.size();
|
2020-12-02 13:00:43 +00:00
|
|
|
nextSink(prev);
|
2018-03-29 22:56:13 +00:00
|
|
|
prev.clear();
|
|
|
|
}
|
|
|
|
|
|
|
|
HashModuloSink::HashModuloSink(HashType ht, const std::string & modulus)
|
|
|
|
: hashSink(ht)
|
|
|
|
, rewritingSink(modulus, std::string(modulus.size(), 0), hashSink)
|
|
|
|
{
|
|
|
|
}
|
|
|
|
|
2020-12-02 13:00:43 +00:00
|
|
|
void HashModuloSink::operator () (std::string_view data)
|
2018-03-29 22:56:13 +00:00
|
|
|
{
|
2020-12-02 13:00:43 +00:00
|
|
|
rewritingSink(data);
|
2018-03-29 22:56:13 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
HashResult HashModuloSink::finish()
|
|
|
|
{
|
|
|
|
rewritingSink.flush();
|
|
|
|
|
|
|
|
/* Hash the positions of the self-references. This ensures that a
|
|
|
|
NAR with self-references and a NAR with some of the
|
|
|
|
self-references already zeroed out do not produce a hash
|
|
|
|
collision. FIXME: proof. */
|
2020-12-02 13:00:43 +00:00
|
|
|
for (auto & pos : rewritingSink.matches)
|
|
|
|
hashSink(fmt("|%d", pos));
|
2018-03-29 22:56:13 +00:00
|
|
|
|
|
|
|
auto h = hashSink.finish();
|
|
|
|
return {h.first, rewritingSink.pos};
|
|
|
|
}
|
|
|
|
|
2006-09-04 21:06:23 +00:00
|
|
|
}
|