forked from lix-project/lix
Merge branch 'master' of github.com:NixOS/nix into add-body-to-network-errors
This commit is contained in:
commit
2d2a10e79a
|
@ -1,6 +1,7 @@
|
||||||
((c++-mode . (
|
((c++-mode . (
|
||||||
(c-file-style . "k&r")
|
(c-file-style . "k&r")
|
||||||
(c-basic-offset . 4)
|
(c-basic-offset . 4)
|
||||||
|
(c-block-comment-prefix . " ")
|
||||||
(indent-tabs-mode . nil)
|
(indent-tabs-mode . nil)
|
||||||
(tab-width . 4)
|
(tab-width . 4)
|
||||||
(show-trailing-whitespace . t)
|
(show-trailing-whitespace . t)
|
||||||
|
|
10
.github/workflows/test.yml
vendored
10
.github/workflows/test.yml
vendored
|
@ -12,3 +12,13 @@ jobs:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
- uses: cachix/install-nix-action@v10
|
- uses: cachix/install-nix-action@v10
|
||||||
- run: nix-build release.nix --arg nix '{ outPath = ./.; revCount = 123; shortRev = "abcdefgh"; }' --arg systems '[ builtins.currentSystem ]' -A installerScript -A perlBindings
|
- run: nix-build release.nix --arg nix '{ outPath = ./.; revCount = 123; shortRev = "abcdefgh"; }' --arg systems '[ builtins.currentSystem ]' -A installerScript -A perlBindings
|
||||||
|
macos_perf_test:
|
||||||
|
runs-on: macos-latest
|
||||||
|
steps:
|
||||||
|
- name: Disable syspolicy assessments
|
||||||
|
run: |
|
||||||
|
spctl --status
|
||||||
|
sudo spctl --master-disable
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: cachix/install-nix-action@v10
|
||||||
|
- run: nix-build release.nix --arg nix '{ outPath = ./.; revCount = 123; shortRev = "abcdefgh"; }' --arg systems '[ builtins.currentSystem ]' -A installerScript -A perlBindings
|
||||||
|
|
|
@ -170,15 +170,5 @@ $channelsBucket->add_key(
|
||||||
chdir("/home/eelco/Dev/nix-pristine") or die;
|
chdir("/home/eelco/Dev/nix-pristine") or die;
|
||||||
system("git remote update origin") == 0 or die;
|
system("git remote update origin") == 0 or die;
|
||||||
system("git tag --force --sign $version $nixRev -m 'Tagging release $version'") == 0 or die;
|
system("git tag --force --sign $version $nixRev -m 'Tagging release $version'") == 0 or die;
|
||||||
|
system("git push --tags") == 0 or die;
|
||||||
# Update the website.
|
system("git push --force-with-lease origin $nixRev:refs/heads/latest-release") == 0 or die;
|
||||||
my $siteDir = "/home/eelco/Dev/nixos-homepage-pristine";
|
|
||||||
|
|
||||||
system("cd $siteDir && git pull") == 0 or die;
|
|
||||||
|
|
||||||
write_file("$siteDir/nix-release.tt",
|
|
||||||
"[%-\n" .
|
|
||||||
"latestNixVersion = \"$version\"\n" .
|
|
||||||
"-%]\n");
|
|
||||||
|
|
||||||
system("cd $siteDir && git commit -a -m 'Nix $version released'") == 0 or die;
|
|
||||||
|
|
|
@ -182,7 +182,7 @@ void importPaths(int fd, int dontCheckSigs)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
FdSource source(fd);
|
FdSource source(fd);
|
||||||
store()->importPaths(source, nullptr, dontCheckSigs ? NoCheckSigs : CheckSigs);
|
store()->importPaths(source, dontCheckSigs ? NoCheckSigs : CheckSigs);
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
}
|
}
|
||||||
|
|
|
@ -366,7 +366,7 @@ EvalState::EvalState(const Strings & _searchPath, ref<Store> store)
|
||||||
|
|
||||||
if (store->isInStore(r.second)) {
|
if (store->isInStore(r.second)) {
|
||||||
StorePathSet closure;
|
StorePathSet closure;
|
||||||
store->computeFSClosure(store->parseStorePath(store->toStorePath(r.second)), closure);
|
store->computeFSClosure(store->toStorePath(r.second).first, closure);
|
||||||
for (auto & path : closure)
|
for (auto & path : closure)
|
||||||
allowedPaths->insert(store->printStorePath(path));
|
allowedPaths->insert(store->printStorePath(path));
|
||||||
} else
|
} else
|
||||||
|
|
|
@ -883,10 +883,10 @@ static void prim_storePath(EvalState & state, const Pos & pos, Value * * args, V
|
||||||
.hint = hintfmt("path '%1%' is not in the Nix store", path),
|
.hint = hintfmt("path '%1%' is not in the Nix store", path),
|
||||||
.errPos = pos
|
.errPos = pos
|
||||||
});
|
});
|
||||||
Path path2 = state.store->toStorePath(path);
|
auto path2 = state.store->toStorePath(path).first;
|
||||||
if (!settings.readOnlyMode)
|
if (!settings.readOnlyMode)
|
||||||
state.store->ensurePath(state.store->parseStorePath(path2));
|
state.store->ensurePath(path2);
|
||||||
context.insert(path2);
|
context.insert(state.store->printStorePath(path2));
|
||||||
mkString(v, path, context);
|
mkString(v, path, context);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -15,6 +15,7 @@
|
||||||
#include <chrono>
|
#include <chrono>
|
||||||
#include <future>
|
#include <future>
|
||||||
#include <regex>
|
#include <regex>
|
||||||
|
#include <fstream>
|
||||||
|
|
||||||
#include <nlohmann/json.hpp>
|
#include <nlohmann/json.hpp>
|
||||||
|
|
||||||
|
@ -57,6 +58,13 @@ void BinaryCacheStore::init()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void BinaryCacheStore::upsertFile(const std::string & path,
|
||||||
|
std::string && data,
|
||||||
|
const std::string & mimeType)
|
||||||
|
{
|
||||||
|
upsertFile(path, std::make_shared<std::stringstream>(std::move(data)), mimeType);
|
||||||
|
}
|
||||||
|
|
||||||
void BinaryCacheStore::getFile(const std::string & path,
|
void BinaryCacheStore::getFile(const std::string & path,
|
||||||
Callback<std::shared_ptr<std::string>> callback) noexcept
|
Callback<std::shared_ptr<std::string>> callback) noexcept
|
||||||
{
|
{
|
||||||
|
@ -113,13 +121,74 @@ void BinaryCacheStore::writeNarInfo(ref<NarInfo> narInfo)
|
||||||
diskCache->upsertNarInfo(getUri(), hashPart, std::shared_ptr<NarInfo>(narInfo));
|
diskCache->upsertNarInfo(getUri(), hashPart, std::shared_ptr<NarInfo>(narInfo));
|
||||||
}
|
}
|
||||||
|
|
||||||
void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource,
|
AutoCloseFD openFile(const Path & path)
|
||||||
RepairFlag repair, CheckSigsFlag checkSigs, std::shared_ptr<FSAccessor> accessor)
|
|
||||||
{
|
{
|
||||||
// FIXME: See if we can use the original source to reduce memory usage.
|
auto fd = open(path.c_str(), O_RDONLY | O_CLOEXEC);
|
||||||
auto nar = make_ref<std::string>(narSource.drain());
|
if (!fd)
|
||||||
|
throw SysError("opening file '%1%'", path);
|
||||||
|
return fd;
|
||||||
|
}
|
||||||
|
|
||||||
if (!repair && isValidPath(info.path)) return;
|
struct FileSource : FdSource
|
||||||
|
{
|
||||||
|
AutoCloseFD fd2;
|
||||||
|
|
||||||
|
FileSource(const Path & path)
|
||||||
|
: fd2(openFile(path))
|
||||||
|
{
|
||||||
|
fd = fd2.get();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource,
|
||||||
|
RepairFlag repair, CheckSigsFlag checkSigs)
|
||||||
|
{
|
||||||
|
assert(info.narHash && info.narSize);
|
||||||
|
|
||||||
|
if (!repair && isValidPath(info.path)) {
|
||||||
|
// FIXME: copyNAR -> null sink
|
||||||
|
narSource.drain();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
auto [fdTemp, fnTemp] = createTempFile();
|
||||||
|
|
||||||
|
auto now1 = std::chrono::steady_clock::now();
|
||||||
|
|
||||||
|
/* Read the NAR simultaneously into a CompressionSink+FileSink (to
|
||||||
|
write the compressed NAR to disk), into a HashSink (to get the
|
||||||
|
NAR hash), and into a NarAccessor (to get the NAR listing). */
|
||||||
|
HashSink fileHashSink(htSHA256);
|
||||||
|
std::shared_ptr<FSAccessor> narAccessor;
|
||||||
|
{
|
||||||
|
FdSink fileSink(fdTemp.get());
|
||||||
|
TeeSink teeSink(fileSink, fileHashSink);
|
||||||
|
auto compressionSink = makeCompressionSink(compression, teeSink);
|
||||||
|
TeeSource teeSource(narSource, *compressionSink);
|
||||||
|
narAccessor = makeNarAccessor(teeSource);
|
||||||
|
compressionSink->finish();
|
||||||
|
}
|
||||||
|
|
||||||
|
auto now2 = std::chrono::steady_clock::now();
|
||||||
|
|
||||||
|
auto narInfo = make_ref<NarInfo>(info);
|
||||||
|
narInfo->narSize = info.narSize;
|
||||||
|
narInfo->narHash = info.narHash;
|
||||||
|
narInfo->compression = compression;
|
||||||
|
auto [fileHash, fileSize] = fileHashSink.finish();
|
||||||
|
narInfo->fileHash = fileHash;
|
||||||
|
narInfo->fileSize = fileSize;
|
||||||
|
narInfo->url = "nar/" + narInfo->fileHash.to_string(Base32, false) + ".nar"
|
||||||
|
+ (compression == "xz" ? ".xz" :
|
||||||
|
compression == "bzip2" ? ".bz2" :
|
||||||
|
compression == "br" ? ".br" :
|
||||||
|
"");
|
||||||
|
|
||||||
|
auto duration = std::chrono::duration_cast<std::chrono::milliseconds>(now2 - now1).count();
|
||||||
|
printMsg(lvlTalkative, "copying path '%1%' (%2% bytes, compressed %3$.1f%% in %4% ms) to binary cache",
|
||||||
|
printStorePath(narInfo->path), info.narSize,
|
||||||
|
((1.0 - (double) fileSize / info.narSize) * 100.0),
|
||||||
|
duration);
|
||||||
|
|
||||||
/* Verify that all references are valid. This may do some .narinfo
|
/* Verify that all references are valid. This may do some .narinfo
|
||||||
reads, but typically they'll already be cached. */
|
reads, but typically they'll already be cached. */
|
||||||
|
@ -132,23 +201,6 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource
|
||||||
printStorePath(info.path), printStorePath(ref));
|
printStorePath(info.path), printStorePath(ref));
|
||||||
}
|
}
|
||||||
|
|
||||||
assert(nar->compare(0, narMagic.size(), narMagic) == 0);
|
|
||||||
|
|
||||||
auto narInfo = make_ref<NarInfo>(info);
|
|
||||||
|
|
||||||
narInfo->narSize = nar->size();
|
|
||||||
narInfo->narHash = hashString(htSHA256, *nar);
|
|
||||||
|
|
||||||
if (info.narHash && info.narHash != narInfo->narHash)
|
|
||||||
throw Error("refusing to copy corrupted path '%1%' to binary cache", printStorePath(info.path));
|
|
||||||
|
|
||||||
auto accessor_ = std::dynamic_pointer_cast<RemoteFSAccessor>(accessor);
|
|
||||||
|
|
||||||
auto narAccessor = makeNarAccessor(nar);
|
|
||||||
|
|
||||||
if (accessor_)
|
|
||||||
accessor_->addToCache(printStorePath(info.path), *nar, narAccessor);
|
|
||||||
|
|
||||||
/* Optionally write a JSON file containing a listing of the
|
/* Optionally write a JSON file containing a listing of the
|
||||||
contents of the NAR. */
|
contents of the NAR. */
|
||||||
if (writeNARListing) {
|
if (writeNARListing) {
|
||||||
|
@ -160,33 +212,13 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource
|
||||||
|
|
||||||
{
|
{
|
||||||
auto res = jsonRoot.placeholder("root");
|
auto res = jsonRoot.placeholder("root");
|
||||||
listNar(res, narAccessor, "", true);
|
listNar(res, ref<FSAccessor>(narAccessor), "", true);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
upsertFile(std::string(info.path.to_string()) + ".ls", jsonOut.str(), "application/json");
|
upsertFile(std::string(info.path.to_string()) + ".ls", jsonOut.str(), "application/json");
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Compress the NAR. */
|
|
||||||
narInfo->compression = compression;
|
|
||||||
auto now1 = std::chrono::steady_clock::now();
|
|
||||||
auto narCompressed = compress(compression, *nar, parallelCompression);
|
|
||||||
auto now2 = std::chrono::steady_clock::now();
|
|
||||||
narInfo->fileHash = hashString(htSHA256, *narCompressed);
|
|
||||||
narInfo->fileSize = narCompressed->size();
|
|
||||||
|
|
||||||
auto duration = std::chrono::duration_cast<std::chrono::milliseconds>(now2 - now1).count();
|
|
||||||
printMsg(lvlTalkative, "copying path '%1%' (%2% bytes, compressed %3$.1f%% in %4% ms) to binary cache",
|
|
||||||
printStorePath(narInfo->path), narInfo->narSize,
|
|
||||||
((1.0 - (double) narCompressed->size() / nar->size()) * 100.0),
|
|
||||||
duration);
|
|
||||||
|
|
||||||
narInfo->url = "nar/" + narInfo->fileHash.to_string(Base32, false) + ".nar"
|
|
||||||
+ (compression == "xz" ? ".xz" :
|
|
||||||
compression == "bzip2" ? ".bz2" :
|
|
||||||
compression == "br" ? ".br" :
|
|
||||||
"");
|
|
||||||
|
|
||||||
/* Optionally maintain an index of DWARF debug info files
|
/* Optionally maintain an index of DWARF debug info files
|
||||||
consisting of JSON files named 'debuginfo/<build-id>' that
|
consisting of JSON files named 'debuginfo/<build-id>' that
|
||||||
specify the NAR file and member containing the debug info. */
|
specify the NAR file and member containing the debug info. */
|
||||||
|
@ -247,12 +279,14 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource
|
||||||
/* Atomically write the NAR file. */
|
/* Atomically write the NAR file. */
|
||||||
if (repair || !fileExists(narInfo->url)) {
|
if (repair || !fileExists(narInfo->url)) {
|
||||||
stats.narWrite++;
|
stats.narWrite++;
|
||||||
upsertFile(narInfo->url, *narCompressed, "application/x-nix-nar");
|
upsertFile(narInfo->url,
|
||||||
|
std::make_shared<std::fstream>(fnTemp, std::ios_base::in),
|
||||||
|
"application/x-nix-nar");
|
||||||
} else
|
} else
|
||||||
stats.narWriteAverted++;
|
stats.narWriteAverted++;
|
||||||
|
|
||||||
stats.narWriteBytes += nar->size();
|
stats.narWriteBytes += info.narSize;
|
||||||
stats.narWriteCompressedBytes += narCompressed->size();
|
stats.narWriteCompressedBytes += fileSize;
|
||||||
stats.narWriteCompressionTimeMs += duration;
|
stats.narWriteCompressionTimeMs += duration;
|
||||||
|
|
||||||
/* Atomically write the NAR info file.*/
|
/* Atomically write the NAR info file.*/
|
||||||
|
@ -351,7 +385,7 @@ StorePath BinaryCacheStore::addToStore(const string & name, const Path & srcPath
|
||||||
ValidPathInfo info(makeFixedOutputPath(method, h, name));
|
ValidPathInfo info(makeFixedOutputPath(method, h, name));
|
||||||
|
|
||||||
auto source = StringSource { *sink.s };
|
auto source = StringSource { *sink.s };
|
||||||
addToStore(info, source, repair, CheckSigs, nullptr);
|
addToStore(info, source, repair, CheckSigs);
|
||||||
|
|
||||||
return std::move(info.path);
|
return std::move(info.path);
|
||||||
}
|
}
|
||||||
|
@ -366,7 +400,7 @@ StorePath BinaryCacheStore::addTextToStore(const string & name, const string & s
|
||||||
StringSink sink;
|
StringSink sink;
|
||||||
dumpString(s, sink);
|
dumpString(s, sink);
|
||||||
auto source = StringSource { *sink.s };
|
auto source = StringSource { *sink.s };
|
||||||
addToStore(info, source, repair, CheckSigs, nullptr);
|
addToStore(info, source, repair, CheckSigs);
|
||||||
}
|
}
|
||||||
|
|
||||||
return std::move(info.path);
|
return std::move(info.path);
|
||||||
|
|
|
@ -36,9 +36,13 @@ public:
|
||||||
virtual bool fileExists(const std::string & path) = 0;
|
virtual bool fileExists(const std::string & path) = 0;
|
||||||
|
|
||||||
virtual void upsertFile(const std::string & path,
|
virtual void upsertFile(const std::string & path,
|
||||||
const std::string & data,
|
std::shared_ptr<std::basic_iostream<char>> istream,
|
||||||
const std::string & mimeType) = 0;
|
const std::string & mimeType) = 0;
|
||||||
|
|
||||||
|
void upsertFile(const std::string & path,
|
||||||
|
std::string && data,
|
||||||
|
const std::string & mimeType);
|
||||||
|
|
||||||
/* Note: subclasses must implement at least one of the two
|
/* Note: subclasses must implement at least one of the two
|
||||||
following getFile() methods. */
|
following getFile() methods. */
|
||||||
|
|
||||||
|
@ -75,8 +79,7 @@ public:
|
||||||
{ unsupported("queryPathFromHashPart"); }
|
{ unsupported("queryPathFromHashPart"); }
|
||||||
|
|
||||||
void addToStore(const ValidPathInfo & info, Source & narSource,
|
void addToStore(const ValidPathInfo & info, Source & narSource,
|
||||||
RepairFlag repair, CheckSigsFlag checkSigs,
|
RepairFlag repair, CheckSigsFlag checkSigs) override;
|
||||||
std::shared_ptr<FSAccessor> accessor) override;
|
|
||||||
|
|
||||||
StorePath addToStore(const string & name, const Path & srcPath,
|
StorePath addToStore(const string & name, const Path & srcPath,
|
||||||
FileIngestionMethod method, HashType hashAlgo,
|
FileIngestionMethod method, HashType hashAlgo,
|
||||||
|
|
|
@ -2041,7 +2041,10 @@ void DerivationGoal::startBuilder()
|
||||||
if (!std::regex_match(fileName, regex))
|
if (!std::regex_match(fileName, regex))
|
||||||
throw Error("invalid file name '%s' in 'exportReferencesGraph'", fileName);
|
throw Error("invalid file name '%s' in 'exportReferencesGraph'", fileName);
|
||||||
|
|
||||||
auto storePath = worker.store.parseStorePath(*i++);
|
auto storePathS = *i++;
|
||||||
|
if (!worker.store.isInStore(storePathS))
|
||||||
|
throw BuildError("'exportReferencesGraph' contains a non-store path '%1%'", storePathS);
|
||||||
|
auto storePath = worker.store.toStorePath(storePathS).first;
|
||||||
|
|
||||||
/* Write closure info to <fileName>. */
|
/* Write closure info to <fileName>. */
|
||||||
writeFile(tmpDir + "/" + fileName,
|
writeFile(tmpDir + "/" + fileName,
|
||||||
|
@ -2080,7 +2083,7 @@ void DerivationGoal::startBuilder()
|
||||||
for (auto & i : dirsInChroot)
|
for (auto & i : dirsInChroot)
|
||||||
try {
|
try {
|
||||||
if (worker.store.isInStore(i.second.source))
|
if (worker.store.isInStore(i.second.source))
|
||||||
worker.store.computeFSClosure(worker.store.parseStorePath(worker.store.toStorePath(i.second.source)), closure);
|
worker.store.computeFSClosure(worker.store.toStorePath(i.second.source).first, closure);
|
||||||
} catch (InvalidPath & e) {
|
} catch (InvalidPath & e) {
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
throw Error("while processing 'sandbox-paths': %s", e.what());
|
throw Error("while processing 'sandbox-paths': %s", e.what());
|
||||||
|
@ -2765,10 +2768,9 @@ struct RestrictedStore : public LocalFSStore
|
||||||
{ throw Error("addToStore"); }
|
{ throw Error("addToStore"); }
|
||||||
|
|
||||||
void addToStore(const ValidPathInfo & info, Source & narSource,
|
void addToStore(const ValidPathInfo & info, Source & narSource,
|
||||||
RepairFlag repair = NoRepair, CheckSigsFlag checkSigs = CheckSigs,
|
RepairFlag repair = NoRepair, CheckSigsFlag checkSigs = CheckSigs) override
|
||||||
std::shared_ptr<FSAccessor> accessor = 0) override
|
|
||||||
{
|
{
|
||||||
next->addToStore(info, narSource, repair, checkSigs, accessor);
|
next->addToStore(info, narSource, repair, checkSigs);
|
||||||
goal.addDependency(info.path);
|
goal.addDependency(info.path);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -82,4 +82,16 @@ std::string renderContentAddress(std::optional<ContentAddress> ca) {
|
||||||
return ca ? renderContentAddress(*ca) : "";
|
return ca ? renderContentAddress(*ca) : "";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Hash getContentAddressHash(const ContentAddress & ca)
|
||||||
|
{
|
||||||
|
return std::visit(overloaded {
|
||||||
|
[](TextHash th) {
|
||||||
|
return th.hash;
|
||||||
|
},
|
||||||
|
[](FixedOutputHash fsh) {
|
||||||
|
return fsh.hash;
|
||||||
|
}
|
||||||
|
}, ca);
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -53,4 +53,6 @@ ContentAddress parseContentAddress(std::string_view rawCa);
|
||||||
|
|
||||||
std::optional<ContentAddress> parseContentAddressOpt(std::string_view rawCaOpt);
|
std::optional<ContentAddress> parseContentAddressOpt(std::string_view rawCaOpt);
|
||||||
|
|
||||||
|
Hash getContentAddressHash(const ContentAddress & ca);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -81,7 +81,7 @@ struct TunnelLogger : public Logger
|
||||||
showErrorInfo(oss, ei, false);
|
showErrorInfo(oss, ei, false);
|
||||||
|
|
||||||
StringSink buf;
|
StringSink buf;
|
||||||
buf << STDERR_NEXT << oss.str() << "\n";
|
buf << STDERR_NEXT << oss.str();
|
||||||
enqueueMsg(*buf.s);
|
enqueueMsg(*buf.s);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -391,7 +391,8 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||||
}
|
}
|
||||||
HashType hashAlgo = parseHashType(s);
|
HashType hashAlgo = parseHashType(s);
|
||||||
|
|
||||||
TeeSource savedNAR(from);
|
StringSink savedNAR;
|
||||||
|
TeeSource savedNARSource(from, savedNAR);
|
||||||
RetrieveRegularNARSink savedRegular;
|
RetrieveRegularNARSink savedRegular;
|
||||||
|
|
||||||
if (method == FileIngestionMethod::Recursive) {
|
if (method == FileIngestionMethod::Recursive) {
|
||||||
|
@ -399,7 +400,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||||
a string so that we can pass it to
|
a string so that we can pass it to
|
||||||
addToStoreFromDump(). */
|
addToStoreFromDump(). */
|
||||||
ParseSink sink; /* null sink; just parse the NAR */
|
ParseSink sink; /* null sink; just parse the NAR */
|
||||||
parseDump(sink, savedNAR);
|
parseDump(sink, savedNARSource);
|
||||||
} else
|
} else
|
||||||
parseDump(savedRegular, from);
|
parseDump(savedRegular, from);
|
||||||
|
|
||||||
|
@ -407,7 +408,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||||
if (!savedRegular.regular) throw Error("regular file expected");
|
if (!savedRegular.regular) throw Error("regular file expected");
|
||||||
|
|
||||||
auto path = store->addToStoreFromDump(
|
auto path = store->addToStoreFromDump(
|
||||||
method == FileIngestionMethod::Recursive ? *savedNAR.data : savedRegular.s,
|
method == FileIngestionMethod::Recursive ? *savedNAR.s : savedRegular.s,
|
||||||
baseName,
|
baseName,
|
||||||
method,
|
method,
|
||||||
hashAlgo);
|
hashAlgo);
|
||||||
|
@ -442,7 +443,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||||
case wopImportPaths: {
|
case wopImportPaths: {
|
||||||
logger->startWork();
|
logger->startWork();
|
||||||
TunnelSource source(from, to);
|
TunnelSource source(from, to);
|
||||||
auto paths = store->importPaths(source, nullptr,
|
auto paths = store->importPaths(source,
|
||||||
trusted ? NoCheckSigs : CheckSigs);
|
trusted ? NoCheckSigs : CheckSigs);
|
||||||
logger->stopWork();
|
logger->stopWork();
|
||||||
Strings paths2;
|
Strings paths2;
|
||||||
|
@ -731,10 +732,9 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||||
if (GET_PROTOCOL_MINOR(clientVersion) >= 21)
|
if (GET_PROTOCOL_MINOR(clientVersion) >= 21)
|
||||||
source = std::make_unique<TunnelSource>(from, to);
|
source = std::make_unique<TunnelSource>(from, to);
|
||||||
else {
|
else {
|
||||||
TeeSource tee(from);
|
TeeParseSink tee(from);
|
||||||
ParseSink sink;
|
parseDump(tee, tee.source);
|
||||||
parseDump(sink, tee);
|
saved = std::move(*tee.saved.s);
|
||||||
saved = std::move(*tee.data);
|
|
||||||
source = std::make_unique<StringSource>(saved);
|
source = std::make_unique<StringSource>(saved);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -742,7 +742,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||||
|
|
||||||
// FIXME: race if addToStore doesn't read source?
|
// FIXME: race if addToStore doesn't read source?
|
||||||
store->addToStore(info, *source, (RepairFlag) repair,
|
store->addToStore(info, *source, (RepairFlag) repair,
|
||||||
dontCheckSigs ? NoCheckSigs : CheckSigs, nullptr);
|
dontCheckSigs ? NoCheckSigs : CheckSigs);
|
||||||
|
|
||||||
logger->stopWork();
|
logger->stopWork();
|
||||||
break;
|
break;
|
||||||
|
|
|
@ -4,7 +4,6 @@
|
||||||
#include "util.hh"
|
#include "util.hh"
|
||||||
#include "worker-protocol.hh"
|
#include "worker-protocol.hh"
|
||||||
#include "fs-accessor.hh"
|
#include "fs-accessor.hh"
|
||||||
#include "istringstream_nocopy.hh"
|
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
@ -101,7 +100,7 @@ static StringSet parseStrings(std::istream & str, bool arePaths)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
static DerivationOutput parseDerivationOutput(const Store & store, istringstream_nocopy & str)
|
static DerivationOutput parseDerivationOutput(const Store & store, std::istringstream & str)
|
||||||
{
|
{
|
||||||
expect(str, ","); auto path = store.parseStorePath(parsePath(str));
|
expect(str, ","); auto path = store.parseStorePath(parsePath(str));
|
||||||
expect(str, ","); auto hashAlgo = parseString(str);
|
expect(str, ","); auto hashAlgo = parseString(str);
|
||||||
|
@ -129,10 +128,10 @@ static DerivationOutput parseDerivationOutput(const Store & store, istringstream
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
static Derivation parseDerivation(const Store & store, const string & s)
|
static Derivation parseDerivation(const Store & store, std::string && s)
|
||||||
{
|
{
|
||||||
Derivation drv;
|
Derivation drv;
|
||||||
istringstream_nocopy str(s);
|
std::istringstream str(std::move(s));
|
||||||
expect(str, "Derive([");
|
expect(str, "Derive([");
|
||||||
|
|
||||||
/* Parse the list of outputs. */
|
/* Parse the list of outputs. */
|
||||||
|
|
|
@ -7,24 +7,6 @@
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
struct HashAndWriteSink : Sink
|
|
||||||
{
|
|
||||||
Sink & writeSink;
|
|
||||||
HashSink hashSink;
|
|
||||||
HashAndWriteSink(Sink & writeSink) : writeSink(writeSink), hashSink(htSHA256)
|
|
||||||
{
|
|
||||||
}
|
|
||||||
virtual void operator () (const unsigned char * data, size_t len)
|
|
||||||
{
|
|
||||||
writeSink(data, len);
|
|
||||||
hashSink(data, len);
|
|
||||||
}
|
|
||||||
Hash currentHash()
|
|
||||||
{
|
|
||||||
return hashSink.currentHash().first;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
void Store::exportPaths(const StorePathSet & paths, Sink & sink)
|
void Store::exportPaths(const StorePathSet & paths, Sink & sink)
|
||||||
{
|
{
|
||||||
auto sorted = topoSortPaths(paths);
|
auto sorted = topoSortPaths(paths);
|
||||||
|
@ -47,28 +29,29 @@ void Store::exportPath(const StorePath & path, Sink & sink)
|
||||||
{
|
{
|
||||||
auto info = queryPathInfo(path);
|
auto info = queryPathInfo(path);
|
||||||
|
|
||||||
HashAndWriteSink hashAndWriteSink(sink);
|
HashSink hashSink(htSHA256);
|
||||||
|
TeeSink teeSink(sink, hashSink);
|
||||||
|
|
||||||
narFromPath(path, hashAndWriteSink);
|
narFromPath(path, teeSink);
|
||||||
|
|
||||||
/* Refuse to export paths that have changed. This prevents
|
/* Refuse to export paths that have changed. This prevents
|
||||||
filesystem corruption from spreading to other machines.
|
filesystem corruption from spreading to other machines.
|
||||||
Don't complain if the stored hash is zero (unknown). */
|
Don't complain if the stored hash is zero (unknown). */
|
||||||
Hash hash = hashAndWriteSink.currentHash();
|
Hash hash = hashSink.currentHash().first;
|
||||||
if (hash != info->narHash && info->narHash != Hash(*info->narHash.type))
|
if (hash != info->narHash && info->narHash != Hash(*info->narHash.type))
|
||||||
throw Error("hash of path '%s' has changed from '%s' to '%s'!",
|
throw Error("hash of path '%s' has changed from '%s' to '%s'!",
|
||||||
printStorePath(path), info->narHash.to_string(Base32, true), hash.to_string(Base32, true));
|
printStorePath(path), info->narHash.to_string(Base32, true), hash.to_string(Base32, true));
|
||||||
|
|
||||||
hashAndWriteSink
|
teeSink
|
||||||
<< exportMagic
|
<< exportMagic
|
||||||
<< printStorePath(path);
|
<< printStorePath(path);
|
||||||
writeStorePaths(*this, hashAndWriteSink, info->references);
|
writeStorePaths(*this, teeSink, info->references);
|
||||||
hashAndWriteSink
|
teeSink
|
||||||
<< (info->deriver ? printStorePath(*info->deriver) : "")
|
<< (info->deriver ? printStorePath(*info->deriver) : "")
|
||||||
<< 0;
|
<< 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
StorePaths Store::importPaths(Source & source, std::shared_ptr<FSAccessor> accessor, CheckSigsFlag checkSigs)
|
StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs)
|
||||||
{
|
{
|
||||||
StorePaths res;
|
StorePaths res;
|
||||||
while (true) {
|
while (true) {
|
||||||
|
@ -77,9 +60,8 @@ StorePaths Store::importPaths(Source & source, std::shared_ptr<FSAccessor> acces
|
||||||
if (n != 1) throw Error("input doesn't look like something created by 'nix-store --export'");
|
if (n != 1) throw Error("input doesn't look like something created by 'nix-store --export'");
|
||||||
|
|
||||||
/* Extract the NAR from the source. */
|
/* Extract the NAR from the source. */
|
||||||
TeeSource tee(source);
|
TeeParseSink tee(source);
|
||||||
ParseSink sink;
|
parseDump(tee, tee.source);
|
||||||
parseDump(sink, tee);
|
|
||||||
|
|
||||||
uint32_t magic = readInt(source);
|
uint32_t magic = readInt(source);
|
||||||
if (magic != exportMagic)
|
if (magic != exportMagic)
|
||||||
|
@ -95,16 +77,16 @@ StorePaths Store::importPaths(Source & source, std::shared_ptr<FSAccessor> acces
|
||||||
if (deriver != "")
|
if (deriver != "")
|
||||||
info.deriver = parseStorePath(deriver);
|
info.deriver = parseStorePath(deriver);
|
||||||
|
|
||||||
info.narHash = hashString(htSHA256, *tee.data);
|
info.narHash = hashString(htSHA256, *tee.saved.s);
|
||||||
info.narSize = tee.data->size();
|
info.narSize = tee.saved.s->size();
|
||||||
|
|
||||||
// Ignore optional legacy signature.
|
// Ignore optional legacy signature.
|
||||||
if (readInt(source) == 1)
|
if (readInt(source) == 1)
|
||||||
readString(source);
|
readString(source);
|
||||||
|
|
||||||
// Can't use underlying source, which would have been exhausted
|
// Can't use underlying source, which would have been exhausted
|
||||||
auto source = StringSource { *tee.data };
|
auto source = StringSource { *tee.saved.s };
|
||||||
addToStore(info, source, NoRepair, checkSigs, accessor);
|
addToStore(info, source, NoRepair, checkSigs);
|
||||||
|
|
||||||
res.push_back(info.path);
|
res.push_back(info.path);
|
||||||
}
|
}
|
||||||
|
|
|
@ -143,6 +143,7 @@ struct curlFileTransfer : public FileTransfer
|
||||||
|
|
||||||
LambdaSink finalSink;
|
LambdaSink finalSink;
|
||||||
std::shared_ptr<CompressionSink> decompressionSink;
|
std::shared_ptr<CompressionSink> decompressionSink;
|
||||||
|
std::optional<StringSink> errorSink;
|
||||||
|
|
||||||
std::exception_ptr writeException;
|
std::exception_ptr writeException;
|
||||||
|
|
||||||
|
@ -159,12 +160,12 @@ struct curlFileTransfer : public FileTransfer
|
||||||
// the response around (which we figure won't be big
|
// the response around (which we figure won't be big
|
||||||
// like an actual download should be) to improve error
|
// like an actual download should be) to improve error
|
||||||
// messages.
|
// messages.
|
||||||
decompressionSink = std::make_shared<TeeSink<ref<CompressionSink>>>(
|
errorSink = StringSink { };
|
||||||
ref<CompressionSink>{ decompressionSink }
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (errorSink)
|
||||||
|
(*errorSink)((unsigned char *) contents, realSize);
|
||||||
(*decompressionSink)((unsigned char *) contents, realSize);
|
(*decompressionSink)((unsigned char *) contents, realSize);
|
||||||
|
|
||||||
return realSize;
|
return realSize;
|
||||||
|
@ -419,9 +420,8 @@ struct curlFileTransfer : public FileTransfer
|
||||||
attempt++;
|
attempt++;
|
||||||
|
|
||||||
std::shared_ptr<std::string> response;
|
std::shared_ptr<std::string> response;
|
||||||
if (decompressionSink)
|
if (errorSink)
|
||||||
if (auto teeSink = std::dynamic_pointer_cast<TeeSink<ref<CompressionSink>>>(decompressionSink))
|
response = errorSink->s;
|
||||||
response = teeSink->data;
|
|
||||||
auto exc =
|
auto exc =
|
||||||
code == CURLE_ABORTED_BY_CALLBACK && _isInterrupted
|
code == CURLE_ABORTED_BY_CALLBACK && _isInterrupted
|
||||||
? FileTransferError(Interrupted, response, "%s of '%s' was interrupted", request.verb(), request.uri)
|
? FileTransferError(Interrupted, response, "%s of '%s' was interrupted", request.verb(), request.uri)
|
||||||
|
|
|
@ -262,11 +262,13 @@ void LocalStore::findTempRoots(FDs & fds, Roots & tempRoots, bool censor)
|
||||||
void LocalStore::findRoots(const Path & path, unsigned char type, Roots & roots)
|
void LocalStore::findRoots(const Path & path, unsigned char type, Roots & roots)
|
||||||
{
|
{
|
||||||
auto foundRoot = [&](const Path & path, const Path & target) {
|
auto foundRoot = [&](const Path & path, const Path & target) {
|
||||||
auto storePath = maybeParseStorePath(toStorePath(target));
|
try {
|
||||||
if (storePath && isValidPath(*storePath))
|
auto storePath = toStorePath(target).first;
|
||||||
roots[std::move(*storePath)].emplace(path);
|
if (isValidPath(storePath))
|
||||||
else
|
roots[std::move(storePath)].emplace(path);
|
||||||
printInfo("skipping invalid root from '%1%' to '%2%'", path, target);
|
else
|
||||||
|
printInfo("skipping invalid root from '%1%' to '%2%'", path, target);
|
||||||
|
} catch (BadStorePath &) { }
|
||||||
};
|
};
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
@ -472,15 +474,15 @@ void LocalStore::findRuntimeRoots(Roots & roots, bool censor)
|
||||||
|
|
||||||
for (auto & [target, links] : unchecked) {
|
for (auto & [target, links] : unchecked) {
|
||||||
if (!isInStore(target)) continue;
|
if (!isInStore(target)) continue;
|
||||||
Path pathS = toStorePath(target);
|
try {
|
||||||
if (!isStorePath(pathS)) continue;
|
auto path = toStorePath(target).first;
|
||||||
auto path = parseStorePath(pathS);
|
if (!isValidPath(path)) continue;
|
||||||
if (!isValidPath(path)) continue;
|
debug("got additional root '%1%'", printStorePath(path));
|
||||||
debug("got additional root '%1%'", pathS);
|
if (censor)
|
||||||
if (censor)
|
roots[path].insert(censored);
|
||||||
roots[path].insert(censored);
|
else
|
||||||
else
|
roots[path].insert(links.begin(), links.end());
|
||||||
roots[path].insert(links.begin(), links.end());
|
} catch (BadStorePath &) { }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -365,6 +365,9 @@ public:
|
||||||
|
|
||||||
Setting<bool> warnDirty{this, true, "warn-dirty",
|
Setting<bool> warnDirty{this, true, "warn-dirty",
|
||||||
"Whether to warn about dirty Git/Mercurial trees."};
|
"Whether to warn about dirty Git/Mercurial trees."};
|
||||||
|
|
||||||
|
Setting<size_t> narBufferSize{this, 32 * 1024 * 1024, "nar-buffer-size",
|
||||||
|
"Maximum size of NARs before spilling them to disk."};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -100,11 +100,11 @@ protected:
|
||||||
}
|
}
|
||||||
|
|
||||||
void upsertFile(const std::string & path,
|
void upsertFile(const std::string & path,
|
||||||
const std::string & data,
|
std::shared_ptr<std::basic_iostream<char>> istream,
|
||||||
const std::string & mimeType) override
|
const std::string & mimeType) override
|
||||||
{
|
{
|
||||||
auto req = FileTransferRequest(cacheUri + "/" + path);
|
auto req = FileTransferRequest(cacheUri + "/" + path);
|
||||||
req.data = std::make_shared<string>(data); // FIXME: inefficient
|
req.data = std::make_shared<string>(StreamToSourceAdapter(istream).drain());
|
||||||
req.mimeType = mimeType;
|
req.mimeType = mimeType;
|
||||||
try {
|
try {
|
||||||
getFileTransfer()->upload(req);
|
getFileTransfer()->upload(req);
|
||||||
|
|
|
@ -126,8 +126,7 @@ struct LegacySSHStore : public Store
|
||||||
}
|
}
|
||||||
|
|
||||||
void addToStore(const ValidPathInfo & info, Source & source,
|
void addToStore(const ValidPathInfo & info, Source & source,
|
||||||
RepairFlag repair, CheckSigsFlag checkSigs,
|
RepairFlag repair, CheckSigsFlag checkSigs) override
|
||||||
std::shared_ptr<FSAccessor> accessor) override
|
|
||||||
{
|
{
|
||||||
debug("adding path '%s' to remote host '%s'", printStorePath(info.path), host);
|
debug("adding path '%s' to remote host '%s'", printStorePath(info.path), host);
|
||||||
|
|
||||||
|
|
|
@ -31,8 +31,18 @@ protected:
|
||||||
bool fileExists(const std::string & path) override;
|
bool fileExists(const std::string & path) override;
|
||||||
|
|
||||||
void upsertFile(const std::string & path,
|
void upsertFile(const std::string & path,
|
||||||
const std::string & data,
|
std::shared_ptr<std::basic_iostream<char>> istream,
|
||||||
const std::string & mimeType) override;
|
const std::string & mimeType) override
|
||||||
|
{
|
||||||
|
auto path2 = binaryCacheDir + "/" + path;
|
||||||
|
Path tmp = path2 + ".tmp." + std::to_string(getpid());
|
||||||
|
AutoDelete del(tmp, false);
|
||||||
|
StreamToSourceAdapter source(istream);
|
||||||
|
writeFile(tmp, source);
|
||||||
|
if (rename(tmp.c_str(), path2.c_str()))
|
||||||
|
throw SysError("renaming '%1%' to '%2%'", tmp, path2);
|
||||||
|
del.cancel();
|
||||||
|
}
|
||||||
|
|
||||||
void getFile(const std::string & path, Sink & sink) override
|
void getFile(const std::string & path, Sink & sink) override
|
||||||
{
|
{
|
||||||
|
@ -52,7 +62,9 @@ protected:
|
||||||
if (entry.name.size() != 40 ||
|
if (entry.name.size() != 40 ||
|
||||||
!hasSuffix(entry.name, ".narinfo"))
|
!hasSuffix(entry.name, ".narinfo"))
|
||||||
continue;
|
continue;
|
||||||
paths.insert(parseStorePath(storeDir + "/" + entry.name.substr(0, entry.name.size() - 8)));
|
paths.insert(parseStorePath(
|
||||||
|
storeDir + "/" + entry.name.substr(0, entry.name.size() - 8)
|
||||||
|
+ "-" + MissingName));
|
||||||
}
|
}
|
||||||
|
|
||||||
return paths;
|
return paths;
|
||||||
|
@ -68,28 +80,11 @@ void LocalBinaryCacheStore::init()
|
||||||
BinaryCacheStore::init();
|
BinaryCacheStore::init();
|
||||||
}
|
}
|
||||||
|
|
||||||
static void atomicWrite(const Path & path, const std::string & s)
|
|
||||||
{
|
|
||||||
Path tmp = path + ".tmp." + std::to_string(getpid());
|
|
||||||
AutoDelete del(tmp, false);
|
|
||||||
writeFile(tmp, s);
|
|
||||||
if (rename(tmp.c_str(), path.c_str()))
|
|
||||||
throw SysError("renaming '%1%' to '%2%'", tmp, path);
|
|
||||||
del.cancel();
|
|
||||||
}
|
|
||||||
|
|
||||||
bool LocalBinaryCacheStore::fileExists(const std::string & path)
|
bool LocalBinaryCacheStore::fileExists(const std::string & path)
|
||||||
{
|
{
|
||||||
return pathExists(binaryCacheDir + "/" + path);
|
return pathExists(binaryCacheDir + "/" + path);
|
||||||
}
|
}
|
||||||
|
|
||||||
void LocalBinaryCacheStore::upsertFile(const std::string & path,
|
|
||||||
const std::string & data,
|
|
||||||
const std::string & mimeType)
|
|
||||||
{
|
|
||||||
atomicWrite(binaryCacheDir + "/" + path, data);
|
|
||||||
}
|
|
||||||
|
|
||||||
static RegisterStoreImplementation regStore([](
|
static RegisterStoreImplementation regStore([](
|
||||||
const std::string & uri, const Store::Params & params)
|
const std::string & uri, const Store::Params & params)
|
||||||
-> std::shared_ptr<Store>
|
-> std::shared_ptr<Store>
|
||||||
|
|
|
@ -20,9 +20,9 @@ struct LocalStoreAccessor : public FSAccessor
|
||||||
|
|
||||||
Path toRealPath(const Path & path)
|
Path toRealPath(const Path & path)
|
||||||
{
|
{
|
||||||
Path storePath = store->toStorePath(path);
|
auto storePath = store->toStorePath(path).first;
|
||||||
if (!store->isValidPath(store->parseStorePath(storePath)))
|
if (!store->isValidPath(storePath))
|
||||||
throw InvalidPath("path '%1%' is not a valid store path", storePath);
|
throw InvalidPath("path '%1%' is not a valid store path", store->printStorePath(storePath));
|
||||||
return store->getRealStoreDir() + std::string(path, store->storeDir.size());
|
return store->getRealStoreDir() + std::string(path, store->storeDir.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -962,7 +962,7 @@ const PublicKeys & LocalStore::getPublicKeys()
|
||||||
|
|
||||||
|
|
||||||
void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
|
void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
|
||||||
RepairFlag repair, CheckSigsFlag checkSigs, std::shared_ptr<FSAccessor> accessor)
|
RepairFlag repair, CheckSigsFlag checkSigs)
|
||||||
{
|
{
|
||||||
if (!info.narHash)
|
if (!info.narHash)
|
||||||
throw Error("cannot add path '%s' because it lacks a hash", printStorePath(info.path));
|
throw Error("cannot add path '%s' because it lacks a hash", printStorePath(info.path));
|
||||||
|
@ -976,7 +976,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
|
||||||
|
|
||||||
PathLocks outputLock;
|
PathLocks outputLock;
|
||||||
|
|
||||||
Path realPath = realStoreDir + "/" + std::string(info.path.to_string());
|
auto realPath = Store::toRealPath(info.path);
|
||||||
|
|
||||||
/* Lock the output path. But don't lock if we're being called
|
/* Lock the output path. But don't lock if we're being called
|
||||||
from a build hook (whose parent process already acquired a
|
from a build hook (whose parent process already acquired a
|
||||||
|
@ -1047,8 +1047,7 @@ StorePath LocalStore::addToStoreFromDump(const string & dump, const string & nam
|
||||||
/* The first check above is an optimisation to prevent
|
/* The first check above is an optimisation to prevent
|
||||||
unnecessary lock acquisition. */
|
unnecessary lock acquisition. */
|
||||||
|
|
||||||
Path realPath = realStoreDir + "/";
|
auto realPath = Store::toRealPath(dstPath);
|
||||||
realPath += dstPath.to_string();
|
|
||||||
|
|
||||||
PathLocks outputLock({realPath});
|
PathLocks outputLock({realPath});
|
||||||
|
|
||||||
|
@ -1098,16 +1097,119 @@ StorePath LocalStore::addToStore(const string & name, const Path & _srcPath,
|
||||||
{
|
{
|
||||||
Path srcPath(absPath(_srcPath));
|
Path srcPath(absPath(_srcPath));
|
||||||
|
|
||||||
/* Read the whole path into memory. This is not a very scalable
|
if (method != FileIngestionMethod::Recursive)
|
||||||
method for very large paths, but `copyPath' is mainly used for
|
return addToStoreFromDump(readFile(srcPath), name, method, hashAlgo, repair);
|
||||||
small files. */
|
|
||||||
StringSink sink;
|
|
||||||
if (method == FileIngestionMethod::Recursive)
|
|
||||||
dumpPath(srcPath, sink, filter);
|
|
||||||
else
|
|
||||||
sink.s = make_ref<std::string>(readFile(srcPath));
|
|
||||||
|
|
||||||
return addToStoreFromDump(*sink.s, name, method, hashAlgo, repair);
|
/* For computing the NAR hash. */
|
||||||
|
auto sha256Sink = std::make_unique<HashSink>(htSHA256);
|
||||||
|
|
||||||
|
/* For computing the store path. In recursive SHA-256 mode, this
|
||||||
|
is the same as the NAR hash, so no need to do it again. */
|
||||||
|
std::unique_ptr<HashSink> hashSink =
|
||||||
|
hashAlgo == htSHA256
|
||||||
|
? nullptr
|
||||||
|
: std::make_unique<HashSink>(hashAlgo);
|
||||||
|
|
||||||
|
/* Read the source path into memory, but only if it's up to
|
||||||
|
narBufferSize bytes. If it's larger, write it to a temporary
|
||||||
|
location in the Nix store. If the subsequently computed
|
||||||
|
destination store path is already valid, we just delete the
|
||||||
|
temporary path. Otherwise, we move it to the destination store
|
||||||
|
path. */
|
||||||
|
bool inMemory = true;
|
||||||
|
std::string nar;
|
||||||
|
|
||||||
|
auto source = sinkToSource([&](Sink & sink) {
|
||||||
|
|
||||||
|
LambdaSink sink2([&](const unsigned char * buf, size_t len) {
|
||||||
|
(*sha256Sink)(buf, len);
|
||||||
|
if (hashSink) (*hashSink)(buf, len);
|
||||||
|
|
||||||
|
if (inMemory) {
|
||||||
|
if (nar.size() + len > settings.narBufferSize) {
|
||||||
|
inMemory = false;
|
||||||
|
sink << 1;
|
||||||
|
sink((const unsigned char *) nar.data(), nar.size());
|
||||||
|
nar.clear();
|
||||||
|
} else {
|
||||||
|
nar.append((const char *) buf, len);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!inMemory) sink(buf, len);
|
||||||
|
});
|
||||||
|
|
||||||
|
dumpPath(srcPath, sink2, filter);
|
||||||
|
});
|
||||||
|
|
||||||
|
std::unique_ptr<AutoDelete> delTempDir;
|
||||||
|
Path tempPath;
|
||||||
|
|
||||||
|
try {
|
||||||
|
/* Wait for the source coroutine to give us some dummy
|
||||||
|
data. This is so that we don't create the temporary
|
||||||
|
directory if the NAR fits in memory. */
|
||||||
|
readInt(*source);
|
||||||
|
|
||||||
|
auto tempDir = createTempDir(realStoreDir, "add");
|
||||||
|
delTempDir = std::make_unique<AutoDelete>(tempDir);
|
||||||
|
tempPath = tempDir + "/x";
|
||||||
|
|
||||||
|
restorePath(tempPath, *source);
|
||||||
|
|
||||||
|
} catch (EndOfFile &) {
|
||||||
|
if (!inMemory) throw;
|
||||||
|
/* The NAR fits in memory, so we didn't do restorePath(). */
|
||||||
|
}
|
||||||
|
|
||||||
|
auto sha256 = sha256Sink->finish();
|
||||||
|
|
||||||
|
Hash hash = hashSink ? hashSink->finish().first : sha256.first;
|
||||||
|
|
||||||
|
auto dstPath = makeFixedOutputPath(method, hash, name);
|
||||||
|
|
||||||
|
addTempRoot(dstPath);
|
||||||
|
|
||||||
|
if (repair || !isValidPath(dstPath)) {
|
||||||
|
|
||||||
|
/* The first check above is an optimisation to prevent
|
||||||
|
unnecessary lock acquisition. */
|
||||||
|
|
||||||
|
auto realPath = Store::toRealPath(dstPath);
|
||||||
|
|
||||||
|
PathLocks outputLock({realPath});
|
||||||
|
|
||||||
|
if (repair || !isValidPath(dstPath)) {
|
||||||
|
|
||||||
|
deletePath(realPath);
|
||||||
|
|
||||||
|
autoGC();
|
||||||
|
|
||||||
|
if (inMemory) {
|
||||||
|
/* Restore from the NAR in memory. */
|
||||||
|
StringSource source(nar);
|
||||||
|
restorePath(realPath, source);
|
||||||
|
} else {
|
||||||
|
/* Move the temporary path we restored above. */
|
||||||
|
if (rename(tempPath.c_str(), realPath.c_str()))
|
||||||
|
throw Error("renaming '%s' to '%s'", tempPath, realPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
canonicalisePathMetaData(realPath, -1); // FIXME: merge into restorePath
|
||||||
|
|
||||||
|
optimisePath(realPath);
|
||||||
|
|
||||||
|
ValidPathInfo info(dstPath);
|
||||||
|
info.narHash = sha256.first;
|
||||||
|
info.narSize = sha256.second;
|
||||||
|
info.ca = FixedOutputHash { .method = method, .hash = hash };
|
||||||
|
registerValidPath(info);
|
||||||
|
}
|
||||||
|
|
||||||
|
outputLock.setDeletion(true);
|
||||||
|
}
|
||||||
|
|
||||||
|
return dstPath;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -1121,8 +1223,7 @@ StorePath LocalStore::addTextToStore(const string & name, const string & s,
|
||||||
|
|
||||||
if (repair || !isValidPath(dstPath)) {
|
if (repair || !isValidPath(dstPath)) {
|
||||||
|
|
||||||
Path realPath = realStoreDir + "/";
|
auto realPath = Store::toRealPath(dstPath);
|
||||||
realPath += dstPath.to_string();
|
|
||||||
|
|
||||||
PathLocks outputLock({realPath});
|
PathLocks outputLock({realPath});
|
||||||
|
|
||||||
|
|
|
@ -143,8 +143,7 @@ public:
|
||||||
SubstitutablePathInfos & infos) override;
|
SubstitutablePathInfos & infos) override;
|
||||||
|
|
||||||
void addToStore(const ValidPathInfo & info, Source & source,
|
void addToStore(const ValidPathInfo & info, Source & source,
|
||||||
RepairFlag repair, CheckSigsFlag checkSigs,
|
RepairFlag repair, CheckSigsFlag checkSigs) override;
|
||||||
std::shared_ptr<FSAccessor> accessor) override;
|
|
||||||
|
|
||||||
StorePath addToStore(const string & name, const Path & srcPath,
|
StorePath addToStore(const string & name, const Path & srcPath,
|
||||||
FileIngestionMethod method, HashType hashAlgo,
|
FileIngestionMethod method, HashType hashAlgo,
|
||||||
|
|
|
@ -18,7 +18,7 @@ struct NarMember
|
||||||
|
|
||||||
/* If this is a regular file, position of the contents of this
|
/* If this is a regular file, position of the contents of this
|
||||||
file in the NAR. */
|
file in the NAR. */
|
||||||
size_t start = 0, size = 0;
|
uint64_t start = 0, size = 0;
|
||||||
|
|
||||||
std::string target;
|
std::string target;
|
||||||
|
|
||||||
|
@ -34,17 +34,19 @@ struct NarAccessor : public FSAccessor
|
||||||
|
|
||||||
NarMember root;
|
NarMember root;
|
||||||
|
|
||||||
struct NarIndexer : ParseSink, StringSource
|
struct NarIndexer : ParseSink, Source
|
||||||
{
|
{
|
||||||
NarAccessor & acc;
|
NarAccessor & acc;
|
||||||
|
Source & source;
|
||||||
|
|
||||||
std::stack<NarMember *> parents;
|
std::stack<NarMember *> parents;
|
||||||
|
|
||||||
std::string currentStart;
|
|
||||||
bool isExec = false;
|
bool isExec = false;
|
||||||
|
|
||||||
NarIndexer(NarAccessor & acc, const std::string & nar)
|
uint64_t pos = 0;
|
||||||
: StringSource(nar), acc(acc)
|
|
||||||
|
NarIndexer(NarAccessor & acc, Source & source)
|
||||||
|
: acc(acc), source(source)
|
||||||
{ }
|
{ }
|
||||||
|
|
||||||
void createMember(const Path & path, NarMember member) {
|
void createMember(const Path & path, NarMember member) {
|
||||||
|
@ -79,31 +81,38 @@ struct NarAccessor : public FSAccessor
|
||||||
|
|
||||||
void preallocateContents(unsigned long long size) override
|
void preallocateContents(unsigned long long size) override
|
||||||
{
|
{
|
||||||
currentStart = string(s, pos, 16);
|
assert(size <= std::numeric_limits<uint64_t>::max());
|
||||||
assert(size <= std::numeric_limits<size_t>::max());
|
parents.top()->size = (uint64_t) size;
|
||||||
parents.top()->size = (size_t)size;
|
|
||||||
parents.top()->start = pos;
|
parents.top()->start = pos;
|
||||||
}
|
}
|
||||||
|
|
||||||
void receiveContents(unsigned char * data, unsigned int len) override
|
void receiveContents(unsigned char * data, unsigned int len) override
|
||||||
{
|
{ }
|
||||||
// Sanity check
|
|
||||||
if (!currentStart.empty()) {
|
|
||||||
assert(len < 16 || currentStart == string((char *) data, 16));
|
|
||||||
currentStart.clear();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void createSymlink(const Path & path, const string & target) override
|
void createSymlink(const Path & path, const string & target) override
|
||||||
{
|
{
|
||||||
createMember(path,
|
createMember(path,
|
||||||
NarMember{FSAccessor::Type::tSymlink, false, 0, 0, target});
|
NarMember{FSAccessor::Type::tSymlink, false, 0, 0, target});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
size_t read(unsigned char * data, size_t len) override
|
||||||
|
{
|
||||||
|
auto n = source.read(data, len);
|
||||||
|
pos += n;
|
||||||
|
return n;
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
NarAccessor(ref<const std::string> nar) : nar(nar)
|
NarAccessor(ref<const std::string> nar) : nar(nar)
|
||||||
{
|
{
|
||||||
NarIndexer indexer(*this, *nar);
|
StringSource source(*nar);
|
||||||
|
NarIndexer indexer(*this, source);
|
||||||
|
parseDump(indexer, indexer);
|
||||||
|
}
|
||||||
|
|
||||||
|
NarAccessor(Source & source)
|
||||||
|
{
|
||||||
|
NarIndexer indexer(*this, source);
|
||||||
parseDump(indexer, indexer);
|
parseDump(indexer, indexer);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -219,6 +228,11 @@ ref<FSAccessor> makeNarAccessor(ref<const std::string> nar)
|
||||||
return make_ref<NarAccessor>(nar);
|
return make_ref<NarAccessor>(nar);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
ref<FSAccessor> makeNarAccessor(Source & source)
|
||||||
|
{
|
||||||
|
return make_ref<NarAccessor>(source);
|
||||||
|
}
|
||||||
|
|
||||||
ref<FSAccessor> makeLazyNarAccessor(const std::string & listing,
|
ref<FSAccessor> makeLazyNarAccessor(const std::string & listing,
|
||||||
GetNarBytes getNarBytes)
|
GetNarBytes getNarBytes)
|
||||||
{
|
{
|
||||||
|
|
|
@ -6,10 +6,14 @@
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
struct Source;
|
||||||
|
|
||||||
/* Return an object that provides access to the contents of a NAR
|
/* Return an object that provides access to the contents of a NAR
|
||||||
file. */
|
file. */
|
||||||
ref<FSAccessor> makeNarAccessor(ref<const std::string> nar);
|
ref<FSAccessor> makeNarAccessor(ref<const std::string> nar);
|
||||||
|
|
||||||
|
ref<FSAccessor> makeNarAccessor(Source & source);
|
||||||
|
|
||||||
/* Create a NAR accessor from a NAR listing (in the format produced by
|
/* Create a NAR accessor from a NAR listing (in the format produced by
|
||||||
listNar()). The callback getNarBytes(offset, length) is used by the
|
listNar()). The callback getNarBytes(offset, length) is used by the
|
||||||
readFile() method of the accessor to get the contents of files
|
readFile() method of the accessor to get the contents of files
|
||||||
|
|
|
@ -2,8 +2,6 @@
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
MakeError(BadStorePath, Error);
|
|
||||||
|
|
||||||
static void checkName(std::string_view path, std::string_view name)
|
static void checkName(std::string_view path, std::string_view name)
|
||||||
{
|
{
|
||||||
if (name.empty())
|
if (name.empty())
|
||||||
|
|
|
@ -16,26 +16,26 @@ RemoteFSAccessor::RemoteFSAccessor(ref<Store> store, const Path & cacheDir)
|
||||||
createDirs(cacheDir);
|
createDirs(cacheDir);
|
||||||
}
|
}
|
||||||
|
|
||||||
Path RemoteFSAccessor::makeCacheFile(const Path & storePath, const std::string & ext)
|
Path RemoteFSAccessor::makeCacheFile(std::string_view hashPart, const std::string & ext)
|
||||||
{
|
{
|
||||||
assert(cacheDir != "");
|
assert(cacheDir != "");
|
||||||
return fmt("%s/%s.%s", cacheDir, store->parseStorePath(storePath).hashPart(), ext);
|
return fmt("%s/%s.%s", cacheDir, hashPart, ext);
|
||||||
}
|
}
|
||||||
|
|
||||||
void RemoteFSAccessor::addToCache(const Path & storePath, const std::string & nar,
|
void RemoteFSAccessor::addToCache(std::string_view hashPart, const std::string & nar,
|
||||||
ref<FSAccessor> narAccessor)
|
ref<FSAccessor> narAccessor)
|
||||||
{
|
{
|
||||||
nars.emplace(storePath, narAccessor);
|
nars.emplace(hashPart, narAccessor);
|
||||||
|
|
||||||
if (cacheDir != "") {
|
if (cacheDir != "") {
|
||||||
try {
|
try {
|
||||||
std::ostringstream str;
|
std::ostringstream str;
|
||||||
JSONPlaceholder jsonRoot(str);
|
JSONPlaceholder jsonRoot(str);
|
||||||
listNar(jsonRoot, narAccessor, "", true);
|
listNar(jsonRoot, narAccessor, "", true);
|
||||||
writeFile(makeCacheFile(storePath, "ls"), str.str());
|
writeFile(makeCacheFile(hashPart, "ls"), str.str());
|
||||||
|
|
||||||
/* FIXME: do this asynchronously. */
|
/* FIXME: do this asynchronously. */
|
||||||
writeFile(makeCacheFile(storePath, "nar"), nar);
|
writeFile(makeCacheFile(hashPart, "nar"), nar);
|
||||||
|
|
||||||
} catch (...) {
|
} catch (...) {
|
||||||
ignoreException();
|
ignoreException();
|
||||||
|
@ -47,23 +47,22 @@ std::pair<ref<FSAccessor>, Path> RemoteFSAccessor::fetch(const Path & path_)
|
||||||
{
|
{
|
||||||
auto path = canonPath(path_);
|
auto path = canonPath(path_);
|
||||||
|
|
||||||
auto storePath = store->toStorePath(path);
|
auto [storePath, restPath] = store->toStorePath(path);
|
||||||
std::string restPath = std::string(path, storePath.size());
|
|
||||||
|
|
||||||
if (!store->isValidPath(store->parseStorePath(storePath)))
|
if (!store->isValidPath(storePath))
|
||||||
throw InvalidPath("path '%1%' is not a valid store path", storePath);
|
throw InvalidPath("path '%1%' is not a valid store path", store->printStorePath(storePath));
|
||||||
|
|
||||||
auto i = nars.find(storePath);
|
auto i = nars.find(std::string(storePath.hashPart()));
|
||||||
if (i != nars.end()) return {i->second, restPath};
|
if (i != nars.end()) return {i->second, restPath};
|
||||||
|
|
||||||
StringSink sink;
|
StringSink sink;
|
||||||
std::string listing;
|
std::string listing;
|
||||||
Path cacheFile;
|
Path cacheFile;
|
||||||
|
|
||||||
if (cacheDir != "" && pathExists(cacheFile = makeCacheFile(storePath, "nar"))) {
|
if (cacheDir != "" && pathExists(cacheFile = makeCacheFile(storePath.hashPart(), "nar"))) {
|
||||||
|
|
||||||
try {
|
try {
|
||||||
listing = nix::readFile(makeCacheFile(storePath, "ls"));
|
listing = nix::readFile(makeCacheFile(storePath.hashPart(), "ls"));
|
||||||
|
|
||||||
auto narAccessor = makeLazyNarAccessor(listing,
|
auto narAccessor = makeLazyNarAccessor(listing,
|
||||||
[cacheFile](uint64_t offset, uint64_t length) {
|
[cacheFile](uint64_t offset, uint64_t length) {
|
||||||
|
@ -81,7 +80,7 @@ std::pair<ref<FSAccessor>, Path> RemoteFSAccessor::fetch(const Path & path_)
|
||||||
return buf;
|
return buf;
|
||||||
});
|
});
|
||||||
|
|
||||||
nars.emplace(storePath, narAccessor);
|
nars.emplace(storePath.hashPart(), narAccessor);
|
||||||
return {narAccessor, restPath};
|
return {narAccessor, restPath};
|
||||||
|
|
||||||
} catch (SysError &) { }
|
} catch (SysError &) { }
|
||||||
|
@ -90,15 +89,15 @@ std::pair<ref<FSAccessor>, Path> RemoteFSAccessor::fetch(const Path & path_)
|
||||||
*sink.s = nix::readFile(cacheFile);
|
*sink.s = nix::readFile(cacheFile);
|
||||||
|
|
||||||
auto narAccessor = makeNarAccessor(sink.s);
|
auto narAccessor = makeNarAccessor(sink.s);
|
||||||
nars.emplace(storePath, narAccessor);
|
nars.emplace(storePath.hashPart(), narAccessor);
|
||||||
return {narAccessor, restPath};
|
return {narAccessor, restPath};
|
||||||
|
|
||||||
} catch (SysError &) { }
|
} catch (SysError &) { }
|
||||||
}
|
}
|
||||||
|
|
||||||
store->narFromPath(store->parseStorePath(storePath), sink);
|
store->narFromPath(storePath, sink);
|
||||||
auto narAccessor = makeNarAccessor(sink.s);
|
auto narAccessor = makeNarAccessor(sink.s);
|
||||||
addToCache(storePath, *sink.s, narAccessor);
|
addToCache(storePath.hashPart(), *sink.s, narAccessor);
|
||||||
return {narAccessor, restPath};
|
return {narAccessor, restPath};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -10,7 +10,7 @@ class RemoteFSAccessor : public FSAccessor
|
||||||
{
|
{
|
||||||
ref<Store> store;
|
ref<Store> store;
|
||||||
|
|
||||||
std::map<Path, ref<FSAccessor>> nars;
|
std::map<std::string, ref<FSAccessor>> nars;
|
||||||
|
|
||||||
Path cacheDir;
|
Path cacheDir;
|
||||||
|
|
||||||
|
@ -18,9 +18,9 @@ class RemoteFSAccessor : public FSAccessor
|
||||||
|
|
||||||
friend class BinaryCacheStore;
|
friend class BinaryCacheStore;
|
||||||
|
|
||||||
Path makeCacheFile(const Path & storePath, const std::string & ext);
|
Path makeCacheFile(std::string_view hashPart, const std::string & ext);
|
||||||
|
|
||||||
void addToCache(const Path & storePath, const std::string & nar,
|
void addToCache(std::string_view hashPart, const std::string & nar,
|
||||||
ref<FSAccessor> narAccessor);
|
ref<FSAccessor> narAccessor);
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
|
|
@ -466,7 +466,7 @@ std::optional<StorePath> RemoteStore::queryPathFromHashPart(const std::string &
|
||||||
|
|
||||||
|
|
||||||
void RemoteStore::addToStore(const ValidPathInfo & info, Source & source,
|
void RemoteStore::addToStore(const ValidPathInfo & info, Source & source,
|
||||||
RepairFlag repair, CheckSigsFlag checkSigs, std::shared_ptr<FSAccessor> accessor)
|
RepairFlag repair, CheckSigsFlag checkSigs)
|
||||||
{
|
{
|
||||||
auto conn(getConnection());
|
auto conn(getConnection());
|
||||||
|
|
||||||
|
|
|
@ -60,8 +60,7 @@ public:
|
||||||
SubstitutablePathInfos & infos) override;
|
SubstitutablePathInfos & infos) override;
|
||||||
|
|
||||||
void addToStore(const ValidPathInfo & info, Source & nar,
|
void addToStore(const ValidPathInfo & info, Source & nar,
|
||||||
RepairFlag repair, CheckSigsFlag checkSigs,
|
RepairFlag repair, CheckSigsFlag checkSigs) override;
|
||||||
std::shared_ptr<FSAccessor> accessor) override;
|
|
||||||
|
|
||||||
StorePath addToStore(const string & name, const Path & srcPath,
|
StorePath addToStore(const string & name, const Path & srcPath,
|
||||||
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256,
|
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256,
|
||||||
|
|
|
@ -7,7 +7,6 @@
|
||||||
#include "globals.hh"
|
#include "globals.hh"
|
||||||
#include "compression.hh"
|
#include "compression.hh"
|
||||||
#include "filetransfer.hh"
|
#include "filetransfer.hh"
|
||||||
#include "istringstream_nocopy.hh"
|
|
||||||
|
|
||||||
#include <aws/core/Aws.h>
|
#include <aws/core/Aws.h>
|
||||||
#include <aws/core/VersionConfig.h>
|
#include <aws/core/VersionConfig.h>
|
||||||
|
@ -262,12 +261,11 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore
|
||||||
std::shared_ptr<TransferManager> transferManager;
|
std::shared_ptr<TransferManager> transferManager;
|
||||||
std::once_flag transferManagerCreated;
|
std::once_flag transferManagerCreated;
|
||||||
|
|
||||||
void uploadFile(const std::string & path, const std::string & data,
|
void uploadFile(const std::string & path,
|
||||||
|
std::shared_ptr<std::basic_iostream<char>> istream,
|
||||||
const std::string & mimeType,
|
const std::string & mimeType,
|
||||||
const std::string & contentEncoding)
|
const std::string & contentEncoding)
|
||||||
{
|
{
|
||||||
auto stream = std::make_shared<istringstream_nocopy>(data);
|
|
||||||
|
|
||||||
auto maxThreads = std::thread::hardware_concurrency();
|
auto maxThreads = std::thread::hardware_concurrency();
|
||||||
|
|
||||||
static std::shared_ptr<Aws::Utils::Threading::PooledThreadExecutor>
|
static std::shared_ptr<Aws::Utils::Threading::PooledThreadExecutor>
|
||||||
|
@ -307,7 +305,7 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore
|
||||||
|
|
||||||
std::shared_ptr<TransferHandle> transferHandle =
|
std::shared_ptr<TransferHandle> transferHandle =
|
||||||
transferManager->UploadFile(
|
transferManager->UploadFile(
|
||||||
stream, bucketName, path, mimeType,
|
istream, bucketName, path, mimeType,
|
||||||
Aws::Map<Aws::String, Aws::String>(),
|
Aws::Map<Aws::String, Aws::String>(),
|
||||||
nullptr /*, contentEncoding */);
|
nullptr /*, contentEncoding */);
|
||||||
|
|
||||||
|
@ -333,9 +331,7 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore
|
||||||
if (contentEncoding != "")
|
if (contentEncoding != "")
|
||||||
request.SetContentEncoding(contentEncoding);
|
request.SetContentEncoding(contentEncoding);
|
||||||
|
|
||||||
auto stream = std::make_shared<istringstream_nocopy>(data);
|
request.SetBody(istream);
|
||||||
|
|
||||||
request.SetBody(stream);
|
|
||||||
|
|
||||||
auto result = checkAws(fmt("AWS error uploading '%s'", path),
|
auto result = checkAws(fmt("AWS error uploading '%s'", path),
|
||||||
s3Helper.client->PutObject(request));
|
s3Helper.client->PutObject(request));
|
||||||
|
@ -347,25 +343,34 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore
|
||||||
std::chrono::duration_cast<std::chrono::milliseconds>(now2 - now1)
|
std::chrono::duration_cast<std::chrono::milliseconds>(now2 - now1)
|
||||||
.count();
|
.count();
|
||||||
|
|
||||||
printInfo(format("uploaded 's3://%1%/%2%' (%3% bytes) in %4% ms") %
|
auto size = istream->tellg();
|
||||||
bucketName % path % data.size() % duration);
|
|
||||||
|
printInfo("uploaded 's3://%s/%s' (%d bytes) in %d ms",
|
||||||
|
bucketName, path, size, duration);
|
||||||
|
|
||||||
stats.putTimeMs += duration;
|
stats.putTimeMs += duration;
|
||||||
stats.putBytes += data.size();
|
stats.putBytes += size;
|
||||||
stats.put++;
|
stats.put++;
|
||||||
}
|
}
|
||||||
|
|
||||||
void upsertFile(const std::string & path, const std::string & data,
|
void upsertFile(const std::string & path,
|
||||||
|
std::shared_ptr<std::basic_iostream<char>> istream,
|
||||||
const std::string & mimeType) override
|
const std::string & mimeType) override
|
||||||
{
|
{
|
||||||
|
auto compress = [&](std::string compression)
|
||||||
|
{
|
||||||
|
auto compressed = nix::compress(compression, StreamToSourceAdapter(istream).drain());
|
||||||
|
return std::make_shared<std::stringstream>(std::move(*compressed));
|
||||||
|
};
|
||||||
|
|
||||||
if (narinfoCompression != "" && hasSuffix(path, ".narinfo"))
|
if (narinfoCompression != "" && hasSuffix(path, ".narinfo"))
|
||||||
uploadFile(path, *compress(narinfoCompression, data), mimeType, narinfoCompression);
|
uploadFile(path, compress(narinfoCompression), mimeType, narinfoCompression);
|
||||||
else if (lsCompression != "" && hasSuffix(path, ".ls"))
|
else if (lsCompression != "" && hasSuffix(path, ".ls"))
|
||||||
uploadFile(path, *compress(lsCompression, data), mimeType, lsCompression);
|
uploadFile(path, compress(lsCompression), mimeType, lsCompression);
|
||||||
else if (logCompression != "" && hasPrefix(path, "log/"))
|
else if (logCompression != "" && hasPrefix(path, "log/"))
|
||||||
uploadFile(path, *compress(logCompression, data), mimeType, logCompression);
|
uploadFile(path, compress(logCompression), mimeType, logCompression);
|
||||||
else
|
else
|
||||||
uploadFile(path, data, mimeType, "");
|
uploadFile(path, istream, mimeType, "");
|
||||||
}
|
}
|
||||||
|
|
||||||
void getFile(const std::string & path, Sink & sink) override
|
void getFile(const std::string & path, Sink & sink) override
|
||||||
|
@ -410,7 +415,7 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore
|
||||||
for (auto object : contents) {
|
for (auto object : contents) {
|
||||||
auto & key = object.GetKey();
|
auto & key = object.GetKey();
|
||||||
if (key.size() != 40 || !hasSuffix(key, ".narinfo")) continue;
|
if (key.size() != 40 || !hasSuffix(key, ".narinfo")) continue;
|
||||||
paths.insert(parseStorePath(storeDir + "/" + key.substr(0, key.size() - 8) + "-unknown"));
|
paths.insert(parseStorePath(storeDir + "/" + key.substr(0, key.size() - 8) + "-" + MissingName));
|
||||||
}
|
}
|
||||||
|
|
||||||
marker = res.GetNextMarker();
|
marker = res.GetNextMarker();
|
||||||
|
|
|
@ -7,6 +7,7 @@
|
||||||
#include "json.hh"
|
#include "json.hh"
|
||||||
#include "derivations.hh"
|
#include "derivations.hh"
|
||||||
#include "url.hh"
|
#include "url.hh"
|
||||||
|
#include "archive.hh"
|
||||||
|
|
||||||
#include <future>
|
#include <future>
|
||||||
|
|
||||||
|
@ -20,15 +21,15 @@ bool Store::isInStore(const Path & path) const
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
Path Store::toStorePath(const Path & path) const
|
std::pair<StorePath, Path> Store::toStorePath(const Path & path) const
|
||||||
{
|
{
|
||||||
if (!isInStore(path))
|
if (!isInStore(path))
|
||||||
throw Error("path '%1%' is not in the Nix store", path);
|
throw Error("path '%1%' is not in the Nix store", path);
|
||||||
Path::size_type slash = path.find('/', storeDir.size() + 1);
|
Path::size_type slash = path.find('/', storeDir.size() + 1);
|
||||||
if (slash == Path::npos)
|
if (slash == Path::npos)
|
||||||
return path;
|
return {parseStorePath(path), ""};
|
||||||
else
|
else
|
||||||
return Path(path, 0, slash);
|
return {parseStorePath(std::string_view(path).substr(0, slash)), path.substr(slash)};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -41,14 +42,14 @@ Path Store::followLinksToStore(std::string_view _path) const
|
||||||
path = absPath(target, dirOf(path));
|
path = absPath(target, dirOf(path));
|
||||||
}
|
}
|
||||||
if (!isInStore(path))
|
if (!isInStore(path))
|
||||||
throw NotInStore("path '%1%' is not in the Nix store", path);
|
throw BadStorePath("path '%1%' is not in the Nix store", path);
|
||||||
return path;
|
return path;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
StorePath Store::followLinksToStorePath(std::string_view path) const
|
StorePath Store::followLinksToStorePath(std::string_view path) const
|
||||||
{
|
{
|
||||||
return parseStorePath(toStorePath(followLinksToStore(path)));
|
return toStorePath(followLinksToStore(path)).first;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -221,6 +222,40 @@ StorePath Store::computeStorePathForText(const string & name, const string & s,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
|
||||||
|
FileIngestionMethod method, HashType hashAlgo,
|
||||||
|
std::optional<Hash> expectedCAHash)
|
||||||
|
{
|
||||||
|
/* FIXME: inefficient: we're reading/hashing 'tmpFile' three
|
||||||
|
times. */
|
||||||
|
|
||||||
|
auto [narHash, narSize] = hashPath(htSHA256, srcPath);
|
||||||
|
|
||||||
|
auto hash = method == FileIngestionMethod::Recursive
|
||||||
|
? hashAlgo == htSHA256
|
||||||
|
? narHash
|
||||||
|
: hashPath(hashAlgo, srcPath).first
|
||||||
|
: hashFile(hashAlgo, srcPath);
|
||||||
|
|
||||||
|
if (expectedCAHash && expectedCAHash != hash)
|
||||||
|
throw Error("hash mismatch for '%s'", srcPath);
|
||||||
|
|
||||||
|
ValidPathInfo info(makeFixedOutputPath(method, hash, name));
|
||||||
|
info.narHash = narHash;
|
||||||
|
info.narSize = narSize;
|
||||||
|
info.ca = FixedOutputHash { .method = method, .hash = hash };
|
||||||
|
|
||||||
|
if (!isValidPath(info.path)) {
|
||||||
|
auto source = sinkToSource([&](Sink & sink) {
|
||||||
|
dumpPath(srcPath, sink);
|
||||||
|
});
|
||||||
|
addToStore(info, *source);
|
||||||
|
}
|
||||||
|
|
||||||
|
return info;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
Store::Store(const Params & params)
|
Store::Store(const Params & params)
|
||||||
: Config(params)
|
: Config(params)
|
||||||
, state({(size_t) pathInfoCacheSize})
|
, state({(size_t) pathInfoCacheSize})
|
||||||
|
@ -316,6 +351,14 @@ ref<const ValidPathInfo> Store::queryPathInfo(const StorePath & storePath)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
static bool goodStorePath(const StorePath & expected, const StorePath & actual)
|
||||||
|
{
|
||||||
|
return
|
||||||
|
expected.hashPart() == actual.hashPart()
|
||||||
|
&& (expected.name() == Store::MissingName || expected.name() == actual.name());
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
void Store::queryPathInfo(const StorePath & storePath,
|
void Store::queryPathInfo(const StorePath & storePath,
|
||||||
Callback<ref<const ValidPathInfo>> callback) noexcept
|
Callback<ref<const ValidPathInfo>> callback) noexcept
|
||||||
{
|
{
|
||||||
|
@ -343,7 +386,7 @@ void Store::queryPathInfo(const StorePath & storePath,
|
||||||
state_->pathInfoCache.upsert(hashPart,
|
state_->pathInfoCache.upsert(hashPart,
|
||||||
res.first == NarInfoDiskCache::oInvalid ? PathInfoCacheValue{} : PathInfoCacheValue{ .value = res.second });
|
res.first == NarInfoDiskCache::oInvalid ? PathInfoCacheValue{} : PathInfoCacheValue{ .value = res.second });
|
||||||
if (res.first == NarInfoDiskCache::oInvalid ||
|
if (res.first == NarInfoDiskCache::oInvalid ||
|
||||||
res.second->path != storePath)
|
!goodStorePath(storePath, res.second->path))
|
||||||
throw InvalidPath("path '%s' is not valid", printStorePath(storePath));
|
throw InvalidPath("path '%s' is not valid", printStorePath(storePath));
|
||||||
}
|
}
|
||||||
return callback(ref<const ValidPathInfo>(res.second));
|
return callback(ref<const ValidPathInfo>(res.second));
|
||||||
|
@ -355,7 +398,7 @@ void Store::queryPathInfo(const StorePath & storePath,
|
||||||
auto callbackPtr = std::make_shared<decltype(callback)>(std::move(callback));
|
auto callbackPtr = std::make_shared<decltype(callback)>(std::move(callback));
|
||||||
|
|
||||||
queryPathInfoUncached(storePath,
|
queryPathInfoUncached(storePath,
|
||||||
{[this, storePath{printStorePath(storePath)}, hashPart, callbackPtr](std::future<std::shared_ptr<const ValidPathInfo>> fut) {
|
{[this, storePathS{printStorePath(storePath)}, hashPart, callbackPtr](std::future<std::shared_ptr<const ValidPathInfo>> fut) {
|
||||||
|
|
||||||
try {
|
try {
|
||||||
auto info = fut.get();
|
auto info = fut.get();
|
||||||
|
@ -368,9 +411,11 @@ void Store::queryPathInfo(const StorePath & storePath,
|
||||||
state_->pathInfoCache.upsert(hashPart, PathInfoCacheValue { .value = info });
|
state_->pathInfoCache.upsert(hashPart, PathInfoCacheValue { .value = info });
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!info || info->path != parseStorePath(storePath)) {
|
auto storePath = parseStorePath(storePathS);
|
||||||
|
|
||||||
|
if (!info || !goodStorePath(storePath, info->path)) {
|
||||||
stats.narInfoMissing++;
|
stats.narInfoMissing++;
|
||||||
throw InvalidPath("path '%s' is not valid", storePath);
|
throw InvalidPath("path '%s' is not valid", storePathS);
|
||||||
}
|
}
|
||||||
|
|
||||||
(*callbackPtr)(ref<const ValidPathInfo>(info));
|
(*callbackPtr)(ref<const ValidPathInfo>(info));
|
||||||
|
@ -515,7 +560,7 @@ void Store::pathInfoToJSON(JSONPlaceholder & jsonOut, const StorePathSet & store
|
||||||
if (!narInfo->url.empty())
|
if (!narInfo->url.empty())
|
||||||
jsonPath.attr("url", narInfo->url);
|
jsonPath.attr("url", narInfo->url);
|
||||||
if (narInfo->fileHash)
|
if (narInfo->fileHash)
|
||||||
jsonPath.attr("downloadHash", narInfo->fileHash.to_string(Base32, true));
|
jsonPath.attr("downloadHash", narInfo->fileHash.to_string(hashBase, true));
|
||||||
if (narInfo->fileSize)
|
if (narInfo->fileSize)
|
||||||
jsonPath.attr("downloadSize", narInfo->fileSize);
|
jsonPath.attr("downloadSize", narInfo->fileSize);
|
||||||
if (showClosureSize)
|
if (showClosureSize)
|
||||||
|
|
|
@ -31,7 +31,7 @@ MakeError(InvalidPath, Error);
|
||||||
MakeError(Unsupported, Error);
|
MakeError(Unsupported, Error);
|
||||||
MakeError(SubstituteGone, Error);
|
MakeError(SubstituteGone, Error);
|
||||||
MakeError(SubstituterDisabled, Error);
|
MakeError(SubstituterDisabled, Error);
|
||||||
MakeError(NotInStore, Error);
|
MakeError(BadStorePath, Error);
|
||||||
|
|
||||||
|
|
||||||
class FSAccessor;
|
class FSAccessor;
|
||||||
|
@ -317,9 +317,9 @@ public:
|
||||||
the Nix store. */
|
the Nix store. */
|
||||||
bool isStorePath(std::string_view path) const;
|
bool isStorePath(std::string_view path) const;
|
||||||
|
|
||||||
/* Chop off the parts after the top-level store name, e.g.,
|
/* Split a path like /nix/store/<hash>-<name>/<bla> into
|
||||||
/nix/store/abcd-foo/bar => /nix/store/abcd-foo. */
|
/nix/store/<hash>-<name> and /<bla>. */
|
||||||
Path toStorePath(const Path & path) const;
|
std::pair<StorePath, Path> toStorePath(const Path & path) const;
|
||||||
|
|
||||||
/* Follow symlinks until we end up with a path in the Nix store. */
|
/* Follow symlinks until we end up with a path in the Nix store. */
|
||||||
Path followLinksToStore(std::string_view path) const;
|
Path followLinksToStore(std::string_view path) const;
|
||||||
|
@ -384,13 +384,16 @@ public:
|
||||||
SubstituteFlag maybeSubstitute = NoSubstitute);
|
SubstituteFlag maybeSubstitute = NoSubstitute);
|
||||||
|
|
||||||
/* Query the set of all valid paths. Note that for some store
|
/* Query the set of all valid paths. Note that for some store
|
||||||
backends, the name part of store paths may be omitted
|
backends, the name part of store paths may be replaced by 'x'
|
||||||
(i.e. you'll get /nix/store/<hash> rather than
|
(i.e. you'll get /nix/store/<hash>-x rather than
|
||||||
/nix/store/<hash>-<name>). Use queryPathInfo() to obtain the
|
/nix/store/<hash>-<name>). Use queryPathInfo() to obtain the
|
||||||
full store path. */
|
full store path. FIXME: should return a set of
|
||||||
|
std::variant<StorePath, HashPart> to get rid of this hack. */
|
||||||
virtual StorePathSet queryAllValidPaths()
|
virtual StorePathSet queryAllValidPaths()
|
||||||
{ unsupported("queryAllValidPaths"); }
|
{ unsupported("queryAllValidPaths"); }
|
||||||
|
|
||||||
|
constexpr static const char * MissingName = "x";
|
||||||
|
|
||||||
/* Query information about a valid path. It is permitted to omit
|
/* Query information about a valid path. It is permitted to omit
|
||||||
the name part of the store path. */
|
the name part of the store path. */
|
||||||
ref<const ValidPathInfo> queryPathInfo(const StorePath & path);
|
ref<const ValidPathInfo> queryPathInfo(const StorePath & path);
|
||||||
|
@ -439,8 +442,7 @@ public:
|
||||||
|
|
||||||
/* Import a path into the store. */
|
/* Import a path into the store. */
|
||||||
virtual void addToStore(const ValidPathInfo & info, Source & narSource,
|
virtual void addToStore(const ValidPathInfo & info, Source & narSource,
|
||||||
RepairFlag repair = NoRepair, CheckSigsFlag checkSigs = CheckSigs,
|
RepairFlag repair = NoRepair, CheckSigsFlag checkSigs = CheckSigs) = 0;
|
||||||
std::shared_ptr<FSAccessor> accessor = 0) = 0;
|
|
||||||
|
|
||||||
/* Copy the contents of a path to the store and register the
|
/* Copy the contents of a path to the store and register the
|
||||||
validity the resulting path. The resulting path is returned.
|
validity the resulting path. The resulting path is returned.
|
||||||
|
@ -450,6 +452,13 @@ public:
|
||||||
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256,
|
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256,
|
||||||
PathFilter & filter = defaultPathFilter, RepairFlag repair = NoRepair) = 0;
|
PathFilter & filter = defaultPathFilter, RepairFlag repair = NoRepair) = 0;
|
||||||
|
|
||||||
|
/* Copy the contents of a path to the store and register the
|
||||||
|
validity the resulting path, using a constant amount of
|
||||||
|
memory. */
|
||||||
|
ValidPathInfo addToStoreSlow(std::string_view name, const Path & srcPath,
|
||||||
|
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256,
|
||||||
|
std::optional<Hash> expectedCAHash = {});
|
||||||
|
|
||||||
// FIXME: remove?
|
// FIXME: remove?
|
||||||
virtual StorePath addToStoreFromDump(const string & dump, const string & name,
|
virtual StorePath addToStoreFromDump(const string & dump, const string & name,
|
||||||
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair)
|
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair)
|
||||||
|
@ -616,8 +625,7 @@ public:
|
||||||
the Nix store. Optionally, the contents of the NARs are
|
the Nix store. Optionally, the contents of the NARs are
|
||||||
preloaded into the specified FS accessor to speed up subsequent
|
preloaded into the specified FS accessor to speed up subsequent
|
||||||
access. */
|
access. */
|
||||||
StorePaths importPaths(Source & source, std::shared_ptr<FSAccessor> accessor,
|
StorePaths importPaths(Source & source, CheckSigsFlag checkSigs = CheckSigs);
|
||||||
CheckSigsFlag checkSigs = CheckSigs);
|
|
||||||
|
|
||||||
struct Stats
|
struct Stats
|
||||||
{
|
{
|
||||||
|
|
|
@ -11,7 +11,7 @@ namespace nix {
|
||||||
#define ANSI_GREEN "\e[32;1m"
|
#define ANSI_GREEN "\e[32;1m"
|
||||||
#define ANSI_YELLOW "\e[33;1m"
|
#define ANSI_YELLOW "\e[33;1m"
|
||||||
#define ANSI_BLUE "\e[34;1m"
|
#define ANSI_BLUE "\e[34;1m"
|
||||||
#define ANSI_MAGENTA "\e[35m;1m"
|
#define ANSI_MAGENTA "\e[35;1m"
|
||||||
#define ANSI_CYAN "\e[36m;1m"
|
#define ANSI_CYAN "\e[36;1m"
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -63,6 +63,14 @@ struct ParseSink
|
||||||
virtual void createSymlink(const Path & path, const string & target) { };
|
virtual void createSymlink(const Path & path, const string & target) { };
|
||||||
};
|
};
|
||||||
|
|
||||||
|
struct TeeParseSink : ParseSink
|
||||||
|
{
|
||||||
|
StringSink saved;
|
||||||
|
TeeSource source;
|
||||||
|
|
||||||
|
TeeParseSink(Source & source) : source(source, saved) { }
|
||||||
|
};
|
||||||
|
|
||||||
void parseDump(ParseSink & sink, Source & source);
|
void parseDump(ParseSink & sink, Source & source);
|
||||||
|
|
||||||
void restorePath(const Path & path, Source & source);
|
void restorePath(const Path & path, Source & source);
|
||||||
|
|
|
@ -25,16 +25,4 @@ MakeError(UnknownCompressionMethod, Error);
|
||||||
|
|
||||||
MakeError(CompressionError, Error);
|
MakeError(CompressionError, Error);
|
||||||
|
|
||||||
template<>
|
|
||||||
struct TeeSink<ref<CompressionSink>> : CompressionSink
|
|
||||||
{
|
|
||||||
MAKE_TEE_SINK(ref<CompressionSink>);
|
|
||||||
void finish() override {
|
|
||||||
orig->finish();
|
|
||||||
}
|
|
||||||
void write(const unsigned char * data, size_t len) override {
|
|
||||||
return orig->write(data, len);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -355,26 +355,21 @@ std::ostream& showErrorInfo(std::ostream &out, const ErrorInfo &einfo, bool show
|
||||||
|
|
||||||
for (auto iter = einfo.traces.rbegin(); iter != einfo.traces.rend(); ++iter)
|
for (auto iter = einfo.traces.rbegin(); iter != einfo.traces.rend(); ++iter)
|
||||||
{
|
{
|
||||||
try {
|
out << std::endl << prefix;
|
||||||
|
out << ANSI_BLUE << "trace: " << ANSI_NORMAL << iter->hint.str();
|
||||||
|
|
||||||
|
if (iter->pos.has_value() && (*iter->pos)) {
|
||||||
|
auto pos = iter->pos.value();
|
||||||
out << std::endl << prefix;
|
out << std::endl << prefix;
|
||||||
out << ANSI_BLUE << "trace: " << ANSI_NORMAL << iter->hint.str();
|
printAtPos(prefix, pos, out);
|
||||||
|
|
||||||
nl = true;
|
auto loc = getCodeLines(pos);
|
||||||
if (*iter->pos) {
|
if (loc.has_value())
|
||||||
auto pos = iter->pos.value();
|
{
|
||||||
|
out << std::endl << prefix;
|
||||||
|
printCodeLines(out, prefix, pos, *loc);
|
||||||
out << std::endl << prefix;
|
out << std::endl << prefix;
|
||||||
|
|
||||||
printAtPos(prefix, pos, out);
|
|
||||||
auto loc = getCodeLines(pos);
|
|
||||||
if (loc.has_value())
|
|
||||||
{
|
|
||||||
out << std::endl << prefix;
|
|
||||||
printCodeLines(out, prefix, pos, *loc);
|
|
||||||
out << std::endl << prefix;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
} catch(const std::bad_optional_access& e) {
|
|
||||||
out << iter->hint.str() << std::endl;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
|
|
||||||
#include "ref.hh"
|
#include "ref.hh"
|
||||||
#include "types.hh"
|
#include "types.hh"
|
||||||
|
#include "fmt.hh"
|
||||||
|
|
||||||
#include <cstring>
|
#include <cstring>
|
||||||
#include <list>
|
#include <list>
|
||||||
|
@ -10,7 +10,9 @@
|
||||||
#include <map>
|
#include <map>
|
||||||
#include <optional>
|
#include <optional>
|
||||||
|
|
||||||
#include "fmt.hh"
|
#include <sys/types.h>
|
||||||
|
#include <sys/stat.h>
|
||||||
|
#include <fcntl.h>
|
||||||
|
|
||||||
/* Before 4.7, gcc's std::exception uses empty throw() specifiers for
|
/* Before 4.7, gcc's std::exception uses empty throw() specifiers for
|
||||||
* its (virtual) destructor and what() in c++11 mode, in violation of spec
|
* its (virtual) destructor and what() in c++11 mode, in violation of spec
|
||||||
|
@ -197,8 +199,9 @@ public:
|
||||||
|
|
||||||
template<typename... Args>
|
template<typename... Args>
|
||||||
SysError(const Args & ... args)
|
SysError(const Args & ... args)
|
||||||
: Error(""), errNo(errno)
|
: Error("")
|
||||||
{
|
{
|
||||||
|
errNo = errno;
|
||||||
auto hf = hintfmt(args...);
|
auto hf = hintfmt(args...);
|
||||||
err.hint = hintfmt("%1%: %2%", normaltxt(hf.str()), strerror(errNo));
|
err.hint = hintfmt("%1%: %2%", normaltxt(hf.str()), strerror(errNo));
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,7 +8,6 @@
|
||||||
#include "hash.hh"
|
#include "hash.hh"
|
||||||
#include "archive.hh"
|
#include "archive.hh"
|
||||||
#include "util.hh"
|
#include "util.hh"
|
||||||
#include "istringstream_nocopy.hh"
|
|
||||||
|
|
||||||
#include <sys/types.h>
|
#include <sys/types.h>
|
||||||
#include <sys/stat.h>
|
#include <sys/stat.h>
|
||||||
|
|
|
@ -1,92 +0,0 @@
|
||||||
/* This file provides a variant of std::istringstream that doesn't
|
|
||||||
copy its string argument. This is useful for large strings. The
|
|
||||||
caller must ensure that the string object is not destroyed while
|
|
||||||
it's referenced by this object. */
|
|
||||||
|
|
||||||
#pragma once
|
|
||||||
|
|
||||||
#include <string>
|
|
||||||
#include <iostream>
|
|
||||||
|
|
||||||
template <class CharT, class Traits = std::char_traits<CharT>, class Allocator = std::allocator<CharT>>
|
|
||||||
class basic_istringbuf_nocopy : public std::basic_streambuf<CharT, Traits>
|
|
||||||
{
|
|
||||||
public:
|
|
||||||
typedef std::basic_string<CharT, Traits, Allocator> string_type;
|
|
||||||
|
|
||||||
typedef typename std::basic_streambuf<CharT, Traits>::off_type off_type;
|
|
||||||
|
|
||||||
typedef typename std::basic_streambuf<CharT, Traits>::pos_type pos_type;
|
|
||||||
|
|
||||||
typedef typename std::basic_streambuf<CharT, Traits>::int_type int_type;
|
|
||||||
|
|
||||||
typedef typename std::basic_streambuf<CharT, Traits>::traits_type traits_type;
|
|
||||||
|
|
||||||
private:
|
|
||||||
const string_type & s;
|
|
||||||
|
|
||||||
off_type off;
|
|
||||||
|
|
||||||
public:
|
|
||||||
basic_istringbuf_nocopy(const string_type & s) : s{s}, off{0}
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
private:
|
|
||||||
pos_type seekoff(off_type off, std::ios_base::seekdir dir, std::ios_base::openmode which)
|
|
||||||
{
|
|
||||||
if (which & std::ios_base::in) {
|
|
||||||
this->off = dir == std::ios_base::beg
|
|
||||||
? off
|
|
||||||
: (dir == std::ios_base::end
|
|
||||||
? s.size() + off
|
|
||||||
: this->off + off);
|
|
||||||
}
|
|
||||||
return pos_type(this->off);
|
|
||||||
}
|
|
||||||
|
|
||||||
pos_type seekpos(pos_type pos, std::ios_base::openmode which)
|
|
||||||
{
|
|
||||||
return seekoff(pos, std::ios_base::beg, which);
|
|
||||||
}
|
|
||||||
|
|
||||||
std::streamsize showmanyc()
|
|
||||||
{
|
|
||||||
return s.size() - off;
|
|
||||||
}
|
|
||||||
|
|
||||||
int_type underflow()
|
|
||||||
{
|
|
||||||
if (typename string_type::size_type(off) == s.size())
|
|
||||||
return traits_type::eof();
|
|
||||||
return traits_type::to_int_type(s[off]);
|
|
||||||
}
|
|
||||||
|
|
||||||
int_type uflow()
|
|
||||||
{
|
|
||||||
if (typename string_type::size_type(off) == s.size())
|
|
||||||
return traits_type::eof();
|
|
||||||
return traits_type::to_int_type(s[off++]);
|
|
||||||
}
|
|
||||||
|
|
||||||
int_type pbackfail(int_type ch)
|
|
||||||
{
|
|
||||||
if (off == 0 || (ch != traits_type::eof() && ch != s[off - 1]))
|
|
||||||
return traits_type::eof();
|
|
||||||
|
|
||||||
return traits_type::to_int_type(s[--off]);
|
|
||||||
}
|
|
||||||
|
|
||||||
};
|
|
||||||
|
|
||||||
template <class CharT, class Traits = std::char_traits<CharT>, class Allocator = std::allocator<CharT>>
|
|
||||||
class basic_istringstream_nocopy : public std::basic_iostream<CharT, Traits>
|
|
||||||
{
|
|
||||||
typedef basic_istringbuf_nocopy<CharT, Traits, Allocator> buf_type;
|
|
||||||
buf_type buf;
|
|
||||||
public:
|
|
||||||
basic_istringstream_nocopy(const typename buf_type::string_type & s) :
|
|
||||||
std::basic_iostream<CharT, Traits>(&buf), buf(s) {};
|
|
||||||
};
|
|
||||||
|
|
||||||
typedef basic_istringstream_nocopy<char> istringstream_nocopy;
|
|
|
@ -82,7 +82,6 @@ public:
|
||||||
log(ei.level, oss.str());
|
log(ei.level, oss.str());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void startActivity(ActivityId act, Verbosity lvl, ActivityType type,
|
void startActivity(ActivityId act, Verbosity lvl, ActivityType type,
|
||||||
const std::string & s, const Fields & fields, ActivityId parent)
|
const std::string & s, const Fields & fields, ActivityId parent)
|
||||||
override
|
override
|
||||||
|
|
|
@ -166,43 +166,34 @@ struct StringSource : Source
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
/* Adapter class of a Source that saves all data read to `s'. */
|
/* A sink that writes all incoming data to two other sinks. */
|
||||||
|
struct TeeSink : Sink
|
||||||
|
{
|
||||||
|
Sink & sink1, & sink2;
|
||||||
|
TeeSink(Sink & sink1, Sink & sink2) : sink1(sink1), sink2(sink2) { }
|
||||||
|
virtual void operator () (const unsigned char * data, size_t len)
|
||||||
|
{
|
||||||
|
sink1(data, len);
|
||||||
|
sink2(data, len);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
/* Adapter class of a Source that saves all data read to a sink. */
|
||||||
struct TeeSource : Source
|
struct TeeSource : Source
|
||||||
{
|
{
|
||||||
Source & orig;
|
Source & orig;
|
||||||
ref<std::string> data;
|
Sink & sink;
|
||||||
TeeSource(Source & orig)
|
TeeSource(Source & orig, Sink & sink)
|
||||||
: orig(orig), data(make_ref<std::string>()) { }
|
: orig(orig), sink(sink) { }
|
||||||
size_t read(unsigned char * data, size_t len)
|
size_t read(unsigned char * data, size_t len)
|
||||||
{
|
{
|
||||||
size_t n = orig.read(data, len);
|
size_t n = orig.read(data, len);
|
||||||
this->data->append((const char *) data, n);
|
sink(data, len);
|
||||||
return n;
|
return n;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
#define MAKE_TEE_SINK(T) \
|
|
||||||
T orig; \
|
|
||||||
ref<std::string> data; \
|
|
||||||
TeeSink(T && orig) \
|
|
||||||
: orig(std::move(orig)), data(make_ref<std::string>()) { } \
|
|
||||||
void operator () (const unsigned char * data, size_t len) { \
|
|
||||||
this->data->append((const char *) data, len); \
|
|
||||||
(*this->orig)(data, len); \
|
|
||||||
} \
|
|
||||||
void operator () (const std::string & s) \
|
|
||||||
{ \
|
|
||||||
*data += s; \
|
|
||||||
(*this->orig)(s); \
|
|
||||||
}
|
|
||||||
|
|
||||||
template<typename T>
|
|
||||||
struct TeeSink : Sink
|
|
||||||
{
|
|
||||||
MAKE_TEE_SINK(T);
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
/* A reader that consumes the original Source until 'size'. */
|
/* A reader that consumes the original Source until 'size'. */
|
||||||
struct SizedSource : Source
|
struct SizedSource : Source
|
||||||
{
|
{
|
||||||
|
@ -358,4 +349,27 @@ Source & operator >> (Source & in, bool & b)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/* An adapter that converts a std::basic_istream into a source. */
|
||||||
|
struct StreamToSourceAdapter : Source
|
||||||
|
{
|
||||||
|
std::shared_ptr<std::basic_istream<char>> istream;
|
||||||
|
|
||||||
|
StreamToSourceAdapter(std::shared_ptr<std::basic_istream<char>> istream)
|
||||||
|
: istream(istream)
|
||||||
|
{ }
|
||||||
|
|
||||||
|
size_t read(unsigned char * data, size_t len) override
|
||||||
|
{
|
||||||
|
if (!istream->read((char *) data, len)) {
|
||||||
|
if (istream->eof()) {
|
||||||
|
if (istream->gcount() == 0)
|
||||||
|
throw EndOfFile("end of file");
|
||||||
|
} else
|
||||||
|
throw Error("I/O error in StreamToSourceAdapter");
|
||||||
|
}
|
||||||
|
return istream->gcount();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -251,18 +251,19 @@ namespace nix {
|
||||||
TEST(addTrace, showTracesWithShowTrace) {
|
TEST(addTrace, showTracesWithShowTrace) {
|
||||||
SymbolTable testTable;
|
SymbolTable testTable;
|
||||||
auto problem_file = testTable.create(test_file);
|
auto problem_file = testTable.create(test_file);
|
||||||
|
|
||||||
auto oneliner_file = testTable.create(one_liner);
|
auto oneliner_file = testTable.create(one_liner);
|
||||||
|
auto invalidfilename = testTable.create("invalid filename");
|
||||||
|
|
||||||
auto e = AssertionError(ErrorInfo {
|
auto e = AssertionError(ErrorInfo {
|
||||||
.name = "wat",
|
.name = "wat",
|
||||||
.description = "a well-known problem occurred",
|
.description = "show-traces",
|
||||||
.hint = hintfmt("it has been %1% days since our last error", "zero"),
|
.hint = hintfmt("it has been %1% days since our last error", "zero"),
|
||||||
.errPos = Pos(foString, problem_file, 2, 13),
|
.errPos = Pos(foString, problem_file, 2, 13),
|
||||||
});
|
});
|
||||||
|
|
||||||
e.addTrace(Pos(foStdin, oneliner_file, 1, 19), "while trying to compute %1%", 42);
|
e.addTrace(Pos(foStdin, oneliner_file, 1, 19), "while trying to compute %1%", 42);
|
||||||
e.addTrace(std::nullopt, "while doing something without a %1%", "pos");
|
e.addTrace(std::nullopt, "while doing something without a %1%", "pos");
|
||||||
|
e.addTrace(Pos(foFile, invalidfilename, 100, 1), "missing %s", "nix file");
|
||||||
|
|
||||||
testing::internal::CaptureStderr();
|
testing::internal::CaptureStderr();
|
||||||
|
|
||||||
|
@ -271,24 +272,25 @@ namespace nix {
|
||||||
logError(e.info());
|
logError(e.info());
|
||||||
|
|
||||||
auto str = testing::internal::GetCapturedStderr();
|
auto str = testing::internal::GetCapturedStderr();
|
||||||
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- AssertionError --- error-unit-test\x1B[0m\n\x1B[34;1mat: \x1B[33;1m(2:13)\x1B[34;1m from string\x1B[0m\n\na well-known problem occurred\n\n 1| previous line of code\n 2| this is the problem line of code\n | \x1B[31;1m^\x1B[0m\n 3| next line of code\n\nit has been \x1B[33;1mzero\x1B[0m days since our last error\n\x1B[34;1m---- show-trace ----\x1B[0m\n\x1B[34;1mtrace: \x1B[0mwhile trying to compute \x1B[33;1m42\x1B[0m\n\x1B[34;1mat: \x1B[33;1m(1:19)\x1B[34;1m from stdin\x1B[0m\n\n 1| this is the other problem line of code\n | \x1B[31;1m^\x1B[0m\n\n\x1B[34;1mtrace: \x1B[0mwhile doing something without a \x1B[33;1mpos\x1B[0m\n");
|
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- SysError --- error-unit-test\x1B[0m\nopening file '\x1B[33;1minvalid filename\x1B[0m': \x1B[33;1mNo such file or directory\x1B[0m\n\x1B[31;1merror:\x1B[0m\x1B[34;1m --- AssertionError --- error-unit-test\x1B[0m\n\x1B[34;1mat: \x1B[33;1m(2:13)\x1B[34;1m from string\x1B[0m\n\nshow-traces\n\n 1| previous line of code\n 2| this is the problem line of code\n | \x1B[31;1m^\x1B[0m\n 3| next line of code\n\nit has been \x1B[33;1mzero\x1B[0m days since our last error\n\x1B[34;1m---- show-trace ----\x1B[0m\n\x1B[34;1mtrace: \x1B[0mwhile trying to compute \x1B[33;1m42\x1B[0m\n\x1B[34;1mat: \x1B[33;1m(1:19)\x1B[34;1m from stdin\x1B[0m\n\n 1| this is the other problem line of code\n | \x1B[31;1m^\x1B[0m\n\n\x1B[34;1mtrace: \x1B[0mwhile doing something without a \x1B[33;1mpos\x1B[0m\n\x1B[34;1mtrace: \x1B[0mmissing \x1B[33;1mnix file\x1B[0m\n\x1B[34;1mat: \x1B[33;1m(100:1)\x1B[34;1m in file: \x1B[0minvalid filename\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(addTrace, hideTracesWithoutShowTrace) {
|
TEST(addTrace, hideTracesWithoutShowTrace) {
|
||||||
SymbolTable testTable;
|
SymbolTable testTable;
|
||||||
auto problem_file = testTable.create(test_file);
|
auto problem_file = testTable.create(test_file);
|
||||||
|
|
||||||
auto oneliner_file = testTable.create(one_liner);
|
auto oneliner_file = testTable.create(one_liner);
|
||||||
|
auto invalidfilename = testTable.create("invalid filename");
|
||||||
|
|
||||||
auto e = AssertionError(ErrorInfo {
|
auto e = AssertionError(ErrorInfo {
|
||||||
.name = "wat",
|
.name = "wat",
|
||||||
.description = "a well-known problem occurred",
|
.description = "hide traces",
|
||||||
.hint = hintfmt("it has been %1% days since our last error", "zero"),
|
.hint = hintfmt("it has been %1% days since our last error", "zero"),
|
||||||
.errPos = Pos(foString, problem_file, 2, 13),
|
.errPos = Pos(foString, problem_file, 2, 13),
|
||||||
});
|
});
|
||||||
|
|
||||||
e.addTrace(Pos(foStdin, oneliner_file, 1, 19), "while trying to compute %1%", 42);
|
e.addTrace(Pos(foStdin, oneliner_file, 1, 19), "while trying to compute %1%", 42);
|
||||||
e.addTrace(std::nullopt, "while doing something without a %1%", "pos");
|
e.addTrace(std::nullopt, "while doing something without a %1%", "pos");
|
||||||
|
e.addTrace(Pos(foFile, invalidfilename, 100, 1), "missing %s", "nix file");
|
||||||
|
|
||||||
testing::internal::CaptureStderr();
|
testing::internal::CaptureStderr();
|
||||||
|
|
||||||
|
@ -297,9 +299,10 @@ namespace nix {
|
||||||
logError(e.info());
|
logError(e.info());
|
||||||
|
|
||||||
auto str = testing::internal::GetCapturedStderr();
|
auto str = testing::internal::GetCapturedStderr();
|
||||||
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- AssertionError --- error-unit-test\x1B[0m\n\x1B[34;1mat: \x1B[33;1m(2:13)\x1B[34;1m from string\x1B[0m\n\na well-known problem occurred\n\n 1| previous line of code\n 2| this is the problem line of code\n | \x1B[31;1m^\x1B[0m\n 3| next line of code\n\nit has been \x1B[33;1mzero\x1B[0m days since our last error\n");
|
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- AssertionError --- error-unit-test\x1B[0m\n\x1B[34;1mat: \x1B[33;1m(2:13)\x1B[34;1m from string\x1B[0m\n\nhide traces\n\n 1| previous line of code\n 2| this is the problem line of code\n | \x1B[31;1m^\x1B[0m\n 3| next line of code\n\nit has been \x1B[33;1mzero\x1B[0m days since our last error\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/* ----------------------------------------------------------------------------
|
/* ----------------------------------------------------------------------------
|
||||||
* hintfmt
|
* hintfmt
|
||||||
* --------------------------------------------------------------------------*/
|
* --------------------------------------------------------------------------*/
|
||||||
|
|
|
@ -153,14 +153,15 @@ static int _main(int argc, char * * argv)
|
||||||
|
|
||||||
/* If an expected hash is given, the file may already exist in
|
/* If an expected hash is given, the file may already exist in
|
||||||
the store. */
|
the store. */
|
||||||
Hash hash, expectedHash(ht);
|
std::optional<Hash> expectedHash;
|
||||||
|
Hash hash;
|
||||||
std::optional<StorePath> storePath;
|
std::optional<StorePath> storePath;
|
||||||
if (args.size() == 2) {
|
if (args.size() == 2) {
|
||||||
expectedHash = Hash(args[1], ht);
|
expectedHash = Hash(args[1], ht);
|
||||||
const auto recursive = unpack ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
|
const auto recursive = unpack ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
|
||||||
storePath = store->makeFixedOutputPath(recursive, expectedHash, name);
|
storePath = store->makeFixedOutputPath(recursive, *expectedHash, name);
|
||||||
if (store->isValidPath(*storePath))
|
if (store->isValidPath(*storePath))
|
||||||
hash = expectedHash;
|
hash = *expectedHash;
|
||||||
else
|
else
|
||||||
storePath.reset();
|
storePath.reset();
|
||||||
}
|
}
|
||||||
|
@ -200,22 +201,12 @@ static int _main(int argc, char * * argv)
|
||||||
tmpFile = unpacked;
|
tmpFile = unpacked;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* FIXME: inefficient; addToStore() will also hash
|
const auto method = unpack ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
|
||||||
this. */
|
|
||||||
hash = unpack ? hashPath(ht, tmpFile).first : hashFile(ht, tmpFile);
|
|
||||||
|
|
||||||
if (expectedHash != Hash(ht) && expectedHash != hash)
|
auto info = store->addToStoreSlow(name, tmpFile, method, ht, expectedHash);
|
||||||
throw Error("hash mismatch for '%1%'", uri);
|
storePath = info.path;
|
||||||
|
assert(info.ca);
|
||||||
const auto recursive = unpack ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
|
hash = getContentAddressHash(*info.ca);
|
||||||
|
|
||||||
/* Copy the file to the Nix store. FIXME: if RemoteStore
|
|
||||||
implemented addToStoreFromDump() and downloadFile()
|
|
||||||
supported a sink, we could stream the download directly
|
|
||||||
into the Nix store. */
|
|
||||||
storePath = store->addToStore(name, tmpFile, recursive, ht);
|
|
||||||
|
|
||||||
assert(*storePath == store->makeFixedOutputPath(recursive, hash, name));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
stopProgressBar();
|
stopProgressBar();
|
||||||
|
|
|
@ -174,10 +174,10 @@ static void opAdd(Strings opFlags, Strings opArgs)
|
||||||
store. */
|
store. */
|
||||||
static void opAddFixed(Strings opFlags, Strings opArgs)
|
static void opAddFixed(Strings opFlags, Strings opArgs)
|
||||||
{
|
{
|
||||||
auto recursive = FileIngestionMethod::Flat;
|
auto method = FileIngestionMethod::Flat;
|
||||||
|
|
||||||
for (auto & i : opFlags)
|
for (auto & i : opFlags)
|
||||||
if (i == "--recursive") recursive = FileIngestionMethod::Recursive;
|
if (i == "--recursive") method = FileIngestionMethod::Recursive;
|
||||||
else throw UsageError("unknown flag '%1%'", i);
|
else throw UsageError("unknown flag '%1%'", i);
|
||||||
|
|
||||||
if (opArgs.empty())
|
if (opArgs.empty())
|
||||||
|
@ -187,7 +187,7 @@ static void opAddFixed(Strings opFlags, Strings opArgs)
|
||||||
opArgs.pop_front();
|
opArgs.pop_front();
|
||||||
|
|
||||||
for (auto & i : opArgs)
|
for (auto & i : opArgs)
|
||||||
cout << fmt("%s\n", store->printStorePath(store->addToStore(std::string(baseNameOf(i)), i, recursive, hashAlgo)));
|
std::cout << fmt("%s\n", store->printStorePath(store->addToStoreSlow(baseNameOf(i), i, method, hashAlgo).path));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -671,7 +671,7 @@ static void opImport(Strings opFlags, Strings opArgs)
|
||||||
if (!opArgs.empty()) throw UsageError("no arguments expected");
|
if (!opArgs.empty()) throw UsageError("no arguments expected");
|
||||||
|
|
||||||
FdSource source(STDIN_FILENO);
|
FdSource source(STDIN_FILENO);
|
||||||
auto paths = store->importPaths(source, nullptr, NoCheckSigs);
|
auto paths = store->importPaths(source, NoCheckSigs);
|
||||||
|
|
||||||
for (auto & i : paths)
|
for (auto & i : paths)
|
||||||
cout << fmt("%s\n", store->printStorePath(i)) << std::flush;
|
cout << fmt("%s\n", store->printStorePath(i)) << std::flush;
|
||||||
|
@ -878,7 +878,7 @@ static void opServe(Strings opFlags, Strings opArgs)
|
||||||
|
|
||||||
case cmdImportPaths: {
|
case cmdImportPaths: {
|
||||||
if (!writeAllowed) throw Error("importing paths is not allowed");
|
if (!writeAllowed) throw Error("importing paths is not allowed");
|
||||||
store->importPaths(in, nullptr, NoCheckSigs); // FIXME: should we skip sig checking?
|
store->importPaths(in, NoCheckSigs); // FIXME: should we skip sig checking?
|
||||||
out << 1; // indicate success
|
out << 1; // indicate success
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
|
@ -94,8 +94,8 @@ struct InstallableStorePath : Installable
|
||||||
ref<Store> store;
|
ref<Store> store;
|
||||||
StorePath storePath;
|
StorePath storePath;
|
||||||
|
|
||||||
InstallableStorePath(ref<Store> store, const Path & storePath)
|
InstallableStorePath(ref<Store> store, StorePath && storePath)
|
||||||
: store(store), storePath(store->parseStorePath(storePath)) { }
|
: store(store), storePath(std::move(storePath)) { }
|
||||||
|
|
||||||
std::string what() override { return store->printStorePath(storePath); }
|
std::string what() override { return store->printStorePath(storePath); }
|
||||||
|
|
||||||
|
@ -228,11 +228,11 @@ static std::vector<std::shared_ptr<Installable>> parseInstallables(
|
||||||
result.push_back(std::make_shared<InstallableExpr>(cmd, s));
|
result.push_back(std::make_shared<InstallableExpr>(cmd, s));
|
||||||
|
|
||||||
else if (s.find("/") != std::string::npos) {
|
else if (s.find("/") != std::string::npos) {
|
||||||
|
try {
|
||||||
auto path = store->toStorePath(store->followLinksToStore(s));
|
result.push_back(std::make_shared<InstallableStorePath>(
|
||||||
|
store,
|
||||||
if (store->isStorePath(path))
|
store->toStorePath(store->followLinksToStore(s)).first));
|
||||||
result.push_back(std::make_shared<InstallableStorePath>(store, path));
|
} catch (BadStorePath &) { }
|
||||||
}
|
}
|
||||||
|
|
||||||
else if (s == "" || std::regex_match(s, attrPathRegex))
|
else if (s == "" || std::regex_match(s, attrPathRegex))
|
||||||
|
|
|
@ -77,13 +77,16 @@ struct CmdVerify : StorePathsCommand
|
||||||
try {
|
try {
|
||||||
checkInterrupt();
|
checkInterrupt();
|
||||||
|
|
||||||
Activity act2(*logger, lvlInfo, actUnknown, fmt("checking '%s'", storePath));
|
|
||||||
|
|
||||||
MaintainCount<std::atomic<size_t>> mcActive(active);
|
MaintainCount<std::atomic<size_t>> mcActive(active);
|
||||||
update();
|
update();
|
||||||
|
|
||||||
auto info = store->queryPathInfo(store->parseStorePath(storePath));
|
auto info = store->queryPathInfo(store->parseStorePath(storePath));
|
||||||
|
|
||||||
|
// Note: info->path can be different from storePath
|
||||||
|
// for binary cache stores when using --all (since we
|
||||||
|
// can't enumerate names efficiently).
|
||||||
|
Activity act2(*logger, lvlInfo, actUnknown, fmt("checking '%s'", store->printStorePath(info->path)));
|
||||||
|
|
||||||
if (!noContents) {
|
if (!noContents) {
|
||||||
|
|
||||||
std::unique_ptr<AbstractHashSink> hashSink;
|
std::unique_ptr<AbstractHashSink> hashSink;
|
||||||
|
|
|
@ -182,3 +182,56 @@ clearCacheCache
|
||||||
nix-store -r $outPath --substituters "file://$cacheDir2 file://$cacheDir" --trusted-public-keys "$publicKey"
|
nix-store -r $outPath --substituters "file://$cacheDir2 file://$cacheDir" --trusted-public-keys "$publicKey"
|
||||||
|
|
||||||
fi # HAVE_LIBSODIUM
|
fi # HAVE_LIBSODIUM
|
||||||
|
|
||||||
|
|
||||||
|
unset _NIX_FORCE_HTTP
|
||||||
|
|
||||||
|
|
||||||
|
# Test 'nix verify --all' on a binary cache.
|
||||||
|
nix verify -vvvvv --all --store file://$cacheDir --no-trust
|
||||||
|
|
||||||
|
|
||||||
|
# Test local NAR caching.
|
||||||
|
narCache=$TEST_ROOT/nar-cache
|
||||||
|
rm -rf $narCache
|
||||||
|
mkdir $narCache
|
||||||
|
|
||||||
|
[[ $(nix cat-store --store "file://$cacheDir?local-nar-cache=$narCache" $outPath/foobar) = FOOBAR ]]
|
||||||
|
|
||||||
|
rm -rfv "$cacheDir/nar"
|
||||||
|
|
||||||
|
[[ $(nix cat-store --store "file://$cacheDir?local-nar-cache=$narCache" $outPath/foobar) = FOOBAR ]]
|
||||||
|
|
||||||
|
(! nix cat-store --store file://$cacheDir $outPath/foobar)
|
||||||
|
|
||||||
|
|
||||||
|
# Test NAR listing generation.
|
||||||
|
clearCache
|
||||||
|
|
||||||
|
outPath=$(nix-build --no-out-link -E '
|
||||||
|
with import ./config.nix;
|
||||||
|
mkDerivation {
|
||||||
|
name = "nar-listing";
|
||||||
|
buildCommand = "mkdir $out; echo foo > $out/bar; ln -s xyzzy $out/link";
|
||||||
|
}
|
||||||
|
')
|
||||||
|
|
||||||
|
nix copy --to file://$cacheDir?write-nar-listing=1 $outPath
|
||||||
|
|
||||||
|
[[ $(cat $cacheDir/$(basename $outPath).ls) = '{"version":1,"root":{"type":"directory","entries":{"bar":{"type":"regular","size":4,"narOffset":232},"link":{"type":"symlink","target":"xyzzy"}}}}' ]]
|
||||||
|
|
||||||
|
|
||||||
|
# Test debug info index generation.
|
||||||
|
clearCache
|
||||||
|
|
||||||
|
outPath=$(nix-build --no-out-link -E '
|
||||||
|
with import ./config.nix;
|
||||||
|
mkDerivation {
|
||||||
|
name = "debug-info";
|
||||||
|
buildCommand = "mkdir -p $out/lib/debug/.build-id/02; echo foo > $out/lib/debug/.build-id/02/623eda209c26a59b1a8638ff7752f6b945c26b.debug";
|
||||||
|
}
|
||||||
|
')
|
||||||
|
|
||||||
|
nix copy --to "file://$cacheDir?index-debug-info=1&compression=none" $outPath
|
||||||
|
|
||||||
|
[[ $(cat $cacheDir/debuginfo/02623eda209c26a59b1a8638ff7752f6b945c26b.debug) = '{"archive":"../nar/100vxs724qr46phz8m24iswmg9p3785hsyagz0kchf6q6gf06sw6.nar","member":"lib/debug/.build-id/02/623eda209c26a59b1a8638ff7752f6b945c26b.debug"}' ]]
|
||||||
|
|
Loading…
Reference in a new issue