forked from lix-project/lix
Drop remaining uses of external "tar"
Also, fetchGit now runs in O(1) memory since we pipe the output of 'git archive' directly into unpackTarball() (rather than first reading it all into memory).
This commit is contained in:
parent
f2bd847092
commit
8918bae098
7 changed files with 43 additions and 24 deletions
|
@ -4,6 +4,7 @@
|
|||
#include "store-api.hh"
|
||||
#include "pathlocks.hh"
|
||||
#include "hash.hh"
|
||||
#include "tarfile.hh"
|
||||
|
||||
#include <sys/time.h>
|
||||
|
||||
|
@ -164,14 +165,16 @@ GitInfo exportGit(ref<Store> store, const std::string & uri,
|
|||
if (e.errNo != ENOENT) throw;
|
||||
}
|
||||
|
||||
// FIXME: should pipe this, or find some better way to extract a
|
||||
// revision.
|
||||
auto tar = runProgram("git", true, { "-C", cacheDir, "archive", gitInfo.rev });
|
||||
auto source = sinkToSource([&](Sink & sink) {
|
||||
RunOptions gitOptions("git", { "-C", cacheDir, "archive", gitInfo.rev });
|
||||
gitOptions.standardOut = &sink;
|
||||
runProgram2(gitOptions);
|
||||
});
|
||||
|
||||
Path tmpDir = createTempDir();
|
||||
AutoDelete delTmpDir(tmpDir, true);
|
||||
|
||||
runProgram("tar", true, { "x", "-C", tmpDir }, tar);
|
||||
unpackTarfile(*source, tmpDir);
|
||||
|
||||
gitInfo.storePath = store->addToStore(name, tmpDir);
|
||||
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
#include "builtins.hh"
|
||||
#include "compression.hh"
|
||||
#include "tarfile.hh"
|
||||
|
||||
namespace nix {
|
||||
|
@ -18,16 +17,7 @@ void builtinUnpackChannel(const BasicDerivation & drv)
|
|||
|
||||
createDirs(out);
|
||||
|
||||
auto source = sinkToSource([&](Sink & sink) {
|
||||
auto decompressor =
|
||||
hasSuffix(src, ".bz2") ? makeDecompressionSink("bzip2", sink) :
|
||||
hasSuffix(src, ".xz") ? makeDecompressionSink("xz", sink) :
|
||||
makeDecompressionSink("none", sink);
|
||||
readFile(src, *decompressor);
|
||||
decompressor->finish();
|
||||
});
|
||||
|
||||
unpackTarfile(*source, out);
|
||||
unpackTarfile(src, out);
|
||||
|
||||
auto entries = readDirectory(out);
|
||||
if (entries.size() != 1)
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
#include "compression.hh"
|
||||
#include "pathlocks.hh"
|
||||
#include "finally.hh"
|
||||
#include "tarfile.hh"
|
||||
|
||||
#ifdef ENABLE_S3
|
||||
#include <aws/core/client/ClientConfiguration.h>
|
||||
|
@ -903,12 +904,15 @@ CachedDownloadResult Downloader::downloadCached(
|
|||
unpackedStorePath = "";
|
||||
}
|
||||
if (unpackedStorePath.empty()) {
|
||||
printInfo(format("unpacking '%1%'...") % url);
|
||||
printInfo("unpacking '%s'...", url);
|
||||
Path tmpDir = createTempDir();
|
||||
AutoDelete autoDelete(tmpDir, true);
|
||||
// FIXME: this requires GNU tar for decompression.
|
||||
runProgram("tar", true, {"xf", store->toRealPath(storePath), "-C", tmpDir, "--strip-components", "1"});
|
||||
unpackedStorePath = store->addToStore(name, tmpDir, true, htSHA256, defaultPathFilter, NoRepair);
|
||||
unpackTarfile(store->toRealPath(storePath), tmpDir, baseNameOf(url));
|
||||
auto members = readDirectory(tmpDir);
|
||||
if (members.size() != 1)
|
||||
throw nix::Error("tarball '%s' contains an unexpected number of top-level files", url);
|
||||
auto topDir = tmpDir + "/" + members.begin()->name;
|
||||
unpackedStorePath = store->addToStore(name, topDir, true, htSHA256, defaultPathFilter, NoRepair);
|
||||
}
|
||||
replaceSymlink(unpackedStorePath, unpackedLink);
|
||||
storePath = unpackedStorePath;
|
||||
|
|
|
@ -77,7 +77,6 @@ struct BufferedSource : Source
|
|||
|
||||
size_t read(unsigned char * data, size_t len) override;
|
||||
|
||||
|
||||
bool hasData();
|
||||
|
||||
protected:
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
#include "rust-ffi.hh"
|
||||
#include "compression.hh"
|
||||
|
||||
extern "C" {
|
||||
rust::CBox2<rust::Result<std::tuple<>>> unpack_tarfile(rust::Source source, rust::StringSlice dest_dir);
|
||||
|
@ -6,9 +7,28 @@ extern "C" {
|
|||
|
||||
namespace nix {
|
||||
|
||||
void unpackTarfile(Source & source, Path destDir)
|
||||
void unpackTarfile(Source & source, const Path & destDir)
|
||||
{
|
||||
unpack_tarfile(source, destDir).use()->unwrap();
|
||||
}
|
||||
|
||||
void unpackTarfile(const Path & tarFile, const Path & destDir,
|
||||
std::optional<std::string> baseName)
|
||||
{
|
||||
if (!baseName) baseName = baseNameOf(tarFile);
|
||||
|
||||
auto source = sinkToSource([&](Sink & sink) {
|
||||
// FIXME: look at first few bytes to determine compression type.
|
||||
auto decompressor =
|
||||
// FIXME: add .gz support
|
||||
hasSuffix(*baseName, ".bz2") ? makeDecompressionSink("bzip2", sink) :
|
||||
hasSuffix(*baseName, ".xz") ? makeDecompressionSink("xz", sink) :
|
||||
makeDecompressionSink("none", sink);
|
||||
readFile(tarFile, *decompressor);
|
||||
decompressor->finish();
|
||||
});
|
||||
|
||||
unpackTarfile(*source, destDir);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -2,6 +2,9 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
void unpackTarfile(Source & source, Path destDir);
|
||||
void unpackTarfile(Source & source, const Path & destDir);
|
||||
|
||||
void unpackTarfile(const Path & tarFile, const Path & destDir,
|
||||
std::optional<std::string> baseName = {});
|
||||
|
||||
}
|
||||
|
|
|
@ -9,6 +9,7 @@
|
|||
#include "legacy.hh"
|
||||
#include "finally.hh"
|
||||
#include "progress-bar.hh"
|
||||
#include "tarfile.hh"
|
||||
|
||||
#include <iostream>
|
||||
|
||||
|
@ -192,8 +193,7 @@ static int _main(int argc, char * * argv)
|
|||
if (hasSuffix(baseNameOf(uri), ".zip"))
|
||||
runProgram("unzip", true, {"-qq", tmpFile, "-d", unpacked});
|
||||
else
|
||||
// FIXME: this requires GNU tar for decompression.
|
||||
runProgram("tar", true, {"xf", tmpFile, "-C", unpacked});
|
||||
unpackTarfile(tmpFile, unpacked, baseNameOf(uri));
|
||||
|
||||
/* If the archive unpacks to a single file/directory, then use
|
||||
that as the top-level. */
|
||||
|
|
Loading…
Reference in a new issue