Merge pull request #5696 from obsidiansystems/fix-5299

Fix #5299 and remove uncesssary unbounded buffer
This commit is contained in:
Eelco Dolstra 2021-12-01 21:32:26 +01:00 committed by GitHub
commit fb662e0acf
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
2 changed files with 27 additions and 19 deletions

View file

@ -431,25 +431,30 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
hashAlgo = parseHashType(hashAlgoRaw); hashAlgo = parseHashType(hashAlgoRaw);
} }
StringSink saved; auto dumpSource = sinkToSource([&](Sink & saved) {
TeeSource savedNARSource(from, saved); if (method == FileIngestionMethod::Recursive) {
RetrieveRegularNARSink savedRegular { saved }; /* We parse the NAR dump through into `saved` unmodified,
so why all this extra work? We still parse the NAR so
if (method == FileIngestionMethod::Recursive) { that we aren't sending arbitrary data to `saved`
/* Get the entire NAR dump from the client and save it to unwittingly`, and we know when the NAR ends so we don't
a string so that we can pass it to consume the rest of `from` and can't parse another
addToStoreFromDump(). */ command. (We don't trust `addToStoreFromDump` to not
ParseSink sink; /* null sink; just parse the NAR */ eagerly consume the entire stream it's given, past the
parseDump(sink, savedNARSource); length of the Nar. */
} else TeeSource savedNARSource(from, saved);
parseDump(savedRegular, from); ParseSink sink; /* null sink; just parse the NAR */
parseDump(sink, savedNARSource);
} else {
/* Incrementally parse the NAR file, stripping the
metadata, and streaming the sole file we expect into
`saved`. */
RetrieveRegularNARSink savedRegular { saved };
parseDump(savedRegular, from);
if (!savedRegular.regular) throw Error("regular file expected");
}
});
logger->startWork(); logger->startWork();
if (!savedRegular.regular) throw Error("regular file expected"); auto path = store->addToStoreFromDump(*dumpSource, baseName, method, hashAlgo);
// FIXME: try to stream directly from `from`.
StringSource dumpSource { *saved.s };
auto path = store->addToStoreFromDump(dumpSource, baseName, method, hashAlgo);
logger->stopWork(); logger->stopWork();
to << store->printStorePath(path); to << store->printStorePath(path);

View file

@ -8,6 +8,7 @@
#include "references.hh" #include "references.hh"
#include "callback.hh" #include "callback.hh"
#include "topo-sort.hh" #include "topo-sort.hh"
#include "finally.hh"
#include <iostream> #include <iostream>
#include <algorithm> #include <algorithm>
@ -1333,13 +1334,15 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, const string & name,
auto want = std::min(chunkSize, settings.narBufferSize - oldSize); auto want = std::min(chunkSize, settings.narBufferSize - oldSize);
dump.resize(oldSize + want); dump.resize(oldSize + want);
auto got = 0; auto got = 0;
Finally cleanup([&]() {
dump.resize(oldSize + got);
});
try { try {
got = source.read(dump.data() + oldSize, want); got = source.read(dump.data() + oldSize, want);
} catch (EndOfFile &) { } catch (EndOfFile &) {
inMemory = true; inMemory = true;
break; break;
} }
dump.resize(oldSize + got);
} }
std::unique_ptr<AutoDelete> delTempDir; std::unique_ptr<AutoDelete> delTempDir;