forked from lix-project/lix
Merge remote-tracking branch 'origin/master' into markdown
This commit is contained in:
commit
4bf5faf416
|
@ -117,7 +117,16 @@ for my $fn (glob "$tmpDir/*") {
|
||||||
my $dstKey = "$releaseDir/" . $name;
|
my $dstKey = "$releaseDir/" . $name;
|
||||||
unless (defined $releasesBucket->head_key($dstKey)) {
|
unless (defined $releasesBucket->head_key($dstKey)) {
|
||||||
print STDERR "uploading $fn to s3://$releasesBucketName/$dstKey...\n";
|
print STDERR "uploading $fn to s3://$releasesBucketName/$dstKey...\n";
|
||||||
$releasesBucket->add_key_filename($dstKey, $fn)
|
|
||||||
|
my $configuration = ();
|
||||||
|
$configuration->{content_type} = "application/octet-stream";
|
||||||
|
|
||||||
|
if ($fn =~ /.sha256|.asc|install/) {
|
||||||
|
# Text files
|
||||||
|
$configuration->{content_type} = "text/plain";
|
||||||
|
}
|
||||||
|
|
||||||
|
$releasesBucket->add_key_filename($dstKey, $fn, $configuration)
|
||||||
or die $releasesBucket->err . ": " . $releasesBucket->errstr;
|
or die $releasesBucket->err . ": " . $releasesBucket->errstr;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1015,7 +1015,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
|
||||||
Hash h = newHashAllowEmpty(*outputHash, ht);
|
Hash h = newHashAllowEmpty(*outputHash, ht);
|
||||||
|
|
||||||
auto outPath = state.store->makeFixedOutputPath(ingestionMethod, h, drvName);
|
auto outPath = state.store->makeFixedOutputPath(ingestionMethod, h, drvName);
|
||||||
if (!jsonObject) drv.env["out"] = state.store->printStorePath(outPath);
|
drv.env["out"] = state.store->printStorePath(outPath);
|
||||||
drv.outputs.insert_or_assign("out", DerivationOutput {
|
drv.outputs.insert_or_assign("out", DerivationOutput {
|
||||||
.output = DerivationOutputCAFixed {
|
.output = DerivationOutputCAFixed {
|
||||||
.hash = FixedOutputHash {
|
.hash = FixedOutputHash {
|
||||||
|
@ -1029,7 +1029,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
|
||||||
else if (contentAddressed) {
|
else if (contentAddressed) {
|
||||||
HashType ht = parseHashType(outputHashAlgo);
|
HashType ht = parseHashType(outputHashAlgo);
|
||||||
for (auto & i : outputs) {
|
for (auto & i : outputs) {
|
||||||
if (!jsonObject) drv.env[i] = hashPlaceholder(i);
|
drv.env[i] = hashPlaceholder(i);
|
||||||
drv.outputs.insert_or_assign(i, DerivationOutput {
|
drv.outputs.insert_or_assign(i, DerivationOutput {
|
||||||
.output = DerivationOutputCAFloating {
|
.output = DerivationOutputCAFloating {
|
||||||
.method = ingestionMethod,
|
.method = ingestionMethod,
|
||||||
|
@ -1047,7 +1047,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
|
||||||
that changes in the set of output names do get reflected in
|
that changes in the set of output names do get reflected in
|
||||||
the hash. */
|
the hash. */
|
||||||
for (auto & i : outputs) {
|
for (auto & i : outputs) {
|
||||||
if (!jsonObject) drv.env[i] = "";
|
drv.env[i] = "";
|
||||||
drv.outputs.insert_or_assign(i,
|
drv.outputs.insert_or_assign(i,
|
||||||
DerivationOutput {
|
DerivationOutput {
|
||||||
.output = DerivationOutputInputAddressed {
|
.output = DerivationOutputInputAddressed {
|
||||||
|
@ -1062,7 +1062,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
|
||||||
|
|
||||||
for (auto & i : outputs) {
|
for (auto & i : outputs) {
|
||||||
auto outPath = state.store->makeOutputPath(i, h, drvName);
|
auto outPath = state.store->makeOutputPath(i, h, drvName);
|
||||||
if (!jsonObject) drv.env[i] = state.store->printStorePath(outPath);
|
drv.env[i] = state.store->printStorePath(outPath);
|
||||||
drv.outputs.insert_or_assign(i,
|
drv.outputs.insert_or_assign(i,
|
||||||
DerivationOutput {
|
DerivationOutput {
|
||||||
.output = DerivationOutputInputAddressed {
|
.output = DerivationOutputInputAddressed {
|
||||||
|
@ -1073,7 +1073,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Write the resulting term into the Nix store directory. */
|
/* Write the resulting term into the Nix store directory. */
|
||||||
auto drvPath = writeDerivation(state.store, drv, state.repair);
|
auto drvPath = writeDerivation(*state.store, drv, state.repair);
|
||||||
auto drvPathS = state.store->printStorePath(drvPath);
|
auto drvPathS = state.store->printStorePath(drvPath);
|
||||||
|
|
||||||
printMsg(lvlChatty, "instantiated '%1%' -> '%2%'", drvName, drvPathS);
|
printMsg(lvlChatty, "instantiated '%1%' -> '%2%'", drvName, drvPathS);
|
||||||
|
|
|
@ -2756,8 +2756,12 @@ struct RestrictedStore : public LocalFSStore
|
||||||
void queryReferrers(const StorePath & path, StorePathSet & referrers) override
|
void queryReferrers(const StorePath & path, StorePathSet & referrers) override
|
||||||
{ }
|
{ }
|
||||||
|
|
||||||
OutputPathMap queryDerivationOutputMap(const StorePath & path) override
|
std::map<std::string, std::optional<StorePath>> queryPartialDerivationOutputMap(const StorePath & path) override
|
||||||
{ throw Error("queryDerivationOutputMap"); }
|
{
|
||||||
|
if (!goal.isAllowed(path))
|
||||||
|
throw InvalidPath("cannot query output map for unknown path '%s' in recursive Nix", printStorePath(path));
|
||||||
|
return next->queryPartialDerivationOutputMap(path);
|
||||||
|
}
|
||||||
|
|
||||||
std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override
|
std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override
|
||||||
{ throw Error("queryPathFromHashPart"); }
|
{ throw Error("queryPathFromHashPart"); }
|
||||||
|
@ -4983,7 +4987,7 @@ void Worker::waitForInput()
|
||||||
std::vector<unsigned char> buffer(4096);
|
std::vector<unsigned char> buffer(4096);
|
||||||
for (auto & k : fds2) {
|
for (auto & k : fds2) {
|
||||||
if (pollStatus.at(fdToPollStatus.at(k)).revents) {
|
if (pollStatus.at(fdToPollStatus.at(k)).revents) {
|
||||||
ssize_t rd = read(k, buffer.data(), buffer.size());
|
ssize_t rd = ::read(k, buffer.data(), buffer.size());
|
||||||
// FIXME: is there a cleaner way to handle pt close
|
// FIXME: is there a cleaner way to handle pt close
|
||||||
// than EIO? Is this even standard?
|
// than EIO? Is this even standard?
|
||||||
if (rd == 0 || (rd == -1 && errno == EIO)) {
|
if (rd == 0 || (rd == -1 && errno == EIO)) {
|
||||||
|
|
|
@ -325,9 +325,9 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||||
case wopQueryDerivationOutputMap: {
|
case wopQueryDerivationOutputMap: {
|
||||||
auto path = store->parseStorePath(readString(from));
|
auto path = store->parseStorePath(readString(from));
|
||||||
logger->startWork();
|
logger->startWork();
|
||||||
OutputPathMap outputs = store->queryDerivationOutputMap(path);
|
auto outputs = store->queryPartialDerivationOutputMap(path);
|
||||||
logger->stopWork();
|
logger->stopWork();
|
||||||
writeOutputPathMap(*store, to, outputs);
|
worker_proto::write(*store, to, outputs);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -61,7 +61,7 @@ bool BasicDerivation::isBuiltin() const
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
StorePath writeDerivation(ref<Store> store,
|
StorePath writeDerivation(Store & store,
|
||||||
const Derivation & drv, RepairFlag repair)
|
const Derivation & drv, RepairFlag repair)
|
||||||
{
|
{
|
||||||
auto references = drv.inputSrcs;
|
auto references = drv.inputSrcs;
|
||||||
|
@ -71,10 +71,10 @@ StorePath writeDerivation(ref<Store> store,
|
||||||
(that can be missing (of course) and should not necessarily be
|
(that can be missing (of course) and should not necessarily be
|
||||||
held during a garbage collection). */
|
held during a garbage collection). */
|
||||||
auto suffix = std::string(drv.name) + drvExtension;
|
auto suffix = std::string(drv.name) + drvExtension;
|
||||||
auto contents = drv.unparse(*store, false);
|
auto contents = drv.unparse(store, false);
|
||||||
return settings.readOnlyMode
|
return settings.readOnlyMode
|
||||||
? store->computeStorePathForText(suffix, contents, references)
|
? store.computeStorePathForText(suffix, contents, references)
|
||||||
: store->addTextToStore(suffix, contents, references, repair);
|
: store.addTextToStore(suffix, contents, references, repair);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -146,7 +146,7 @@ class Store;
|
||||||
enum RepairFlag : bool { NoRepair = false, Repair = true };
|
enum RepairFlag : bool { NoRepair = false, Repair = true };
|
||||||
|
|
||||||
/* Write a derivation to the Nix store, and return its path. */
|
/* Write a derivation to the Nix store, and return its path. */
|
||||||
StorePath writeDerivation(ref<Store> store,
|
StorePath writeDerivation(Store & store,
|
||||||
const Derivation & drv, RepairFlag repair = NoRepair);
|
const Derivation & drv, RepairFlag repair = NoRepair);
|
||||||
|
|
||||||
/* Read a derivation from a file. */
|
/* Read a derivation from a file. */
|
||||||
|
|
|
@ -209,6 +209,24 @@ struct LegacySSHStore : public Store
|
||||||
const StorePathSet & references, RepairFlag repair) override
|
const StorePathSet & references, RepairFlag repair) override
|
||||||
{ unsupported("addTextToStore"); }
|
{ unsupported("addTextToStore"); }
|
||||||
|
|
||||||
|
private:
|
||||||
|
|
||||||
|
void putBuildSettings(Connection & conn)
|
||||||
|
{
|
||||||
|
conn.to
|
||||||
|
<< settings.maxSilentTime
|
||||||
|
<< settings.buildTimeout;
|
||||||
|
if (GET_PROTOCOL_MINOR(conn.remoteVersion) >= 2)
|
||||||
|
conn.to
|
||||||
|
<< settings.maxLogSize;
|
||||||
|
if (GET_PROTOCOL_MINOR(conn.remoteVersion) >= 3)
|
||||||
|
conn.to
|
||||||
|
<< settings.buildRepeat
|
||||||
|
<< settings.enforceDeterminism;
|
||||||
|
}
|
||||||
|
|
||||||
|
public:
|
||||||
|
|
||||||
BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv,
|
BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv,
|
||||||
BuildMode buildMode) override
|
BuildMode buildMode) override
|
||||||
{
|
{
|
||||||
|
@ -218,16 +236,8 @@ struct LegacySSHStore : public Store
|
||||||
<< cmdBuildDerivation
|
<< cmdBuildDerivation
|
||||||
<< printStorePath(drvPath);
|
<< printStorePath(drvPath);
|
||||||
writeDerivation(conn->to, *this, drv);
|
writeDerivation(conn->to, *this, drv);
|
||||||
conn->to
|
|
||||||
<< settings.maxSilentTime
|
putBuildSettings(*conn);
|
||||||
<< settings.buildTimeout;
|
|
||||||
if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 2)
|
|
||||||
conn->to
|
|
||||||
<< settings.maxLogSize;
|
|
||||||
if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 3)
|
|
||||||
conn->to
|
|
||||||
<< settings.buildRepeat
|
|
||||||
<< settings.enforceDeterminism;
|
|
||||||
|
|
||||||
conn->to.flush();
|
conn->to.flush();
|
||||||
|
|
||||||
|
@ -241,6 +251,29 @@ struct LegacySSHStore : public Store
|
||||||
return status;
|
return status;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void buildPaths(const std::vector<StorePathWithOutputs> & drvPaths, BuildMode buildMode) override
|
||||||
|
{
|
||||||
|
auto conn(connections->get());
|
||||||
|
|
||||||
|
conn->to << cmdBuildPaths;
|
||||||
|
Strings ss;
|
||||||
|
for (auto & p : drvPaths)
|
||||||
|
ss.push_back(p.to_string(*this));
|
||||||
|
conn->to << ss;
|
||||||
|
|
||||||
|
putBuildSettings(*conn);
|
||||||
|
|
||||||
|
conn->to.flush();
|
||||||
|
|
||||||
|
BuildResult result;
|
||||||
|
result.status = (BuildResult::Status) readInt(conn->from);
|
||||||
|
|
||||||
|
if (!result.success()) {
|
||||||
|
conn->from >> result.errorMsg;
|
||||||
|
throw Error(result.status, result.errorMsg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
void ensurePath(const StorePath & path) override
|
void ensurePath(const StorePath & path) override
|
||||||
{ unsupported("ensurePath"); }
|
{ unsupported("ensurePath"); }
|
||||||
|
|
||||||
|
|
|
@ -785,17 +785,21 @@ StorePathSet LocalStore::queryValidDerivers(const StorePath & path)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
OutputPathMap LocalStore::queryDerivationOutputMap(const StorePath & path)
|
std::map<std::string, std::optional<StorePath>> LocalStore::queryPartialDerivationOutputMap(const StorePath & path)
|
||||||
{
|
{
|
||||||
return retrySQLite<OutputPathMap>([&]() {
|
std::map<std::string, std::optional<StorePath>> outputs;
|
||||||
|
BasicDerivation drv = readDerivation(path);
|
||||||
|
for (auto & [outName, _] : drv.outputs) {
|
||||||
|
outputs.insert_or_assign(outName, std::nullopt);
|
||||||
|
}
|
||||||
|
return retrySQLite<std::map<std::string, std::optional<StorePath>>>([&]() {
|
||||||
auto state(_state.lock());
|
auto state(_state.lock());
|
||||||
|
|
||||||
auto useQueryDerivationOutputs(state->stmtQueryDerivationOutputs.use()
|
auto useQueryDerivationOutputs(state->stmtQueryDerivationOutputs.use()
|
||||||
(queryValidPathId(*state, path)));
|
(queryValidPathId(*state, path)));
|
||||||
|
|
||||||
OutputPathMap outputs;
|
|
||||||
while (useQueryDerivationOutputs.next())
|
while (useQueryDerivationOutputs.next())
|
||||||
outputs.emplace(
|
outputs.insert_or_assign(
|
||||||
useQueryDerivationOutputs.getStr(0),
|
useQueryDerivationOutputs.getStr(0),
|
||||||
parseStorePath(useQueryDerivationOutputs.getStr(1))
|
parseStorePath(useQueryDerivationOutputs.getStr(1))
|
||||||
);
|
);
|
||||||
|
|
|
@ -133,7 +133,7 @@ public:
|
||||||
|
|
||||||
StorePathSet queryValidDerivers(const StorePath & path) override;
|
StorePathSet queryValidDerivers(const StorePath & path) override;
|
||||||
|
|
||||||
OutputPathMap queryDerivationOutputMap(const StorePath & path) override;
|
std::map<std::string, std::optional<StorePath>> queryPartialDerivationOutputMap(const StorePath & path) override;
|
||||||
|
|
||||||
std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override;
|
std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override;
|
||||||
|
|
||||||
|
|
|
@ -31,7 +31,6 @@ template<> StorePathSet readStorePaths(const Store & store, Source & from)
|
||||||
return paths;
|
return paths;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void writeStorePaths(const Store & store, Sink & out, const StorePathSet & paths)
|
void writeStorePaths(const Store & store, Sink & out, const StorePathSet & paths)
|
||||||
{
|
{
|
||||||
out << paths.size();
|
out << paths.size();
|
||||||
|
@ -39,6 +38,7 @@ void writeStorePaths(const Store & store, Sink & out, const StorePathSet & paths
|
||||||
out << store.printStorePath(i);
|
out << store.printStorePath(i);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
StorePathCAMap readStorePathCAMap(const Store & store, Source & from)
|
StorePathCAMap readStorePathCAMap(const Store & store, Source & from)
|
||||||
{
|
{
|
||||||
StorePathCAMap paths;
|
StorePathCAMap paths;
|
||||||
|
@ -57,30 +57,36 @@ void writeStorePathCAMap(const Store & store, Sink & out, const StorePathCAMap &
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
std::map<string, StorePath> readOutputPathMap(const Store & store, Source & from)
|
|
||||||
|
namespace worker_proto {
|
||||||
|
|
||||||
|
StorePath read(const Store & store, Source & from, Phantom<StorePath> _)
|
||||||
{
|
{
|
||||||
std::map<string, StorePath> pathMap;
|
return store.parseStorePath(readString(from));
|
||||||
auto rawInput = readStrings<Strings>(from);
|
|
||||||
if (rawInput.size() % 2)
|
|
||||||
throw Error("got an odd number of elements from the daemon when trying to read a output path map");
|
|
||||||
auto curInput = rawInput.begin();
|
|
||||||
while (curInput != rawInput.end()) {
|
|
||||||
auto thisKey = *curInput++;
|
|
||||||
auto thisValue = *curInput++;
|
|
||||||
pathMap.emplace(thisKey, store.parseStorePath(thisValue));
|
|
||||||
}
|
|
||||||
return pathMap;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void writeOutputPathMap(const Store & store, Sink & out, const std::map<string, StorePath> & pathMap)
|
void write(const Store & store, Sink & out, const StorePath & storePath)
|
||||||
{
|
{
|
||||||
out << 2*pathMap.size();
|
out << store.printStorePath(storePath);
|
||||||
for (auto & i : pathMap) {
|
|
||||||
out << i.first;
|
|
||||||
out << store.printStorePath(i.second);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
template<>
|
||||||
|
std::optional<StorePath> read(const Store & store, Source & from, Phantom<std::optional<StorePath>> _)
|
||||||
|
{
|
||||||
|
auto s = readString(from);
|
||||||
|
return s == "" ? std::optional<StorePath> {} : store.parseStorePath(s);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
template<>
|
||||||
|
void write(const Store & store, Sink & out, const std::optional<StorePath> & storePathOpt)
|
||||||
|
{
|
||||||
|
out << (storePathOpt ? store.printStorePath(*storePathOpt) : "");
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
/* TODO: Separate these store impls into different files, give them better names */
|
/* TODO: Separate these store impls into different files, give them better names */
|
||||||
RemoteStore::RemoteStore(const Params & params)
|
RemoteStore::RemoteStore(const Params & params)
|
||||||
: Store(params)
|
: Store(params)
|
||||||
|
@ -468,12 +474,12 @@ StorePathSet RemoteStore::queryDerivationOutputs(const StorePath & path)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
OutputPathMap RemoteStore::queryDerivationOutputMap(const StorePath & path)
|
std::map<std::string, std::optional<StorePath>> RemoteStore::queryPartialDerivationOutputMap(const StorePath & path)
|
||||||
{
|
{
|
||||||
auto conn(getConnection());
|
auto conn(getConnection());
|
||||||
conn->to << wopQueryDerivationOutputMap << printStorePath(path);
|
conn->to << wopQueryDerivationOutputMap << printStorePath(path);
|
||||||
conn.processStderr();
|
conn.processStderr();
|
||||||
return readOutputPathMap(*this, conn->from);
|
return worker_proto::read(*this, conn->from, Phantom<std::map<std::string, std::optional<StorePath>>> {});
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -51,7 +51,7 @@ public:
|
||||||
|
|
||||||
StorePathSet queryDerivationOutputs(const StorePath & path) override;
|
StorePathSet queryDerivationOutputs(const StorePath & path) override;
|
||||||
|
|
||||||
OutputPathMap queryDerivationOutputMap(const StorePath & path) override;
|
std::map<std::string, std::optional<StorePath>> queryPartialDerivationOutputMap(const StorePath & path) override;
|
||||||
std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override;
|
std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override;
|
||||||
|
|
||||||
StorePathSet querySubstitutablePaths(const StorePathSet & paths) override;
|
StorePathSet querySubstitutablePaths(const StorePathSet & paths) override;
|
||||||
|
|
|
@ -359,6 +359,17 @@ bool Store::PathInfoCacheValue::isKnownNow()
|
||||||
return std::chrono::steady_clock::now() < time_point + ttl;
|
return std::chrono::steady_clock::now() < time_point + ttl;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
OutputPathMap Store::queryDerivationOutputMap(const StorePath & path) {
|
||||||
|
auto resp = queryPartialDerivationOutputMap(path);
|
||||||
|
OutputPathMap result;
|
||||||
|
for (auto & [outName, optOutPath] : resp) {
|
||||||
|
if (!optOutPath)
|
||||||
|
throw Error("output '%s' has no store path mapped to it", outName);
|
||||||
|
result.insert_or_assign(outName, *optOutPath);
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
StorePathSet Store::queryDerivationOutputs(const StorePath & path)
|
StorePathSet Store::queryDerivationOutputs(const StorePath & path)
|
||||||
{
|
{
|
||||||
auto outputMap = this->queryDerivationOutputMap(path);
|
auto outputMap = this->queryDerivationOutputMap(path);
|
||||||
|
|
|
@ -343,9 +343,15 @@ public:
|
||||||
/* Query the outputs of the derivation denoted by `path'. */
|
/* Query the outputs of the derivation denoted by `path'. */
|
||||||
virtual StorePathSet queryDerivationOutputs(const StorePath & path);
|
virtual StorePathSet queryDerivationOutputs(const StorePath & path);
|
||||||
|
|
||||||
/* Query the mapping outputName=>outputPath for the given derivation */
|
/* Query the mapping outputName => outputPath for the given derivation. All
|
||||||
virtual OutputPathMap queryDerivationOutputMap(const StorePath & path)
|
outputs are mentioned so ones mising the mapping are mapped to
|
||||||
{ unsupported("queryDerivationOutputMap"); }
|
`std::nullopt`. */
|
||||||
|
virtual std::map<std::string, std::optional<StorePath>> queryPartialDerivationOutputMap(const StorePath & path)
|
||||||
|
{ unsupported("queryPartialDerivationOutputMap"); }
|
||||||
|
|
||||||
|
/* Query the mapping outputName=>outputPath for the given derivation.
|
||||||
|
Assume every output has a mapping and throw an exception otherwise. */
|
||||||
|
OutputPathMap queryDerivationOutputMap(const StorePath & path);
|
||||||
|
|
||||||
/* Query the full store path given the hash part of a valid store
|
/* Query the full store path given the hash part of a valid store
|
||||||
path, or empty if the path doesn't exist. */
|
path, or empty if the path doesn't exist. */
|
||||||
|
|
|
@ -70,10 +70,84 @@ template<class T> T readStorePaths(const Store & store, Source & from);
|
||||||
|
|
||||||
void writeStorePaths(const Store & store, Sink & out, const StorePathSet & paths);
|
void writeStorePaths(const Store & store, Sink & out, const StorePathSet & paths);
|
||||||
|
|
||||||
|
/* To guide overloading */
|
||||||
|
template<typename T>
|
||||||
|
struct Phantom {};
|
||||||
|
|
||||||
|
|
||||||
|
namespace worker_proto {
|
||||||
|
/* FIXME maybe move more stuff inside here */
|
||||||
|
|
||||||
|
StorePath read(const Store & store, Source & from, Phantom<StorePath> _);
|
||||||
|
void write(const Store & store, Sink & out, const StorePath & storePath);
|
||||||
|
|
||||||
|
template<typename T>
|
||||||
|
std::map<std::string, T> read(const Store & store, Source & from, Phantom<std::map<std::string, T>> _);
|
||||||
|
template<typename T>
|
||||||
|
void write(const Store & store, Sink & out, const std::map<string, T> & resMap);
|
||||||
|
template<typename T>
|
||||||
|
std::optional<T> read(const Store & store, Source & from, Phantom<std::optional<T>> _);
|
||||||
|
template<typename T>
|
||||||
|
void write(const Store & store, Sink & out, const std::optional<T> & optVal);
|
||||||
|
|
||||||
|
/* Specialization which uses and empty string for the empty case, taking
|
||||||
|
advantage of the fact StorePaths always serialize to a non-empty string.
|
||||||
|
This is done primarily for backwards compatability, so that StorePath <=
|
||||||
|
std::optional<StorePath>, where <= is the compatability partial order.
|
||||||
|
*/
|
||||||
|
template<>
|
||||||
|
void write(const Store & store, Sink & out, const std::optional<StorePath> & optVal);
|
||||||
|
|
||||||
|
template<typename T>
|
||||||
|
std::map<std::string, T> read(const Store & store, Source & from, Phantom<std::map<std::string, T>> _)
|
||||||
|
{
|
||||||
|
std::map<string, T> resMap;
|
||||||
|
auto size = (size_t)readInt(from);
|
||||||
|
while (size--) {
|
||||||
|
auto thisKey = readString(from);
|
||||||
|
resMap.insert_or_assign(std::move(thisKey), nix::worker_proto::read(store, from, Phantom<T> {}));
|
||||||
|
}
|
||||||
|
return resMap;
|
||||||
|
}
|
||||||
|
|
||||||
|
template<typename T>
|
||||||
|
void write(const Store & store, Sink & out, const std::map<string, T> & resMap)
|
||||||
|
{
|
||||||
|
out << resMap.size();
|
||||||
|
for (auto & i : resMap) {
|
||||||
|
out << i.first;
|
||||||
|
nix::worker_proto::write(store, out, i.second);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
template<typename T>
|
||||||
|
std::optional<T> read(const Store & store, Source & from, Phantom<std::optional<T>> _)
|
||||||
|
{
|
||||||
|
auto tag = readNum<uint8_t>(from);
|
||||||
|
switch (tag) {
|
||||||
|
case 0:
|
||||||
|
return std::nullopt;
|
||||||
|
case 1:
|
||||||
|
return nix::worker_proto::read(store, from, Phantom<T> {});
|
||||||
|
default:
|
||||||
|
throw Error("got an invalid tag bit for std::optional: %#04x", tag);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
template<typename T>
|
||||||
|
void write(const Store & store, Sink & out, const std::optional<T> & optVal)
|
||||||
|
{
|
||||||
|
out << (optVal ? 1 : 0);
|
||||||
|
if (optVal)
|
||||||
|
nix::worker_proto::write(store, out, *optVal);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
StorePathCAMap readStorePathCAMap(const Store & store, Source & from);
|
StorePathCAMap readStorePathCAMap(const Store & store, Source & from);
|
||||||
|
|
||||||
void writeStorePathCAMap(const Store & store, Sink & out, const StorePathCAMap & paths);
|
void writeStorePathCAMap(const Store & store, Sink & out, const StorePathCAMap & paths);
|
||||||
|
|
||||||
void writeOutputPathMap(const Store & store, Sink & out, const OutputPathMap & paths);
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -138,7 +138,7 @@ StorePath getDerivationEnvironment(ref<Store> store, const StorePath & drvPath)
|
||||||
.path = shellOutPath
|
.path = shellOutPath
|
||||||
} });
|
} });
|
||||||
drv.env["out"] = store->printStorePath(shellOutPath);
|
drv.env["out"] = store->printStorePath(shellOutPath);
|
||||||
auto shellDrvPath2 = writeDerivation(store, drv);
|
auto shellDrvPath2 = writeDerivation(*store, drv);
|
||||||
|
|
||||||
/* Build the derivation. */
|
/* Build the derivation. */
|
||||||
store->buildPaths({{shellDrvPath2}});
|
store->buildPaths({{shellDrvPath2}});
|
||||||
|
@ -246,7 +246,7 @@ struct CmdDevelop : Common, MixEnvironment
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "command",
|
.longName = "command",
|
||||||
.shortName = 'c',
|
.shortName = 'c',
|
||||||
.description = "command and arguments to be executed insted of an interactive shell",
|
.description = "command and arguments to be executed instead of an interactive shell",
|
||||||
.labels = {"command", "args"},
|
.labels = {"command", "args"},
|
||||||
.handler = {[&](std::vector<std::string> ss) {
|
.handler = {[&](std::vector<std::string> ss) {
|
||||||
if (ss.empty()) throw UsageError("--command requires at least one argument");
|
if (ss.empty()) throw UsageError("--command requires at least one argument");
|
||||||
|
|
45
tests/build-hook-ca.nix
Normal file
45
tests/build-hook-ca.nix
Normal file
|
@ -0,0 +1,45 @@
|
||||||
|
{ busybox }:
|
||||||
|
|
||||||
|
with import ./config.nix;
|
||||||
|
|
||||||
|
let
|
||||||
|
|
||||||
|
mkDerivation = args:
|
||||||
|
derivation ({
|
||||||
|
inherit system;
|
||||||
|
builder = busybox;
|
||||||
|
args = ["sh" "-e" args.builder or (builtins.toFile "builder-${args.name}.sh" "if [ -e .attrs.sh ]; then source .attrs.sh; fi; eval \"$buildCommand\"")];
|
||||||
|
outputHashMode = "recursive";
|
||||||
|
outputHashAlgo = "sha256";
|
||||||
|
} // removeAttrs args ["builder" "meta"])
|
||||||
|
// { meta = args.meta or {}; };
|
||||||
|
|
||||||
|
input1 = mkDerivation {
|
||||||
|
shell = busybox;
|
||||||
|
name = "build-remote-input-1";
|
||||||
|
buildCommand = "echo FOO > $out";
|
||||||
|
requiredSystemFeatures = ["foo"];
|
||||||
|
outputHash = "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=";
|
||||||
|
};
|
||||||
|
|
||||||
|
input2 = mkDerivation {
|
||||||
|
shell = busybox;
|
||||||
|
name = "build-remote-input-2";
|
||||||
|
buildCommand = "echo BAR > $out";
|
||||||
|
requiredSystemFeatures = ["bar"];
|
||||||
|
outputHash = "sha256-XArauVH91AVwP9hBBQNlkX9ccuPpSYx9o0zeIHb6e+Q=";
|
||||||
|
};
|
||||||
|
|
||||||
|
in
|
||||||
|
|
||||||
|
mkDerivation {
|
||||||
|
shell = busybox;
|
||||||
|
name = "build-remote";
|
||||||
|
buildCommand =
|
||||||
|
''
|
||||||
|
read x < ${input1}
|
||||||
|
read y < ${input2}
|
||||||
|
echo "$x $y" > $out
|
||||||
|
'';
|
||||||
|
outputHash = "sha256-3YGhlOfbGUm9hiPn2teXXTT8M1NEpDFvfXkxMaJRld0=";
|
||||||
|
}
|
5
tests/build-remote-content-addressed-fixed.sh
Normal file
5
tests/build-remote-content-addressed-fixed.sh
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
source common.sh
|
||||||
|
|
||||||
|
file=build-hook-ca.nix
|
||||||
|
|
||||||
|
source build-remote.sh
|
5
tests/build-remote-input-addressed.sh
Normal file
5
tests/build-remote-input-addressed.sh
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
source common.sh
|
||||||
|
|
||||||
|
file=build-hook.nix
|
||||||
|
|
||||||
|
source build-remote.sh
|
|
@ -1,5 +1,3 @@
|
||||||
source common.sh
|
|
||||||
|
|
||||||
if ! canUseSandbox; then exit; fi
|
if ! canUseSandbox; then exit; fi
|
||||||
if ! [[ $busybox =~ busybox ]]; then exit; fi
|
if ! [[ $busybox =~ busybox ]]; then exit; fi
|
||||||
|
|
||||||
|
@ -19,7 +17,7 @@ builders=(
|
||||||
# Note: ssh://localhost bypasses ssh, directly invoking nix-store as a
|
# Note: ssh://localhost bypasses ssh, directly invoking nix-store as a
|
||||||
# child process. This allows us to test LegacySSHStore::buildDerivation().
|
# child process. This allows us to test LegacySSHStore::buildDerivation().
|
||||||
# ssh-ng://... likewise allows us to test RemoteStore::buildDerivation().
|
# ssh-ng://... likewise allows us to test RemoteStore::buildDerivation().
|
||||||
nix build -L -v -f build-hook.nix -o $TEST_ROOT/result --max-jobs 0 \
|
nix build -L -v -f $file -o $TEST_ROOT/result --max-jobs 0 \
|
||||||
--arg busybox $busybox \
|
--arg busybox $busybox \
|
||||||
--store $TEST_ROOT/machine0 \
|
--store $TEST_ROOT/machine0 \
|
||||||
--builders "$(join_by '; ' "${builders[@]}")"
|
--builders "$(join_by '; ' "${builders[@]}")"
|
||||||
|
|
|
@ -14,7 +14,7 @@ nix_tests = \
|
||||||
placeholders.sh nix-shell.sh \
|
placeholders.sh nix-shell.sh \
|
||||||
linux-sandbox.sh \
|
linux-sandbox.sh \
|
||||||
build-dry.sh \
|
build-dry.sh \
|
||||||
build-remote.sh \
|
build-remote-input-addressed.sh \
|
||||||
nar-access.sh \
|
nar-access.sh \
|
||||||
structured-attrs.sh \
|
structured-attrs.sh \
|
||||||
fetchGit.sh \
|
fetchGit.sh \
|
||||||
|
@ -34,6 +34,7 @@ nix_tests = \
|
||||||
recursive.sh \
|
recursive.sh \
|
||||||
flakes.sh
|
flakes.sh
|
||||||
# parallel.sh
|
# parallel.sh
|
||||||
|
# build-remote-content-addressed-fixed.sh \
|
||||||
|
|
||||||
install-tests += $(foreach x, $(nix_tests), tests/$(x))
|
install-tests += $(foreach x, $(nix_tests), tests/$(x))
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue