LocalStore: Get rid of recursive_mutex

This commit is contained in:
Eelco Dolstra 2020-11-03 12:52:57 +01:00
parent 797a52e31d
commit e8c379555f
5 changed files with 22 additions and 15 deletions

View file

@ -3245,7 +3245,7 @@ void DerivationGoal::registerOutputs()
if (!oldInfo.ultimate) {
oldInfo.ultimate = true;
worker.store.signPathInfo(oldInfo);
worker.store.registerValidPaths({ std::move(oldInfo) });
worker.store.registerValidPaths({{oldInfo.path, oldInfo}});
}
continue;
@ -3275,7 +3275,7 @@ void DerivationGoal::registerOutputs()
isn't statically known so that we can safely unlock the path before
the next iteration */
if (newInfo.ca)
worker.store.registerValidPaths({newInfo});
worker.store.registerValidPaths({{newInfo.path, newInfo}});
infos.emplace(outputName, std::move(newInfo));
}
@ -3350,7 +3350,7 @@ void DerivationGoal::registerOutputs()
{
ValidPathInfos infos2;
for (auto & [outputName, newInfo] : infos) {
infos2.push_back(newInfo);
infos2.insert_or_assign(newInfo.path, newInfo);
}
worker.store.registerValidPaths(infos2);
}

View file

@ -7,6 +7,7 @@
#include "nar-info.hh"
#include "references.hh"
#include "callback.hh"
#include "topo-sort.hh"
#include <iostream>
#include <algorithm>
@ -962,9 +963,7 @@ void LocalStore::querySubstitutablePathInfos(const StorePathCAMap & paths, Subst
void LocalStore::registerValidPath(const ValidPathInfo & info)
{
ValidPathInfos infos;
infos.push_back(info);
registerValidPaths(infos);
registerValidPaths({{info.path, info}});
}
@ -982,7 +981,7 @@ void LocalStore::registerValidPaths(const ValidPathInfos & infos)
SQLiteTxn txn(state->db);
StorePathSet paths;
for (auto & i : infos) {
for (auto & [_, i] : infos) {
assert(i.narHash.type == htSHA256);
if (isValidPath_(*state, i.path))
updatePathInfo(*state, i);
@ -991,7 +990,7 @@ void LocalStore::registerValidPaths(const ValidPathInfos & infos)
paths.insert(i.path);
}
for (auto & i : infos) {
for (auto & [_, i] : infos) {
auto referrer = queryValidPathId(*state, i.path);
for (auto & j : i.references)
state->stmtAddReference.use()(referrer)(queryValidPathId(*state, j)).exec();
@ -1000,7 +999,7 @@ void LocalStore::registerValidPaths(const ValidPathInfos & infos)
/* Check that the derivation outputs are correct. We can't do
this in addValidPath() above, because the references might
not be valid yet. */
for (auto & i : infos)
for (auto & [_, i] : infos)
if (i.path.isDerivation()) {
// FIXME: inefficient; we already loaded the derivation in addValidPath().
checkDerivationOutputs(i.path,
@ -1014,7 +1013,17 @@ void LocalStore::registerValidPaths(const ValidPathInfos & infos)
error if a cycle is detected and roll back the
transaction. Cycles can only occur when a derivation
has multiple outputs. */
topoSortPaths(paths);
topoSort(paths,
{[&](const StorePath & path) {
auto i = infos.find(path);
return i == infos.end() ? StorePathSet() : i->second.references;
}},
{[&](const StorePath & path, const StorePath & parent) {
return BuildError(
"cycle detected in the references of '%s' from '%s'",
printStorePath(path),
printStorePath(parent));
}});
txn.commit();
});

View file

@ -90,9 +90,7 @@ private:
std::unique_ptr<PublicKeys> publicKeys;
};
// FIXME: get rid of recursive_mutex, it hides recursive SQLite
// queries.
Sync<State, std::recursive_mutex> _state;
Sync<State> _state;
public:

View file

@ -107,6 +107,6 @@ struct ValidPathInfo
virtual ~ValidPathInfo() { }
};
typedef list<ValidPathInfo> ValidPathInfos;
typedef std::map<StorePath, ValidPathInfo> ValidPathInfos;
}

View file

@ -516,7 +516,7 @@ static void registerValidity(bool reregister, bool hashGiven, bool canonicalise)
info->narHash = hash.first;
info->narSize = hash.second;
}
infos.push_back(std::move(*info));
infos.insert_or_assign(info->path, *info);
}
}