* Merged the no-bdb branch (-r10900:HEAD
https://svn.nixos.org/repos/nix/nix/branches/no-bdb).
This commit is contained in:
parent
4ed01ed791
commit
b0e92f6d47
21
configure.ac
21
configure.ac
|
@ -16,7 +16,7 @@ if test "$STABLE" != "1"; then
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
AC_DEFINE_UNQUOTED(NIX_VERSION, ["$VERSION"], [version])
|
AC_DEFINE_UNQUOTED(NIX_VERSION, ["$VERSION"], [Nix version.])
|
||||||
|
|
||||||
AC_PREFIX_DEFAULT(/nix)
|
AC_PREFIX_DEFAULT(/nix)
|
||||||
|
|
||||||
|
@ -54,7 +54,7 @@ case $sys_name in
|
||||||
esac
|
esac
|
||||||
|
|
||||||
AC_ARG_WITH(system, AC_HELP_STRING([--with-system=SYSTEM],
|
AC_ARG_WITH(system, AC_HELP_STRING([--with-system=SYSTEM],
|
||||||
[platform identifier (e.g., `i686-linux')]),
|
[Platform identifier (e.g., `i686-linux').]),
|
||||||
system=$withval, system="${machine_name}-${sys_name}")
|
system=$withval, system="${machine_name}-${sys_name}")
|
||||||
AC_MSG_RESULT($system)
|
AC_MSG_RESULT($system)
|
||||||
AC_SUBST(system)
|
AC_SUBST(system)
|
||||||
|
@ -94,7 +94,7 @@ AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <iostream>
|
||||||
using namespace std;
|
using namespace std;
|
||||||
static char buf[1024];]],
|
static char buf[1024];]],
|
||||||
[[cerr.rdbuf()->pubsetbuf(buf, sizeof(buf));]])],
|
[[cerr.rdbuf()->pubsetbuf(buf, sizeof(buf));]])],
|
||||||
[AC_MSG_RESULT(yes) AC_DEFINE(HAVE_PUBSETBUF, 1, [whether pubsetbuf is available])],
|
[AC_MSG_RESULT(yes) AC_DEFINE(HAVE_PUBSETBUF, 1, [Whether pubsetbuf is available.])],
|
||||||
AC_MSG_RESULT(no))
|
AC_MSG_RESULT(no))
|
||||||
AC_LANG_POP(C++)
|
AC_LANG_POP(C++)
|
||||||
|
|
||||||
|
@ -177,8 +177,13 @@ AC_ARG_WITH(store-dir, AC_HELP_STRING([--with-store-dir=PATH],
|
||||||
storedir=$withval, storedir='${prefix}/store')
|
storedir=$withval, storedir='${prefix}/store')
|
||||||
AC_SUBST(storedir)
|
AC_SUBST(storedir)
|
||||||
|
|
||||||
|
AC_ARG_ENABLE(old-db-compat, AC_HELP_STRING([--disable-old-db-compat],
|
||||||
|
[disable support for converting from old Berkeley DB-based Nix stores]),
|
||||||
|
old_db_compat=$enableval, old_db_compat=yes)
|
||||||
|
AM_CONDITIONAL(OLD_DB_COMPAT, test "$old_db_compat" = "yes")
|
||||||
|
|
||||||
AC_ARG_WITH(bdb, AC_HELP_STRING([--with-bdb=PATH],
|
AC_ARG_WITH(bdb, AC_HELP_STRING([--with-bdb=PATH],
|
||||||
[prefix of Berkeley DB]),
|
[prefix of Berkeley DB (for Nix <= 0.11 compatibility)]),
|
||||||
bdb=$withval, bdb=)
|
bdb=$withval, bdb=)
|
||||||
AM_CONDITIONAL(HAVE_BDB, test -n "$bdb")
|
AM_CONDITIONAL(HAVE_BDB, test -n "$bdb")
|
||||||
if test -z "$bdb"; then
|
if test -z "$bdb"; then
|
||||||
|
@ -188,6 +193,12 @@ else
|
||||||
bdb_lib="-L$bdb/lib -ldb_cxx"
|
bdb_lib="-L$bdb/lib -ldb_cxx"
|
||||||
bdb_include="-I$bdb/include"
|
bdb_include="-I$bdb/include"
|
||||||
fi
|
fi
|
||||||
|
if test "$old_db_compat" = "no"; then
|
||||||
|
bdb_lib=
|
||||||
|
bdb_include=
|
||||||
|
else
|
||||||
|
AC_DEFINE(OLD_DB_COMPAT, 1, [Whether to support converting from old Berkeley DB-based Nix stores.])
|
||||||
|
fi
|
||||||
AC_SUBST(bdb_lib)
|
AC_SUBST(bdb_lib)
|
||||||
AC_SUBST(bdb_include)
|
AC_SUBST(bdb_include)
|
||||||
|
|
||||||
|
@ -216,7 +227,7 @@ if test -n "$openssl"; then
|
||||||
LDFLAGS="-L$openssl/lib -lcrypto $LDFLAGS"
|
LDFLAGS="-L$openssl/lib -lcrypto $LDFLAGS"
|
||||||
CFLAGS="-I$openssl/include $CFLAGS"
|
CFLAGS="-I$openssl/include $CFLAGS"
|
||||||
CXXFLAGS="-I$openssl/include $CXXFLAGS"
|
CXXFLAGS="-I$openssl/include $CXXFLAGS"
|
||||||
AC_DEFINE(HAVE_OPENSSL, 1, [whether to use OpenSSL])
|
AC_DEFINE(HAVE_OPENSSL, 1, [Whether to use OpenSSL.])
|
||||||
fi
|
fi
|
||||||
|
|
||||||
AC_ARG_WITH(bzip2, AC_HELP_STRING([--with-bzip2=PATH],
|
AC_ARG_WITH(bzip2, AC_HELP_STRING([--with-bzip2=PATH],
|
||||||
|
|
8
externals/Makefile.am
vendored
8
externals/Makefile.am
vendored
|
@ -2,6 +2,8 @@
|
||||||
|
|
||||||
DB = db-4.5.20
|
DB = db-4.5.20
|
||||||
|
|
||||||
|
if OLD_DB_COMPAT
|
||||||
|
|
||||||
$(DB).tar.gz:
|
$(DB).tar.gz:
|
||||||
@echo "Nix requires Berkeley DB to build."
|
@echo "Nix requires Berkeley DB to build."
|
||||||
@echo "Please download version 4.5.20 from"
|
@echo "Please download version 4.5.20 from"
|
||||||
|
@ -32,6 +34,12 @@ build-db: have-db
|
||||||
touch build-db
|
touch build-db
|
||||||
endif
|
endif
|
||||||
|
|
||||||
|
else
|
||||||
|
|
||||||
|
build-db:
|
||||||
|
|
||||||
|
endif
|
||||||
|
|
||||||
|
|
||||||
# CWI ATerm
|
# CWI ATerm
|
||||||
|
|
||||||
|
|
|
@ -1,13 +1,14 @@
|
||||||
pkglib_LTLIBRARIES = libstore.la
|
pkglib_LTLIBRARIES = libstore.la
|
||||||
|
|
||||||
libstore_la_SOURCES = \
|
libstore_la_SOURCES = \
|
||||||
store-api.cc local-store.cc remote-store.cc derivations.cc build.cc misc.cc \
|
store-api.cc local-store.cc remote-store.cc derivations.cc build.cc misc.cc \
|
||||||
globals.cc db.cc references.cc pathlocks.cc gc.cc
|
globals.cc db.cc references.cc pathlocks.cc gc.cc upgrade-schema.cc \
|
||||||
|
optimise-store.cc
|
||||||
|
|
||||||
pkginclude_HEADERS = \
|
pkginclude_HEADERS = \
|
||||||
store-api.hh local-store.hh remote-store.hh derivations.hh misc.hh \
|
store-api.hh local-store.hh remote-store.hh derivations.hh misc.hh \
|
||||||
globals.hh db.hh references.hh pathlocks.hh \
|
globals.hh db.hh references.hh pathlocks.hh \
|
||||||
worker-protocol.hh
|
worker-protocol.hh
|
||||||
|
|
||||||
libstore_la_LIBADD = ../libutil/libutil.la ../boost/format/libformat.la
|
libstore_la_LIBADD = ../libutil/libutil.la ../boost/format/libformat.la
|
||||||
|
|
||||||
|
|
|
@ -3,7 +3,6 @@
|
||||||
#include "misc.hh"
|
#include "misc.hh"
|
||||||
#include "globals.hh"
|
#include "globals.hh"
|
||||||
#include "local-store.hh"
|
#include "local-store.hh"
|
||||||
#include "db.hh"
|
|
||||||
#include "util.hh"
|
#include "util.hh"
|
||||||
|
|
||||||
#include <map>
|
#include <map>
|
||||||
|
@ -207,7 +206,9 @@ private:
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
|
||||||
Worker();
|
LocalStore & store;
|
||||||
|
|
||||||
|
Worker(LocalStore & store);
|
||||||
~Worker();
|
~Worker();
|
||||||
|
|
||||||
/* Make a goal (with caching). */
|
/* Make a goal (with caching). */
|
||||||
|
@ -897,14 +898,14 @@ void DerivationGoal::haveDerivation()
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
assert(store->isValidPath(drvPath));
|
assert(worker.store.isValidPath(drvPath));
|
||||||
|
|
||||||
/* Get the derivation. */
|
/* Get the derivation. */
|
||||||
drv = derivationFromPath(drvPath);
|
drv = derivationFromPath(drvPath);
|
||||||
|
|
||||||
for (DerivationOutputs::iterator i = drv.outputs.begin();
|
for (DerivationOutputs::iterator i = drv.outputs.begin();
|
||||||
i != drv.outputs.end(); ++i)
|
i != drv.outputs.end(); ++i)
|
||||||
store->addTempRoot(i->second.path);
|
worker.store.addTempRoot(i->second.path);
|
||||||
|
|
||||||
/* Check what outputs paths are not already valid. */
|
/* Check what outputs paths are not already valid. */
|
||||||
PathSet invalidOutputs = checkPathValidity(false);
|
PathSet invalidOutputs = checkPathValidity(false);
|
||||||
|
@ -938,7 +939,7 @@ void DerivationGoal::haveDerivation()
|
||||||
i != invalidOutputs.end(); ++i)
|
i != invalidOutputs.end(); ++i)
|
||||||
/* Don't bother creating a substitution goal if there are no
|
/* Don't bother creating a substitution goal if there are no
|
||||||
substitutes. */
|
substitutes. */
|
||||||
if (store->hasSubstitutes(*i))
|
if (worker.store.hasSubstitutes(*i))
|
||||||
addWaitee(worker.makeSubstitutionGoal(*i));
|
addWaitee(worker.makeSubstitutionGoal(*i));
|
||||||
|
|
||||||
if (waitees.empty()) /* to prevent hang (no wake-up event) */
|
if (waitees.empty()) /* to prevent hang (no wake-up event) */
|
||||||
|
@ -993,7 +994,7 @@ void DerivationGoal::outputsSubstituted()
|
||||||
throw BuildError(format("`exportBuildReferencesGraph' contains a non-store path `%1%'")
|
throw BuildError(format("`exportBuildReferencesGraph' contains a non-store path `%1%'")
|
||||||
% storePath);
|
% storePath);
|
||||||
storePath = toStorePath(storePath);
|
storePath = toStorePath(storePath);
|
||||||
if (!store->isValidPath(storePath))
|
if (!worker.store.isValidPath(storePath))
|
||||||
throw BuildError(format("`exportBuildReferencesGraph' contains an invalid path `%1%'")
|
throw BuildError(format("`exportBuildReferencesGraph' contains an invalid path `%1%'")
|
||||||
% storePath);
|
% storePath);
|
||||||
|
|
||||||
|
@ -1250,19 +1251,6 @@ PathSet outputPaths(const DerivationOutputs & outputs)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
string showPaths(const PathSet & paths)
|
|
||||||
{
|
|
||||||
string s;
|
|
||||||
for (PathSet::const_iterator i = paths.begin();
|
|
||||||
i != paths.end(); ++i)
|
|
||||||
{
|
|
||||||
if (s.size() != 0) s += ", ";
|
|
||||||
s += "`" + *i + "'";
|
|
||||||
}
|
|
||||||
return s;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
DerivationGoal::HookReply DerivationGoal::tryBuildHook()
|
DerivationGoal::HookReply DerivationGoal::tryBuildHook()
|
||||||
{
|
{
|
||||||
if (!useBuildHook) return rpDecline;
|
if (!useBuildHook) return rpDecline;
|
||||||
|
@ -1474,7 +1462,7 @@ DerivationGoal::PrepareBuildReply DerivationGoal::prepareBuild()
|
||||||
i != drv.outputs.end(); ++i)
|
i != drv.outputs.end(); ++i)
|
||||||
{
|
{
|
||||||
Path path = i->second.path;
|
Path path = i->second.path;
|
||||||
if (store->isValidPath(path))
|
if (worker.store.isValidPath(path))
|
||||||
throw BuildError(format("obstructed build: path `%1%' exists") % path);
|
throw BuildError(format("obstructed build: path `%1%' exists") % path);
|
||||||
if (pathExists(path)) {
|
if (pathExists(path)) {
|
||||||
debug(format("removing unregistered path `%1%'") % path);
|
debug(format("removing unregistered path `%1%'") % path);
|
||||||
|
@ -1502,7 +1490,7 @@ DerivationGoal::PrepareBuildReply DerivationGoal::prepareBuild()
|
||||||
/* Add the relevant output closures of the input derivation
|
/* Add the relevant output closures of the input derivation
|
||||||
`*i' as input paths. Only add the closures of output paths
|
`*i' as input paths. Only add the closures of output paths
|
||||||
that are specified as inputs. */
|
that are specified as inputs. */
|
||||||
assert(store->isValidPath(i->first));
|
assert(worker.store.isValidPath(i->first));
|
||||||
Derivation inDrv = derivationFromPath(i->first);
|
Derivation inDrv = derivationFromPath(i->first);
|
||||||
for (StringSet::iterator j = i->second.begin();
|
for (StringSet::iterator j = i->second.begin();
|
||||||
j != i->second.end(); ++j)
|
j != i->second.end(); ++j)
|
||||||
|
@ -1624,7 +1612,7 @@ void DerivationGoal::startBuilder()
|
||||||
throw BuildError(format("`exportReferencesGraph' contains a non-store path `%1%'")
|
throw BuildError(format("`exportReferencesGraph' contains a non-store path `%1%'")
|
||||||
% storePath);
|
% storePath);
|
||||||
storePath = toStorePath(storePath);
|
storePath = toStorePath(storePath);
|
||||||
if (!store->isValidPath(storePath))
|
if (!worker.store.isValidPath(storePath))
|
||||||
throw BuildError(format("`exportReferencesGraph' contains an invalid path `%1%'")
|
throw BuildError(format("`exportReferencesGraph' contains an invalid path `%1%'")
|
||||||
% storePath);
|
% storePath);
|
||||||
|
|
||||||
|
@ -1652,7 +1640,7 @@ void DerivationGoal::startBuilder()
|
||||||
throw BuildError(format("`exportBuildReferencesGraph' contains a non-store path `%1%'")
|
throw BuildError(format("`exportBuildReferencesGraph' contains a non-store path `%1%'")
|
||||||
% storePath);
|
% storePath);
|
||||||
storePath = toStorePath(storePath);
|
storePath = toStorePath(storePath);
|
||||||
if (!store->isValidPath(storePath))
|
if (!worker.store.isValidPath(storePath))
|
||||||
throw BuildError(format("`exportBuildReferencesGraph' contains an invalid path `%1%'")
|
throw BuildError(format("`exportBuildReferencesGraph' contains an invalid path `%1%'")
|
||||||
% storePath);
|
% storePath);
|
||||||
|
|
||||||
|
@ -1994,27 +1982,17 @@ void DerivationGoal::computeClosure()
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Register each output path as valid, and register the sets of
|
/* Register each output path as valid, and register the sets of
|
||||||
paths referenced by each of them. This is wrapped in one
|
paths referenced by each of them. !!! this should be
|
||||||
database transaction to ensure that if we crash, either
|
atomic so that either all paths are registered as valid, or
|
||||||
everything is registered or nothing is. This is for
|
none are. */
|
||||||
recoverability: unregistered paths in the store can be deleted
|
|
||||||
arbitrarily, while registered paths can only be deleted by
|
|
||||||
running the garbage collector.
|
|
||||||
|
|
||||||
The reason that we do the transaction here and not on the fly
|
|
||||||
while we are scanning (above) is so that we don't hold database
|
|
||||||
locks for too long. */
|
|
||||||
Transaction txn;
|
|
||||||
createStoreTransaction(txn);
|
|
||||||
for (DerivationOutputs::iterator i = drv.outputs.begin();
|
for (DerivationOutputs::iterator i = drv.outputs.begin();
|
||||||
i != drv.outputs.end(); ++i)
|
i != drv.outputs.end(); ++i)
|
||||||
{
|
{
|
||||||
registerValidPath(txn, i->second.path,
|
worker.store.registerValidPath(i->second.path,
|
||||||
contentHashes[i->second.path],
|
contentHashes[i->second.path],
|
||||||
allReferences[i->second.path],
|
allReferences[i->second.path],
|
||||||
drvPath);
|
drvPath);
|
||||||
}
|
}
|
||||||
txn.commit();
|
|
||||||
|
|
||||||
/* It is now safe to delete the lock files, since all future
|
/* It is now safe to delete the lock files, since all future
|
||||||
lockers will see that the output paths are valid; they will not
|
lockers will see that the output paths are valid; they will not
|
||||||
|
@ -2113,7 +2091,7 @@ PathSet DerivationGoal::checkPathValidity(bool returnValid)
|
||||||
PathSet result;
|
PathSet result;
|
||||||
for (DerivationOutputs::iterator i = drv.outputs.begin();
|
for (DerivationOutputs::iterator i = drv.outputs.begin();
|
||||||
i != drv.outputs.end(); ++i)
|
i != drv.outputs.end(); ++i)
|
||||||
if (store->isValidPath(i->second.path)) {
|
if (worker.store.isValidPath(i->second.path)) {
|
||||||
if (returnValid) result.insert(i->second.path);
|
if (returnValid) result.insert(i->second.path);
|
||||||
} else {
|
} else {
|
||||||
if (!returnValid) result.insert(i->second.path);
|
if (!returnValid) result.insert(i->second.path);
|
||||||
|
@ -2219,10 +2197,10 @@ void SubstitutionGoal::init()
|
||||||
{
|
{
|
||||||
trace("init");
|
trace("init");
|
||||||
|
|
||||||
store->addTempRoot(storePath);
|
worker.store.addTempRoot(storePath);
|
||||||
|
|
||||||
/* If the path already exists we're done. */
|
/* If the path already exists we're done. */
|
||||||
if (store->isValidPath(storePath)) {
|
if (worker.store.isValidPath(storePath)) {
|
||||||
amDone(ecSuccess);
|
amDone(ecSuccess);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -2293,7 +2271,7 @@ void SubstitutionGoal::referencesValid()
|
||||||
for (PathSet::iterator i = references.begin();
|
for (PathSet::iterator i = references.begin();
|
||||||
i != references.end(); ++i)
|
i != references.end(); ++i)
|
||||||
if (*i != storePath) /* ignore self-references */
|
if (*i != storePath) /* ignore self-references */
|
||||||
assert(store->isValidPath(*i));
|
assert(worker.store.isValidPath(*i));
|
||||||
|
|
||||||
state = &SubstitutionGoal::tryToRun;
|
state = &SubstitutionGoal::tryToRun;
|
||||||
worker.waitForBuildSlot(shared_from_this());
|
worker.waitForBuildSlot(shared_from_this());
|
||||||
|
@ -2327,7 +2305,7 @@ void SubstitutionGoal::tryToRun()
|
||||||
(format("waiting for lock on `%1%'") % storePath).str());
|
(format("waiting for lock on `%1%'") % storePath).str());
|
||||||
|
|
||||||
/* Check again whether the path is invalid. */
|
/* Check again whether the path is invalid. */
|
||||||
if (store->isValidPath(storePath)) {
|
if (worker.store.isValidPath(storePath)) {
|
||||||
debug(format("store path `%1%' has become valid") % storePath);
|
debug(format("store path `%1%' has become valid") % storePath);
|
||||||
outputLock->setDeletion(true);
|
outputLock->setDeletion(true);
|
||||||
amDone(ecSuccess);
|
amDone(ecSuccess);
|
||||||
|
@ -2434,11 +2412,8 @@ void SubstitutionGoal::finished()
|
||||||
|
|
||||||
Hash contentHash = hashPath(htSHA256, storePath);
|
Hash contentHash = hashPath(htSHA256, storePath);
|
||||||
|
|
||||||
Transaction txn;
|
worker.store.registerValidPath(storePath, contentHash,
|
||||||
createStoreTransaction(txn);
|
|
||||||
registerValidPath(txn, storePath, contentHash,
|
|
||||||
references, deriver);
|
references, deriver);
|
||||||
txn.commit();
|
|
||||||
|
|
||||||
outputLock->setDeletion(true);
|
outputLock->setDeletion(true);
|
||||||
|
|
||||||
|
@ -2472,7 +2447,8 @@ void SubstitutionGoal::handleEOF(int fd)
|
||||||
static bool working = false;
|
static bool working = false;
|
||||||
|
|
||||||
|
|
||||||
Worker::Worker()
|
Worker::Worker(LocalStore & store)
|
||||||
|
: store(store)
|
||||||
{
|
{
|
||||||
/* Debugging: prevent recursive workers. */
|
/* Debugging: prevent recursive workers. */
|
||||||
if (working) abort();
|
if (working) abort();
|
||||||
|
@ -2870,7 +2846,7 @@ void LocalStore::buildDerivations(const PathSet & drvPaths)
|
||||||
startNest(nest, lvlDebug,
|
startNest(nest, lvlDebug,
|
||||||
format("building %1%") % showPaths(drvPaths));
|
format("building %1%") % showPaths(drvPaths));
|
||||||
|
|
||||||
Worker worker;
|
Worker worker(*this);
|
||||||
|
|
||||||
Goals goals;
|
Goals goals;
|
||||||
for (PathSet::const_iterator i = drvPaths.begin();
|
for (PathSet::const_iterator i = drvPaths.begin();
|
||||||
|
@ -2895,9 +2871,9 @@ void LocalStore::buildDerivations(const PathSet & drvPaths)
|
||||||
void LocalStore::ensurePath(const Path & path)
|
void LocalStore::ensurePath(const Path & path)
|
||||||
{
|
{
|
||||||
/* If the path is already valid, we're done. */
|
/* If the path is already valid, we're done. */
|
||||||
if (store->isValidPath(path)) return;
|
if (isValidPath(path)) return;
|
||||||
|
|
||||||
Worker worker;
|
Worker worker(*this);
|
||||||
GoalPtr goal = worker.makeSubstitutionGoal(path);
|
GoalPtr goal = worker.makeSubstitutionGoal(path);
|
||||||
Goals goals = singleton<Goals>(goal);
|
Goals goals = singleton<Goals>(goal);
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,7 @@
|
||||||
|
#include "config.h"
|
||||||
|
|
||||||
|
#ifdef OLD_DB_COMPAT
|
||||||
|
|
||||||
#include "db.hh"
|
#include "db.hh"
|
||||||
#include "util.hh"
|
#include "util.hh"
|
||||||
#include "pathlocks.hh"
|
#include "pathlocks.hh"
|
||||||
|
@ -466,3 +470,5 @@ void Database::clearTable(const Transaction & txn, TableId table)
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#endif
|
||||||
|
|
|
@ -2,8 +2,6 @@
|
||||||
#include "misc.hh"
|
#include "misc.hh"
|
||||||
#include "pathlocks.hh"
|
#include "pathlocks.hh"
|
||||||
#include "local-store.hh"
|
#include "local-store.hh"
|
||||||
#include "db.hh"
|
|
||||||
#include "util.hh"
|
|
||||||
|
|
||||||
#include <boost/shared_ptr.hpp>
|
#include <boost/shared_ptr.hpp>
|
||||||
|
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -4,18 +4,16 @@
|
||||||
#include <string>
|
#include <string>
|
||||||
|
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
|
#include "util.hh"
|
||||||
|
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
|
||||||
class Transaction;
|
|
||||||
|
|
||||||
|
|
||||||
/* Nix store and database schema version. Version 1 (or 0) was Nix <=
|
/* Nix store and database schema version. Version 1 (or 0) was Nix <=
|
||||||
0.7. Version 2 was Nix 0.8 and 0.9. Version 3 is Nix 0.10.
|
0.7. Version 2 was Nix 0.8 and 0.9. Version 3 is Nix 0.10.
|
||||||
Version 4 is Nix 0.11. */
|
Version 4 is Nix 0.11. Version 5 is Nix 0.12*/
|
||||||
const int nixSchemaVersion = 4;
|
const int nixSchemaVersion = 5;
|
||||||
|
|
||||||
|
|
||||||
extern string drvsLogDir;
|
extern string drvsLogDir;
|
||||||
|
@ -43,15 +41,9 @@ private:
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
|
||||||
/* Open the database environment. If `reserveSpace' is true, make
|
/* Initialise the local store, upgrading the schema if
|
||||||
sure that a big empty file exists in /nix/var/nix/db/reserved.
|
necessary. */
|
||||||
If `reserveSpace' is false, delete this file if it exists. The
|
LocalStore();
|
||||||
idea is that on normal operation, the file exists; but when we
|
|
||||||
run the garbage collector, it is deleted. This is to ensure
|
|
||||||
that the garbage collector has a small amount of disk space
|
|
||||||
available, which is required to open the Berkeley DB
|
|
||||||
environment. */
|
|
||||||
LocalStore(bool reserveSpace);
|
|
||||||
|
|
||||||
~LocalStore();
|
~LocalStore();
|
||||||
|
|
||||||
|
@ -100,33 +92,63 @@ public:
|
||||||
void collectGarbage(GCAction action, const PathSet & pathsToDelete,
|
void collectGarbage(GCAction action, const PathSet & pathsToDelete,
|
||||||
bool ignoreLiveness, PathSet & result, unsigned long long & bytesFreed);
|
bool ignoreLiveness, PathSet & result, unsigned long long & bytesFreed);
|
||||||
|
|
||||||
|
/* Delete a path from the Nix store. */
|
||||||
|
void deleteFromStore(const Path & path, unsigned long long & bytesFreed);
|
||||||
|
|
||||||
/* Optimise the disk space usage of the Nix store by hard-linking
|
/* Optimise the disk space usage of the Nix store by hard-linking
|
||||||
files with the same contents. */
|
files with the same contents. */
|
||||||
void optimiseStore(bool dryRun, OptimiseStats & stats);
|
void optimiseStore(bool dryRun, OptimiseStats & stats);
|
||||||
|
|
||||||
|
/* Check the integrity of the Nix store. */
|
||||||
|
void verifyStore(bool checkContents);
|
||||||
|
|
||||||
|
/* Register the validity of a path, i.e., that `path' exists, that
|
||||||
|
the paths referenced by it exists, and in the case of an output
|
||||||
|
path of a derivation, that it has been produced by a succesful
|
||||||
|
execution of the derivation (or something equivalent). Also
|
||||||
|
register the hash of the file system contents of the path. The
|
||||||
|
hash must be a SHA-256 hash. */
|
||||||
|
void registerValidPath(const Path & path,
|
||||||
|
const Hash & hash, const PathSet & references, const Path & deriver);
|
||||||
|
|
||||||
|
void registerValidPaths(const ValidPathInfos & infos);
|
||||||
|
|
||||||
|
private:
|
||||||
|
|
||||||
|
Path schemaPath;
|
||||||
|
|
||||||
|
/* Lock file used for upgrading. */
|
||||||
|
AutoCloseFD globalLock;
|
||||||
|
|
||||||
|
/* !!! The cache can grow very big. Maybe it should be pruned
|
||||||
|
every once in a while. */
|
||||||
|
std::map<Path, ValidPathInfo> pathInfoCache;
|
||||||
|
|
||||||
|
/* Store paths for which the referrers file must be purged. */
|
||||||
|
PathSet delayedUpdates;
|
||||||
|
|
||||||
|
int getSchema();
|
||||||
|
|
||||||
|
void registerValidPath(const ValidPathInfo & info, bool ignoreValidity = false);
|
||||||
|
|
||||||
|
ValidPathInfo queryPathInfo(const Path & path);
|
||||||
|
|
||||||
|
void rewriteReferrers(const Path & path, bool purge, PathSet referrers);
|
||||||
|
|
||||||
|
void flushDelayedUpdates();
|
||||||
|
|
||||||
|
bool queryReferrersInternal(const Path & path, PathSet & referrers);
|
||||||
|
|
||||||
|
void invalidatePath(const Path & path);
|
||||||
|
|
||||||
|
void upgradeStore12();
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
/* Get a transaction object. */
|
|
||||||
void createStoreTransaction(Transaction & txn);
|
|
||||||
|
|
||||||
/* Copy a path recursively. */
|
/* Copy a path recursively. */
|
||||||
void copyPath(const Path & src, const Path & dst);
|
void copyPath(const Path & src, const Path & dst);
|
||||||
|
|
||||||
/* Register the validity of a path, i.e., that `path' exists, that the
|
|
||||||
paths referenced by it exists, and in the case of an output path of
|
|
||||||
a derivation, that it has been produced by a succesful execution of
|
|
||||||
the derivation (or something equivalent). Also register the hash
|
|
||||||
of the file system contents of the path. The hash must be a
|
|
||||||
SHA-256 hash. */
|
|
||||||
void registerValidPath(const Transaction & txn,
|
|
||||||
const Path & path, const Hash & hash, const PathSet & references,
|
|
||||||
const Path & deriver);
|
|
||||||
|
|
||||||
typedef list<ValidPathInfo> ValidPathInfos;
|
|
||||||
|
|
||||||
void registerValidPaths(const Transaction & txn,
|
|
||||||
const ValidPathInfos & infos);
|
|
||||||
|
|
||||||
/* "Fix", or canonicalise, the meta-data of the files in a store path
|
/* "Fix", or canonicalise, the meta-data of the files in a store path
|
||||||
after it has been built. In particular:
|
after it has been built. In particular:
|
||||||
- the last modification date on each file is set to 0 (i.e.,
|
- the last modification date on each file is set to 0 (i.e.,
|
||||||
|
@ -137,25 +159,10 @@ void registerValidPaths(const Transaction & txn,
|
||||||
in a setuid Nix installation. */
|
in a setuid Nix installation. */
|
||||||
void canonicalisePathMetaData(const Path & path);
|
void canonicalisePathMetaData(const Path & path);
|
||||||
|
|
||||||
/* Checks whether a path is valid. */
|
void canonicalisePathMetaData(const Path & path, bool recurse);
|
||||||
bool isValidPathTxn(const Transaction & txn, const Path & path);
|
|
||||||
|
|
||||||
/* Sets the set of outgoing FS references for a store path. Use with
|
|
||||||
care! */
|
|
||||||
void setReferences(const Transaction & txn, const Path & path,
|
|
||||||
const PathSet & references);
|
|
||||||
|
|
||||||
/* Sets the deriver of a store path. Use with care! */
|
|
||||||
void setDeriver(const Transaction & txn, const Path & path,
|
|
||||||
const Path & deriver);
|
|
||||||
|
|
||||||
/* Delete a value from the nixStore directory. */
|
|
||||||
void deleteFromStore(const Path & path, unsigned long long & bytesFreed);
|
|
||||||
|
|
||||||
MakeError(PathInUse, Error);
|
MakeError(PathInUse, Error);
|
||||||
|
|
||||||
void verifyStore(bool checkContents);
|
|
||||||
|
|
||||||
/* Whether we are in build users mode. */
|
/* Whether we are in build users mode. */
|
||||||
bool haveBuildUsers();
|
bool haveBuildUsers();
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
#include "misc.hh"
|
#include "misc.hh"
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
#include "db.hh"
|
|
||||||
|
|
||||||
#include <aterm2.h>
|
#include <aterm2.h>
|
||||||
|
|
||||||
|
|
129
src/libstore/optimise-store.cc
Normal file
129
src/libstore/optimise-store.cc
Normal file
|
@ -0,0 +1,129 @@
|
||||||
|
#include "util.hh"
|
||||||
|
#include "local-store.hh"
|
||||||
|
|
||||||
|
#include <sys/types.h>
|
||||||
|
#include <sys/stat.h>
|
||||||
|
#include <unistd.h>
|
||||||
|
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
|
||||||
|
typedef std::map<Hash, std::pair<Path, ino_t> > HashToPath;
|
||||||
|
|
||||||
|
|
||||||
|
static void makeWritable(const Path & path)
|
||||||
|
{
|
||||||
|
struct stat st;
|
||||||
|
if (lstat(path.c_str(), &st))
|
||||||
|
throw SysError(format("getting attributes of path `%1%'") % path);
|
||||||
|
if (chmod(path.c_str(), st.st_mode | S_IWUSR) == -1)
|
||||||
|
throw SysError(format("changing writability of `%1%'") % path);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
static void hashAndLink(bool dryRun, HashToPath & hashToPath,
|
||||||
|
OptimiseStats & stats, const Path & path)
|
||||||
|
{
|
||||||
|
struct stat st;
|
||||||
|
if (lstat(path.c_str(), &st))
|
||||||
|
throw SysError(format("getting attributes of path `%1%'") % path);
|
||||||
|
|
||||||
|
/* Sometimes SNAFUs can cause files in the Nix store to be
|
||||||
|
modified, in particular when running programs as root under
|
||||||
|
NixOS (example: $fontconfig/var/cache being modified). Skip
|
||||||
|
those files. */
|
||||||
|
if (S_ISREG(st.st_mode) && (st.st_mode & S_IWUSR)) {
|
||||||
|
printMsg(lvlError, format("skipping suspicious writable file `%1%'") % path);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* We can hard link regular files and symlinks. */
|
||||||
|
if (S_ISREG(st.st_mode) || S_ISLNK(st.st_mode)) {
|
||||||
|
|
||||||
|
/* Hash the file. Note that hashPath() returns the hash over
|
||||||
|
the NAR serialisation, which includes the execute bit on
|
||||||
|
the file. Thus, executable and non-executable files with
|
||||||
|
the same contents *won't* be linked (which is good because
|
||||||
|
otherwise the permissions would be screwed up).
|
||||||
|
|
||||||
|
Also note that if `path' is a symlink, then we're hashing
|
||||||
|
the contents of the symlink (i.e. the result of
|
||||||
|
readlink()), not the contents of the target (which may not
|
||||||
|
even exist). */
|
||||||
|
Hash hash = hashPath(htSHA256, path);
|
||||||
|
stats.totalFiles++;
|
||||||
|
printMsg(lvlDebug, format("`%1%' has hash `%2%'") % path % printHash(hash));
|
||||||
|
|
||||||
|
std::pair<Path, ino_t> prevPath = hashToPath[hash];
|
||||||
|
|
||||||
|
if (prevPath.first == "") {
|
||||||
|
hashToPath[hash] = std::pair<Path, ino_t>(path, st.st_ino);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Yes! We've seen a file with the same contents. Replace
|
||||||
|
the current file with a hard link to that file. */
|
||||||
|
stats.sameContents++;
|
||||||
|
if (prevPath.second == st.st_ino) {
|
||||||
|
printMsg(lvlDebug, format("`%1%' is already linked to `%2%'") % path % prevPath.first);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!dryRun) {
|
||||||
|
|
||||||
|
printMsg(lvlTalkative, format("linking `%1%' to `%2%'") % path % prevPath.first);
|
||||||
|
|
||||||
|
Path tempLink = (format("%1%.tmp-%2%-%3%")
|
||||||
|
% path % getpid() % rand()).str();
|
||||||
|
|
||||||
|
/* Make the containing directory writable, but only if
|
||||||
|
it's not the store itself (we don't want or need to
|
||||||
|
mess with its permissions). */
|
||||||
|
bool mustToggle = !isStorePath(path);
|
||||||
|
if (mustToggle) makeWritable(dirOf(path));
|
||||||
|
|
||||||
|
if (link(prevPath.first.c_str(), tempLink.c_str()) == -1)
|
||||||
|
throw SysError(format("cannot link `%1%' to `%2%'")
|
||||||
|
% tempLink % prevPath.first);
|
||||||
|
|
||||||
|
/* Atomically replace the old file with the new hard link. */
|
||||||
|
if (rename(tempLink.c_str(), path.c_str()) == -1)
|
||||||
|
throw SysError(format("cannot rename `%1%' to `%2%'")
|
||||||
|
% tempLink % path);
|
||||||
|
|
||||||
|
/* Make the directory read-only again and reset its
|
||||||
|
timestamp back to 0. */
|
||||||
|
if (mustToggle) canonicalisePathMetaData(dirOf(path), false);
|
||||||
|
|
||||||
|
} else
|
||||||
|
printMsg(lvlTalkative, format("would link `%1%' to `%2%'") % path % prevPath.first);
|
||||||
|
|
||||||
|
stats.filesLinked++;
|
||||||
|
stats.bytesFreed += st.st_size;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (S_ISDIR(st.st_mode)) {
|
||||||
|
Strings names = readDirectory(path);
|
||||||
|
for (Strings::iterator i = names.begin(); i != names.end(); ++i)
|
||||||
|
hashAndLink(dryRun, hashToPath, stats, path + "/" + *i);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void LocalStore::optimiseStore(bool dryRun, OptimiseStats & stats)
|
||||||
|
{
|
||||||
|
HashToPath hashToPath;
|
||||||
|
|
||||||
|
PathSet paths = queryValidPaths();
|
||||||
|
|
||||||
|
for (PathSet::iterator i = paths.begin(); i != paths.end(); ++i) {
|
||||||
|
addTempRoot(*i);
|
||||||
|
if (!isValidPath(*i)) continue; /* path was GC'ed, probably */
|
||||||
|
startNest(nest, lvlChatty, format("hashing files in `%1%'") % *i);
|
||||||
|
hashAndLink(dryRun, hashToPath, stats, *i);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
|
@ -205,6 +205,19 @@ ValidPathInfo decodeValidPathInfo(std::istream & str, bool hashGiven)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
string showPaths(const PathSet & paths)
|
||||||
|
{
|
||||||
|
string s;
|
||||||
|
for (PathSet::const_iterator i = paths.begin();
|
||||||
|
i != paths.end(); ++i)
|
||||||
|
{
|
||||||
|
if (s.size() != 0) s += ", ";
|
||||||
|
s += "`" + *i + "'";
|
||||||
|
}
|
||||||
|
return s;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -219,10 +232,10 @@ namespace nix {
|
||||||
boost::shared_ptr<StoreAPI> store;
|
boost::shared_ptr<StoreAPI> store;
|
||||||
|
|
||||||
|
|
||||||
boost::shared_ptr<StoreAPI> openStore(bool reserveSpace)
|
boost::shared_ptr<StoreAPI> openStore()
|
||||||
{
|
{
|
||||||
if (getEnv("NIX_REMOTE") == "")
|
if (getEnv("NIX_REMOTE") == "")
|
||||||
return boost::shared_ptr<StoreAPI>(new LocalStore(reserveSpace));
|
return boost::shared_ptr<StoreAPI>(new LocalStore());
|
||||||
else
|
else
|
||||||
return boost::shared_ptr<StoreAPI>(new RemoteStore());
|
return boost::shared_ptr<StoreAPI>(new RemoteStore());
|
||||||
}
|
}
|
||||||
|
|
|
@ -249,7 +249,12 @@ extern boost::shared_ptr<StoreAPI> store;
|
||||||
|
|
||||||
/* Factory method: open the Nix database, either through the local or
|
/* Factory method: open the Nix database, either through the local or
|
||||||
remote implementation. */
|
remote implementation. */
|
||||||
boost::shared_ptr<StoreAPI> openStore(bool reserveSpace = true);
|
boost::shared_ptr<StoreAPI> openStore();
|
||||||
|
|
||||||
|
|
||||||
|
/* Display a set of paths in human-readable form (i.e., between quotes
|
||||||
|
and separated by commas). */
|
||||||
|
string showPaths(const PathSet & paths);
|
||||||
|
|
||||||
|
|
||||||
string makeValidityRegistration(const PathSet & paths,
|
string makeValidityRegistration(const PathSet & paths,
|
||||||
|
@ -261,8 +266,12 @@ struct ValidPathInfo
|
||||||
Path deriver;
|
Path deriver;
|
||||||
Hash hash;
|
Hash hash;
|
||||||
PathSet references;
|
PathSet references;
|
||||||
|
time_t registrationTime;
|
||||||
|
ValidPathInfo() : registrationTime(0) { }
|
||||||
};
|
};
|
||||||
|
|
||||||
|
typedef list<ValidPathInfo> ValidPathInfos;
|
||||||
|
|
||||||
ValidPathInfo decodeValidPathInfo(std::istream & str,
|
ValidPathInfo decodeValidPathInfo(std::istream & str,
|
||||||
bool hashGiven = false);
|
bool hashGiven = false);
|
||||||
|
|
||||||
|
|
108
src/libstore/upgrade-schema.cc
Normal file
108
src/libstore/upgrade-schema.cc
Normal file
|
@ -0,0 +1,108 @@
|
||||||
|
#include "db.hh"
|
||||||
|
#include "hash.hh"
|
||||||
|
#include "util.hh"
|
||||||
|
#include "local-store.hh"
|
||||||
|
#include "globals.hh"
|
||||||
|
#include "pathlocks.hh"
|
||||||
|
#include "config.h"
|
||||||
|
|
||||||
|
#include <iostream>
|
||||||
|
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
|
||||||
|
Hash parseHashField(const Path & path, const string & s);
|
||||||
|
|
||||||
|
|
||||||
|
/* Upgrade from schema 4 (Nix 0.11) to schema 5 (Nix >= 0.12). The
|
||||||
|
old schema uses Berkeley DB, the new one stores store path
|
||||||
|
meta-information in files. */
|
||||||
|
void LocalStore::upgradeStore12()
|
||||||
|
{
|
||||||
|
#if OLD_DB_COMPAT
|
||||||
|
|
||||||
|
#ifdef __CYGWIN__
|
||||||
|
/* Cygwin can't upgrade a read lock to a write lock... */
|
||||||
|
lockFile(globalLock, ltNone, true);
|
||||||
|
#endif
|
||||||
|
|
||||||
|
if (!lockFile(globalLock, ltWrite, false)) {
|
||||||
|
printMsg(lvlError, "waiting for exclusive access to the Nix store...");
|
||||||
|
lockFile(globalLock, ltWrite, true);
|
||||||
|
}
|
||||||
|
|
||||||
|
printMsg(lvlError, "upgrading Nix store to new schema (this may take a while)...");
|
||||||
|
|
||||||
|
if (getSchema() >= nixSchemaVersion) return; /* somebody else beat us to it */
|
||||||
|
|
||||||
|
/* Open the old Nix database and tables. */
|
||||||
|
Database nixDB;
|
||||||
|
nixDB.open(nixDBPath);
|
||||||
|
|
||||||
|
/* dbValidPaths :: Path -> ()
|
||||||
|
|
||||||
|
The existence of a key $p$ indicates that path $p$ is valid
|
||||||
|
(that is, produced by a succesful build). */
|
||||||
|
TableId dbValidPaths = nixDB.openTable("validpaths");
|
||||||
|
|
||||||
|
/* dbReferences :: Path -> [Path]
|
||||||
|
|
||||||
|
This table lists the outgoing file system references for each
|
||||||
|
output path that has been built by a Nix derivation. These are
|
||||||
|
found by scanning the path for the hash components of input
|
||||||
|
paths. */
|
||||||
|
TableId dbReferences = nixDB.openTable("references");
|
||||||
|
|
||||||
|
/* dbReferrers :: Path -> Path
|
||||||
|
|
||||||
|
This table is just the reverse mapping of dbReferences. This
|
||||||
|
table can have duplicate keys, each corresponding value
|
||||||
|
denoting a single referrer. */
|
||||||
|
// Not needed for conversion: it's just the inverse of
|
||||||
|
// references.
|
||||||
|
// TableId dbReferrers = nixDB.openTable("referrers");
|
||||||
|
|
||||||
|
/* dbDerivers :: Path -> [Path]
|
||||||
|
|
||||||
|
This table lists the derivation used to build a path. There
|
||||||
|
can only be multiple such paths for fixed-output derivations
|
||||||
|
(i.e., derivations specifying an expected hash). */
|
||||||
|
TableId dbDerivers = nixDB.openTable("derivers");
|
||||||
|
|
||||||
|
Paths paths;
|
||||||
|
nixDB.enumTable(noTxn, dbValidPaths, paths);
|
||||||
|
|
||||||
|
for (Paths::iterator i = paths.begin(); i != paths.end(); ++i) {
|
||||||
|
ValidPathInfo info;
|
||||||
|
info.path = *i;
|
||||||
|
|
||||||
|
Paths references;
|
||||||
|
nixDB.queryStrings(noTxn, dbReferences, *i, references);
|
||||||
|
info.references.insert(references.begin(), references.end());
|
||||||
|
|
||||||
|
string s;
|
||||||
|
nixDB.queryString(noTxn, dbValidPaths, *i, s);
|
||||||
|
info.hash = parseHashField(*i, s);
|
||||||
|
|
||||||
|
nixDB.queryString(noTxn, dbDerivers, *i, info.deriver);
|
||||||
|
|
||||||
|
registerValidPath(info, true);
|
||||||
|
std::cerr << ".";
|
||||||
|
}
|
||||||
|
|
||||||
|
std::cerr << std::endl;
|
||||||
|
|
||||||
|
writeFile(schemaPath, (format("%1%") % nixSchemaVersion).str());
|
||||||
|
|
||||||
|
lockFile(globalLock, ltRead, true);
|
||||||
|
|
||||||
|
#else
|
||||||
|
throw Error(
|
||||||
|
"Your Nix store has a database in Berkeley DB format. To convert\n"
|
||||||
|
"to the new format, please compile Nix with Berkeley DB support.");
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
|
@ -361,9 +361,10 @@ Path createTempDir(const Path & tmpRoot, const Path & prefix,
|
||||||
|
|
||||||
Paths createDirs(const Path & path)
|
Paths createDirs(const Path & path)
|
||||||
{
|
{
|
||||||
if (path == "/") return Paths();
|
Paths created;
|
||||||
Paths created = createDirs(dirOf(path));
|
if (path == "/") return created;
|
||||||
if (!pathExists(path)) {
|
if (!pathExists(path)) {
|
||||||
|
created = createDirs(dirOf(path));
|
||||||
if (mkdir(path.c_str(), 0777) == -1)
|
if (mkdir(path.c_str(), 0777) == -1)
|
||||||
throw SysError(format("creating directory `%1%'") % path);
|
throw SysError(format("creating directory `%1%'") % path);
|
||||||
created.push_back(path);
|
created.push_back(path);
|
||||||
|
|
|
@ -12,6 +12,10 @@
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
|
||||||
|
#define foreach(it_type, it, collection) \
|
||||||
|
for (it_type it = collection.begin(); it != collection.end(); ++it)
|
||||||
|
|
||||||
|
|
||||||
/* Return an environment variable. */
|
/* Return an environment variable. */
|
||||||
string getEnv(const string & key, const string & def = "");
|
string getEnv(const string & key, const string & def = "");
|
||||||
|
|
||||||
|
|
|
@ -13,7 +13,6 @@
|
||||||
#include "common-opts.hh"
|
#include "common-opts.hh"
|
||||||
#include "xml-writer.hh"
|
#include "xml-writer.hh"
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
#include "db.hh"
|
|
||||||
#include "util.hh"
|
#include "util.hh"
|
||||||
|
|
||||||
#include <cerrno>
|
#include <cerrno>
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
#include "dotgraph.hh"
|
#include "dotgraph.hh"
|
||||||
#include "util.hh"
|
#include "util.hh"
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
#include "db.hh"
|
|
||||||
|
|
||||||
#include <iostream>
|
#include <iostream>
|
||||||
|
|
||||||
|
|
|
@ -7,7 +7,6 @@
|
||||||
#include "shared.hh"
|
#include "shared.hh"
|
||||||
#include "dotgraph.hh"
|
#include "dotgraph.hh"
|
||||||
#include "local-store.hh"
|
#include "local-store.hh"
|
||||||
#include "db.hh"
|
|
||||||
#include "util.hh"
|
#include "util.hh"
|
||||||
#include "help.txt.hh"
|
#include "help.txt.hh"
|
||||||
|
|
||||||
|
@ -31,6 +30,14 @@ static int rootNr = 0;
|
||||||
static bool indirectRoot = false;
|
static bool indirectRoot = false;
|
||||||
|
|
||||||
|
|
||||||
|
LocalStore & ensureLocalStore()
|
||||||
|
{
|
||||||
|
LocalStore * store2(dynamic_cast<LocalStore *>(store.get()));
|
||||||
|
if (!store2) throw Error("you don't have sufficient rights to use --verify");
|
||||||
|
return *store2;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
static Path useDeriver(Path path)
|
static Path useDeriver(Path path)
|
||||||
{
|
{
|
||||||
if (!isDerivation(path)) {
|
if (!isDerivation(path)) {
|
||||||
|
@ -430,10 +437,7 @@ static void registerValidity(bool reregister, bool hashGiven, bool canonicalise)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Transaction txn;
|
ensureLocalStore().registerValidPaths(infos);
|
||||||
createStoreTransaction(txn);
|
|
||||||
registerValidPaths(txn, infos);
|
|
||||||
txn.commit();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -641,11 +645,10 @@ static void opVerify(Strings opFlags, Strings opArgs)
|
||||||
if (*i == "--check-contents") checkContents = true;
|
if (*i == "--check-contents") checkContents = true;
|
||||||
else throw UsageError(format("unknown flag `%1%'") % *i);
|
else throw UsageError(format("unknown flag `%1%'") % *i);
|
||||||
|
|
||||||
verifyStore(checkContents);
|
ensureLocalStore().verifyStore(checkContents);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
static void showOptimiseStats(OptimiseStats & stats)
|
static void showOptimiseStats(OptimiseStats & stats)
|
||||||
{
|
{
|
||||||
printMsg(lvlError,
|
printMsg(lvlError,
|
||||||
|
@ -671,12 +674,9 @@ static void opOptimise(Strings opFlags, Strings opArgs)
|
||||||
if (*i == "--dry-run") dryRun = true;
|
if (*i == "--dry-run") dryRun = true;
|
||||||
else throw UsageError(format("unknown flag `%1%'") % *i);
|
else throw UsageError(format("unknown flag `%1%'") % *i);
|
||||||
|
|
||||||
LocalStore * store2(dynamic_cast<LocalStore *>(store.get()));
|
|
||||||
if (!store2) throw Error("you don't have sufficient rights to use --optimise");
|
|
||||||
|
|
||||||
OptimiseStats stats;
|
OptimiseStats stats;
|
||||||
try {
|
try {
|
||||||
store2->optimiseStore(dryRun, stats);
|
ensureLocalStore().optimiseStore(dryRun, stats);
|
||||||
} catch (...) {
|
} catch (...) {
|
||||||
showOptimiseStats(stats);
|
showOptimiseStats(stats);
|
||||||
throw;
|
throw;
|
||||||
|
@ -755,7 +755,7 @@ void run(Strings args)
|
||||||
if (!op) throw UsageError("no operation specified");
|
if (!op) throw UsageError("no operation specified");
|
||||||
|
|
||||||
if (op != opDump && op != opRestore) /* !!! hack */
|
if (op != opDump && op != opRestore) /* !!! hack */
|
||||||
store = openStore(op != opGC);
|
store = openStore();
|
||||||
|
|
||||||
op(opFlags, opArgs);
|
op(opFlags, opArgs);
|
||||||
}
|
}
|
||||||
|
|
|
@ -474,7 +474,7 @@ static void processConnection()
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
/* Open the store. */
|
/* Open the store. */
|
||||||
store = boost::shared_ptr<StoreAPI>(new LocalStore(true));
|
store = boost::shared_ptr<StoreAPI>(new LocalStore());
|
||||||
|
|
||||||
stopWork();
|
stopWork();
|
||||||
|
|
||||||
|
|
|
@ -19,7 +19,7 @@ TESTS = init.sh hash.sh lang.sh add.sh simple.sh dependencies.sh \
|
||||||
fallback.sh nix-push.sh gc.sh gc-concurrent.sh verify.sh nix-pull.sh \
|
fallback.sh nix-push.sh gc.sh gc-concurrent.sh verify.sh nix-pull.sh \
|
||||||
referrers.sh user-envs.sh logging.sh nix-build.sh misc.sh fixed.sh \
|
referrers.sh user-envs.sh logging.sh nix-build.sh misc.sh fixed.sh \
|
||||||
gc-runtime.sh install-package.sh check-refs.sh filter-source.sh \
|
gc-runtime.sh install-package.sh check-refs.sh filter-source.sh \
|
||||||
remote-store.sh
|
remote-store.sh export.sh
|
||||||
|
|
||||||
XFAIL_TESTS =
|
XFAIL_TESTS =
|
||||||
|
|
||||||
|
|
|
@ -11,7 +11,7 @@ $nixstore -q --graph "$drvPath" > $TEST_ROOT/graph
|
||||||
if test -n "$dot"; then
|
if test -n "$dot"; then
|
||||||
# Does it parse?
|
# Does it parse?
|
||||||
$dot < $TEST_ROOT/graph
|
$dot < $TEST_ROOT/graph
|
||||||
fi
|
fi
|
||||||
|
|
||||||
outPath=$($nixstore -rvv "$drvPath")
|
outPath=$($nixstore -rvv "$drvPath")
|
||||||
|
|
||||||
|
|
31
tests/export.sh
Normal file
31
tests/export.sh
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
source common.sh
|
||||||
|
|
||||||
|
clearStore
|
||||||
|
|
||||||
|
outPath=$($nixstore -r $($nixinstantiate dependencies.nix))
|
||||||
|
|
||||||
|
$nixstore --export $outPath > $TEST_ROOT/exp
|
||||||
|
|
||||||
|
$nixstore --export $($nixstore -qR $outPath) > $TEST_ROOT/exp_all
|
||||||
|
|
||||||
|
|
||||||
|
clearStore
|
||||||
|
|
||||||
|
if $nixstore --import < $TEST_ROOT/exp; then
|
||||||
|
echo "importing a non-closure should fail"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
|
||||||
|
clearStore
|
||||||
|
|
||||||
|
$nixstore --import < $TEST_ROOT/exp_all
|
||||||
|
|
||||||
|
$nixstore --export $($nixstore -qR $outPath) > $TEST_ROOT/exp_all2
|
||||||
|
|
||||||
|
|
||||||
|
clearStore
|
||||||
|
|
||||||
|
# Regression test: the derivers in exp_all2 are empty, which shouldn't
|
||||||
|
# cause a failure.
|
||||||
|
$nixstore --import < $TEST_ROOT/exp_all2
|
|
@ -95,4 +95,5 @@ chmod +x $NIX_BIN_DIR/nix/download-using-manifests.pl
|
||||||
$nixstore --init
|
$nixstore --init
|
||||||
|
|
||||||
# Did anything happen?
|
# Did anything happen?
|
||||||
test -e "$NIX_DB_DIR"/validpaths
|
test -e "$NIX_DB_DIR"/info
|
||||||
|
test -e "$NIX_DB_DIR"/referrer
|
||||||
|
|
|
@ -9,12 +9,50 @@ reference=$NIX_STORE_DIR/abcdef
|
||||||
touch $reference
|
touch $reference
|
||||||
(echo $reference && echo && echo 0) | $nixstore --register-validity
|
(echo $reference && echo && echo 0) | $nixstore --register-validity
|
||||||
|
|
||||||
echo "registering..."
|
echo "making registration..."
|
||||||
time for ((n = 0; n < $max; n++)); do
|
|
||||||
|
for ((n = 0; n < $max; n++)); do
|
||||||
storePath=$NIX_STORE_DIR/$n
|
storePath=$NIX_STORE_DIR/$n
|
||||||
touch $storePath
|
touch $storePath
|
||||||
(echo $storePath && echo && echo 1 && echo $reference)
|
ref2=$NIX_STORE_DIR/$((n+1))
|
||||||
done | $nixstore --register-validity
|
if test $((n+1)) = $max; then
|
||||||
|
ref2=$reference
|
||||||
|
fi
|
||||||
|
(echo $storePath && echo && echo 2 && echo $reference && echo $ref2)
|
||||||
|
done > $TEST_ROOT/reg_info
|
||||||
|
|
||||||
|
echo "registering..."
|
||||||
|
|
||||||
|
time $nixstore --register-validity < $TEST_ROOT/reg_info
|
||||||
|
|
||||||
|
oldTime=$(cat test-tmp/db/info/1 | grep Registered-At)
|
||||||
|
|
||||||
|
echo "sleeping..."
|
||||||
|
|
||||||
|
sleep 2
|
||||||
|
|
||||||
|
echo "reregistering..."
|
||||||
|
|
||||||
|
time $nixstore --register-validity --reregister < $TEST_ROOT/reg_info
|
||||||
|
|
||||||
|
newTime=$(cat test-tmp/db/info/1 | grep Registered-At)
|
||||||
|
|
||||||
|
if test "$newTime" != "$oldTime"; then
|
||||||
|
echo "reregistration changed original registration time"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if test "$(cat test-tmp/db/referrer/1 | wc -w)" != 1; then
|
||||||
|
echo "reregistration duplicated referrers"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
echo "collecting garbage..."
|
echo "collecting garbage..."
|
||||||
time $nixstore --gc 2> /dev/null
|
ln -sfn $reference "$NIX_STATE_DIR"/gcroots/ref
|
||||||
|
time $nixstore --gc
|
||||||
|
|
||||||
|
if test "$(cat test-tmp/db/referrer/abcdef | wc -w)" != 0; then
|
||||||
|
echo "referrers not cleaned up"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue