forked from lix-project/lix
Handle case collisions on case-insensitive systems
When running NixOps under Mac OS X, we need to be able to import store paths built on Linux into the local Nix store. However, HFS+ is usually case-insensitive, so if there are directories with file names that differ only in case, then importing will fail. The solution is to add a suffix ("~nix~case~hack~<integer>") to colliding files. For instance, if we have a directory containing xt_CONNMARK.h and xt_connmark.h, then the latter will be renamed to "xt_connmark.h~nix~case~hack~1". If a store path is dumped as a NAR, the suffixes are removed. Thus, importing and exporting via a case-insensitive Nix store is round-tripping. So when NixOps calls nix-copy-closure to copy the path to a Linux machine, you get the original file names back. Closes #119.
This commit is contained in:
parent
bb65460feb
commit
276a40b31f
|
@ -2,6 +2,7 @@
|
|||
|
||||
#include "globals.hh"
|
||||
#include "util.hh"
|
||||
#include "archive.hh"
|
||||
|
||||
#include <map>
|
||||
#include <algorithm>
|
||||
|
@ -150,6 +151,7 @@ void Settings::update()
|
|||
get(useSshSubstituter, "use-ssh-substituter");
|
||||
get(logServers, "log-servers");
|
||||
get(enableImportNative, "allow-unsafe-native-code-during-evaluation");
|
||||
get(useCaseHack, "use-case-hack");
|
||||
|
||||
string subs = getEnv("NIX_SUBSTITUTERS", "default");
|
||||
if (subs == "default") {
|
||||
|
|
|
@ -3,6 +3,8 @@
|
|||
#include <cerrno>
|
||||
#include <algorithm>
|
||||
#include <vector>
|
||||
#include <map>
|
||||
#include <cstring>
|
||||
|
||||
#define _XOPEN_SOURCE 600
|
||||
#include <sys/types.h>
|
||||
|
@ -18,39 +20,21 @@
|
|||
namespace nix {
|
||||
|
||||
|
||||
bool useCaseHack =
|
||||
#if __APPLE__
|
||||
true;
|
||||
#else
|
||||
false;
|
||||
#endif
|
||||
|
||||
static string archiveVersion1 = "nix-archive-1";
|
||||
|
||||
static string caseHackSuffix = "~nix~case~hack~";
|
||||
|
||||
PathFilter defaultPathFilter;
|
||||
|
||||
|
||||
static void dump(const string & path, Sink & sink, PathFilter & filter);
|
||||
|
||||
|
||||
static void dumpEntries(const Path & path, Sink & sink, PathFilter & filter)
|
||||
{
|
||||
Strings names = readDirectory(path);
|
||||
vector<string> names2(names.begin(), names.end());
|
||||
sort(names2.begin(), names2.end());
|
||||
|
||||
for (vector<string>::iterator i = names2.begin();
|
||||
i != names2.end(); ++i)
|
||||
{
|
||||
Path entry = path + "/" + *i;
|
||||
if (filter(entry)) {
|
||||
writeString("entry", sink);
|
||||
writeString("(", sink);
|
||||
writeString("name", sink);
|
||||
writeString(*i, sink);
|
||||
writeString("node", sink);
|
||||
dump(entry, sink, filter);
|
||||
writeString(")", sink);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static void dumpContents(const Path & path, size_t size,
|
||||
static void dumpContents(const Path & path, size_t size,
|
||||
Sink & sink)
|
||||
{
|
||||
writeString("contents", sink);
|
||||
|
@ -58,7 +42,7 @@ static void dumpContents(const Path & path, size_t size,
|
|||
|
||||
AutoCloseFD fd = open(path.c_str(), O_RDONLY);
|
||||
if (fd == -1) throw SysError(format("opening file `%1%'") % path);
|
||||
|
||||
|
||||
unsigned char buf[65536];
|
||||
size_t left = size;
|
||||
|
||||
|
@ -89,12 +73,41 @@ static void dump(const Path & path, Sink & sink, PathFilter & filter)
|
|||
writeString("", sink);
|
||||
}
|
||||
dumpContents(path, (size_t) st.st_size, sink);
|
||||
}
|
||||
}
|
||||
|
||||
else if (S_ISDIR(st.st_mode)) {
|
||||
writeString("type", sink);
|
||||
writeString("directory", sink);
|
||||
dumpEntries(path, sink, filter);
|
||||
|
||||
/* If we're on a case-insensitive system like Mac OS X, undo
|
||||
the case hack applied by restorePath(). */
|
||||
Strings names = readDirectory(path);
|
||||
std::map<string, string> unhacked;
|
||||
for (auto & i : names)
|
||||
if (useCaseHack) {
|
||||
string name(i);
|
||||
size_t pos = i.find(caseHackSuffix);
|
||||
if (pos != string::npos) {
|
||||
printMsg(lvlDebug, format("removing case hack suffix from `%1%'") % (path + "/" + i));
|
||||
name.erase(pos);
|
||||
}
|
||||
if (unhacked.find(name) != unhacked.end())
|
||||
throw Error(format("file name collision in between `%1%' and `%2%'")
|
||||
% (path + "/" + unhacked[name]) % (path + "/" + i));
|
||||
unhacked[name] = i;
|
||||
} else
|
||||
unhacked[i] = i;
|
||||
|
||||
for (auto & i : unhacked)
|
||||
if (filter(path + "/" + i.first)) {
|
||||
writeString("entry", sink);
|
||||
writeString("(", sink);
|
||||
writeString("name", sink);
|
||||
writeString(i.first, sink);
|
||||
writeString("node", sink);
|
||||
dump(path + "/" + i.second, sink, filter);
|
||||
writeString(")", sink);
|
||||
}
|
||||
}
|
||||
|
||||
else if (S_ISLNK(st.st_mode)) {
|
||||
|
@ -123,6 +136,7 @@ static SerialisationError badArchive(string s)
|
|||
}
|
||||
|
||||
|
||||
#if 0
|
||||
static void skipGeneric(Source & source)
|
||||
{
|
||||
if (readString(source) == "(") {
|
||||
|
@ -130,43 +144,13 @@ static void skipGeneric(Source & source)
|
|||
skipGeneric(source);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static void parse(ParseSink & sink, Source & source, const Path & path);
|
||||
|
||||
|
||||
|
||||
static void parseEntry(ParseSink & sink, Source & source, const Path & path)
|
||||
{
|
||||
string s, name;
|
||||
|
||||
s = readString(source);
|
||||
if (s != "(") throw badArchive("expected open tag");
|
||||
|
||||
while (1) {
|
||||
checkInterrupt();
|
||||
|
||||
s = readString(source);
|
||||
|
||||
if (s == ")") {
|
||||
break;
|
||||
} else if (s == "name") {
|
||||
name = readString(source);
|
||||
} else if (s == "node") {
|
||||
if (s == "") throw badArchive("entry name missing");
|
||||
parse(sink, source, path + "/" + name);
|
||||
} else {
|
||||
throw badArchive("unknown field " + s);
|
||||
skipGeneric(source);
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
|
||||
static void parseContents(ParseSink & sink, Source & source, const Path & path)
|
||||
{
|
||||
unsigned long long size = readLongLong(source);
|
||||
|
||||
|
||||
sink.preallocateContents(size);
|
||||
|
||||
unsigned long long left = size;
|
||||
|
@ -185,6 +169,15 @@ static void parseContents(ParseSink & sink, Source & source, const Path & path)
|
|||
}
|
||||
|
||||
|
||||
struct CaseInsensitiveCompare
|
||||
{
|
||||
bool operator() (const string & a, const string & b) const
|
||||
{
|
||||
return strcasecmp(a.c_str(), b.c_str()) < 0;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
static void parse(ParseSink & sink, Source & source, const Path & path)
|
||||
{
|
||||
string s;
|
||||
|
@ -194,6 +187,8 @@ static void parse(ParseSink & sink, Source & source, const Path & path)
|
|||
|
||||
enum { tpUnknown, tpRegular, tpDirectory, tpSymlink } type = tpUnknown;
|
||||
|
||||
std::map<Path, int, CaseInsensitiveCompare> names;
|
||||
|
||||
while (1) {
|
||||
checkInterrupt();
|
||||
|
||||
|
@ -221,9 +216,9 @@ static void parse(ParseSink & sink, Source & source, const Path & path)
|
|||
else if (t == "symlink") {
|
||||
type = tpSymlink;
|
||||
}
|
||||
|
||||
|
||||
else throw badArchive("unknown file type " + t);
|
||||
|
||||
|
||||
}
|
||||
|
||||
else if (s == "contents" && type == tpRegular) {
|
||||
|
@ -236,7 +231,35 @@ static void parse(ParseSink & sink, Source & source, const Path & path)
|
|||
}
|
||||
|
||||
else if (s == "entry" && type == tpDirectory) {
|
||||
parseEntry(sink, source, path);
|
||||
string name;
|
||||
|
||||
s = readString(source);
|
||||
if (s != "(") throw badArchive("expected open tag");
|
||||
|
||||
while (1) {
|
||||
checkInterrupt();
|
||||
|
||||
s = readString(source);
|
||||
|
||||
if (s == ")") {
|
||||
break;
|
||||
} else if (s == "name") {
|
||||
name = readString(source);
|
||||
if (useCaseHack) {
|
||||
auto i = names.find(name);
|
||||
if (i != names.end()) {
|
||||
printMsg(lvlDebug, format("case collision between `%1%' and `%2%'") % i->first % name);
|
||||
name += caseHackSuffix;
|
||||
name += int2String(++i->second);
|
||||
} else
|
||||
names[name] = 0;
|
||||
}
|
||||
} else if (s == "node") {
|
||||
if (s.empty()) throw badArchive("entry name missing");
|
||||
parse(sink, source, path + "/" + name);
|
||||
} else
|
||||
throw badArchive("unknown field " + s);
|
||||
}
|
||||
}
|
||||
|
||||
else if (s == "target" && type == tpSymlink) {
|
||||
|
@ -244,17 +267,15 @@ static void parse(ParseSink & sink, Source & source, const Path & path)
|
|||
sink.createSymlink(path, target);
|
||||
}
|
||||
|
||||
else {
|
||||
else
|
||||
throw badArchive("unknown field " + s);
|
||||
skipGeneric(source);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void parseDump(ParseSink & sink, Source & source)
|
||||
{
|
||||
string version;
|
||||
string version;
|
||||
try {
|
||||
version = readString(source);
|
||||
} catch (SerialisationError & e) {
|
||||
|
@ -323,7 +344,7 @@ struct RestoreSink : ParseSink
|
|||
}
|
||||
};
|
||||
|
||||
|
||||
|
||||
void restorePath(const Path & path, Source & source)
|
||||
{
|
||||
RestoreSink sink;
|
||||
|
@ -331,5 +352,5 @@ void restorePath(const Path & path, Source & source)
|
|||
parseDump(sink, source);
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -28,7 +28,7 @@ namespace nix {
|
|||
|
||||
where:
|
||||
|
||||
attrs(as) = concat(map(attr, as)) + encN(0)
|
||||
attrs(as) = concat(map(attr, as)) + encN(0)
|
||||
attrs((a, b)) = encS(a) + encS(b)
|
||||
|
||||
encS(s) = encN(len(s)) + s + (padding until next 64-bit boundary)
|
||||
|
@ -58,7 +58,7 @@ void dumpPath(const Path & path, Sink & sink,
|
|||
struct ParseSink
|
||||
{
|
||||
virtual void createDirectory(const Path & path) { };
|
||||
|
||||
|
||||
virtual void createRegularFile(const Path & path) { };
|
||||
virtual void isExecutable() { };
|
||||
virtual void preallocateContents(unsigned long long size) { };
|
||||
|
@ -66,10 +66,14 @@ struct ParseSink
|
|||
|
||||
virtual void createSymlink(const Path & path, const string & target) { };
|
||||
};
|
||||
|
||||
|
||||
void parseDump(ParseSink & sink, Source & source);
|
||||
|
||||
void restorePath(const Path & path, Source & source);
|
||||
|
||||
|
||||
|
||||
// FIXME: global variables are bad m'kay.
|
||||
extern bool useCaseHack;
|
||||
|
||||
|
||||
}
|
||||
|
|
19
tests/case-hack.sh
Normal file
19
tests/case-hack.sh
Normal file
|
@ -0,0 +1,19 @@
|
|||
source common.sh
|
||||
|
||||
clearStore
|
||||
|
||||
rm -rf $TEST_ROOT/case
|
||||
|
||||
opts="--option use-case-hack true"
|
||||
|
||||
# Check whether restoring and dumping a NAR that contains case
|
||||
# collisions is round-tripping, even on a case-insensitive system.
|
||||
nix-store $opts --restore $TEST_ROOT/case < case.nar
|
||||
nix-store $opts --dump $TEST_ROOT/case > $TEST_ROOT/case.nar
|
||||
cmp case.nar $TEST_ROOT/case.nar
|
||||
[ "$(nix-hash $opts --type sha256 $TEST_ROOT/case)" = "$(nix-hash --flat --type sha256 case.nar)" ]
|
||||
|
||||
# Check whether we detect true collisions (e.g. those remaining after
|
||||
# removal of the suffix).
|
||||
touch "$TEST_ROOT/case/xt_CONNMARK.h~nix~case~hack~3"
|
||||
! nix-store $opts --dump $TEST_ROOT/case > /dev/null
|
BIN
tests/case.nar
Normal file
BIN
tests/case.nar
Normal file
Binary file not shown.
|
@ -10,7 +10,7 @@ nix_tests = \
|
|||
remote-store.sh export.sh export-graph.sh negative-caching.sh \
|
||||
binary-patching.sh timeout.sh secure-drv-outputs.sh nix-channel.sh \
|
||||
multiple-outputs.sh import-derivation.sh fetchurl.sh optimise-store.sh \
|
||||
binary-cache.sh nix-profile.sh repair.sh dump-db.sh
|
||||
binary-cache.sh nix-profile.sh repair.sh dump-db.sh case-hack.sh
|
||||
# parallel.sh
|
||||
|
||||
install-tests += $(foreach x, $(nix_tests), tests/$(x))
|
||||
|
|
Loading…
Reference in a new issue