* Nix can now fetch prebuilts (and other files) from the network, iff
a mapping from the hash to a url has been registered through `nix regurl'. * Bug fix in nix: don't pollute stdout when running tar, it made nix-switch barf. * Bug fix in nix-push-prebuilts: don't create a subdirectory on the target when rsync'ing.
This commit is contained in:
parent
13176d74cc
commit
f8d91f20e6
|
@ -1,4 +1,4 @@
|
|||
bin_SCRIPTS = nix-generate-regscript nix-switch nix-collect-garbage \
|
||||
bin_SCRIPTS = nix-switch nix-collect-garbage \
|
||||
nix-pull-prebuilts nix-push-prebuilts
|
||||
|
||||
install-exec-local:
|
||||
|
|
|
@ -9,13 +9,25 @@ my $conffile = "$etcdir/prebuilts.conf";
|
|||
|
||||
sub register {
|
||||
my $fn = shift;
|
||||
my $url = shift;
|
||||
return unless $fn =~ /([^\/]*)-([0-9a-z]{32})-([0-9a-z]{32})\.tar\.bz2/;
|
||||
my $id = $1;
|
||||
my $pkghash = $2;
|
||||
my $prebuilthash = $3;
|
||||
|
||||
print "$pkghash => $prebuilthash ($id)\n";
|
||||
|
||||
system "nix regprebuilt $pkghash $prebuilthash";
|
||||
if ($?) { die "`nix regprebuilt' failed"; }
|
||||
|
||||
if ($url =~ /^\//) {
|
||||
system "nix regfile $url";
|
||||
if ($?) { die "`nix regfile' failed"; }
|
||||
} else {
|
||||
system "nix regurl $prebuilthash $url";
|
||||
if ($?) { die "`nix regurl' failed"; }
|
||||
}
|
||||
|
||||
print KNOWNS "$pkghash\n";
|
||||
}
|
||||
|
||||
|
@ -35,7 +47,7 @@ while (<CONFFILE>) {
|
|||
# It's a local path.
|
||||
|
||||
foreach my $fn (glob "$url/*") {
|
||||
register $fn;
|
||||
register($fn, $fn);
|
||||
}
|
||||
|
||||
} else {
|
||||
|
@ -54,7 +66,7 @@ while (<CONFFILE>) {
|
|||
my $fn = $1;
|
||||
next if $fn =~ /\.\./;
|
||||
next if $fn =~ /\//;
|
||||
register $fn;
|
||||
register($fn, "$url/$fn");
|
||||
}
|
||||
|
||||
close INDEX;
|
||||
|
|
|
@ -17,7 +17,6 @@ close KNOWNS;
|
|||
# For each installed package, check whether a prebuilt is known.
|
||||
|
||||
open PKGS, "nix listinst|";
|
||||
open KNOWNS, ">>$knowns";
|
||||
|
||||
while (<PKGS>) {
|
||||
chomp;
|
||||
|
@ -28,13 +27,16 @@ while (<PKGS>) {
|
|||
print "exporting $pkghash...\n";
|
||||
system "nix export '$exportdir' $pkghash";
|
||||
if ($?) { die "`nix export' failed"; }
|
||||
print KNOWNS "$pkghash\n";
|
||||
}
|
||||
}
|
||||
|
||||
close KNOWNS;
|
||||
close PKGS;
|
||||
|
||||
# Push the prebuilts to the server. !!! FIXME
|
||||
|
||||
system "rsync -av -e ssh '$exportdir' losser:/home/eelco/public_html/nix-prebuilts/";
|
||||
system "rsync -av -e ssh '$exportdir'/ losser:/home/eelco/public_html/nix-prebuilts/";
|
||||
|
||||
# Rerun `nix-pull-prebuilts' to rescan the prebuilt source locations.
|
||||
|
||||
print "running nix-pull-prebuilts...";
|
||||
system "nix-pull-prebuilts";
|
||||
|
|
|
@ -30,7 +30,7 @@ while (-e "$linkdir/$id-$nr") { $nr++; }
|
|||
my $link = "$linkdir/$id-$nr";
|
||||
|
||||
# Create a symlink from $link to $pkgdir.
|
||||
symlink($pkgdir, $link) or die "cannot create $link";
|
||||
symlink($pkgdir, $link) or die "cannot create $link: $!";
|
||||
|
||||
# Also store the hash of $pkgdir. This is useful for garbage
|
||||
# collection and the like.
|
||||
|
|
21
src/fix.cc
21
src/fix.cc
|
@ -23,15 +23,24 @@ static bool verbose = false;
|
|||
typedef map<string, string> DescriptorMap;
|
||||
|
||||
|
||||
/* Forward declarations. */
|
||||
|
||||
void registerFile(string filename)
|
||||
{
|
||||
int res = system(("nix regfile " + filename).c_str());
|
||||
/* !!! escape */
|
||||
if (WEXITSTATUS(res) != 0)
|
||||
throw Error("cannot register " + filename + " with Nix");
|
||||
}
|
||||
|
||||
|
||||
void registerURL(string hash, string url)
|
||||
{
|
||||
int res = system(("nix regurl " + hash + " " + url).c_str());
|
||||
/* !!! escape */
|
||||
if (WEXITSTATUS(res) != 0)
|
||||
throw Error("cannot register " + hash + " -> " + url + " with Nix");
|
||||
}
|
||||
|
||||
|
||||
Error badTerm(const string & msg, ATerm e)
|
||||
{
|
||||
char * s = ATwriteToString(e);
|
||||
|
@ -152,6 +161,7 @@ ATerm evaluate(ATerm e, EvalContext ctx)
|
|||
else if (ATmatch(e, "Local(<term>)", &e2)) {
|
||||
string filename = absPath(evaluateStr(e2, ctx), ctx.dir); /* !!! */
|
||||
string hash = hashFile(filename);
|
||||
registerFile(filename); /* !!! */
|
||||
return ATmake("File(<str>)", hash.c_str());
|
||||
}
|
||||
|
||||
|
@ -161,12 +171,7 @@ ATerm evaluate(ATerm e, EvalContext ctx)
|
|||
string hash = evaluateStr(e2, ctx);
|
||||
checkHash(hash);
|
||||
string url = evaluateStr(e3, ctx);
|
||||
#if 0
|
||||
if (verbose)
|
||||
cerr << "fetching " << url << endl;
|
||||
string filename = fetchURL(url);
|
||||
#endif
|
||||
/* !!! register */
|
||||
registerURL(hash, url);
|
||||
return ATmake("File(<str>)", hash.c_str());
|
||||
}
|
||||
|
||||
|
|
126
src/nix.cc
126
src/nix.cc
|
@ -26,6 +26,7 @@ using namespace std;
|
|||
static string dbRefs = "refs";
|
||||
static string dbInstPkgs = "pkginst";
|
||||
static string dbPrebuilts = "prebuilts";
|
||||
static string dbNetSources = "netsources";
|
||||
|
||||
|
||||
static string nixSourcesDir;
|
||||
|
@ -116,6 +117,65 @@ void enumDB(const string & dbname, DBPairs & contents)
|
|||
}
|
||||
|
||||
|
||||
/* Download object referenced by the given URL into the sources
|
||||
directory. Return the file name it was downloaded to. */
|
||||
string fetchURL(string url)
|
||||
{
|
||||
string filename = baseNameOf(url);
|
||||
string fullname = nixSourcesDir + "/" + filename;
|
||||
struct stat st;
|
||||
if (stat(fullname.c_str(), &st)) {
|
||||
cerr << "fetching " << url << endl;
|
||||
/* !!! quoting */
|
||||
string shellCmd =
|
||||
"cd " + nixSourcesDir + " && wget --quiet -N \"" + url + "\"";
|
||||
int res = system(shellCmd.c_str());
|
||||
if (WEXITSTATUS(res) != 0)
|
||||
throw Error("cannot fetch " + url);
|
||||
}
|
||||
return fullname;
|
||||
}
|
||||
|
||||
|
||||
/* Obtain an object with the given hash. If a file with that hash is
|
||||
known to exist in the local file system (as indicated by the dbRefs
|
||||
database), we use that. Otherwise, we attempt to fetch it from the
|
||||
network (using dbNetSources). We verify that the file has the
|
||||
right hash. */
|
||||
string getFile(string hash)
|
||||
{
|
||||
bool checkedNet = false;
|
||||
|
||||
while (1) {
|
||||
|
||||
string fn, url;
|
||||
|
||||
if (queryDB(dbRefs, hash, fn)) {
|
||||
|
||||
/* Verify that the file hasn't changed. !!! race */
|
||||
if (hashFile(fn) != hash)
|
||||
throw Error("file " + fn + " is stale");
|
||||
|
||||
return fn;
|
||||
}
|
||||
|
||||
if (checkedNet)
|
||||
throw Error("consistency problem: file fetched from " + url +
|
||||
" should have hash " + hash + ", but it doesn't");
|
||||
|
||||
if (!queryDB(dbNetSources, hash, url))
|
||||
throw Error("a file with hash " + hash + " is requested, "
|
||||
"but it is not known to exist locally or on the network");
|
||||
|
||||
checkedNet = true;
|
||||
|
||||
fn = fetchURL(url);
|
||||
|
||||
setDB(dbRefs, hash, fn);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
typedef map<string, string> Params;
|
||||
|
||||
|
||||
|
@ -124,14 +184,7 @@ void readPkgDescr(const string & hash,
|
|||
{
|
||||
string pkgfile;
|
||||
|
||||
if (!queryDB(dbRefs, hash, pkgfile))
|
||||
throw Error("unknown package " + hash);
|
||||
|
||||
// cerr << "reading information about " + hash + " from " + pkgfile + "\n";
|
||||
|
||||
/* Verify that the file hasn't changed. !!! race */
|
||||
if (hashFile(pkgfile) != hash)
|
||||
throw Error("file " + pkgfile + " is stale");
|
||||
pkgfile = getFile(hash);
|
||||
|
||||
ATerm term = ATreadFromNamedFile(pkgfile.c_str());
|
||||
if (!term) throw Error("cannot read aterm " + pkgfile);
|
||||
|
@ -199,11 +252,7 @@ void fetchDeps(string hash, Environment & env)
|
|||
|
||||
string file;
|
||||
|
||||
if (!queryDB(dbRefs, it->second, file))
|
||||
throw Error("unknown file " + it->second);
|
||||
|
||||
if (hashFile(file) != it->second)
|
||||
throw Error("file " + file + " is stale");
|
||||
file = getFile(it->second);
|
||||
|
||||
env[it->first] = file;
|
||||
}
|
||||
|
@ -283,17 +332,18 @@ void installPkg(string hash)
|
|||
|
||||
/* Try to use a prebuilt. */
|
||||
string prebuiltHash, prebuiltFile;
|
||||
if (queryDB(dbPrebuilts, hash, prebuiltHash) &&
|
||||
queryDB(dbRefs, prebuiltHash, prebuiltFile))
|
||||
{
|
||||
cerr << "substituting prebuilt " << prebuiltFile << endl;
|
||||
if (queryDB(dbPrebuilts, hash, prebuiltHash)) {
|
||||
|
||||
if (hashFile(prebuiltFile) != prebuiltHash) {
|
||||
cerr << "prebuilt " + prebuiltFile + " is stale\n";
|
||||
try {
|
||||
prebuiltFile = getFile(prebuiltHash);
|
||||
} catch (Error e) {
|
||||
cerr << "cannot obtain prebuilt (ignoring): " << e.what() << endl;
|
||||
goto build;
|
||||
}
|
||||
|
||||
int res = system(("tar xvfj " + prebuiltFile).c_str()); // !!! escaping
|
||||
cerr << "substituting prebuilt " << prebuiltFile << endl;
|
||||
|
||||
int res = system(("tar xfj " + prebuiltFile + " 1>&2").c_str()); // !!! escaping
|
||||
if (WEXITSTATUS(res) != 0)
|
||||
/* This is a fatal error, because path may now
|
||||
have clobbered. */
|
||||
|
@ -302,6 +352,8 @@ void installPkg(string hash)
|
|||
_exit(0);
|
||||
}
|
||||
|
||||
throw Error("no prebuilt available");
|
||||
|
||||
build:
|
||||
|
||||
/* Fill in the environment. We don't bother freeing the
|
||||
|
@ -453,7 +505,7 @@ void exportPkgs(string outDir,
|
|||
}
|
||||
|
||||
|
||||
void regPrebuilt(string pkgHash, string prebuiltHash)
|
||||
void registerPrebuilt(string pkgHash, string prebuiltHash)
|
||||
{
|
||||
checkHash(pkgHash);
|
||||
checkHash(prebuiltHash);
|
||||
|
@ -470,6 +522,14 @@ string registerFile(string filename)
|
|||
}
|
||||
|
||||
|
||||
void registerURL(string hash, string url)
|
||||
{
|
||||
checkHash(hash);
|
||||
setDB(dbNetSources, hash, url);
|
||||
/* !!! currently we allow only one network source per hash */
|
||||
}
|
||||
|
||||
|
||||
/* This is primarily used for bootstrapping. */
|
||||
void registerInstalledPkg(string hash, string path)
|
||||
{
|
||||
|
@ -486,6 +546,7 @@ void initDB()
|
|||
openDB(dbRefs, false);
|
||||
openDB(dbInstPkgs, false);
|
||||
openDB(dbPrebuilts, false);
|
||||
openDB(dbNetSources, false);
|
||||
}
|
||||
|
||||
|
||||
|
@ -623,25 +684,6 @@ void printGraph(Strings::iterator first, Strings::iterator last)
|
|||
}
|
||||
|
||||
|
||||
/* Download object referenced by the given URL into the sources
|
||||
directory. Return the file name it was downloaded to. */
|
||||
string fetchURL(string url)
|
||||
{
|
||||
string filename = baseNameOf(url);
|
||||
string fullname = nixSourcesDir + "/" + filename;
|
||||
struct stat st;
|
||||
if (stat(fullname.c_str(), &st)) {
|
||||
/* !!! quoting */
|
||||
string shellCmd =
|
||||
"cd " + nixSourcesDir + " && wget --quiet -N \"" + url + "\"";
|
||||
int res = system(shellCmd.c_str());
|
||||
if (WEXITSTATUS(res) != 0)
|
||||
throw Error("cannot fetch " + url);
|
||||
}
|
||||
return fullname;
|
||||
}
|
||||
|
||||
|
||||
void fetch(string id)
|
||||
{
|
||||
string fn;
|
||||
|
@ -777,9 +819,11 @@ void run(Strings::iterator argCur, Strings::iterator argEnd)
|
|||
exportPkgs(*argCur, argCur + 1, argEnd);
|
||||
} else if (cmd == "regprebuilt") {
|
||||
if (argc != 2) throw argcError;
|
||||
regPrebuilt(*argCur, argCur[1]);
|
||||
registerPrebuilt(*argCur, argCur[1]);
|
||||
} else if (cmd == "regfile") {
|
||||
for_each(argCur, argEnd, registerFile);
|
||||
} else if (cmd == "regurl") {
|
||||
registerURL(argCur[0], argCur[1]);
|
||||
} else if (cmd == "reginst") {
|
||||
if (argc != 2) throw argcError;
|
||||
registerInstalledPkg(*argCur, argCur[1]);
|
||||
|
|
Loading…
Reference in a new issue