lix/src/nix.cc

873 lines
24 KiB
C++
Raw Normal View History

2003-03-13 16:28:32 +00:00
#include <iostream>
#include <list>
#include <vector>
#include <set>
2003-03-24 11:50:20 +00:00
#include <map>
#include <algorithm>
#include <cstdio>
2003-03-13 16:28:32 +00:00
#include <errno.h>
2003-03-13 16:28:32 +00:00
#include <sys/stat.h>
#include <sys/types.h>
#include <sys/wait.h>
2003-04-01 14:00:47 +00:00
extern "C" {
#include <aterm1.h>
}
#include "util.hh"
2003-06-15 13:41:32 +00:00
#include "hash.hh"
2003-05-26 13:45:00 +00:00
#include "db.hh"
2003-03-13 16:28:32 +00:00
using namespace std;
2003-03-20 16:53:00 +00:00
2003-03-13 16:28:32 +00:00
/* Database names. */
2003-06-16 07:03:40 +00:00
/* dbRefs :: Hash -> FileName
Maintains a mapping from hashes to filenames within the NixValues
directory. This mapping is for performance only; it can be
reconstructed unambiguously from the nixValues directory. The
reason is that names in this directory are not printed hashes but
also might carry some descriptive element (e.g.,
"aterm-2.0-ae749a..."). Without this mapping, looking up a value
would take O(n) time because we would need to read the entire
directory. */
static string dbRefs = "refs";
2003-06-16 07:03:40 +00:00
/* dbNFs :: Hash -> Hash
Each pair (h1, h2) in this mapping records the fact that h2 is a
normal form obtained by evaluating the value h1.
We would really like to have h2 be the hash of the object
referenced by h2. However, that gives a cyclic dependency: to
compute the hash (and thus the file name) of the object, we need to
compute the object, but to do that, we need the file name of the
object.
So for now we abandon the requirement that
hashFile(dbRefs[h]) == h.
I.e., this property does not hold for computed normal forms.
Rather, we use h2 = hash(h1). This allows dbNFs to be
reconstructed. Perhaps using a pseudo random number would be
better to prevent the system from being subverted in some way.
*/
static string dbNFs = "nfs";
/* dbNetSources :: Hash -> URL
Each pair (hash, url) in this mapping states that the object
identified by hash can be obtained by fetching the object pointed
to by url.
TODO: this should be Hash -> [URL]
TODO: factor this out into a separate tool? */
static string dbNetSources = "netsources";
2003-06-16 07:03:40 +00:00
/* Path names. */
/* nixValues is the directory where all Nix values (both files and
directories, and both normal and non-normal forms) live. */
static string nixValues;
/* nixLogDir is the directory where we log evaluations. */
static string nixLogDir;
2003-06-16 07:03:40 +00:00
/* nixDB is the file name of the Berkeley DB database where we
maintain the dbXXX mappings. */
2003-05-26 13:45:00 +00:00
static string nixDB;
2003-06-16 07:03:40 +00:00
/* Abstract syntax of Nix values:
e := Hash(h) -- external reference
| Str(s) -- string constant
| Bool(b) -- boolean constant
| Name(e) -- "&" operator; pointer (file name) formation
| App(e, e) -- application
| Lam(x, e) -- lambda abstraction
| Exec(platform, e, e*)
-- primitive; execute e with args e* on platform
;
*/
/* Download object referenced by the given URL into the sources
directory. Return the file name it was downloaded to. */
string fetchURL(string url)
{
string filename = baseNameOf(url);
string fullname = nixSourcesDir + "/" + filename;
struct stat st;
if (stat(fullname.c_str(), &st)) {
cerr << "fetching " << url << endl;
/* !!! quoting */
string shellCmd =
"cd " + nixSourcesDir + " && wget --quiet -N \"" + url + "\"";
int res = system(shellCmd.c_str());
if (WEXITSTATUS(res) != 0)
throw Error("cannot fetch " + url);
}
return fullname;
}
/* Obtain an object with the given hash. If a file with that hash is
known to exist in the local file system (as indicated by the dbRefs
database), we use that. Otherwise, we attempt to fetch it from the
network (using dbNetSources). We verify that the file has the
right hash. */
2003-06-15 13:41:32 +00:00
string getFile(Hash hash)
{
bool checkedNet = false;
while (1) {
string fn, url;
2003-05-26 13:45:00 +00:00
if (queryDB(nixDB, dbRefs, hash, fn)) {
/* Verify that the file hasn't changed. !!! race */
if (hashFile(fn) != hash)
throw Error("file " + fn + " is stale");
return fn;
}
if (checkedNet)
throw Error("consistency problem: file fetched from " + url +
2003-06-15 13:41:32 +00:00
" should have hash " + (string) hash + ", but it doesn't");
2003-05-26 13:45:00 +00:00
if (!queryDB(nixDB, dbNetSources, hash, url))
2003-06-15 13:41:32 +00:00
throw Error("a file with hash " + (string) hash + " is requested, "
"but it is not known to exist locally or on the network");
checkedNet = true;
fn = fetchURL(url);
2003-05-26 13:45:00 +00:00
setDB(nixDB, dbRefs, hash, fn);
}
}
typedef map<string, string> Params;
2003-06-15 13:41:32 +00:00
void readPkgDescr(Hash hash,
Params & pkgImports, Params & fileImports, Params & arguments)
{
string pkgfile;
2003-03-13 16:28:32 +00:00
pkgfile = getFile(hash);
2003-04-01 14:00:47 +00:00
ATerm term = ATreadFromNamedFile(pkgfile.c_str());
if (!term) throw Error("cannot read aterm " + pkgfile);
2003-03-13 16:28:32 +00:00
2003-04-01 14:00:47 +00:00
ATerm bindings;
if (!ATmatch(term, "Descr(<term>)", &bindings))
throw Error("invalid term in " + pkgfile);
2003-03-13 16:28:32 +00:00
2003-04-01 14:00:47 +00:00
char * cname;
ATerm value;
while (ATmatch(bindings, "[Bind(<str>, <term>), <list>]",
2003-06-15 13:41:32 +00:00
&cname, &value, &bindings))
2003-04-01 14:00:47 +00:00
{
string name(cname);
char * arg;
if (ATmatch(value, "Pkg(<str>)", &arg)) {
2003-06-15 13:41:32 +00:00
parseHash(arg);
2003-04-01 14:00:47 +00:00
pkgImports[name] = arg;
} else if (ATmatch(value, "File(<str>)", &arg)) {
2003-06-15 13:41:32 +00:00
parseHash(arg);
2003-04-01 14:00:47 +00:00
fileImports[name] = arg;
} else if (ATmatch(value, "Str(<str>)", &arg))
arguments[name] = arg;
else if (ATmatch(value, "Bool(True)"))
arguments[name] = "1";
else if (ATmatch(value, "Bool(False)"))
arguments[name] = "";
else {
ATprintf("%t\n", value);
throw Error("invalid binding in " + pkgfile);
}
}
}
2003-06-15 13:41:32 +00:00
string getPkg(Hash hash);
2003-03-24 11:50:20 +00:00
typedef map<string, string> Environment;
2003-06-15 13:41:32 +00:00
void fetchDeps(Hash hash, Environment & env)
{
/* Read the package description file. */
Params pkgImports, fileImports, arguments;
readPkgDescr(hash, pkgImports, fileImports, arguments);
/* Recursively fetch all the dependencies, filling in the
environment as we go along. */
for (Params::iterator it = pkgImports.begin();
it != pkgImports.end(); it++)
{
cerr << "fetching package dependency "
<< it->first << " <- " << it->second
<< endl;
2003-06-15 13:41:32 +00:00
env[it->first] = getPkg(parseHash(it->second));
}
for (Params::iterator it = fileImports.begin();
it != fileImports.end(); it++)
{
cerr << "fetching file dependency "
<< it->first << " = " << it->second
<< endl;
2003-03-13 16:28:32 +00:00
string file;
2003-03-13 16:28:32 +00:00
2003-06-15 13:41:32 +00:00
file = getFile(parseHash(it->second));
env[it->first] = file;
}
string buildSystem;
for (Params::iterator it = arguments.begin();
it != arguments.end(); it++)
{
env[it->first] = it->second;
if (it->first == "system")
buildSystem = it->second;
}
if (buildSystem != thisSystem)
throw Error("descriptor requires a `" + buildSystem +
"' but I am a `" + thisSystem + "'");
2003-03-24 11:50:20 +00:00
}
2003-03-24 11:50:20 +00:00
string getFromEnv(const Environment & env, const string & key)
{
Environment::const_iterator it = env.find(key);
if (it == env.end())
throw Error("key " + key + " not found in the environment");
return it->second;
}
2003-06-15 13:41:32 +00:00
string queryPkgId(Hash hash)
{
Params pkgImports, fileImports, arguments;
readPkgDescr(hash, pkgImports, fileImports, arguments);
return getFromEnv(arguments, "id");
}
2003-06-15 13:41:32 +00:00
void installPkg(Hash hash)
2003-03-24 11:50:20 +00:00
{
string pkgfile;
string src;
string path;
string cmd;
string builder;
Environment env;
/* Fetch dependencies. */
fetchDeps(hash, env);
builder = getFromEnv(env, "build");
string id = getFromEnv(env, "id");
/* Construct a path for the installed package. */
2003-06-15 13:41:32 +00:00
path = nixHomeDir + "/pkg/" + id + "-" + (string) hash;
/* Create the path. */
if (mkdir(path.c_str(), 0777))
2003-03-20 16:53:00 +00:00
throw Error("unable to create directory " + path);
/* Create a log file. */
2003-06-15 13:41:32 +00:00
string logFileName =
nixLogDir + "/" + id + "-" + (string) hash + ".log";
/* !!! auto-pclose on exit */
2003-05-30 17:01:21 +00:00
FILE * logFile = popen(("tee " + logFileName + " >&2").c_str(), "w"); /* !!! escaping */
if (!logFile)
throw Error("unable to create log file " + logFileName);
2003-03-20 16:53:00 +00:00
try {
2003-03-20 16:53:00 +00:00
/* Fork a child to build the package. */
pid_t pid;
switch (pid = fork()) {
case -1:
throw Error("unable to fork");
case 0:
try { /* child */
/* Go to the build directory. */
if (chdir(path.c_str())) {
cerr << "unable to chdir to package directory\n";
_exit(1);
}
/* Try to use a prebuilt. */
2003-06-15 13:41:32 +00:00
string prebuiltHashS, prebuiltFile;
if (queryDB(nixDB, dbPrebuilts, hash, prebuiltHashS)) {
try {
2003-06-15 13:41:32 +00:00
prebuiltFile = getFile(parseHash(prebuiltHashS));
} catch (Error e) {
cerr << "cannot obtain prebuilt (ignoring): " << e.what() << endl;
goto build;
}
cerr << "substituting prebuilt " << prebuiltFile << endl;
int res = system(("tar xfj " + prebuiltFile + " 1>&2").c_str()); // !!! escaping
if (WEXITSTATUS(res) != 0)
/* This is a fatal error, because path may now
have clobbered. */
throw Error("cannot unpack " + prebuiltFile);
_exit(0);
}
build:
/* Fill in the environment. We don't bother freeing
the strings, since we'll exec or die soon
anyway. */
const char * env2[env.size() + 1];
int i = 0;
for (Environment::iterator it = env.begin();
it != env.end(); it++, i++)
env2[i] = (new string(it->first + "=" + it->second))->c_str();
env2[i] = 0;
/* Dup the log handle into stderr. */
if (dup2(fileno(logFile), STDERR_FILENO) == -1)
throw Error("cannot pipe standard error into log file: " + string(strerror(errno)));
/* Dup stderr to stdin. */
if (dup2(STDERR_FILENO, STDOUT_FILENO) == -1)
throw Error("cannot dup stderr into stdout");
/* Execute the builder. This should not return. */
execle(builder.c_str(), builder.c_str(), 0, env2);
throw Error("unable to execute builder: " +
string(strerror(errno)));
} catch (exception & e) {
cerr << "build error: " << e.what() << endl;
_exit(1);
}
2003-03-20 16:53:00 +00:00
}
2003-03-20 16:53:00 +00:00
/* parent */
/* Close the logging pipe. Note that this should not cause
the logger to exit until builder exits (because the latter
has an open file handle to the former). */
pclose(logFile);
2003-03-20 16:53:00 +00:00
/* Wait for the child to finish. */
int status;
if (waitpid(pid, &status, 0) != pid)
throw Error("unable to wait for child");
2003-03-20 16:53:00 +00:00
if (!WIFEXITED(status) || WEXITSTATUS(status) != 0)
throw Error("unable to build package");
/* Remove write permission from the build directory. */
int res = system(("chmod -R -w " + path).c_str()); // !!! escaping
if (WEXITSTATUS(res) != 0)
throw Error("cannot remove write permission from " + path);
2003-03-20 16:53:00 +00:00
} catch (exception &) {
2003-05-30 17:01:21 +00:00
// system(("rm -rf " + path).c_str());
2003-03-20 16:53:00 +00:00
throw;
}
2003-03-13 16:28:32 +00:00
2003-05-26 13:45:00 +00:00
setDB(nixDB, dbInstPkgs, hash, path);
}
2003-03-13 16:28:32 +00:00
2003-06-15 13:41:32 +00:00
string getPkg(Hash hash)
{
string path;
2003-05-26 13:45:00 +00:00
while (!queryDB(nixDB, dbInstPkgs, hash, path))
installPkg(hash);
return path;
2003-03-13 16:28:32 +00:00
}
2003-06-15 13:41:32 +00:00
void runPkg(Hash hash,
Strings::iterator firstArg,
Strings::iterator lastArg)
{
string src;
string path;
string cmd;
string runner;
Environment env;
2003-03-24 11:50:20 +00:00
/* Fetch dependencies. */
fetchDeps(hash, env);
2003-03-24 11:50:20 +00:00
runner = getFromEnv(env, "run");
2003-03-28 10:33:34 +00:00
/* Fill in the environment. We don't bother freeing the
strings, since we'll exec or die soon anyway. */
for (Environment::iterator it = env.begin();
it != env.end(); it++)
{
string * s = new string(it->first + "=" + it->second);
putenv((char *) s->c_str());
}
2003-03-28 10:33:34 +00:00
/* Create the list of arguments. */
const char * args2[env.size() + 2];
int i = 0;
args2[i++] = runner.c_str();
for (Strings::const_iterator it = firstArg; it != lastArg; it++, i++)
2003-03-28 10:33:34 +00:00
args2[i] = it->c_str();
args2[i] = 0;
2003-03-28 10:33:34 +00:00
/* Execute the runner. This should not return. */
execv(runner.c_str(), (char * *) args2);
cerr << strerror(errno) << endl;
2003-03-28 10:33:34 +00:00
throw Error("unable to execute runner");
}
2003-06-15 13:41:32 +00:00
void ensurePkg(Hash hash)
{
Params pkgImports, fileImports, arguments;
readPkgDescr(hash, pkgImports, fileImports, arguments);
if (fileImports.find("build") != fileImports.end())
getPkg(hash);
else if (fileImports.find("run") != fileImports.end()) {
Environment env;
fetchDeps(hash, env);
} else throw Error("invalid descriptor");
}
2003-06-15 13:41:32 +00:00
void delPkg(Hash hash)
{
string path;
2003-05-26 13:45:00 +00:00
if (queryDB(nixDB, dbInstPkgs, hash, path)) {
int res = system(("chmod -R +w " + path + " && rm -rf " + path).c_str()); // !!! escaping
2003-05-26 13:45:00 +00:00
delDB(nixDB, dbInstPkgs, hash); // not a bug ???
if (WEXITSTATUS(res) != 0)
cerr << "errors deleting " + path + ", ignoring" << endl;
}
}
void exportPkgs(string outDir,
Strings::iterator firstHash,
Strings::iterator lastHash)
{
outDir = absPath(outDir);
for (Strings::iterator it = firstHash; it != lastHash; it++) {
2003-06-15 13:41:32 +00:00
Hash hash = parseHash(*it);
string pkgDir = getPkg(hash);
string tmpFile = outDir + "/export_tmp";
string cmd = "cd " + pkgDir + " && tar cfj " + tmpFile + " .";
int res = system(cmd.c_str()); // !!! escaping
if (!WIFEXITED(res) || WEXITSTATUS(res) != 0)
throw Error("cannot tar " + pkgDir);
string prebuiltHash = hashFile(tmpFile);
string pkgId = queryPkgId(hash);
string prebuiltFile = outDir + "/" +
2003-06-15 13:41:32 +00:00
pkgId + "-" + (string) hash + "-" + prebuiltHash + ".tar.bz2";
rename(tmpFile.c_str(), prebuiltFile.c_str());
}
}
2003-06-15 13:41:32 +00:00
void registerPrebuilt(Hash pkgHash, Hash prebuiltHash)
{
2003-05-26 13:45:00 +00:00
setDB(nixDB, dbPrebuilts, pkgHash, prebuiltHash);
}
2003-06-15 13:41:32 +00:00
Hash registerFile(string filename)
{
filename = absPath(filename);
2003-06-15 13:41:32 +00:00
Hash hash = hashFile(filename);
2003-05-26 13:45:00 +00:00
setDB(nixDB, dbRefs, hash, filename);
return hash;
2003-03-13 16:28:32 +00:00
}
2003-06-15 13:41:32 +00:00
void registerURL(Hash hash, string url)
{
2003-05-26 13:45:00 +00:00
setDB(nixDB, dbNetSources, hash, url);
/* !!! currently we allow only one network source per hash */
}
2003-03-13 16:28:32 +00:00
/* This is primarily used for bootstrapping. */
2003-06-15 13:41:32 +00:00
void registerInstalledPkg(Hash hash, string path)
2003-03-13 16:28:32 +00:00
{
if (path == "")
2003-05-26 13:45:00 +00:00
delDB(nixDB, dbInstPkgs, hash);
2003-03-13 16:28:32 +00:00
else
2003-05-26 13:45:00 +00:00
setDB(nixDB, dbInstPkgs, hash, path);
}
void initDB()
{
2003-05-26 13:45:00 +00:00
createDB(nixDB, dbRefs);
createDB(nixDB, dbInstPkgs);
createDB(nixDB, dbPrebuilts);
createDB(nixDB, dbNetSources);
2003-03-13 16:28:32 +00:00
}
void verifyDB()
{
/* Check that all file references are still valid. */
DBPairs fileRefs;
2003-05-26 13:45:00 +00:00
enumDB(nixDB, dbRefs, fileRefs);
for (DBPairs::iterator it = fileRefs.begin();
it != fileRefs.end(); it++)
{
try {
2003-06-15 13:41:32 +00:00
Hash hash = parseHash(it->first);
if (hashFile(it->second) != hash) {
cerr << "file " << it->second << " has changed\n";
2003-05-26 13:45:00 +00:00
delDB(nixDB, dbRefs, it->first);
}
2003-06-15 13:41:32 +00:00
} catch (Error e) { /* !!! better error check */
cerr << "error: " << e.what() << endl;
2003-05-26 13:45:00 +00:00
delDB(nixDB, dbRefs, it->first);
}
}
/* Check that all installed packages are still there. */
DBPairs instPkgs;
2003-05-26 13:45:00 +00:00
enumDB(nixDB, dbInstPkgs, instPkgs);
for (DBPairs::iterator it = instPkgs.begin();
it != instPkgs.end(); it++)
{
struct stat st;
if (stat(it->second.c_str(), &st) == -1) {
cerr << "package " << it->first << " has disappeared\n";
2003-05-26 13:45:00 +00:00
delDB(nixDB, dbInstPkgs, it->first);
}
}
/* TODO: check that all directories in pkgHome are installed
packages. */
}
void listInstalledPkgs()
{
DBPairs instPkgs;
2003-05-26 13:45:00 +00:00
enumDB(nixDB, dbInstPkgs, instPkgs);
for (DBPairs::iterator it = instPkgs.begin();
it != instPkgs.end(); it++)
cout << it->first << endl;
}
void printInfo(Strings::iterator first, Strings::iterator last)
{
for (Strings::iterator it = first; it != last; it++) {
try {
2003-06-15 13:41:32 +00:00
cout << *it << " " << queryPkgId(parseHash(*it)) << endl;
} catch (Error & e) { // !!! more specific
cout << *it << " (descriptor missing)\n";
}
}
}
void computeClosure(Strings::iterator first, Strings::iterator last,
set<string> & result)
{
list<string> workList(first, last);
set<string> doneSet;
while (!workList.empty()) {
2003-06-15 13:41:32 +00:00
Hash hash = parseHash(workList.front());
workList.pop_front();
if (doneSet.find(hash) == doneSet.end()) {
doneSet.insert(hash);
Params pkgImports, fileImports, arguments;
readPkgDescr(hash, pkgImports, fileImports, arguments);
for (Params::iterator it = pkgImports.begin();
it != pkgImports.end(); it++)
workList.push_back(it->second);
}
}
result = doneSet;
}
void printClosure(Strings::iterator first, Strings::iterator last)
{
set<string> allHashes;
computeClosure(first, last, allHashes);
for (set<string>::iterator it = allHashes.begin();
it != allHashes.end(); it++)
cout << *it << endl;
}
string dotQuote(const string & s)
{
return "\"" + s + "\"";
}
void printGraph(Strings::iterator first, Strings::iterator last)
{
set<string> allHashes;
computeClosure(first, last, allHashes);
cout << "digraph G {\n";
for (set<string>::iterator it = allHashes.begin();
it != allHashes.end(); it++)
{
Params pkgImports, fileImports, arguments;
2003-06-15 13:41:32 +00:00
readPkgDescr(parseHash(*it), pkgImports, fileImports, arguments);
cout << dotQuote(*it) << "[label = \""
<< getFromEnv(arguments, "id")
<< "\"];\n";
for (Params::iterator it2 = pkgImports.begin();
it2 != pkgImports.end(); it2++)
cout << dotQuote(it2->second) << " -> "
<< dotQuote(*it) << ";\n";
}
cout << "}\n";
}
void fetch(string id)
2003-03-13 16:28:32 +00:00
{
string fn;
/* Fetch the object referenced by id. */
if (isHash(id)) {
throw Error("not implemented");
} else {
fn = fetchURL(id);
}
/* Register it by hash. */
2003-06-15 13:41:32 +00:00
Hash hash = registerFile(fn);
cout << (string) hash << endl;
}
void fetch(Strings::iterator first, Strings::iterator last)
{
for (Strings::iterator it = first; it != last; it++)
fetch(*it);
2003-03-13 16:28:32 +00:00
}
2003-03-20 16:53:00 +00:00
void printUsage()
2003-03-13 16:28:32 +00:00
{
2003-03-20 16:53:00 +00:00
cerr <<
2003-06-15 13:41:32 +00:00
"Usage: nix SUBCOMMAND OPTIONS...\n\
\n\
Subcommands:\n\
\n\
init\n\
Initialize the database.\n\
\n\
verify\n\
Remove stale entries from the database.\n\
\n\
regfile FILENAME...\n\
Register each FILENAME keyed by its hash.\n\
\n\
reginst HASH PATH\n\
Register an installed package.\n\
\n\
getpkg HASH...\n\
For each HASH, ensure that the package referenced by HASH is\n\
installed. Print out the path of the installation on stdout.\n\
\n\
delpkg HASH...\n\
Uninstall the package referenced by each HASH, disregarding any\n\
dependencies that other packages may have on HASH.\n\
\n\
listinst\n\
Prints a list of installed packages.\n\
\n\
run HASH ARGS...\n\
Run the descriptor referenced by HASH with the given arguments.\n\
\n\
ensure HASH...\n\
Like getpkg, but if HASH refers to a run descriptor, fetch only\n\
the dependencies.\n\
\n\
export DIR HASH...\n\
Export installed packages to DIR.\n\
\n\
regprebuilt HASH1 HASH2\n\
Inform Nix that an export HASH2 can be used to fast-build HASH1.\n\
\n\
info HASH...\n\
Print information about the specified descriptors.\n\
\n\
closure HASH...\n\
Determine the closure of the set of descriptors under the import\n\
relation, starting at the given roots.\n\
\n\
graph HASH...\n\
Like closure, but print a dot graph specification.\n\
\n\
fetch ID...\n\
Fetch the objects identified by ID and place them in the Nix\n\
sources directory. ID can be a hash or URL. Print out the hash\n\
of the object.\n\
2003-03-20 16:53:00 +00:00
";
}
2003-03-13 16:28:32 +00:00
2003-03-20 16:53:00 +00:00
void run(Strings::iterator argCur, Strings::iterator argEnd)
2003-03-20 16:53:00 +00:00
{
umask(0022);
2003-03-13 16:28:32 +00:00
char * homeDir = getenv(nixHomeDirEnvVar.c_str());
if (homeDir) nixHomeDir = homeDir;
2003-03-13 16:28:32 +00:00
nixSourcesDir = nixHomeDir + "/var/nix/sources";
nixLogDir = nixHomeDir + "/var/log/nix";
2003-05-26 13:45:00 +00:00
nixDB = nixHomeDir + "/var/nix/pkginfo.db";
/* Parse the global flags. */
for ( ; argCur != argEnd; argCur++) {
string arg(*argCur);
if (arg == "-h" || arg == "--help") {
2003-03-20 16:53:00 +00:00
printUsage();
return;
} else if (arg[0] == '-') {
throw UsageError("invalid option `" + arg + "'");
} else break;
2003-03-20 16:53:00 +00:00
}
UsageError argcError("wrong number of arguments");
/* Parse the command. */
if (argCur == argEnd) throw UsageError("no command specified");
string cmd = *argCur++;
int argc = argEnd - argCur;
if (cmd == "init") {
if (argc != 0) throw argcError;
initDB();
} else if (cmd == "verify") {
if (argc != 0) throw argcError;
verifyDB();
} else if (cmd == "getpkg") {
for (Strings::iterator it = argCur; it != argEnd; it++) {
2003-06-15 13:41:32 +00:00
string path = getPkg(parseHash(*it));
cout << path << endl;
}
} else if (cmd == "delpkg") {
2003-06-15 13:41:32 +00:00
for (Strings::iterator it = argCur; it != argEnd; it++)
delPkg(parseHash(*it));
} else if (cmd == "run") {
if (argc < 1) throw argcError;
2003-06-15 13:41:32 +00:00
runPkg(parseHash(*argCur), argCur + 1, argEnd);
} else if (cmd == "ensure") {
2003-06-15 13:41:32 +00:00
for (Strings::iterator it = argCur; it != argEnd; it++)
ensurePkg(parseHash(*it));
} else if (cmd == "export") {
if (argc < 1) throw argcError;
exportPkgs(*argCur, argCur + 1, argEnd);
} else if (cmd == "regprebuilt") {
if (argc != 2) throw argcError;
2003-06-15 13:41:32 +00:00
registerPrebuilt(parseHash(argCur[0]), parseHash(argCur[1]));
} else if (cmd == "regfile") {
for_each(argCur, argEnd, registerFile);
} else if (cmd == "regurl") {
2003-06-15 13:41:32 +00:00
registerURL(parseHash(argCur[0]), argCur[1]);
} else if (cmd == "reginst") {
if (argc != 2) throw argcError;
2003-06-15 13:41:32 +00:00
registerInstalledPkg(parseHash(argCur[0]), argCur[1]);
} else if (cmd == "listinst") {
if (argc != 0) throw argcError;
listInstalledPkgs();
} else if (cmd == "info") {
printInfo(argCur, argEnd);
} else if (cmd == "closure") {
printClosure(argCur, argEnd);
} else if (cmd == "graph") {
printGraph(argCur, argEnd);
} else if (cmd == "fetch") {
fetch(argCur, argEnd);
} else
throw UsageError("unknown command: " + string(cmd));
2003-03-20 16:53:00 +00:00
}
2003-03-20 16:53:00 +00:00
int main(int argc, char * * argv)
{
2003-04-01 14:00:47 +00:00
ATerm bottomOfStack;
ATinit(argc, argv, &bottomOfStack);
/* Put the arguments in a vector. */
Strings args;
while (argc--) args.push_back(*argv++);
Strings::iterator argCur = args.begin(), argEnd = args.end();
argCur++;
2003-03-20 16:53:00 +00:00
try {
2003-05-26 13:45:00 +00:00
run(argCur, argEnd);
2003-03-20 16:53:00 +00:00
} catch (UsageError & e) {
cerr << "error: " << e.what() << endl
<< "Try `nix -h' for more information.\n";
return 1;
2003-03-20 16:53:00 +00:00
} catch (exception & e) {
cerr << "error: " << e.what() << endl;
2003-03-13 16:28:32 +00:00
return 1;
}
return 0;
2003-03-13 16:28:32 +00:00
}