diff --git a/src/hydra.schema b/src/hydra.schema index 4a3d1ab4..f9cec9f4 100644 --- a/src/hydra.schema +++ b/src/hydra.schema @@ -1,9 +1,38 @@ create table builds ( - id integer primary key autoincrement, - timestamp integer, -- time this build was added to the db (in Unix time) - name text, + id integer primary key autoincrement not null, + timestamp integer not null, -- time this build was added to the db (in Unix time) + name text not null, description text, - drvPath text, - outPath text, + drvPath text not null, + outPath text not null, buildStatus integer -- 0 = succeeded, 1 = failure, ... ); + + +create table buildProducts ( + buildId integer not null, + type text not null, -- "nix-build", "file", "doc", "report", ... + subtype text not null, -- "sources", "rpm", ... + path text not null, + primary key (buildId, type, subType), + foreign key (buildId) references builds(id) on delete cascade -- ignored by sqlite +); + + +create table buildLogs ( + buildId integer not null, + logPhase text not null, + path text not null, + type text not null, + primary key (buildId, logPhase), + foreign key (buildId) references builds(id) on delete cascade -- ignored by sqlite +); + + +-- Emulate "on delete cascade" foreign key constraints. +create trigger cascadeBuildDeletion + before delete on builds + for each row begin + delete from buildLogs where buildId = old.id; + delete from buildProducts where buildId = old.id; + end; diff --git a/src/scheduler.pl b/src/scheduler.pl index c5d968e0..cb57741d 100644 --- a/src/scheduler.pl +++ b/src/scheduler.pl @@ -3,12 +3,16 @@ use strict; use XML::Simple; use DBI; +use File::Basename; + + +my $jobsFile = "../test.nix"; my $dbh = DBI->connect("dbi:SQLite:dbname=hydra.sqlite", "", ""); -my $jobsXml = `nix-env -f ../test.nix --query --available "*" --attr-path --out-path --drv-path --meta --xml --system-filter "*"` +my $jobsXml = `nix-env -f $jobsFile --query --available "*" --attr-path --out-path --drv-path --meta --xml --system-filter "*"` or die "cannot evaluate the Nix expression containing the job definitions: $?"; print "$jobsXml"; @@ -20,20 +24,49 @@ my $jobs = XMLin($jobsXml, KeyAttr => ['attrPath', 'name']) foreach my $jobName (keys %{$jobs->{item}}) { my $job = $jobs->{item}->{$jobName}; - print "JOB: $jobName ($job->{meta}->{description}->{value})\n"; + my $description = defined $job->{meta}->{description} ? $job->{meta}->{description}->{value} : ""; + print "JOB: $jobName ($description)\n"; my $outPath = $job->{outPath}; + my $drvPath = $job->{drvPath}; if (scalar(@{$dbh->selectall_arrayref("select * from builds where name = ? and outPath = ?", {}, $jobName, $outPath)}) > 0) { print " already done\n"; next; } - my $res = system("nix-build ../test.nix --attr $jobName"); + my $res = system("nix-build $jobsFile --attr $jobName"); my $buildStatus = $res == 0 ? 0 : 1; + $dbh->begin_work; + $dbh->prepare("insert into builds(timestamp, name, description, drvPath, outPath, buildStatus) values(?, ?, ?, ?, ?, ?)") - ->execute(time(), $jobName, $job->{meta}->{description}->{value}, $job->{drvPath}, $outPath, $buildStatus); - print " db id = ", $dbh->last_insert_id(undef, undef, undef, undef), "\n"; + ->execute(time(), $jobName, $description, $drvPath, $outPath, $buildStatus); + + my $buildId = $dbh->last_insert_id(undef, undef, undef, undef); + print " db id = $buildId\n"; + + if ($buildStatus == 0) { + + $dbh->prepare("insert into buildProducts(buildId, type, subtype, path) values(?, ?, ?, ?)") + ->execute($buildId, "nix-build", "", $outPath); + + my $logPath = "/nix/var/log/nix/drvs/" . basename $drvPath; + if (-e $logPath) { + print " LOG $logPath\n"; + $dbh->prepare("insert into buildLogs(buildId, logPhase, path, type) values(?, ?, ?, ?)") + ->execute($buildId, "full", $logPath, "raw"); + } + + if (-e "$outPath/log") { + foreach my $logPath (glob "$outPath/log/*") { + print " LOG $logPath\n"; + $dbh->prepare("insert into buildLogs(buildId, logPhase, path, type) values(?, ?, ?, ?)") + ->execute($buildId, basename($logPath), $logPath, "raw"); + } + } + } + + $dbh->commit; }