* Support variant builds.

This commit is contained in:
Eelco Dolstra 2008-11-06 18:26:29 +00:00
parent 7bbd736d96
commit 279de1a9c2
17 changed files with 198 additions and 94 deletions

View file

@ -29,7 +29,7 @@ sub getBuild {
sub index :Path :Args(0) { sub index :Path :Args(0) {
my ( $self, $c ) = @_; my ( $self, $c ) = @_;
$c->stash->{template} = 'index.tt'; $c->stash->{template} = 'index.tt';
$c->stash->{allBuilds} = [$c->model('DB::Builds')->all]; $c->stash->{allBuilds} = [$c->model('DB::Builds')->search(undef, {order_by => "timestamp DESC"})];
# Get the latest build for each unique job. # Get the latest build for each unique job.
# select * from builds as x where timestamp == (select max(timestamp) from builds where jobName == x.jobName); # select * from builds as x where timestamp == (select max(timestamp) from builds where jobName == x.jobName);
$c->stash->{latestBuilds} = [$c->model('DB::Builds')->search(undef, {order_by => "project, attrName", where => "timestamp == (select max(timestamp) from builds where project == me.project and attrName == me.attrName)"})]; $c->stash->{latestBuilds} = [$c->model('DB::Builds')->search(undef, {order_by => "project, attrName", where => "timestamp == (select max(timestamp) from builds where project == me.project and attrName == me.attrName)"})];

View file

@ -8,8 +8,8 @@ use base 'DBIx::Class::Schema';
__PACKAGE__->load_classes; __PACKAGE__->load_classes;
# Created by DBIx::Class::Schema::Loader v0.04005 @ 2008-11-05 23:48:14 # Created by DBIx::Class::Schema::Loader v0.04005 @ 2008-11-06 19:19:17
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:m+4HRK3Cwlb1lbJ+twj8zw # DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:DQCaurV1oArj0odoPHR+zw
# You can replace this text with custom content, and it will be preserved on regeneration # You can replace this text with custom content, and it will be preserved on regeneration

View file

@ -24,6 +24,8 @@ __PACKAGE__->add_columns(
{ data_type => "integer", is_nullable => 0, size => undef }, { data_type => "integer", is_nullable => 0, size => undef },
"path", "path",
{ data_type => "text", is_nullable => 0, size => undef }, { data_type => "text", is_nullable => 0, size => undef },
"value",
{ data_type => "VARCHAR", is_nullable => 0, size => undef },
); );
__PACKAGE__->set_primary_key("buildid", "name"); __PACKAGE__->set_primary_key("buildid", "name");
__PACKAGE__->belongs_to( __PACKAGE__->belongs_to(
@ -33,8 +35,8 @@ __PACKAGE__->belongs_to(
); );
# Created by DBIx::Class::Schema::Loader v0.04005 @ 2008-11-05 23:48:14 # Created by DBIx::Class::Schema::Loader v0.04005 @ 2008-11-06 19:19:17
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:wqjRb/WbGiyFTBcu8QId3Q # DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:ZDtFSXPegKbVEAoM+svosg
__PACKAGE__->belongs_to( __PACKAGE__->belongs_to(
"build", "build",

View file

@ -25,8 +25,8 @@ __PACKAGE__->belongs_to(
); );
# Created by DBIx::Class::Schema::Loader v0.04005 @ 2008-11-05 23:48:14 # Created by DBIx::Class::Schema::Loader v0.04005 @ 2008-11-06 19:19:17
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:lU3XutG22RG9DJdxziFlgg # DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:YwyZF3sTsIWvkKAQ5fPtMQ
# You can replace this text with custom content, and it will be preserved on regeneration # You can replace this text with custom content, and it will be preserved on regeneration

View file

@ -25,8 +25,8 @@ __PACKAGE__->belongs_to(
); );
# Created by DBIx::Class::Schema::Loader v0.04005 @ 2008-11-05 23:48:14 # Created by DBIx::Class::Schema::Loader v0.04005 @ 2008-11-06 19:19:17
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:lfykD1HZfuy+uc7JM1sVCA # DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:m2mxFOI2ZgjLoAzbNhiDlw
# You can replace this text with custom content, and it will be preserved on regeneration # You can replace this text with custom content, and it will be preserved on regeneration

View file

@ -34,6 +34,8 @@ __PACKAGE__->add_columns(
{ data_type => "integer", is_nullable => 0, size => undef }, { data_type => "integer", is_nullable => 0, size => undef },
"stoptime", "stoptime",
{ data_type => "integer", is_nullable => 0, size => undef }, { data_type => "integer", is_nullable => 0, size => undef },
"system",
{ data_type => "text", is_nullable => 0, size => undef },
); );
__PACKAGE__->set_primary_key("id"); __PACKAGE__->set_primary_key("id");
__PACKAGE__->has_many( __PACKAGE__->has_many(
@ -53,8 +55,8 @@ __PACKAGE__->has_many(
); );
# Created by DBIx::Class::Schema::Loader v0.04005 @ 2008-11-05 23:48:14 # Created by DBIx::Class::Schema::Loader v0.04005 @ 2008-11-06 19:19:17
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:bneV9O2uXIrg3Wuencuj+Q # DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:jcNXcVLLxMEddBCUkq5aYg
__PACKAGE__->has_many(dependentBuildInputs => 'HydraFrontend::Schema::Buildinputs', 'inputid'); __PACKAGE__->has_many(dependentBuildInputs => 'HydraFrontend::Schema::Buildinputs', 'inputid');

View file

@ -0,0 +1,41 @@
package HydraFrontend::Schema::Jobsetinputalts;
use strict;
use warnings;
use base 'DBIx::Class';
__PACKAGE__->load_components("Core");
__PACKAGE__->table("jobSetInputAlts");
__PACKAGE__->add_columns(
"project",
{ data_type => "text", is_nullable => 0, size => undef },
"jobset",
{ data_type => "text", is_nullable => 0, size => undef },
"input",
{ data_type => "text", is_nullable => 0, size => undef },
"altnr",
{ data_type => "integer", is_nullable => 0, size => undef },
"uri",
{ data_type => "text", is_nullable => 0, size => undef },
"revision",
{ data_type => "integer", is_nullable => 0, size => undef },
"tag",
{ data_type => "text", is_nullable => 0, size => undef },
"value",
{ data_type => "text", is_nullable => 0, size => undef },
);
__PACKAGE__->set_primary_key("project", "jobset", "input", "altnr");
__PACKAGE__->belongs_to(
"jobsetinput",
"HydraFrontend::Schema::Jobsetinputs",
{ jobset => "jobset", name => "input", project => "project" },
);
# Created by DBIx::Class::Schema::Loader v0.04005 @ 2008-11-06 19:19:17
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:YK+TrYs5Zk+8q+66b3kOUw
# You can replace this text with custom content, and it will be preserved on regeneration
1;

View file

@ -10,20 +10,14 @@ __PACKAGE__->table("jobSetInputs");
__PACKAGE__->add_columns( __PACKAGE__->add_columns(
"project", "project",
{ data_type => "text", is_nullable => 0, size => undef }, { data_type => "text", is_nullable => 0, size => undef },
"job", "jobset",
{ data_type => "text", is_nullable => 0, size => undef }, { data_type => "text", is_nullable => 0, size => undef },
"name", "name",
{ data_type => "text", is_nullable => 0, size => undef }, { data_type => "text", is_nullable => 0, size => undef },
"type", "type",
{ data_type => "text", is_nullable => 0, size => undef }, { data_type => "text", is_nullable => 0, size => undef },
"uri",
{ data_type => "text", is_nullable => 0, size => undef },
"revision",
{ data_type => "integer", is_nullable => 0, size => undef },
"tag",
{ data_type => "text", is_nullable => 0, size => undef },
); );
__PACKAGE__->set_primary_key("project", "job", "name"); __PACKAGE__->set_primary_key("project", "jobset", "name");
__PACKAGE__->has_many( __PACKAGE__->has_many(
"jobsets", "jobsets",
"HydraFrontend::Schema::Jobsets", "HydraFrontend::Schema::Jobsets",
@ -36,12 +30,21 @@ __PACKAGE__->has_many(
__PACKAGE__->belongs_to( __PACKAGE__->belongs_to(
"jobset", "jobset",
"HydraFrontend::Schema::Jobsets", "HydraFrontend::Schema::Jobsets",
{ name => "job", project => "project" }, { name => "jobset", project => "project" },
);
__PACKAGE__->has_many(
"jobsetinputalts",
"HydraFrontend::Schema::Jobsetinputalts",
{
"foreign.input" => "self.name",
"foreign.jobset" => "self.jobset",
"foreign.project" => "self.project",
},
); );
# Created by DBIx::Class::Schema::Loader v0.04005 @ 2008-11-05 23:48:14 # Created by DBIx::Class::Schema::Loader v0.04005 @ 2008-11-06 19:19:17
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:fKqDK1YOZXl88jxNRwEvSA # DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:H0KE/7kJ23s4RKFDVRIyUQ
# You can replace this text with custom content, and it will be preserved on regeneration # You can replace this text with custom content, and it will be preserved on regeneration

View file

@ -33,12 +33,15 @@ __PACKAGE__->belongs_to(
__PACKAGE__->has_many( __PACKAGE__->has_many(
"jobsetinputs", "jobsetinputs",
"HydraFrontend::Schema::Jobsetinputs", "HydraFrontend::Schema::Jobsetinputs",
{ "foreign.job" => "self.name", "foreign.project" => "self.project" }, {
"foreign.jobset" => "self.name",
"foreign.project" => "self.project",
},
); );
# Created by DBIx::Class::Schema::Loader v0.04005 @ 2008-11-05 23:48:14 # Created by DBIx::Class::Schema::Loader v0.04005 @ 2008-11-06 19:19:17
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:R+xsUdoLpQ7JnbZagpMqJQ # DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:7JksbkRjqTKzHQkOvUkt2g
# You can replace this text with custom content, and it will be preserved on regeneration # You can replace this text with custom content, and it will be preserved on regeneration

View file

@ -19,8 +19,8 @@ __PACKAGE__->has_many(
); );
# Created by DBIx::Class::Schema::Loader v0.04005 @ 2008-11-05 23:48:14 # Created by DBIx::Class::Schema::Loader v0.04005 @ 2008-11-06 19:19:17
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:PpPVJuiXpYbj8HN3sA05Gw # DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:F3bS3Kbsxhp9G6klfVPRmg
# You can replace this text with custom content, and it will be preserved on regeneration # You can replace this text with custom content, and it will be preserved on regeneration

View file

@ -57,6 +57,9 @@
<th>Output store path:</th> <th>Output store path:</th>
<td><tt>[% build.outpath %]</tt></td> <td><tt>[% build.outpath %]</tt></td>
</tr> </tr>
<tr>
<th>System:</th>
<td><tt>[% build.system %]</tt></td>
<tr> <tr>
<th>Status:</th> <th>Status:</th>
<td> <td>
@ -84,6 +87,8 @@
<td> <td>
[% IF input.type == "build" %] [% IF input.type == "build" %]
<a href="[% c.uri_for('/build' input.inputid) %]">Job <tt>[% input.build.project %]:[% input.build.attrname %]</tt> build [% input.inputid %]</a> <a href="[% c.uri_for('/build' input.inputid) %]">Job <tt>[% input.build.project %]:[% input.build.attrname %]</tt> build [% input.inputid %]</a>
[% ELSIF input.type == "string" %]
<tt>"[% input.value %]"</tt></a>
[% ELSE %] [% ELSE %]
<tt>[% input.uri %]</tt> <tt>[% input.uri %]</tt>
[% END %] [% END %]
@ -131,13 +136,14 @@
<table class="tablesorter"> <table class="tablesorter">
<thead> <thead>
<tr><th>Build</th><th>Input name</th><th>Timestamp</th></tr> <tr><th>Build</th><th>Input name</th><th>System</th><th>Timestamp</th></tr>
</thead> </thead>
<tbody> <tbody>
[% FOREACH input IN build.dependentBuildInputs -%] [% FOREACH input IN build.dependentBuildInputs -%]
<tr> <tr>
<td><a href="[% c.uri_for('/build' input.buildid.id) %]">Job <tt>[% input.buildid.project %]:[% input.buildid.attrname %]</tt> build [% input.buildid.id %]</a></td> <td><a href="[% c.uri_for('/build' input.buildid.id) %]">Job <tt>[% input.buildid.project %]:[% input.buildid.attrname %]</tt> build [% input.buildid.id %]</a></td>
<td><tt>[% input.name %]</tt></td> <td><tt>[% input.name %]</tt></td>
<td><tt>[% input.buildid.system %]</tt></td>
<td>[% date.format(input.buildid.timestamp, '%Y-%m-%d %H:%M:%S') %]</td> <td>[% date.format(input.buildid.timestamp, '%Y-%m-%d %H:%M:%S') %]</td>
</tr> </tr>
[% END -%] [% END -%]

View file

@ -6,7 +6,7 @@
<table class="tablesorter"> <table class="tablesorter">
<thead> <thead>
<tr><th></th><th>#</th><th>Project</th><th>Job</th><th>Timestamp</th><th>Description</th></tr> <tr><th></th><th>#</th><th>Project</th><th>Job</th><th>System</th><th>Timestamp</th><th>Description</th></tr>
</thead> </thead>
<tbody> <tbody>
[% FOREACH build IN latestBuilds -%] [% FOREACH build IN latestBuilds -%]
@ -21,7 +21,7 @@
<table class="tablesorter"> <table class="tablesorter">
<thead> <thead>
<tr><th></th><th>#</th><th>Project</th><th>Job</th><th>Timestamp</th><th>Description</th></tr> <tr><th></th><th>#</th><th>Project</th><th>Job</th><th>System</th><th>Timestamp</th><th>Description</th></tr>
</thead> </thead>
<tbody> <tbody>
[% FOREACH build IN allBuilds -%] [% FOREACH build IN allBuilds -%]

View file

@ -3,7 +3,7 @@
<h1>All builds for job <tt>[% jobName %]</tt></h1> <h1>All builds for job <tt>[% jobName %]</tt></h1>
<table> <table>
<tr><th></th><th>Id</th><th>Project</th><th>Job</th><th>Timestamp</th><th>Description</th></tr> <tr><th></th><th>Id</th><th>Project</th><th>Job</th><th>System</th><th>Timestamp</th><th>Description</th></tr>
[% FOREACH build IN builds -%] [% FOREACH build IN builds -%]
[% INCLUDE "short-build-info.tt" %] [% INCLUDE "short-build-info.tt" %]
[% END -%] [% END -%]

View file

@ -11,6 +11,7 @@
<td><a href="[% c.uri_for('/build' build.id) %]">[% build.id %]</a></td> <td><a href="[% c.uri_for('/build' build.id) %]">[% build.id %]</a></td>
<td><a href="[% c.uri_for('/project' build.project) %]"><tt>[% build.project %]</tt></a></td> <td><a href="[% c.uri_for('/project' build.project) %]"><tt>[% build.project %]</tt></a></td>
<td><a href="[% c.uri_for('/job' build.project build.attrname) %]"><tt>[% build.jobset %]:[% build.attrname %]</tt></a></td> <td><a href="[% c.uri_for('/job' build.project build.attrname) %]"><tt>[% build.jobset %]:[% build.attrname %]</tt></a></td>
<td><tt>[% build.system %]</tt></td>
<td>[% date.format(build.timestamp, '%Y-%m-%d %H:%M:%S') %]</td> <td>[% date.format(build.timestamp, '%Y-%m-%d %H:%M:%S') %]</td>
<td>[% build.description %]</td> <td>[% build.description %]</td>
</tr> </tr>

View file

@ -17,7 +17,8 @@ create table builds (
buildStatus integer, -- 0 = succeeded, 1 = Nix build failure, 2 = positive build failure buildStatus integer, -- 0 = succeeded, 1 = Nix build failure, 2 = positive build failure
errorMsg text, -- error message in case of a Nix failure errorMsg text, -- error message in case of a Nix failure
startTime integer, -- in Unix time, 0 = used cached build result startTime integer, -- in Unix time, 0 = used cached build result
stopTime integer stopTime integer,
system text not null
); );
@ -30,9 +31,10 @@ create table buildInputs (
uri text, uri text,
revision integer, revision integer,
tag text, tag text,
value text,
inputId integer, -- build ID of the input, for type == 'build' inputId integer, -- build ID of the input, for type == 'build'
path text not null, path text,
primary key (buildId, name), primary key (buildId, name),
foreign key (buildId) references builds(id) on delete cascade -- ignored by sqlite foreign key (buildId) references builds(id) on delete cascade -- ignored by sqlite
@ -92,12 +94,26 @@ create table jobSets (
create table jobSetInputs ( create table jobSetInputs (
project text not null, project text not null,
job text not null, jobset text not null,
name text not null, name text not null,
type text not null, -- "svn", "cvs", "path", "file" type text not null, -- "svn", "cvs", "path", "file", "string"
uri text, primary key (project, jobset, name),
revision integer, -- for svn foreign key (project, jobset) references jobSets(project, name) on delete cascade -- ignored by sqlite
tag text, -- for cvs );
primary key (project, job, name),
foreign key (project, job) references jobSets(project, name) on delete cascade -- ignored by sqlite
create table jobSetInputAlts (
project text not null,
jobset text not null,
input text not null,
altnr integer,
-- urgh
uri text,
revision integer, -- for type == 'svn'
tag text, -- for type == 'cvs'
value text, -- for type == 'string'
primary key (project, jobset, input, altnr),
foreign key (project, jobset, input) references jobSetInputs(project, jobset, name) on delete cascade -- ignored by sqlite
); );

View file

@ -16,7 +16,7 @@ sub isValidPath {
sub buildJob { sub buildJob {
my ($project, $jobset, $jobName, $drvPath, $outPath, $usedInputs) = @_; my ($project, $jobset, $jobName, $drvPath, $outPath, $usedInputs, $system) = @_;
if (scalar($db->resultset('Builds')->search({project => $project->name, jobset => $jobset->name, attrname => $jobName, outPath => $outPath})) > 0) { if (scalar($db->resultset('Builds')->search({project => $project->name, jobset => $jobset->name, attrname => $jobName, outPath => $outPath})) > 0) {
print " already done\n"; print " already done\n";
@ -54,30 +54,23 @@ sub buildJob {
, buildstatus => $buildStatus , buildstatus => $buildStatus
, starttime => $startTime , starttime => $startTime
, stoptime => $stopTime , stoptime => $stopTime
, system => $system
}); });
print " build ID = ", $build->id, "\n"; print " build ID = ", $build->id, "\n";
foreach my $inputName (keys %{$usedInputs}) { foreach my $inputName (keys %{$usedInputs}) {
my $input = $usedInputs->{$inputName}; my $input = $usedInputs->{$inputName};
if (defined $input->{orig}) { $db->resultset('Buildinputs')->create(
$db->resultset('Buildinputs')->create( { buildid => $build->id
{ buildid => $build->id , name => $inputName
, name => $inputName , type => $input->{type}
, type => $input->{orig}->type , uri => $input->{uri}
, uri => $input->{orig}->uri #, revision => $input->{orig}->revision
, revision => $input->{orig}->revision #, tag => $input->{orig}->tag
, tag => $input->{orig}->tag , value => $input->{value}
, path => $input->{storePath} , inputid => $input->{id}
}); , path => ($input->{storePath} or "") # !!! temporary hack
} else { });
$db->resultset('Buildinputs')->create(
{ buildid => $build->id
, name => $inputName
, type => "build"
, inputid => $input->{id}
, path => $input->{storePath}
});
}
} }
my $logPath = "/nix/var/log/nix/drvs/" . basename $drvPath; my $logPath = "/nix/var/log/nix/drvs/" . basename $drvPath;
@ -119,34 +112,32 @@ sub buildJob {
sub fetchInput { sub fetchInput {
my ($input, $inputInfo) = @_; my ($input, $alt, $inputInfo) = @_;
my $type = $input->type; my $type = $input->type;
my $uri = $input->uri;
if ($type eq "path") { if ($type eq "path") {
my $uri = $alt->uri;
my $storePath = `nix-store --add "$uri"` my $storePath = `nix-store --add "$uri"`
or die "cannot copy path $uri to the Nix store"; or die "cannot copy path $uri to the Nix store";
chomp $storePath; chomp $storePath;
print " copied to $storePath\n"; print " copied to $storePath\n";
$$inputInfo{$input->name} = {orig => $input, storePath => $storePath}; $$inputInfo{$input->name} = {type => $type, uri => $uri, storePath => $storePath};
} }
elsif ($type eq "string") {
die unless defined $alt->value;
$$inputInfo{$input->name} = {type => $type, value => $alt->value};
}
else { else {
die "input `" . $input->type . "' has unknown type `$type'"; die "input `" . $input->type . "' has unknown type `$type'";
} }
} }
sub checkJobSet { sub checkJobSetInstance {
my ($project, $jobset) = @_; my ($project, $jobset, $inputInfo) = @_;
my $inputInfo = {};
foreach my $input ($jobset->jobsetinputs) {
print " INPUT ", $input->name, " (", $input->type, " ", $input->uri, ")\n";
fetchInput($input, $inputInfo);
}
die unless defined $inputInfo->{$jobset->nixexprinput}; die unless defined $inputInfo->{$jobset->nixexprinput};
my $nixExprPath = $inputInfo->{$jobset->nixexprinput}->{storePath} . "/" . $jobset->nixexprpath; my $nixExprPath = $inputInfo->{$jobset->nixexprinput}->{storePath} . "/" . $jobset->nixexprpath;
@ -183,43 +174,50 @@ sub checkJobSet {
foreach my $argName (keys(%{$jobExpr->{function}->{attrspat}->{attr}})) { foreach my $argName (keys(%{$jobExpr->{function}->{attrspat}->{attr}})) {
print " needs input $argName\n"; print " needs input $argName\n";
my $storePath;
if (defined $inputInfo->{$argName}) { if (defined $inputInfo->{$argName}) {
# The argument name matches an input. # The argument name matches an input.
$storePath = $inputInfo->{$argName}->{storePath};
$$usedInputs{$argName} = $inputInfo->{$argName}; $$usedInputs{$argName} = $inputInfo->{$argName};
if (defined $inputInfo->{$argName}->{storePath}) {
# !!! escaping
$extraArgs .= " --arg $argName '{path = builtins.toPath " . $inputInfo->{$argName}->{storePath} . ";}'";
} elsif (defined $inputInfo->{$argName}->{value}) {
$extraArgs .= " --argstr $argName '" . $inputInfo->{$argName}->{value} . "'";
}
} }
else { else {
(my $prevBuild) = $db->resultset('Builds')->search( (my $prevBuild) = $db->resultset('Builds')->search(
{project => $project->name, jobset => $jobset->name, attrname => $argName, buildStatus => 0}, {project => $project->name, jobset => $jobset->name, attrname => $argName, buildStatus => 0},
{order_by => "timestamp DESC", rows => 1}); {order_by => "timestamp DESC", rows => 1});
my $storePath;
if (defined $prevBuild) { if (!defined $prevBuild) {
# The argument name matches a previously built
# job in this jobset. Pick the most recent
# build. !!! refine the selection criteria:
# e.g., most recent successful build.
if (!isValidPath($prevBuild->outpath)) {
die "input path " . $prevBuild->outpath . " has been garbage-collected";
}
$storePath = $prevBuild->outpath;
} else {
# !!! reschedule? # !!! reschedule?
die "missing input `$argName'"; die "missing input `$argName'";
} }
# The argument name matches a previously built
# job in this jobset. Pick the most recent
# build. !!! refine the selection criteria:
# e.g., most recent successful build.
if (!isValidPath($prevBuild->outpath)) {
die "input path " . $prevBuild->outpath . " has been garbage-collected";
}
$$usedInputs{$argName} = $$usedInputs{$argName} =
{ storePath => $storePath { type => "build"
, storePath => $prevBuild->outpath
, id => $prevBuild->id , id => $prevBuild->id
}; };
$extraArgs .= " --arg $argName '{path = builtins.toPath " . $prevBuild->outpath . ";}'";
} }
$extraArgs .= " --arg $argName '{path = " . $storePath . ";}'";
} }
} }
# Instantiate the store derivation. # Instantiate the store derivation.
print $extraArgs, "\n";
my $drvPath = `nix-instantiate $nixExprPath --attr $jobName $extraArgs` my $drvPath = `nix-instantiate $nixExprPath --attr $jobName $extraArgs`
or die "cannot evaluate the Nix expression containing the job definitions: $?"; or die "cannot evaluate the Nix expression containing the job definitions: $?";
chomp $drvPath; chomp $drvPath;
@ -232,14 +230,40 @@ sub checkJobSet {
or die "cannot parse XML output"; or die "cannot parse XML output";
my $job = $info->{item}; my $job = $info->{item};
die unless !defined $job || $job->{system} ne $jobName; die if !defined $job || $job->{attrPath} ne $jobName;
my $description = defined $job->{meta}->{description} ? $job->{meta}->{description}->{value} : ""; my $description = defined $job->{meta}->{description} ? $job->{meta}->{description}->{value} : "";
die unless $job->{drvPath} eq $drvPath; die unless $job->{drvPath} eq $drvPath;
my $outPath = $job->{outPath}; my $outPath = $job->{outPath};
buildJob($project, $jobset, $jobName, $drvPath, $outPath, $usedInputs); buildJob($project, $jobset, $jobName, $drvPath, $outPath, $usedInputs, $job->{system});
} }
};
sub checkJobSetAlts {
my ($project, $jobset, $inputs, $n, $inputInfo) = @_;
if ($n >= scalar @{$inputs}) {
checkJobSetInstance($project, $jobset, $inputInfo);
return;
}
my $input = @{$inputs}[$n];
foreach my $alt ($input->jobsetinputalts) {
print " INPUT ", $input->name, " (type ", $input->type, ") alt ", $alt->altnr, "\n";
fetchInput($input, $alt, $inputInfo); # !!! caching
checkJobSetAlts($project, $jobset, $inputs, $n + 1, $inputInfo);
}
};
sub checkJobSet {
my ($project, $jobset) = @_;
my $inputInfo = {};
my @jobsetinputs = $jobset->jobsetinputs;
checkJobSetAlts($project, $jobset, \@jobsetinputs, 0, $inputInfo);
} }

View file

@ -1,8 +1,14 @@
insert into projects(name) values('patchelf'); insert into projects(name) values('patchelf');
insert into jobSets(project, name, description, nixExprInput, nixExprPath) values('patchelf', 'trunk', 'PatchELF', 'patchelfSrc', 'release.nix'); insert into jobSets(project, name, description, nixExprInput, nixExprPath) values('patchelf', 'trunk', 'PatchELF', 'patchelfSrc', 'release.nix');
insert into jobSetInputs(project, job, name, type, uri) values('patchelf', 'trunk', 'patchelfSrc', 'path', '/home/eelco/Dev/patchelf-wc'); insert into jobSetInputs(project, jobset, name, type) values('patchelf', 'trunk', 'patchelfSrc', 'path');
insert into jobSetInputs(project, job, name, type, uri) values('patchelf', 'trunk', 'nixpkgs', 'path', '/home/eelco/Dev/nixpkgs-wc'); insert into jobSetInputAlts(project, jobset, input, altnr, uri) values('patchelf', 'trunk', 'patchelfSrc', 0, '/home/eelco/Dev/patchelf-wc');
insert into jobSetInputs(project, job, name, type, uri) values('patchelf', 'trunk', 'release', 'path', '/home/eelco/Dev/release'); insert into jobSetInputs(project, jobset, name, type) values('patchelf', 'trunk', 'nixpkgs', 'path');
insert into jobSetInputAlts(project, jobset, input, altnr, uri) values('patchelf', 'trunk', 'nixpkgs', 0, '/home/eelco/Dev/nixpkgs-wc');
insert into jobSetInputs(project, jobset, name, type) values('patchelf', 'trunk', 'release', 'path');
insert into jobSetInputAlts(project, jobset, input, altnr, uri) values('patchelf', 'trunk', 'release', 0, '/home/eelco/Dev/release');
insert into jobSetInputs(project, jobset, name, type) values('patchelf', 'trunk', 'system', 'string');
insert into jobSetInputAlts(project, jobset, input, altnr, value) values('patchelf', 'trunk', 'system', 0, 'i686-linux');
insert into jobSetInputAlts(project, jobset, input, altnr, value) values('patchelf', 'trunk', 'system', 1, 'x86_64-linux');
--insert into projects(name) values('nixpkgs'); --insert into projects(name) values('nixpkgs');
--insert into jobSets(project, name) values('nixpkgs', 'trunk'); --insert into jobSets(project, name) values('nixpkgs', 'trunk');