forked from lix-project/hydra
Merge pull request #1115 from DeterminateSystems/project-jobset/builds-json-repr
Project jobset: update builds json repr
This commit is contained in:
commit
2abcd84931
|
@ -611,36 +611,36 @@ makeQueries('ForJobset', "and jobset_id = ?");
|
|||
makeQueries('ForJob', "and jobset_id = ? and job = ?");
|
||||
makeQueries('ForJobName', "and jobset_id = (select id from jobsets j where j.name = ?) and job = ?");
|
||||
|
||||
sub as_json {
|
||||
my ($self) = @_;
|
||||
|
||||
my %hint = (
|
||||
columns => [
|
||||
'id',
|
||||
'finished',
|
||||
'timestamp',
|
||||
'starttime',
|
||||
'stoptime',
|
||||
'project',
|
||||
'jobset',
|
||||
'job',
|
||||
'nixname',
|
||||
'system',
|
||||
'priority',
|
||||
'buildstatus',
|
||||
'releasename',
|
||||
'drvpath',
|
||||
],
|
||||
relations => {
|
||||
jobsetevals => 'id'
|
||||
},
|
||||
eager_relations => {
|
||||
buildoutputs => 'name',
|
||||
buildproducts => 'productnr',
|
||||
buildmetrics => 'name',
|
||||
}
|
||||
);
|
||||
# After #1093 merges this can become $self->jobset;
|
||||
# However, with ->jobset being a column on master
|
||||
# it seems DBIX gets a it confused.
|
||||
my ($jobset) = $self->search_related('jobset')->first;
|
||||
|
||||
sub json_hint {
|
||||
return \%hint;
|
||||
my $json = {
|
||||
id => $self->get_column('id'),
|
||||
finished => $self->get_column('finished'),
|
||||
timestamp => $self->get_column('timestamp'),
|
||||
starttime => $self->get_column('starttime'),
|
||||
stoptime => $self->get_column('stoptime'),
|
||||
project => $jobset->get_column('project'),
|
||||
jobset => $jobset->name,
|
||||
job => $self->get_column('job'),
|
||||
nixname => $self->get_column('nixname'),
|
||||
system => $self->get_column('system'),
|
||||
priority => $self->get_column('priority'),
|
||||
buildstatus => $self->get_column('buildstatus'),
|
||||
releasename => $self->get_column('releasename'),
|
||||
drvpath => $self->get_column('drvpath'),
|
||||
jobsetevals => [ map { $_->id } $self->jobsetevals ],
|
||||
buildoutputs => { map { $_->name => $_ } $self->buildoutputs },
|
||||
buildproducts => { map { $_->productnr => $_ } $self->buildproducts },
|
||||
buildmetrics => { map { $_->name => $_ } $self->buildmetrics },
|
||||
};
|
||||
|
||||
return $json;
|
||||
}
|
||||
|
||||
1;
|
||||
|
|
105
t/Hydra/Controller/Build/api.t
Normal file
105
t/Hydra/Controller/Build/api.t
Normal file
|
@ -0,0 +1,105 @@
|
|||
use strict;
|
||||
use warnings;
|
||||
use Setup;
|
||||
use JSON::MaybeXS qw(decode_json encode_json);
|
||||
use Data::Dumper;
|
||||
use URI;
|
||||
use Test2::V0;
|
||||
use Catalyst::Test ();
|
||||
use HTTP::Request::Common;
|
||||
|
||||
my %ctx = test_init();
|
||||
|
||||
Catalyst::Test->import('Hydra');
|
||||
|
||||
my $db = Hydra::Model::DB->new;
|
||||
hydra_setup($db);
|
||||
|
||||
my $project = $db->resultset('Projects')->create({name => "tests", displayname => "", owner => "root"});
|
||||
|
||||
my $jobset = createBaseJobset("aggregate", "aggregate.nix", $ctx{jobsdir});
|
||||
|
||||
ok(evalSucceeds($jobset), "Evaluating jobs/aggregate.nix should exit with return code 0");
|
||||
is(nrQueuedBuildsForJobset($jobset), 3, "Evaluating jobs/aggregate.nix should result in 3 builds");
|
||||
my $aggregateBuild;
|
||||
for my $build (queuedBuildsForJobset($jobset)) {
|
||||
if ($build->nixname eq "aggregate") {
|
||||
$aggregateBuild = $build;
|
||||
}
|
||||
ok(runBuild($build), "Build '".$build->job."' from jobs/aggregate.nix should exit with return code 0");
|
||||
}
|
||||
$aggregateBuild->discard_changes();
|
||||
|
||||
my $build_redirect = request(GET '/job/tests/aggregate/aggregate/latest-finished');
|
||||
my $build_url = URI->new($build_redirect->header('location'))->path;
|
||||
|
||||
subtest "validating the JSON representation of a build" => sub {
|
||||
my $response = request(GET $build_url,
|
||||
Accept => 'application/json',
|
||||
);
|
||||
|
||||
is($response->code, 200, "Getting the build data");
|
||||
|
||||
my $data;
|
||||
my $valid_json = lives { $data = decode_json($response->content); };
|
||||
ok($valid_json, "We get back valid JSON.");
|
||||
if (!$valid_json) {
|
||||
use Data::Dumper;
|
||||
print STDERR Dumper $response->content;
|
||||
}
|
||||
|
||||
is($data, {
|
||||
project => "tests",
|
||||
jobset => "aggregate",
|
||||
buildmetrics => {},
|
||||
buildoutputs => { out => { path => $aggregateBuild->buildoutputs->find({ name => "out" })->path }},
|
||||
buildproducts => { 1 => {
|
||||
defaultpath => "",
|
||||
filesize => undef,
|
||||
name => "aggregate",
|
||||
path => $aggregateBuild->buildoutputs->find({ name => "out" })->path,
|
||||
sha256hash => undef,
|
||||
subtype => "",
|
||||
type => "nix-build",
|
||||
}},
|
||||
buildstatus => 0,
|
||||
drvpath => $aggregateBuild->drvpath,
|
||||
finished => 1,
|
||||
id => $aggregateBuild->id,
|
||||
job => "aggregate",
|
||||
jobsetevals => [ $aggregateBuild->jobsetevals->first->id ],
|
||||
nixname => "aggregate",
|
||||
priority => 100,
|
||||
releasename => undef,
|
||||
starttime => $aggregateBuild->starttime,
|
||||
stoptime => $aggregateBuild->stoptime,
|
||||
timestamp => $aggregateBuild->timestamp,
|
||||
system => $aggregateBuild->system,
|
||||
}, "The build's JSON matches our API.");
|
||||
};
|
||||
|
||||
subtest "accessing the constituents API" => sub {
|
||||
my $url = $build_url . "/constituents";
|
||||
|
||||
my $constituents = request(GET $url,
|
||||
Accept => 'application/json',
|
||||
);
|
||||
|
||||
ok($constituents->is_success, "Getting the constituent builds");
|
||||
|
||||
my $data;
|
||||
my $valid_json = lives { $data = decode_json($constituents->content); };
|
||||
ok($valid_json, "We get back valid JSON.");
|
||||
if (!$valid_json) {
|
||||
use Data::Dumper;
|
||||
print STDERR Dumper $constituents->content;
|
||||
}
|
||||
|
||||
my ($buildA) = grep { $_->{nixname} eq "empty-dir-a" } @$data;
|
||||
my ($buildB) = grep { $_->{nixname} eq "empty-dir-b" } @$data;
|
||||
|
||||
is($buildA->{job}, "a");
|
||||
is($buildB->{job}, "b");
|
||||
};
|
||||
|
||||
done_testing;
|
|
@ -1,54 +0,0 @@
|
|||
use strict;
|
||||
use warnings;
|
||||
use Setup;
|
||||
use JSON::MaybeXS qw(decode_json encode_json);
|
||||
use Data::Dumper;
|
||||
use URI;
|
||||
my %ctx = test_init();
|
||||
|
||||
require Hydra::Schema;
|
||||
require Hydra::Model::DB;
|
||||
require Hydra::Helper::Nix;
|
||||
|
||||
use Test2::V0;
|
||||
require Catalyst::Test;
|
||||
Catalyst::Test->import('Hydra');
|
||||
use HTTP::Request::Common;
|
||||
|
||||
my $db = Hydra::Model::DB->new;
|
||||
hydra_setup($db);
|
||||
|
||||
my $project = $db->resultset('Projects')->create({name => "tests", displayname => "", owner => "root"});
|
||||
|
||||
my $jobset = createBaseJobset("aggregate", "aggregate.nix", $ctx{jobsdir});
|
||||
|
||||
ok(evalSucceeds($jobset), "Evaluating jobs/aggregate.nix should exit with return code 0");
|
||||
is(nrQueuedBuildsForJobset($jobset), 3, "Evaluating jobs/aggregate.nix should result in 3 builds");
|
||||
for my $build (queuedBuildsForJobset($jobset)) {
|
||||
ok(runBuild($build), "Build '".$build->job."' from jobs/aggregate.nix should exit with return code 0");
|
||||
}
|
||||
|
||||
my $build_redirect = request(GET '/job/tests/aggregate/aggregate/latest-finished');
|
||||
|
||||
my $url = URI->new($build_redirect->header('location'))->path . "/constituents";
|
||||
my $constituents = request(GET $url,
|
||||
Accept => 'application/json',
|
||||
);
|
||||
|
||||
ok($constituents->is_success, "Getting the constituent builds");
|
||||
|
||||
my $data;
|
||||
my $valid_json = lives { $data = decode_json($constituents->content); };
|
||||
ok($valid_json, "We get back valid JSON.");
|
||||
if (!$valid_json) {
|
||||
use Data::Dumper;
|
||||
print STDERR Dumper $constituents->content;
|
||||
}
|
||||
|
||||
my ($buildA) = grep { $_->{nixname} eq "empty-dir-a" } @$data;
|
||||
my ($buildB) = grep { $_->{nixname} eq "empty-dir-b" } @$data;
|
||||
|
||||
is($buildA->{job}, "a");
|
||||
is($buildB->{job}, "b");
|
||||
|
||||
done_testing;
|
Loading…
Reference in a new issue