Merge remote-tracking branch 'origin/master' into flake
This commit is contained in:
commit
881b7449fd
|
@ -82,7 +82,7 @@ sub overview : Chained('job') PathPart('') Args(0) {
|
|||
# If this is an aggregate job, then get its constituents.
|
||||
my @constituents = $c->model('DB::Builds')->search(
|
||||
{ aggregate => { -in => $job->builds->search({}, { columns => ["id"], order_by => "id desc", rows => 15 })->as_query } },
|
||||
{ join => 'aggregateconstituents_constituents',
|
||||
{ join => 'aggregateconstituents_constituents',
|
||||
columns => ['id', 'job', 'finished', 'buildstatus'],
|
||||
+select => ['aggregateconstituents_constituents.aggregate'],
|
||||
+as => ['aggregate']
|
||||
|
@ -99,7 +99,7 @@ sub overview : Chained('job') PathPart('') Args(0) {
|
|||
|
||||
foreach my $agg (keys %$aggregates) {
|
||||
# FIXME: could be done in one query.
|
||||
$aggregates->{$agg}->{build} =
|
||||
$aggregates->{$agg}->{build} =
|
||||
$c->model('DB::Builds')->find({id => $agg}, {columns => [@buildListColumns]}) or die;
|
||||
}
|
||||
|
||||
|
@ -172,7 +172,7 @@ sub get_builds : Chained('job') PathPart('') CaptureArgs(0) {
|
|||
my ($self, $c) = @_;
|
||||
$c->stash->{allBuilds} = $c->stash->{job}->builds;
|
||||
$c->stash->{latestSucceeded} = $c->model('DB')->resultset('LatestSucceededForJob')
|
||||
->search({}, {bind => [$c->stash->{project}->name, $c->stash->{jobset}->name, $c->stash->{job}->name]});
|
||||
->search({}, {bind => [$c->stash->{jobset}->name, $c->stash->{job}->name]});
|
||||
$c->stash->{channelBaseName} =
|
||||
$c->stash->{project}->name . "-" . $c->stash->{jobset}->name . "-" . $c->stash->{job}->name;
|
||||
}
|
||||
|
|
|
@ -162,7 +162,7 @@ sub get_builds : Chained('jobsetChain') PathPart('') CaptureArgs(0) {
|
|||
my ($self, $c) = @_;
|
||||
$c->stash->{allBuilds} = $c->stash->{jobset}->builds;
|
||||
$c->stash->{latestSucceeded} = $c->model('DB')->resultset('LatestSucceededForJobset')
|
||||
->search({}, {bind => [$c->stash->{project}->name, $c->stash->{jobset}->name]});
|
||||
->search({}, {bind => [$c->stash->{jobset}->name]});
|
||||
$c->stash->{channelBaseName} =
|
||||
$c->stash->{project}->name . "-" . $c->stash->{jobset}->name;
|
||||
}
|
||||
|
|
|
@ -68,7 +68,7 @@ __PACKAGE__->table("builds");
|
|||
|
||||
data_type: 'integer'
|
||||
is_foreign_key: 1
|
||||
is_nullable: 1
|
||||
is_nullable: 0
|
||||
|
||||
=head2 job
|
||||
|
||||
|
@ -222,7 +222,7 @@ __PACKAGE__->add_columns(
|
|||
"jobset",
|
||||
{ data_type => "text", is_foreign_key => 1, is_nullable => 0 },
|
||||
"jobset_id",
|
||||
{ data_type => "integer", is_foreign_key => 1, is_nullable => 1 },
|
||||
{ data_type => "integer", is_foreign_key => 1, is_nullable => 0 },
|
||||
"job",
|
||||
{ data_type => "text", is_foreign_key => 1, is_nullable => 0 },
|
||||
"nixname",
|
||||
|
@ -466,12 +466,7 @@ __PACKAGE__->belongs_to(
|
|||
"jobset",
|
||||
"Hydra::Schema::Jobsets",
|
||||
{ id => "jobset_id" },
|
||||
{
|
||||
is_deferrable => 0,
|
||||
join_type => "LEFT",
|
||||
on_delete => "CASCADE",
|
||||
on_update => "NO ACTION",
|
||||
},
|
||||
{ is_deferrable => 0, on_delete => "CASCADE", on_update => "NO ACTION" },
|
||||
);
|
||||
|
||||
=head2 jobset_project_jobset
|
||||
|
@ -578,8 +573,8 @@ __PACKAGE__->many_to_many(
|
|||
);
|
||||
|
||||
|
||||
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2020-02-06 12:32:28
|
||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:RvrINOAowDcde8Nd9VD6rQ
|
||||
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2020-02-06 12:34:25
|
||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:EEXlcKN/ydXJ129vT0jTUw
|
||||
|
||||
__PACKAGE__->has_many(
|
||||
"dependents",
|
||||
|
@ -642,8 +637,8 @@ QUERY
|
|||
|
||||
makeQueries('', "");
|
||||
makeQueries('ForProject', "and project = ?");
|
||||
makeQueries('ForJobset', "and project = ? and jobset = ?");
|
||||
makeQueries('ForJob', "and project = ? and jobset = ? and job = ?");
|
||||
makeQueries('ForJobset', "and jobset_id = (select id from jobsets j where j.name = ?)");
|
||||
makeQueries('ForJob', "and jobset_id = (select id from jobsets j where j.name = ?) and job = ?");
|
||||
|
||||
|
||||
my %hint = (
|
||||
|
|
|
@ -51,7 +51,7 @@ __PACKAGE__->table("jobs");
|
|||
|
||||
data_type: 'integer'
|
||||
is_foreign_key: 1
|
||||
is_nullable: 1
|
||||
is_nullable: 0
|
||||
|
||||
=head2 name
|
||||
|
||||
|
@ -66,7 +66,7 @@ __PACKAGE__->add_columns(
|
|||
"jobset",
|
||||
{ data_type => "text", is_foreign_key => 1, is_nullable => 0 },
|
||||
"jobset_id",
|
||||
{ data_type => "integer", is_foreign_key => 1, is_nullable => 1 },
|
||||
{ data_type => "integer", is_foreign_key => 1, is_nullable => 0 },
|
||||
"name",
|
||||
{ data_type => "text", is_nullable => 0 },
|
||||
);
|
||||
|
@ -139,12 +139,7 @@ __PACKAGE__->belongs_to(
|
|||
"jobset",
|
||||
"Hydra::Schema::Jobsets",
|
||||
{ id => "jobset_id" },
|
||||
{
|
||||
is_deferrable => 0,
|
||||
join_type => "LEFT",
|
||||
on_delete => "CASCADE",
|
||||
on_update => "NO ACTION",
|
||||
},
|
||||
{ is_deferrable => 0, on_delete => "CASCADE", on_update => "NO ACTION" },
|
||||
);
|
||||
|
||||
=head2 jobset_project_jobset
|
||||
|
@ -197,7 +192,25 @@ __PACKAGE__->has_many(
|
|||
);
|
||||
|
||||
|
||||
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2020-02-06 12:30:58
|
||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:dFusVjxb423gIEoadAw9sw
|
||||
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2020-02-06 12:33:28
|
||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:C5Tyh8Ke4yC6q7KIFVOHcQ
|
||||
|
||||
=head2 builds
|
||||
|
||||
Type: has_many
|
||||
|
||||
Related object: L<Hydra::Sc2hema::Builds>
|
||||
|
||||
=cut
|
||||
|
||||
__PACKAGE__->has_many(
|
||||
"builds",
|
||||
"Hydra::Schema::Builds",
|
||||
{
|
||||
"foreign.job" => "self.name",
|
||||
"foreign.jobset_id" => "self.jobset_id",
|
||||
},
|
||||
undef,
|
||||
);
|
||||
|
||||
1;
|
||||
|
|
|
@ -423,10 +423,7 @@ Related object: L<Hydra::Schema::Builds>
|
|||
__PACKAGE__->has_many(
|
||||
"builds",
|
||||
"Hydra::Schema::Builds",
|
||||
{
|
||||
"foreign.jobset" => "self.name",
|
||||
"foreign.project" => "self.project",
|
||||
},
|
||||
{ "foreign.jobset_id" => "self.id" },
|
||||
undef,
|
||||
);
|
||||
|
||||
|
@ -441,10 +438,7 @@ Related object: L<Hydra::Schema::Jobs>
|
|||
__PACKAGE__->has_many(
|
||||
"jobs",
|
||||
"Hydra::Schema::Jobs",
|
||||
{
|
||||
"foreign.jobset" => "self.name",
|
||||
"foreign.project" => "self.project",
|
||||
},
|
||||
{ "foreign.jobset_id" => "self.id" },
|
||||
undef,
|
||||
);
|
||||
|
||||
|
|
|
@ -143,7 +143,7 @@ sub fetchInputSystemBuild {
|
|||
$jobsetName ||= $jobset->name;
|
||||
|
||||
my @latestBuilds = $db->resultset('LatestSucceededForJob')
|
||||
->search({}, {bind => [$projectName, $jobsetName, $jobName]});
|
||||
->search({}, {bind => [$jobsetName, $jobName]});
|
||||
|
||||
my @validBuilds = ();
|
||||
foreach my $build (@latestBuilds) {
|
||||
|
|
|
@ -146,7 +146,7 @@ create table JobsetInputAlts (
|
|||
create table Jobs (
|
||||
project text not null,
|
||||
jobset text not null,
|
||||
jobset_id integer null,
|
||||
jobset_id integer not null,
|
||||
name text not null,
|
||||
|
||||
primary key (project, jobset, name),
|
||||
|
@ -170,7 +170,7 @@ create table Builds (
|
|||
-- Info about the inputs.
|
||||
project text not null,
|
||||
jobset text not null,
|
||||
jobset_id integer null,
|
||||
jobset_id integer not null,
|
||||
job text not null,
|
||||
|
||||
-- Info about the build result.
|
||||
|
@ -682,6 +682,7 @@ create index IndexBuildsOnProject on Builds(project);
|
|||
create index IndexBuildsOnTimestamp on Builds(timestamp);
|
||||
create index IndexBuildsOnFinishedStopTime on Builds(finished, stoptime DESC);
|
||||
create index IndexBuildsOnJobFinishedId on builds(project, jobset, job, system, finished, id DESC);
|
||||
create index IndexFinishedSuccessfulBuilds on Builds(id DESC, buildstatus, finished, job, jobset_id) where buildstatus = 0 and finished = 1;
|
||||
create index IndexBuildsOnDrvPath on Builds(drvPath);
|
||||
create index IndexCachedHgInputsOnHash on CachedHgInputs(uri, branch, sha256hash);
|
||||
create index IndexCachedGitInputsOnHash on CachedGitInputs(uri, branch, sha256hash);
|
||||
|
|
7
src/sql/upgrade-63.sql
Normal file
7
src/sql/upgrade-63.sql
Normal file
|
@ -0,0 +1,7 @@
|
|||
-- Make the Jobs.jobset_id column NOT NULL. If this upgrade fails,
|
||||
-- either the admin didn't run the backfiller or there is a bug. If
|
||||
-- the admin ran the backfiller and there are null columns, it is
|
||||
-- very important to figure out where the nullable columns came from.
|
||||
|
||||
ALTER TABLE Jobs
|
||||
ALTER COLUMN jobset_id SET NOT NULL;
|
7
src/sql/upgrade-64.sql
Normal file
7
src/sql/upgrade-64.sql
Normal file
|
@ -0,0 +1,7 @@
|
|||
-- Make the Builds.jobset_id column NOT NULL. If this upgrade fails,
|
||||
-- either the admin didn't run the backfiller or there is a bug. If
|
||||
-- the admin ran the backfiller and there are null columns, it is
|
||||
-- very important to figure out where the nullable columns came from.
|
||||
|
||||
ALTER TABLE Builds
|
||||
ALTER COLUMN jobset_id SET NOT NULL;
|
4
src/sql/upgrade-65.sql
Normal file
4
src/sql/upgrade-65.sql
Normal file
|
@ -0,0 +1,4 @@
|
|||
-- Index more exactly what the latest-finished query looks for.
|
||||
create index IndexFinishedSuccessfulBuilds
|
||||
on Builds(id DESC, buildstatus, finished, job, jobset_id)
|
||||
where buildstatus = 0 and finished = 1;
|
Loading…
Reference in a new issue