forked from lix-project/hydra
* Prevent repeated evaluation of a jobset with the same inputs. This
should make the Hydra scheduler a lot less CPU-intensive, since it won't run hydra_eval_jobs all the time.
This commit is contained in:
parent
2bcedde1c8
commit
7eda090e74
|
@ -8,7 +8,7 @@ use IPC::Run;
|
|||
use Hydra::Helper::Nix;
|
||||
|
||||
our @ISA = qw(Exporter);
|
||||
our @EXPORT = qw(fetchInput evalJobs checkBuild);
|
||||
our @EXPORT = qw(fetchInput evalJobs checkBuild inputsToArgs);
|
||||
|
||||
|
||||
sub getStorePathHash {
|
||||
|
|
59
src/lib/Hydra/Schema/JobsetInputHashes.pm
Normal file
59
src/lib/Hydra/Schema/JobsetInputHashes.pm
Normal file
|
@ -0,0 +1,59 @@
|
|||
package Hydra::Schema::JobsetInputHashes;
|
||||
|
||||
# Created by DBIx::Class::Schema::Loader
|
||||
# DO NOT MODIFY THE FIRST PART OF THIS FILE
|
||||
|
||||
use strict;
|
||||
use warnings;
|
||||
|
||||
use base 'DBIx::Class';
|
||||
|
||||
__PACKAGE__->load_components("Core");
|
||||
__PACKAGE__->table("JobsetInputHashes");
|
||||
__PACKAGE__->add_columns(
|
||||
"project",
|
||||
{
|
||||
data_type => "text",
|
||||
default_value => undef,
|
||||
is_foreign_key => 1,
|
||||
is_nullable => 0,
|
||||
size => undef,
|
||||
},
|
||||
"jobset",
|
||||
{
|
||||
data_type => "text",
|
||||
default_value => undef,
|
||||
is_foreign_key => 1,
|
||||
is_nullable => 0,
|
||||
size => undef,
|
||||
},
|
||||
"hash",
|
||||
{
|
||||
data_type => "text",
|
||||
default_value => undef,
|
||||
is_nullable => 0,
|
||||
size => undef,
|
||||
},
|
||||
"timestamp",
|
||||
{
|
||||
data_type => "integer",
|
||||
default_value => undef,
|
||||
is_nullable => 0,
|
||||
size => undef,
|
||||
},
|
||||
);
|
||||
__PACKAGE__->set_primary_key("project", "jobset", "hash");
|
||||
__PACKAGE__->belongs_to("project", "Hydra::Schema::Projects", { name => "project" });
|
||||
__PACKAGE__->belongs_to(
|
||||
"jobset",
|
||||
"Hydra::Schema::Jobsets",
|
||||
{ name => "jobset", project => "project" },
|
||||
);
|
||||
|
||||
|
||||
# Created by DBIx::Class::Schema::Loader v0.04999_09 @ 2009-11-17 14:04:55
|
||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:f8/4vTSQJbmAh/0PZHeFDg
|
||||
|
||||
|
||||
# You can replace this text with custom content, and it will be preserved on regeneration
|
||||
1;
|
|
@ -104,10 +104,18 @@ __PACKAGE__->has_many(
|
|||
"foreign.project" => "self.project",
|
||||
},
|
||||
);
|
||||
__PACKAGE__->has_many(
|
||||
"jobsetinputhashes",
|
||||
"Hydra::Schema::JobsetInputHashes",
|
||||
{
|
||||
"foreign.jobset" => "self.name",
|
||||
"foreign.project" => "self.project",
|
||||
},
|
||||
);
|
||||
|
||||
|
||||
# Created by DBIx::Class::Schema::Loader v0.04999_09 @ 2009-10-23 16:56:03
|
||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:M+eetraKtSfF8q3cqJhEPw
|
||||
# Created by DBIx::Class::Schema::Loader v0.04999_09 @ 2009-11-17 14:04:55
|
||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:xWsqXneZw90uEw/vcEXc4w
|
||||
|
||||
|
||||
# You can replace this text with custom content, and it will be preserved on regeneration
|
||||
|
|
|
@ -87,10 +87,15 @@ __PACKAGE__->has_many(
|
|||
"Hydra::Schema::ReleaseMembers",
|
||||
{ "foreign.project" => "self.name" },
|
||||
);
|
||||
__PACKAGE__->has_many(
|
||||
"jobsetinputhashes",
|
||||
"Hydra::Schema::JobsetInputHashes",
|
||||
{ "foreign.project" => "self.name" },
|
||||
);
|
||||
|
||||
|
||||
# Created by DBIx::Class::Schema::Loader v0.04999_09 @ 2009-10-23 16:56:03
|
||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:zhL+ArisX2ZFU0NPIuDLdw
|
||||
# Created by DBIx::Class::Schema::Loader v0.04999_09 @ 2009-11-17 14:04:55
|
||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:dWe2DEsuZuOjVj4IA8TwQg
|
||||
|
||||
|
||||
# You can replace this text with custom content, and it will be preserved on regeneration
|
||||
|
|
|
@ -5,6 +5,7 @@ use feature 'switch';
|
|||
use Hydra::Schema;
|
||||
use Hydra::Helper::Nix;
|
||||
use Hydra::Helper::AddBuilds;
|
||||
use Digest::SHA qw(sha256_hex);
|
||||
|
||||
|
||||
STDOUT->autoflush();
|
||||
|
@ -50,6 +51,20 @@ sub checkJobset {
|
|||
# Fetch all values for all inputs.
|
||||
fetchInputs($project, $jobset, $inputInfo);
|
||||
|
||||
# Hash the arguments to hydra_eval_jobs and check the
|
||||
# JobsetInputHashes to see if we've already evaluated this set of
|
||||
# inputs. If so, bail out.
|
||||
my @args = ($jobset->nixexprinput, $jobset->nixexprpath, inputsToArgs($inputInfo));
|
||||
my $argsHash = sha256_hex("@args");
|
||||
|
||||
if ($jobset->jobsetinputhashes->find({hash => $argsHash})) {
|
||||
print " already evaluated, skipping\n";
|
||||
txn_do($db, sub {
|
||||
$jobset->update({lastcheckedtime => time});
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
# Evaluate the job expression.
|
||||
my ($jobs, $nixExprInput) = evalJobs($inputInfo, $jobset->nixexprinput, $jobset->nixexprpath);
|
||||
|
||||
|
@ -83,6 +98,8 @@ sub checkJobset {
|
|||
foreach my $build ($jobset->builds->search({iscurrent => 1})) {
|
||||
$build->update({iscurrent => 0}) unless $currentBuilds{$build->id};
|
||||
}
|
||||
|
||||
$jobset->jobsetinputhashes->create({hash => $argsHash, timestamp => time});
|
||||
|
||||
});
|
||||
|
||||
|
|
|
@ -379,6 +379,24 @@ create table ReleaseMembers (
|
|||
);
|
||||
|
||||
|
||||
-- This table is used to prevent repeated Nix expression evaluation
|
||||
-- for the same set of inputs for a jobset. In the scheduler, after
|
||||
-- obtaining the current inputs for a jobset, we hash the inputs
|
||||
-- together, and if the resulting hash already appears in this table,
|
||||
-- we can skip the jobset. Otherwise it's added to the table, and the
|
||||
-- Nix expression for the jobset is evaluated. The hash is computed
|
||||
-- over the command-line arguments to hydra_eval_jobs.
|
||||
create table JobsetInputHashes (
|
||||
project text not null,
|
||||
jobset text not null,
|
||||
hash text not null,
|
||||
timestamp integer not null,
|
||||
primary key (project, jobset, hash),
|
||||
foreign key (project) references Projects(name) on delete cascade on update cascade,
|
||||
foreign key (project, jobset) references Jobsets(project, name) on delete cascade on update cascade
|
||||
);
|
||||
|
||||
|
||||
-- Some indices.
|
||||
create index IndexBuildInputsByBuild on BuildInputs(build);
|
||||
create index IndexBuildInputsByDependency on BuildInputs(dependency);
|
||||
|
|
Loading…
Reference in a new issue