forked from lix-project/hydra
Merge pull request #850 from grahamc/jobset-evals-by-id
Jobset -> JobsetEvals by JobsetEvals.jobset_id
This commit is contained in:
commit
6d047c286f
|
@ -15,6 +15,56 @@ using namespace nix;
|
||||||
|
|
||||||
typedef std::pair<std::string, std::string> JobsetName;
|
typedef std::pair<std::string, std::string> JobsetName;
|
||||||
|
|
||||||
|
class JobsetId {
|
||||||
|
public:
|
||||||
|
|
||||||
|
std::string project;
|
||||||
|
std::string jobset;
|
||||||
|
int id;
|
||||||
|
|
||||||
|
|
||||||
|
JobsetId(const std::string & project, const std::string & jobset, int id)
|
||||||
|
: project{ project }, jobset{ jobset }, id{ id }
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
friend bool operator== (const JobsetId & lhs, const JobsetId & rhs);
|
||||||
|
friend bool operator!= (const JobsetId & lhs, const JobsetId & rhs);
|
||||||
|
friend bool operator< (const JobsetId & lhs, const JobsetId & rhs);
|
||||||
|
|
||||||
|
|
||||||
|
friend bool operator== (const JobsetId & lhs, const JobsetName & rhs);
|
||||||
|
friend bool operator!= (const JobsetId & lhs, const JobsetName & rhs);
|
||||||
|
|
||||||
|
std::string display() const {
|
||||||
|
return str(format("%1%:%2% (jobset#%3%)") % project % jobset % id);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
bool operator==(const JobsetId & lhs, const JobsetId & rhs)
|
||||||
|
{
|
||||||
|
return lhs.id == rhs.id;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool operator!=(const JobsetId & lhs, const JobsetId & rhs)
|
||||||
|
{
|
||||||
|
return lhs.id != rhs.id;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool operator<(const JobsetId & lhs, const JobsetId & rhs)
|
||||||
|
{
|
||||||
|
return lhs.id < rhs.id;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool operator==(const JobsetId & lhs, const JobsetName & rhs)
|
||||||
|
{
|
||||||
|
return lhs.project == rhs.first && lhs.jobset == rhs.second;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool operator!=(const JobsetId & lhs, const JobsetName & rhs)
|
||||||
|
{
|
||||||
|
return ! (lhs == rhs);
|
||||||
|
}
|
||||||
|
|
||||||
enum class EvaluationStyle
|
enum class EvaluationStyle
|
||||||
{
|
{
|
||||||
SCHEDULE = 1,
|
SCHEDULE = 1,
|
||||||
|
@ -30,14 +80,14 @@ struct Evaluator
|
||||||
|
|
||||||
struct Jobset
|
struct Jobset
|
||||||
{
|
{
|
||||||
JobsetName name;
|
JobsetId name;
|
||||||
std::optional<EvaluationStyle> evaluation_style;
|
std::optional<EvaluationStyle> evaluation_style;
|
||||||
time_t lastCheckedTime, triggerTime;
|
time_t lastCheckedTime, triggerTime;
|
||||||
int checkInterval;
|
int checkInterval;
|
||||||
Pid pid;
|
Pid pid;
|
||||||
};
|
};
|
||||||
|
|
||||||
typedef std::map<JobsetName, Jobset> Jobsets;
|
typedef std::map<JobsetId, Jobset> Jobsets;
|
||||||
|
|
||||||
std::optional<JobsetName> evalOne;
|
std::optional<JobsetName> evalOne;
|
||||||
|
|
||||||
|
@ -68,16 +118,18 @@ struct Evaluator
|
||||||
pqxx::work txn(*conn);
|
pqxx::work txn(*conn);
|
||||||
|
|
||||||
auto res = txn.exec
|
auto res = txn.exec
|
||||||
("select project, j.name, lastCheckedTime, triggerTime, checkInterval, j.enabled as jobset_enabled from Jobsets j join Projects p on j.project = p.name "
|
("select j.id as id, project, j.name, lastCheckedTime, triggerTime, checkInterval, j.enabled as jobset_enabled "
|
||||||
|
"from Jobsets j "
|
||||||
|
"join Projects p on j.project = p.name "
|
||||||
"where j.enabled != 0 and p.enabled != 0");
|
"where j.enabled != 0 and p.enabled != 0");
|
||||||
|
|
||||||
|
|
||||||
auto state(state_.lock());
|
auto state(state_.lock());
|
||||||
|
|
||||||
std::set<JobsetName> seen;
|
std::set<JobsetId> seen;
|
||||||
|
|
||||||
for (auto const & row : res) {
|
for (auto const & row : res) {
|
||||||
auto name = JobsetName{row["project"].as<std::string>(), row["name"].as<std::string>()};
|
auto name = JobsetId{row["project"].as<std::string>(), row["name"].as<std::string>(), row["id"].as<int>()};
|
||||||
|
|
||||||
if (evalOne && name != *evalOne) continue;
|
if (evalOne && name != *evalOne) continue;
|
||||||
|
|
||||||
|
@ -111,7 +163,7 @@ struct Evaluator
|
||||||
if (seen.count(i->first))
|
if (seen.count(i->first))
|
||||||
++i;
|
++i;
|
||||||
else {
|
else {
|
||||||
printInfo("forgetting jobset ‘%s:%s’", i->first.first, i->first.second);
|
printInfo("forgetting jobset ‘%s’", i->first.display());
|
||||||
i = state->jobsets.erase(i);
|
i = state->jobsets.erase(i);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -120,25 +172,24 @@ struct Evaluator
|
||||||
{
|
{
|
||||||
time_t now = time(0);
|
time_t now = time(0);
|
||||||
|
|
||||||
printInfo("starting evaluation of jobset ‘%s:%s’ (last checked %d s ago)",
|
printInfo("starting evaluation of jobset ‘%s’ (last checked %d s ago)",
|
||||||
jobset.name.first, jobset.name.second,
|
jobset.name.display(),
|
||||||
now - jobset.lastCheckedTime);
|
now - jobset.lastCheckedTime);
|
||||||
|
|
||||||
{
|
{
|
||||||
auto conn(dbPool.get());
|
auto conn(dbPool.get());
|
||||||
pqxx::work txn(*conn);
|
pqxx::work txn(*conn);
|
||||||
txn.exec_params0
|
txn.exec_params0
|
||||||
("update Jobsets set startTime = $1 where project = $2 and name = $3",
|
("update Jobsets set startTime = $1 where id = $2",
|
||||||
now,
|
now,
|
||||||
jobset.name.first,
|
jobset.name.id);
|
||||||
jobset.name.second);
|
|
||||||
txn.commit();
|
txn.commit();
|
||||||
}
|
}
|
||||||
|
|
||||||
assert(jobset.pid == -1);
|
assert(jobset.pid == -1);
|
||||||
|
|
||||||
jobset.pid = startProcess([&]() {
|
jobset.pid = startProcess([&]() {
|
||||||
Strings args = { "hydra-eval-jobset", jobset.name.first, jobset.name.second };
|
Strings args = { "hydra-eval-jobset", jobset.name.project, jobset.name.jobset };
|
||||||
execvp(args.front().c_str(), stringsToCharPtrs(args).data());
|
execvp(args.front().c_str(), stringsToCharPtrs(args).data());
|
||||||
throw SysError("executing ‘%1%’", args.front());
|
throw SysError("executing ‘%1%’", args.front());
|
||||||
});
|
});
|
||||||
|
@ -152,23 +203,23 @@ struct Evaluator
|
||||||
{
|
{
|
||||||
if (jobset.pid != -1) {
|
if (jobset.pid != -1) {
|
||||||
// Already running.
|
// Already running.
|
||||||
debug("shouldEvaluate %s:%s? no: already running",
|
debug("shouldEvaluate %s? no: already running",
|
||||||
jobset.name.first, jobset.name.second);
|
jobset.name.display());
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (jobset.triggerTime != std::numeric_limits<time_t>::max()) {
|
if (jobset.triggerTime != std::numeric_limits<time_t>::max()) {
|
||||||
// An evaluation of this Jobset is requested
|
// An evaluation of this Jobset is requested
|
||||||
debug("shouldEvaluate %s:%s? yes: requested",
|
debug("shouldEvaluate %s? yes: requested",
|
||||||
jobset.name.first, jobset.name.second);
|
jobset.name.display());
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (jobset.checkInterval <= 0) {
|
if (jobset.checkInterval <= 0) {
|
||||||
// Automatic scheduling is disabled. We allow requested
|
// Automatic scheduling is disabled. We allow requested
|
||||||
// evaluations, but never schedule start one.
|
// evaluations, but never schedule start one.
|
||||||
debug("shouldEvaluate %s:%s? no: checkInterval <= 0",
|
debug("shouldEvaluate %s? no: checkInterval <= 0",
|
||||||
jobset.name.first, jobset.name.second);
|
jobset.name.display());
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -184,16 +235,15 @@ struct Evaluator
|
||||||
if (jobset.evaluation_style == EvaluationStyle::ONE_AT_A_TIME) {
|
if (jobset.evaluation_style == EvaluationStyle::ONE_AT_A_TIME) {
|
||||||
auto evaluation_res = txn.parameterized
|
auto evaluation_res = txn.parameterized
|
||||||
("select id from JobsetEvals "
|
("select id from JobsetEvals "
|
||||||
"where project = $1 and jobset = $2 "
|
"where jobset_id = $1 "
|
||||||
"order by id desc limit 1")
|
"order by id desc limit 1")
|
||||||
(jobset.name.first)
|
(jobset.name.id)
|
||||||
(jobset.name.second)
|
|
||||||
.exec();
|
.exec();
|
||||||
|
|
||||||
if (evaluation_res.empty()) {
|
if (evaluation_res.empty()) {
|
||||||
// First evaluation, so allow scheduling.
|
// First evaluation, so allow scheduling.
|
||||||
debug("shouldEvaluate(one-at-a-time) %s:%s? yes: no prior eval",
|
debug("shouldEvaluate(one-at-a-time) %s? yes: no prior eval",
|
||||||
jobset.name.first, jobset.name.second);
|
jobset.name.display());
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -212,20 +262,20 @@ struct Evaluator
|
||||||
// If the previous evaluation has no unfinished builds
|
// If the previous evaluation has no unfinished builds
|
||||||
// schedule!
|
// schedule!
|
||||||
if (unfinished_build_res.empty()) {
|
if (unfinished_build_res.empty()) {
|
||||||
debug("shouldEvaluate(one-at-a-time) %s:%s? yes: no unfinished builds",
|
debug("shouldEvaluate(one-at-a-time) %s? yes: no unfinished builds",
|
||||||
jobset.name.first, jobset.name.second);
|
jobset.name.display());
|
||||||
return true;
|
return true;
|
||||||
} else {
|
} else {
|
||||||
debug("shouldEvaluate(one-at-a-time) %s:%s? no: at least one unfinished build",
|
debug("shouldEvaluate(one-at-a-time) %s:%s? no: at least one unfinished build",
|
||||||
jobset.name.first, jobset.name.second);
|
jobset.name.display());
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
// EvaluationStyle::ONESHOT, EvaluationStyle::SCHEDULED
|
// EvaluationStyle::ONESHOT, EvaluationStyle::SCHEDULED
|
||||||
debug("shouldEvaluate(oneshot/scheduled) %s:%s? yes: checkInterval elapsed",
|
debug("shouldEvaluate(oneshot/scheduled) %s? yes: checkInterval elapsed",
|
||||||
jobset.name.first, jobset.name.second);
|
jobset.name.display());
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -350,8 +400,8 @@ struct Evaluator
|
||||||
auto & jobset(i.second);
|
auto & jobset(i.second);
|
||||||
|
|
||||||
if (jobset.pid == pid) {
|
if (jobset.pid == pid) {
|
||||||
printInfo("evaluation of jobset ‘%s:%s’ %s",
|
printInfo("evaluation of jobset ‘%s’ %s",
|
||||||
jobset.name.first, jobset.name.second, statusToString(status));
|
jobset.name.display(), statusToString(status));
|
||||||
|
|
||||||
auto now = time(0);
|
auto now = time(0);
|
||||||
|
|
||||||
|
@ -367,23 +417,20 @@ struct Evaluator
|
||||||
jobset from getting stuck in an endless
|
jobset from getting stuck in an endless
|
||||||
failing eval loop. */
|
failing eval loop. */
|
||||||
txn.exec_params0
|
txn.exec_params0
|
||||||
("update Jobsets set triggerTime = null where project = $1 and name = $2 and startTime is not null and triggerTime <= startTime",
|
("update Jobsets set triggerTime = null where id = $1 and startTime is not null and triggerTime <= startTime",
|
||||||
jobset.name.first,
|
jobset.name.id);
|
||||||
jobset.name.second);
|
|
||||||
|
|
||||||
/* Clear the start time. */
|
/* Clear the start time. */
|
||||||
txn.exec_params0
|
txn.exec_params0
|
||||||
("update Jobsets set startTime = null where project = $1 and name = $2",
|
("update Jobsets set startTime = null where id = $1",
|
||||||
jobset.name.first,
|
jobset.name.id);
|
||||||
jobset.name.second);
|
|
||||||
|
|
||||||
if (!WIFEXITED(status) || WEXITSTATUS(status) > 1) {
|
if (!WIFEXITED(status) || WEXITSTATUS(status) > 1) {
|
||||||
txn.exec_params0
|
txn.exec_params0
|
||||||
("update Jobsets set errorMsg = $1, lastCheckedTime = $2, errorTime = $2, fetchErrorMsg = null where project = $3 and name = $4",
|
("update Jobsets set errorMsg = $1, lastCheckedTime = $2, errorTime = $2, fetchErrorMsg = null where id = $3",
|
||||||
fmt("evaluation %s", statusToString(status)),
|
fmt("evaluation %s", statusToString(status)),
|
||||||
now,
|
now,
|
||||||
jobset.name.first,
|
jobset.name.id);
|
||||||
jobset.name.second);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
txn.commit();
|
txn.commit();
|
||||||
|
|
|
@ -34,7 +34,7 @@ sub machines : Chained('admin') PathPart('machines') Args(0) {
|
||||||
sub clear_queue_non_current : Chained('admin') PathPart('clear-queue-non-current') Args(0) {
|
sub clear_queue_non_current : Chained('admin') PathPart('clear-queue-non-current') Args(0) {
|
||||||
my ($self, $c) = @_;
|
my ($self, $c) = @_;
|
||||||
my $builds = $c->model('DB::Builds')->search(
|
my $builds = $c->model('DB::Builds')->search(
|
||||||
{ id => { -in => \ "select id from Builds where id in ((select id from Builds where finished = 0) except (select build from JobsetEvalMembers where eval in (select max(id) from JobsetEvals where hasNewBuilds = 1 group by project, jobset)))" }
|
{ id => { -in => \ "select id from Builds where id in ((select id from Builds where finished = 0) except (select build from JobsetEvalMembers where eval in (select max(id) from JobsetEvals where hasNewBuilds = 1 group by jobset_id)))" }
|
||||||
});
|
});
|
||||||
my $n = cancelBuilds($c->model('DB')->schema, $builds);
|
my $n = cancelBuilds($c->model('DB')->schema, $builds);
|
||||||
$c->flash->{successMsg} = "$n builds have been cancelled.";
|
$c->flash->{successMsg} = "$n builds have been cancelled.";
|
||||||
|
|
|
@ -16,8 +16,8 @@ sub evalChain : Chained('/') PathPart('eval') CaptureArgs(1) {
|
||||||
or notFound($c, "Evaluation $evalId doesn't exist.");
|
or notFound($c, "Evaluation $evalId doesn't exist.");
|
||||||
|
|
||||||
$c->stash->{eval} = $eval;
|
$c->stash->{eval} = $eval;
|
||||||
$c->stash->{project} = $eval->project;
|
|
||||||
$c->stash->{jobset} = $eval->jobset;
|
$c->stash->{jobset} = $eval->jobset;
|
||||||
|
$c->stash->{project} = $eval->jobset->project;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -219,7 +219,7 @@ sub getEvals {
|
||||||
foreach my $curEval (@evals) {
|
foreach my $curEval (@evals) {
|
||||||
|
|
||||||
my ($prevEval) = $c->model('DB::JobsetEvals')->search(
|
my ($prevEval) = $c->model('DB::JobsetEvals')->search(
|
||||||
{ project => $curEval->get_column('project'), jobset => $curEval->get_column('jobset')
|
{ jobset_id => $curEval->get_column('jobset_id')
|
||||||
, hasnewbuilds => 1, id => { '<', $curEval->id } },
|
, hasnewbuilds => 1, id => { '<', $curEval->id } },
|
||||||
{ order_by => "id DESC", rows => 1 });
|
{ order_by => "id DESC", rows => 1 });
|
||||||
|
|
||||||
|
|
|
@ -42,15 +42,9 @@ __PACKAGE__->table("jobsetevals");
|
||||||
is_nullable: 0
|
is_nullable: 0
|
||||||
sequence: 'jobsetevals_id_seq'
|
sequence: 'jobsetevals_id_seq'
|
||||||
|
|
||||||
=head2 project
|
=head2 jobset_id
|
||||||
|
|
||||||
data_type: 'text'
|
data_type: 'integer'
|
||||||
is_foreign_key: 1
|
|
||||||
is_nullable: 0
|
|
||||||
|
|
||||||
=head2 jobset
|
|
||||||
|
|
||||||
data_type: 'text'
|
|
||||||
is_foreign_key: 1
|
is_foreign_key: 1
|
||||||
is_nullable: 0
|
is_nullable: 0
|
||||||
|
|
||||||
|
@ -89,16 +83,6 @@ __PACKAGE__->table("jobsetevals");
|
||||||
data_type: 'text'
|
data_type: 'text'
|
||||||
is_nullable: 0
|
is_nullable: 0
|
||||||
|
|
||||||
=head2 nixexprinput
|
|
||||||
|
|
||||||
data_type: 'text'
|
|
||||||
is_nullable: 1
|
|
||||||
|
|
||||||
=head2 nixexprpath
|
|
||||||
|
|
||||||
data_type: 'text'
|
|
||||||
is_nullable: 1
|
|
||||||
|
|
||||||
=head2 nrbuilds
|
=head2 nrbuilds
|
||||||
|
|
||||||
data_type: 'integer'
|
data_type: 'integer'
|
||||||
|
@ -114,6 +98,16 @@ __PACKAGE__->table("jobsetevals");
|
||||||
data_type: 'text'
|
data_type: 'text'
|
||||||
is_nullable: 1
|
is_nullable: 1
|
||||||
|
|
||||||
|
=head2 nixexprinput
|
||||||
|
|
||||||
|
data_type: 'text'
|
||||||
|
is_nullable: 1
|
||||||
|
|
||||||
|
=head2 nixexprpath
|
||||||
|
|
||||||
|
data_type: 'text'
|
||||||
|
is_nullable: 1
|
||||||
|
|
||||||
=cut
|
=cut
|
||||||
|
|
||||||
__PACKAGE__->add_columns(
|
__PACKAGE__->add_columns(
|
||||||
|
@ -124,10 +118,8 @@ __PACKAGE__->add_columns(
|
||||||
is_nullable => 0,
|
is_nullable => 0,
|
||||||
sequence => "jobsetevals_id_seq",
|
sequence => "jobsetevals_id_seq",
|
||||||
},
|
},
|
||||||
"project",
|
"jobset_id",
|
||||||
{ data_type => "text", is_foreign_key => 1, is_nullable => 0 },
|
{ data_type => "integer", is_foreign_key => 1, is_nullable => 0 },
|
||||||
"jobset",
|
|
||||||
{ data_type => "text", is_foreign_key => 1, is_nullable => 0 },
|
|
||||||
"errormsg",
|
"errormsg",
|
||||||
{ data_type => "text", is_nullable => 1 },
|
{ data_type => "text", is_nullable => 1 },
|
||||||
"errortime",
|
"errortime",
|
||||||
|
@ -142,16 +134,16 @@ __PACKAGE__->add_columns(
|
||||||
{ data_type => "integer", is_nullable => 0 },
|
{ data_type => "integer", is_nullable => 0 },
|
||||||
"hash",
|
"hash",
|
||||||
{ data_type => "text", is_nullable => 0 },
|
{ data_type => "text", is_nullable => 0 },
|
||||||
"nixexprinput",
|
|
||||||
{ data_type => "text", is_nullable => 1 },
|
|
||||||
"nixexprpath",
|
|
||||||
{ data_type => "text", is_nullable => 1 },
|
|
||||||
"nrbuilds",
|
"nrbuilds",
|
||||||
{ data_type => "integer", is_nullable => 1 },
|
{ data_type => "integer", is_nullable => 1 },
|
||||||
"nrsucceeded",
|
"nrsucceeded",
|
||||||
{ data_type => "integer", is_nullable => 1 },
|
{ data_type => "integer", is_nullable => 1 },
|
||||||
"flake",
|
"flake",
|
||||||
{ data_type => "text", is_nullable => 1 },
|
{ data_type => "text", is_nullable => 1 },
|
||||||
|
"nixexprinput",
|
||||||
|
{ data_type => "text", is_nullable => 1 },
|
||||||
|
"nixexprpath",
|
||||||
|
{ data_type => "text", is_nullable => 1 },
|
||||||
);
|
);
|
||||||
|
|
||||||
=head1 PRIMARY KEY
|
=head1 PRIMARY KEY
|
||||||
|
@ -179,8 +171,8 @@ Related object: L<Hydra::Schema::Jobsets>
|
||||||
__PACKAGE__->belongs_to(
|
__PACKAGE__->belongs_to(
|
||||||
"jobset",
|
"jobset",
|
||||||
"Hydra::Schema::Jobsets",
|
"Hydra::Schema::Jobsets",
|
||||||
{ name => "jobset", project => "project" },
|
{ id => "jobset_id" },
|
||||||
{ is_deferrable => 0, on_delete => "CASCADE", on_update => "CASCADE" },
|
{ is_deferrable => 0, on_delete => "CASCADE", on_update => "NO ACTION" },
|
||||||
);
|
);
|
||||||
|
|
||||||
=head2 jobsetevalinputs
|
=head2 jobsetevalinputs
|
||||||
|
@ -213,24 +205,9 @@ __PACKAGE__->has_many(
|
||||||
undef,
|
undef,
|
||||||
);
|
);
|
||||||
|
|
||||||
=head2 project
|
|
||||||
|
|
||||||
Type: belongs_to
|
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2021-01-25 14:44:07
|
||||||
|
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:OVxeYH+eoZZrAsAJ2/mAAA
|
||||||
Related object: L<Hydra::Schema::Projects>
|
|
||||||
|
|
||||||
=cut
|
|
||||||
|
|
||||||
__PACKAGE__->belongs_to(
|
|
||||||
"project",
|
|
||||||
"Hydra::Schema::Projects",
|
|
||||||
{ name => "project" },
|
|
||||||
{ is_deferrable => 0, on_delete => "CASCADE", on_update => "CASCADE" },
|
|
||||||
);
|
|
||||||
|
|
||||||
|
|
||||||
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2021-01-22 07:11:57
|
|
||||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:hdu+0WWo2363dVvImMKxdA
|
|
||||||
|
|
||||||
__PACKAGE__->has_many(
|
__PACKAGE__->has_many(
|
||||||
"buildIds",
|
"buildIds",
|
||||||
|
|
|
@ -301,10 +301,7 @@ Related object: L<Hydra::Schema::JobsetEvals>
|
||||||
__PACKAGE__->has_many(
|
__PACKAGE__->has_many(
|
||||||
"jobsetevals",
|
"jobsetevals",
|
||||||
"Hydra::Schema::JobsetEvals",
|
"Hydra::Schema::JobsetEvals",
|
||||||
{
|
{ "foreign.jobset_id" => "self.id" },
|
||||||
"foreign.jobset" => "self.name",
|
|
||||||
"foreign.project" => "self.project",
|
|
||||||
},
|
|
||||||
undef,
|
undef,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -375,8 +372,8 @@ __PACKAGE__->has_many(
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
||||||
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2021-01-22 07:11:57
|
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2021-01-25 14:38:14
|
||||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:6P1qlC5oVSPRSgRBp6nmrw
|
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:7XtIqrrGAIvReqly1kapog
|
||||||
|
|
||||||
|
|
||||||
=head2 builds
|
=head2 builds
|
||||||
|
|
|
@ -157,21 +157,6 @@ __PACKAGE__->has_many(
|
||||||
undef,
|
undef,
|
||||||
);
|
);
|
||||||
|
|
||||||
=head2 jobsetevals
|
|
||||||
|
|
||||||
Type: has_many
|
|
||||||
|
|
||||||
Related object: L<Hydra::Schema::JobsetEvals>
|
|
||||||
|
|
||||||
=cut
|
|
||||||
|
|
||||||
__PACKAGE__->has_many(
|
|
||||||
"jobsetevals",
|
|
||||||
"Hydra::Schema::JobsetEvals",
|
|
||||||
{ "foreign.project" => "self.name" },
|
|
||||||
undef,
|
|
||||||
);
|
|
||||||
|
|
||||||
=head2 jobsetrenames
|
=head2 jobsetrenames
|
||||||
|
|
||||||
Type: has_many
|
Type: has_many
|
||||||
|
@ -258,8 +243,8 @@ Composing rels: L</projectmembers> -> username
|
||||||
__PACKAGE__->many_to_many("usernames", "projectmembers", "username");
|
__PACKAGE__->many_to_many("usernames", "projectmembers", "username");
|
||||||
|
|
||||||
|
|
||||||
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2021-01-22 07:11:57
|
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2021-01-25 14:38:14
|
||||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:Ff5gJejFu+02b0lInobOoQ
|
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:+4yWd9UjCyxxLZYDrVUAxA
|
||||||
|
|
||||||
my %hint = (
|
my %hint = (
|
||||||
columns => [
|
columns => [
|
||||||
|
|
|
@ -463,7 +463,7 @@ BLOCK renderEvals %]
|
||||||
<tr>
|
<tr>
|
||||||
<td><a class="row-link" href="[% link %]">[% eval.id %]</a></td>
|
<td><a class="row-link" href="[% link %]">[% eval.id %]</a></td>
|
||||||
[% IF !jobset && !build %]
|
[% IF !jobset && !build %]
|
||||||
<td>[% INCLUDE renderFullJobsetName project=eval.get_column('project') jobset=eval.get_column('jobset') %]</td>
|
<td>[% INCLUDE renderFullJobsetName project=eval.jobset.project.name jobset=eval.jobset.name %]</td>
|
||||||
[% END %]
|
[% END %]
|
||||||
<td class="nowrap">[% INCLUDE renderRelativeDate timestamp = eval.timestamp %]</td>
|
<td class="nowrap">[% INCLUDE renderRelativeDate timestamp = eval.timestamp %]</td>
|
||||||
<td>
|
<td>
|
||||||
|
|
|
@ -5,6 +5,11 @@ BEGIN {
|
||||||
}
|
}
|
||||||
|
|
||||||
use Catalyst::ScriptRunner;
|
use Catalyst::ScriptRunner;
|
||||||
|
|
||||||
|
STDOUT->autoflush();
|
||||||
|
STDERR->autoflush(1);
|
||||||
|
binmode STDERR, ":encoding(utf8)";
|
||||||
|
|
||||||
Catalyst::ScriptRunner->run('Hydra', 'DevServer');
|
Catalyst::ScriptRunner->run('Hydra', 'DevServer');
|
||||||
|
|
||||||
1;
|
1;
|
||||||
|
|
|
@ -191,8 +191,11 @@ sub fetchInputEval {
|
||||||
$eval = getLatestFinishedEval($jobset);
|
$eval = getLatestFinishedEval($jobset);
|
||||||
die "jobset ‘$value’ does not have a finished evaluation\n" unless defined $eval;
|
die "jobset ‘$value’ does not have a finished evaluation\n" unless defined $eval;
|
||||||
} elsif ($value =~ /^($projectNameRE):($jobsetNameRE):($jobNameRE)$/) {
|
} elsif ($value =~ /^($projectNameRE):($jobsetNameRE):($jobNameRE)$/) {
|
||||||
|
my $jobset = $db->resultset('Jobsets')->find({ project => $1, name => $2 });
|
||||||
|
die "jobset ‘$1:$2’ does not exist\n" unless defined $jobset;
|
||||||
|
|
||||||
$eval = $db->resultset('JobsetEvals')->find(
|
$eval = $db->resultset('JobsetEvals')->find(
|
||||||
{ project => $1, jobset => $2, hasnewbuilds => 1 },
|
{ jobset_id => $jobset->id, hasnewbuilds => 1 },
|
||||||
{ order_by => "id DESC", rows => 1
|
{ order_by => "id DESC", rows => 1
|
||||||
, where =>
|
, where =>
|
||||||
\ [ # All builds in this jobset should be finished...
|
\ [ # All builds in this jobset should be finished...
|
||||||
|
|
|
@ -1,3 +1,14 @@
|
||||||
|
-- Making a database change:
|
||||||
|
--
|
||||||
|
-- 1. Update this schema document to match what the end result should be.
|
||||||
|
--
|
||||||
|
-- 2. Run `make -C src/sql update-dbix hydra-postgresql.sql` in the root
|
||||||
|
-- of the project directory, and git add / git commit the changed,
|
||||||
|
-- generated files.
|
||||||
|
--
|
||||||
|
-- 3. Create a migration in this same directory, named `upgrade-N.sql`
|
||||||
|
--
|
||||||
|
|
||||||
-- Singleton table to keep track of the schema version.
|
-- Singleton table to keep track of the schema version.
|
||||||
create table SchemaVersion (
|
create table SchemaVersion (
|
||||||
version integer not null
|
version integer not null
|
||||||
|
@ -429,9 +440,7 @@ create table SystemTypes (
|
||||||
|
|
||||||
create table JobsetEvals (
|
create table JobsetEvals (
|
||||||
id serial primary key not null,
|
id serial primary key not null,
|
||||||
|
jobset_id integer not null,
|
||||||
project text not null,
|
|
||||||
jobset text not null,
|
|
||||||
|
|
||||||
errorMsg text, -- error output from the evaluator
|
errorMsg text, -- error output from the evaluator
|
||||||
errorTime integer, -- timestamp associated with errorMsg
|
errorTime integer, -- timestamp associated with errorMsg
|
||||||
|
@ -462,8 +471,7 @@ create table JobsetEvals (
|
||||||
nixExprInput text, -- name of the jobsetInput containing the Nix or Guix expression
|
nixExprInput text, -- name of the jobsetInput containing the Nix or Guix expression
|
||||||
nixExprPath text, -- relative path of the Nix or Guix expression
|
nixExprPath text, -- relative path of the Nix or Guix expression
|
||||||
|
|
||||||
foreign key (project) references Projects(name) on delete cascade on update cascade,
|
foreign key (jobset_id) references Jobsets(id) on delete cascade
|
||||||
foreign key (project, jobset) references Jobsets(project, name) on delete cascade on update cascade
|
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
||||||
|
@ -618,7 +626,8 @@ create index IndexBuildOutputsPath on BuildOutputs using hash(path);
|
||||||
create index IndexBuildsOnKeep on Builds(keep) where keep = 1;
|
create index IndexBuildsOnKeep on Builds(keep) where keep = 1;
|
||||||
|
|
||||||
-- To get the most recent eval for a jobset.
|
-- To get the most recent eval for a jobset.
|
||||||
create index IndexJobsetEvalsOnJobsetId on JobsetEvals(project, jobset, id desc) where hasNewBuilds = 1;
|
create index IndexJobsetEvalsOnJobsetId on JobsetEvals(jobset_id, id desc) where hasNewBuilds = 1;
|
||||||
|
create index IndexJobsetIdEvals on JobsetEvals(jobset_id) where hasNewBuilds = 1;
|
||||||
|
|
||||||
create index IndexBuildsOnNotificationPendingSince on Builds(notificationPendingSince) where notificationPendingSince is not null;
|
create index IndexBuildsOnNotificationPendingSince on Builds(notificationPendingSince) where notificationPendingSince is not null;
|
||||||
|
|
||||||
|
|
|
@ -1,12 +1,14 @@
|
||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
set -eux
|
||||||
|
|
||||||
readonly scratch=$(mktemp -d -t tmp.XXXXXXXXXX)
|
readonly scratch=$(mktemp -d -t tmp.XXXXXXXXXX)
|
||||||
|
|
||||||
readonly socket=$scratch/socket
|
readonly socket=$scratch/socket
|
||||||
readonly data=$scratch/data
|
readonly data=$scratch/data
|
||||||
readonly dbname=hydra-update-dbix
|
readonly dbname=hydra-update-dbix
|
||||||
|
|
||||||
function finish {
|
function finish() {
|
||||||
set +e
|
set +e
|
||||||
pg_ctl -D "$data" \
|
pg_ctl -D "$data" \
|
||||||
-o "-F -h '' -k \"$socket\"" \
|
-o "-F -h '' -k \"$socket\"" \
|
||||||
|
@ -33,7 +35,10 @@ pg_ctl -D "$data" \
|
||||||
|
|
||||||
createdb -h "$socket" "$dbname"
|
createdb -h "$socket" "$dbname"
|
||||||
|
|
||||||
psql -h "$socket" "$dbname" -f ./hydra.sql
|
psql --host "$socket" \
|
||||||
|
--set ON_ERROR_STOP=1 \
|
||||||
|
--file ./hydra.sql \
|
||||||
|
"$dbname"
|
||||||
|
|
||||||
perl -I ../lib \
|
perl -I ../lib \
|
||||||
-MDBIx::Class::Schema::Loader=make_schema_at,dump_to_dir:../lib \
|
-MDBIx::Class::Schema::Loader=make_schema_at,dump_to_dir:../lib \
|
||||||
|
|
22
src/sql/upgrade-72.sql
Normal file
22
src/sql/upgrade-72.sql
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
|
||||||
|
ALTER TABLE JobsetEvals
|
||||||
|
ADD COLUMN jobset_id integer NULL,
|
||||||
|
ADD FOREIGN KEY (jobset_id)
|
||||||
|
REFERENCES Jobsets(id)
|
||||||
|
ON DELETE CASCADE;
|
||||||
|
|
||||||
|
UPDATE JobsetEvals
|
||||||
|
SET jobset_id = (
|
||||||
|
SELECT jobsets.id
|
||||||
|
FROM jobsets
|
||||||
|
WHERE jobsets.name = JobsetEvals.jobset
|
||||||
|
AND jobsets.project = JobsetEvals.project
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
ALTER TABLE JobsetEvals
|
||||||
|
ALTER COLUMN jobset_id SET NOT NULL,
|
||||||
|
DROP COLUMN jobset,
|
||||||
|
DROP COLUMN project;
|
||||||
|
|
||||||
|
create index IndexJobsetIdEvals on JobsetEvals(jobset_id) where hasNewBuilds = 1;
|
Loading…
Reference in a new issue