Merge pull request #1137 from DeterminateSystems/runcommand-logs
Store and display the output of RunCommands
This commit is contained in:
commit
85b842e0ac
18
flake.nix
18
flake.nix
|
@ -432,6 +432,20 @@
|
||||||
license = with final.lib.licenses; [ artistic1 gpl1Plus ];
|
license = with final.lib.licenses; [ artistic1 gpl1Plus ];
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
UUID4Tiny = final.buildPerlPackage {
|
||||||
|
pname = "UUID4-Tiny";
|
||||||
|
version = "0.002";
|
||||||
|
src = final.fetchurl {
|
||||||
|
url = "mirror://cpan/authors/id/C/CV/CVLIBRARY/UUID4-Tiny-0.002.tar.gz";
|
||||||
|
sha256 = "e7535b31e386d432dec7adde214348389e1d5cf753e7ed07f1ae04c4360840cf";
|
||||||
|
};
|
||||||
|
meta = {
|
||||||
|
description = "Cryptographically secure v4 UUIDs for Linux x64";
|
||||||
|
license = with final.lib.licenses; [ artistic1 gpl1Plus ];
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
hydra = with final; let
|
hydra = with final; let
|
||||||
|
@ -474,6 +488,7 @@
|
||||||
git
|
git
|
||||||
IOCompress
|
IOCompress
|
||||||
IPCRun
|
IPCRun
|
||||||
|
IPCRun3
|
||||||
JSON
|
JSON
|
||||||
JSONMaybeXS
|
JSONMaybeXS
|
||||||
JSONXS
|
JSONXS
|
||||||
|
@ -501,6 +516,7 @@
|
||||||
TestPostgreSQL
|
TestPostgreSQL
|
||||||
TextDiff
|
TextDiff
|
||||||
TextTable
|
TextTable
|
||||||
|
UUID4Tiny
|
||||||
XMLSimple
|
XMLSimple
|
||||||
YAML
|
YAML
|
||||||
];
|
];
|
||||||
|
@ -517,7 +533,7 @@
|
||||||
gitAndTools.topGit mercurial darcs subversion breezy openssl bzip2 libxslt
|
gitAndTools.topGit mercurial darcs subversion breezy openssl bzip2 libxslt
|
||||||
final.nix perlDeps perl mdbook pixz
|
final.nix perlDeps perl mdbook pixz
|
||||||
boost
|
boost
|
||||||
postgresql_11
|
postgresql_13
|
||||||
(if lib.versionAtLeast lib.version "20.03pre"
|
(if lib.versionAtLeast lib.version "20.03pre"
|
||||||
then nlohmann_json
|
then nlohmann_json
|
||||||
else nlohmann_json.override { multipleHeaders = true; })
|
else nlohmann_json.override { multipleHeaders = true; })
|
||||||
|
|
|
@ -275,7 +275,11 @@ in
|
||||||
|
|
||||||
mkdir -m 0700 -p ${baseDir}/queue-runner
|
mkdir -m 0700 -p ${baseDir}/queue-runner
|
||||||
mkdir -m 0750 -p ${baseDir}/build-logs
|
mkdir -m 0750 -p ${baseDir}/build-logs
|
||||||
chown hydra-queue-runner.hydra ${baseDir}/queue-runner ${baseDir}/build-logs
|
mkdir -m 0750 -p ${baseDir}/runcommand-logs
|
||||||
|
chown hydra-queue-runner.hydra \
|
||||||
|
${baseDir}/queue-runner \
|
||||||
|
${baseDir}/build-logs \
|
||||||
|
${baseDir}/runcommand-logs
|
||||||
|
|
||||||
${optionalString haveLocalDB ''
|
${optionalString haveLocalDB ''
|
||||||
if ! [ -e ${baseDir}/.db-created ]; then
|
if ! [ -e ${baseDir}/.db-created ]; then
|
||||||
|
@ -432,12 +436,12 @@ in
|
||||||
if [ $(systemctl is-active $service) == active ]; then
|
if [ $(systemctl is-active $service) == active ]; then
|
||||||
echo "stopping $service due to lack of free space..."
|
echo "stopping $service due to lack of free space..."
|
||||||
systemctl stop $service
|
systemctl stop $service
|
||||||
date > /var/lib/hydra/.$service-stopped-minspace
|
date > ${baseDir}/.$service-stopped-minspace
|
||||||
fi
|
fi
|
||||||
else
|
else
|
||||||
if [ $spaceleft -gt $(( ($minFreeGB + 10) * 1024**3)) -a \
|
if [ $spaceleft -gt $(( ($minFreeGB + 10) * 1024**3)) -a \
|
||||||
-r /var/lib/hydra/.$service-stopped-minspace ] ; then
|
-r ${baseDir}/.$service-stopped-minspace ] ; then
|
||||||
rm /var/lib/hydra/.$service-stopped-minspace
|
rm ${baseDir}/.$service-stopped-minspace
|
||||||
echo "restarting $service due to newly available free space..."
|
echo "restarting $service due to newly available free space..."
|
||||||
systemctl start $service
|
systemctl start $service
|
||||||
fi
|
fi
|
||||||
|
@ -456,7 +460,7 @@ in
|
||||||
{ path = [ pkgs.bzip2 ];
|
{ path = [ pkgs.bzip2 ];
|
||||||
script =
|
script =
|
||||||
''
|
''
|
||||||
find /var/lib/hydra/build-logs -type f -name "*.drv" -mtime +3 -size +0c | xargs -r bzip2 -v -f
|
find ${baseDir}/build-logs -type f -name "*.drv" -mtime +3 -size +0c | xargs -r bzip2 -v -f
|
||||||
'';
|
'';
|
||||||
startAt = "Sun 01:45";
|
startAt = "Sun 01:45";
|
||||||
};
|
};
|
||||||
|
|
|
@ -126,23 +126,35 @@ sub view_nixlog : Chained('buildChain') PathPart('nixlog') {
|
||||||
|
|
||||||
$c->stash->{step} = $step;
|
$c->stash->{step} = $step;
|
||||||
|
|
||||||
showLog($c, $mode, $step->busy == 0, $step->drvpath);
|
my $drvPath = $step->drvpath;
|
||||||
|
my $log_uri = $c->uri_for($c->controller('Root')->action_for("log"), [basename($drvPath)]);
|
||||||
|
showLog($c, $mode, $log_uri);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
sub view_log : Chained('buildChain') PathPart('log') {
|
sub view_log : Chained('buildChain') PathPart('log') {
|
||||||
my ($self, $c, $mode) = @_;
|
my ($self, $c, $mode) = @_;
|
||||||
showLog($c, $mode, $c->stash->{build}->finished,
|
|
||||||
$c->stash->{build}->drvpath);
|
my $drvPath = $c->stash->{build}->drvpath;
|
||||||
|
my $log_uri = $c->uri_for($c->controller('Root')->action_for("log"), [basename($drvPath)]);
|
||||||
|
showLog($c, $mode, $log_uri);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
sub view_runcommandlog : Chained('buildChain') PathPart('runcommandlog') {
|
||||||
|
my ($self, $c, $uuid, $mode) = @_;
|
||||||
|
|
||||||
|
my $log_uri = $c->uri_for($c->controller('Root')->action_for("runcommandlog"), $uuid);
|
||||||
|
showLog($c, $mode, $log_uri);
|
||||||
|
$c->stash->{template} = 'runcommand-log.tt';
|
||||||
|
$c->stash->{runcommandlog} = $c->stash->{build}->runcommandlogs->find({ uuid => $uuid });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
sub showLog {
|
sub showLog {
|
||||||
my ($c, $mode, $finished, $drvPath) = @_;
|
my ($c, $mode, $log_uri) = @_;
|
||||||
$mode //= "pretty";
|
$mode //= "pretty";
|
||||||
|
|
||||||
my $log_uri = $c->uri_for($c->controller('Root')->action_for("log"), [basename($drvPath)]);
|
|
||||||
|
|
||||||
if ($mode eq "pretty") {
|
if ($mode eq "pretty") {
|
||||||
$c->stash->{log_uri} = $log_uri;
|
$c->stash->{log_uri} = $log_uri;
|
||||||
$c->stash->{template} = 'log.tt';
|
$c->stash->{template} = 'log.tt';
|
||||||
|
|
|
@ -530,4 +530,23 @@ sub log :Local :Args(1) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
sub runcommandlog :Local :Args(1) {
|
||||||
|
my ($self, $c, $uuid) = @_;
|
||||||
|
|
||||||
|
my $tail = $c->request->params->{"tail"};
|
||||||
|
|
||||||
|
die if defined $tail && $tail !~ /^[0-9]+$/;
|
||||||
|
|
||||||
|
my $runlog = $c->model('DB')->resultset('RunCommandLogs')->find({ uuid => $uuid })
|
||||||
|
or notFound($c, "The RunCommand log is not available.");
|
||||||
|
|
||||||
|
my $logFile = constructRunCommandLogPath($runlog);
|
||||||
|
if (-f $logFile) {
|
||||||
|
serveLogFile($c, $logFile, $tail);
|
||||||
|
return;
|
||||||
|
} else {
|
||||||
|
notFound($c, "The RunCommand log is not available.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
1;
|
1;
|
||||||
|
|
|
@ -13,12 +13,14 @@ use Nix::Store;
|
||||||
use Encode;
|
use Encode;
|
||||||
use Sys::Hostname::Long;
|
use Sys::Hostname::Long;
|
||||||
use IPC::Run;
|
use IPC::Run;
|
||||||
|
use UUID4::Tiny qw(is_uuid4_string);
|
||||||
|
|
||||||
our @ISA = qw(Exporter);
|
our @ISA = qw(Exporter);
|
||||||
our @EXPORT = qw(
|
our @EXPORT = qw(
|
||||||
cancelBuilds
|
cancelBuilds
|
||||||
captureStdoutStderr
|
captureStdoutStderr
|
||||||
captureStdoutStderrWithStdin
|
captureStdoutStderrWithStdin
|
||||||
|
constructRunCommandLogPath
|
||||||
findLog
|
findLog
|
||||||
gcRootFor
|
gcRootFor
|
||||||
getBaseUrl
|
getBaseUrl
|
||||||
|
@ -589,4 +591,18 @@ sub isLocalStore {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
sub constructRunCommandLogPath {
|
||||||
|
my ($runlog) = @_;
|
||||||
|
my $uuid = $runlog->uuid;
|
||||||
|
|
||||||
|
if (!is_uuid4_string($uuid)) {
|
||||||
|
die "UUID was invalid."
|
||||||
|
}
|
||||||
|
|
||||||
|
my $hydra_path = Hydra::Model::DB::getHydraPath;
|
||||||
|
my $bucket = substr($uuid, 0, 2);
|
||||||
|
|
||||||
|
return "$hydra_path/runcommand-logs/$bucket/$uuid";
|
||||||
|
}
|
||||||
|
|
||||||
1;
|
1;
|
||||||
|
|
|
@ -5,6 +5,10 @@ use warnings;
|
||||||
use parent 'Hydra::Plugin';
|
use parent 'Hydra::Plugin';
|
||||||
use experimental 'smartmatch';
|
use experimental 'smartmatch';
|
||||||
use JSON::MaybeXS;
|
use JSON::MaybeXS;
|
||||||
|
use File::Basename qw(dirname);
|
||||||
|
use File::Path qw(make_path);
|
||||||
|
use IPC::Run3;
|
||||||
|
use Try::Tiny;
|
||||||
|
|
||||||
sub isEnabled {
|
sub isEnabled {
|
||||||
my ($self) = @_;
|
my ($self) = @_;
|
||||||
|
@ -160,10 +164,31 @@ sub buildFinished {
|
||||||
|
|
||||||
$runlog->started();
|
$runlog->started();
|
||||||
|
|
||||||
system("$command") == 0
|
my $logPath = Hydra::Helper::Nix::constructRunCommandLogPath($runlog) or die "RunCommandLog not found.";
|
||||||
or warn "notification command '$command' failed with exit status $? ($!)\n";
|
my $dir = dirname($logPath);
|
||||||
|
my $oldUmask = umask();
|
||||||
|
my $f;
|
||||||
|
|
||||||
$runlog->completed_with_child_error($?, $!);
|
try {
|
||||||
|
# file: 640, dir: 750
|
||||||
|
umask(0027);
|
||||||
|
make_path($dir);
|
||||||
|
|
||||||
|
open($f, '>', $logPath);
|
||||||
|
umask($oldUmask);
|
||||||
|
|
||||||
|
run3($command, \undef, $f, $f, { return_if_system_error => 1 }) == 1
|
||||||
|
or warn "notification command '$command' failed with exit status $? ($!)\n";
|
||||||
|
|
||||||
|
close($f);
|
||||||
|
|
||||||
|
$runlog->completed_with_child_error($?, $!);
|
||||||
|
1;
|
||||||
|
} catch {
|
||||||
|
die "Died while trying to process RunCommand (${\$runlog->uuid}): $_";
|
||||||
|
} finally {
|
||||||
|
umask($oldUmask);
|
||||||
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -42,6 +42,12 @@ __PACKAGE__->table("runcommandlogs");
|
||||||
is_nullable: 0
|
is_nullable: 0
|
||||||
sequence: 'runcommandlogs_id_seq'
|
sequence: 'runcommandlogs_id_seq'
|
||||||
|
|
||||||
|
=head2 uuid
|
||||||
|
|
||||||
|
data_type: 'uuid'
|
||||||
|
is_nullable: 0
|
||||||
|
size: 16
|
||||||
|
|
||||||
=head2 job_matcher
|
=head2 job_matcher
|
||||||
|
|
||||||
data_type: 'text'
|
data_type: 'text'
|
||||||
|
@ -98,6 +104,8 @@ __PACKAGE__->add_columns(
|
||||||
is_nullable => 0,
|
is_nullable => 0,
|
||||||
sequence => "runcommandlogs_id_seq",
|
sequence => "runcommandlogs_id_seq",
|
||||||
},
|
},
|
||||||
|
"uuid",
|
||||||
|
{ data_type => "uuid", is_nullable => 0, size => 16 },
|
||||||
"job_matcher",
|
"job_matcher",
|
||||||
{ data_type => "text", is_nullable => 0 },
|
{ data_type => "text", is_nullable => 0 },
|
||||||
"build_id",
|
"build_id",
|
||||||
|
@ -130,6 +138,20 @@ __PACKAGE__->add_columns(
|
||||||
|
|
||||||
__PACKAGE__->set_primary_key("id");
|
__PACKAGE__->set_primary_key("id");
|
||||||
|
|
||||||
|
=head1 UNIQUE CONSTRAINTS
|
||||||
|
|
||||||
|
=head2 C<runcommandlogs_uuid_unique>
|
||||||
|
|
||||||
|
=over 4
|
||||||
|
|
||||||
|
=item * L</uuid>
|
||||||
|
|
||||||
|
=back
|
||||||
|
|
||||||
|
=cut
|
||||||
|
|
||||||
|
__PACKAGE__->add_unique_constraint("runcommandlogs_uuid_unique", ["uuid"]);
|
||||||
|
|
||||||
=head1 RELATIONS
|
=head1 RELATIONS
|
||||||
|
|
||||||
=head2 build
|
=head2 build
|
||||||
|
@ -148,10 +170,31 @@ __PACKAGE__->belongs_to(
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
||||||
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2021-11-19 15:15:36
|
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2022-01-24 10:24:52
|
||||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:9AIzlQl1RjRXrs9gQCZKVw
|
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:ZVpYU6k3d/k/nitjpdgf/A
|
||||||
|
|
||||||
use POSIX qw(WEXITSTATUS WIFEXITED WIFSIGNALED WTERMSIG);
|
use POSIX qw(WEXITSTATUS WIFEXITED WIFSIGNALED WTERMSIG);
|
||||||
|
use UUID4::Tiny qw(create_uuid_string);
|
||||||
|
|
||||||
|
|
||||||
|
=head2 new
|
||||||
|
|
||||||
|
Initialize a new row object.
|
||||||
|
|
||||||
|
Sets the UUID automatically unless a UUID is specified in the attributes.
|
||||||
|
|
||||||
|
=cut
|
||||||
|
sub new {
|
||||||
|
my ($class, $attrs) = @_;
|
||||||
|
|
||||||
|
if (!defined $attrs->{uuid}) {
|
||||||
|
$attrs->{uuid} = create_uuid_string();
|
||||||
|
}
|
||||||
|
|
||||||
|
my $new = $class->next::method($attrs);
|
||||||
|
|
||||||
|
return $new;
|
||||||
|
}
|
||||||
|
|
||||||
=head2 started
|
=head2 started
|
||||||
|
|
||||||
|
|
|
@ -524,11 +524,21 @@ END;
|
||||||
<div class="d-flex flex-column align-items-end">
|
<div class="d-flex flex-column align-items-end">
|
||||||
[% IF runcommandlog.start_time != undef %]
|
[% IF runcommandlog.start_time != undef %]
|
||||||
<div>Started at [% INCLUDE renderDateTime timestamp = runcommandlog.start_time; %]</div>
|
<div>Started at [% INCLUDE renderDateTime timestamp = runcommandlog.start_time; %]</div>
|
||||||
[% IF runcommandlog.end_time != undef %]
|
<div class="d-flex flex-column align-items-end">
|
||||||
<div>Ran for [% INCLUDE renderDuration duration = runcommandlog.end_time - runcommandlog.start_time %]</div>
|
[% IF runcommandlog.end_time != undef %]
|
||||||
[% ELSE %]
|
Ran for [% INCLUDE renderDuration duration = runcommandlog.end_time - runcommandlog.start_time %]
|
||||||
<div>Running for [% INCLUDE renderDuration duration = curTime - runcommandlog.start_time %]</div>
|
[% ELSE %]
|
||||||
[% END %]
|
Running for [% INCLUDE renderDuration duration = curTime - runcommandlog.start_time %]
|
||||||
|
[% END %]
|
||||||
|
[% IF runcommandlog.uuid != undef %]
|
||||||
|
[% runLog = c.uri_for('/build', build.id, 'runcommandlog', runcommandlog.uuid) %]
|
||||||
|
<div>
|
||||||
|
<a class="btn btn-secondary btn-sm" href="[% runLog %]">pretty</a>
|
||||||
|
<a class="btn btn-secondary btn-sm" href="[% runLog %]/raw">raw</a>
|
||||||
|
<a class="btn btn-secondary btn-sm" href="[% runLog %]/tail">tail</a>
|
||||||
|
</div>
|
||||||
|
[% END %]
|
||||||
|
</div>
|
||||||
[% ELSE %]
|
[% ELSE %]
|
||||||
<div>Pending</div>
|
<div>Pending</div>
|
||||||
[% END %]
|
[% END %]
|
||||||
|
|
49
src/root/runcommand-log.tt
Normal file
49
src/root/runcommand-log.tt
Normal file
|
@ -0,0 +1,49 @@
|
||||||
|
[% WRAPPER layout.tt
|
||||||
|
titleHTML="RunCommand log of " _ (step ? " step $step.stepnr of " : "") _ "build ${build.id} of job " _ linkToJob(build.jobset, job)
|
||||||
|
title="RunCommand log of " _ (step ? " step $step.stepnr of " : "") _ "build ${build.id} of job " _ makeNameTextForJob(build.jobset, job)
|
||||||
|
%]
|
||||||
|
[% PROCESS common.tt %]
|
||||||
|
|
||||||
|
<p>
|
||||||
|
Below
|
||||||
|
[% IF tail %]
|
||||||
|
are the last lines of
|
||||||
|
[% ELSE %]
|
||||||
|
is
|
||||||
|
[% END %]
|
||||||
|
the output of a RunCommand execution of the command <tt>[% HTML.escape(runcommandlog.command) %]</tt>
|
||||||
|
on <a href="[% c.uri_for('/build', build.id) %]">Build [% build.id %]</a>.
|
||||||
|
[% IF tail %]
|
||||||
|
The <a href="[% c.uri_for('/build', build.id, 'runcommandlog', runcommandlog.uuid) %]">full log</a> is also available.
|
||||||
|
[% END %]
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<div class="card bg-light"><div class="card-body"><pre class="log" id="contents">
|
||||||
|
<em>Loading...</em>
|
||||||
|
</pre></div></div>
|
||||||
|
|
||||||
|
<script type="text/javascript">
|
||||||
|
$(document).ready(function() {
|
||||||
|
requestPlainFile({
|
||||||
|
url: "[% HTML.escape(log_uri) %]",
|
||||||
|
dataType: "text",
|
||||||
|
type: 'GET',
|
||||||
|
success: function (log_data) {
|
||||||
|
|
||||||
|
[% IF tail %]
|
||||||
|
/* The server may give us a full log (e.g. if the log is in
|
||||||
|
S3). So extract the last lines. */
|
||||||
|
log_data = log_data.split("\n").slice(-[%tail%]).join("\n");
|
||||||
|
[% END %]
|
||||||
|
|
||||||
|
$("#contents").text(log_data);
|
||||||
|
},
|
||||||
|
error: function () {
|
||||||
|
bootbox.alert("The RunCommand log file is not available.");
|
||||||
|
$("#contents").text("(Unavailable)");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
</script>
|
||||||
|
|
||||||
|
[% END %]
|
|
@ -568,6 +568,7 @@ create index IndexTaskRetriesOrdered on TaskRetries(retry_at asc);
|
||||||
-- 3. Update the end_time and exit_code when it completes
|
-- 3. Update the end_time and exit_code when it completes
|
||||||
create table RunCommandLogs (
|
create table RunCommandLogs (
|
||||||
id serial primary key not null,
|
id serial primary key not null,
|
||||||
|
uuid uuid not null,
|
||||||
job_matcher text not null,
|
job_matcher text not null,
|
||||||
build_id integer not null,
|
build_id integer not null,
|
||||||
-- TODO: evaluation_id integer not null,
|
-- TODO: evaluation_id integer not null,
|
||||||
|
@ -599,13 +600,16 @@ create table RunCommandLogs (
|
||||||
constraint RunCommandLogs_end_time_has_start_time check (
|
constraint RunCommandLogs_end_time_has_start_time check (
|
||||||
-- If end time is not null, then end_time, exit_code, and core_dumped should not be null
|
-- If end time is not null, then end_time, exit_code, and core_dumped should not be null
|
||||||
(end_time is null) or (start_time is not null)
|
(end_time is null) or (start_time is not null)
|
||||||
)
|
),
|
||||||
|
-- The uuid should be actually unique.
|
||||||
|
constraint RunCommandLogs_uuid_unique unique(uuid)
|
||||||
|
|
||||||
-- Note: if exit_code is not null then signal and core_dumped must be null.
|
-- Note: if exit_code is not null then signal and core_dumped must be null.
|
||||||
-- Similarly, if signal is not null then exit_code must be null and
|
-- Similarly, if signal is not null then exit_code must be null and
|
||||||
-- core_dumped must not be null. However, these semantics are tricky
|
-- core_dumped must not be null. However, these semantics are tricky
|
||||||
-- to encode as constraints and probably provide limited actual value.
|
-- to encode as constraints and probably provide limited actual value.
|
||||||
);
|
);
|
||||||
|
create index IndexRunCommandLogsOnBuildID on RunCommandLogs(build_id);
|
||||||
|
|
||||||
-- The output paths that have permanently failed.
|
-- The output paths that have permanently failed.
|
||||||
create table FailedPaths (
|
create table FailedPaths (
|
||||||
|
|
5
src/sql/upgrade-81.sql
Normal file
5
src/sql/upgrade-81.sql
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
alter table runcommandlogs add column uuid uuid;
|
||||||
|
update runcommandlogs set uuid = gen_random_uuid() where uuid is null;
|
||||||
|
alter table runcommandlogs alter column uuid set not null;
|
||||||
|
alter table runcommandlogs add constraint RunCommandLogs_uuid_unique unique(uuid);
|
||||||
|
create index IndexRunCommandLogsOnBuildID on RunCommandLogs(build_id);
|
|
@ -3,7 +3,8 @@ use warnings;
|
||||||
use Setup;
|
use Setup;
|
||||||
use File::Temp;
|
use File::Temp;
|
||||||
|
|
||||||
my %ctx = test_init();
|
my $ctx = test_context();
|
||||||
|
my $db = $ctx->db();
|
||||||
|
|
||||||
require Hydra::Helper::Nix;
|
require Hydra::Helper::Nix;
|
||||||
|
|
||||||
|
@ -65,4 +66,28 @@ is(Hydra::Helper::Nix::getMachines(), {
|
||||||
|
|
||||||
}, ":)");
|
}, ":)");
|
||||||
|
|
||||||
|
subtest "constructRunCommandLogPath" => sub {
|
||||||
|
my $builds = $ctx->makeAndEvaluateJobset(
|
||||||
|
expression => "basic.nix",
|
||||||
|
);
|
||||||
|
my $build = $builds->{"empty_dir"};
|
||||||
|
my $runlog = $db->resultset('RunCommandLogs')->create({
|
||||||
|
job_matcher => "*:*:*",
|
||||||
|
build_id => $build->get_column('id'),
|
||||||
|
command => "bogus",
|
||||||
|
});
|
||||||
|
|
||||||
|
like(
|
||||||
|
Hydra::Helper::Nix::constructRunCommandLogPath($runlog),
|
||||||
|
qr@/runcommand-logs/[0-9a-f]{2}/[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}@,
|
||||||
|
"The constructed RunCommandLog path is sufficiently bucketed and UUID-like."
|
||||||
|
);
|
||||||
|
|
||||||
|
my $badlog = $db->resultset('RunCommandLogs')->new({ uuid => "../../../etc/passwd" });
|
||||||
|
ok(
|
||||||
|
dies { Hydra::Helper::Nix::constructRunCommandLogPath($badlog) },
|
||||||
|
"Expected invalid UUID to be rejected and not have a path constructed for it.",
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
done_testing;
|
done_testing;
|
||||||
|
|
|
@ -58,6 +58,12 @@ subtest "Validate a run log was created" => sub {
|
||||||
is($runlog->start_time, within(time() - 1, 2), "The start time is recent.");
|
is($runlog->start_time, within(time() - 1, 2), "The start time is recent.");
|
||||||
is($runlog->end_time, within(time() - 1, 2), "The end time is also recent.");
|
is($runlog->end_time, within(time() - 1, 2), "The end time is also recent.");
|
||||||
is($runlog->exit_code, 0, "This command should have succeeded.");
|
is($runlog->exit_code, 0, "This command should have succeeded.");
|
||||||
|
|
||||||
|
subtest "Validate the run log file exists" => sub {
|
||||||
|
my $logPath = Hydra::Helper::Nix::constructRunCommandLogPath($runlog);
|
||||||
|
ok(-f $logPath, "The run log was saved to a file.");
|
||||||
|
ok(-z $logPath, "The run log was empty.");
|
||||||
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
done_testing;
|
done_testing;
|
||||||
|
|
50
t/Hydra/Plugin/RunCommand/http.t
Normal file
50
t/Hydra/Plugin/RunCommand/http.t
Normal file
|
@ -0,0 +1,50 @@
|
||||||
|
use feature 'unicode_strings';
|
||||||
|
use strict;
|
||||||
|
use warnings;
|
||||||
|
use Setup;
|
||||||
|
use Test2::V0;
|
||||||
|
|
||||||
|
my $ctx = test_context(
|
||||||
|
hydra_config => q|
|
||||||
|
<runcommand>
|
||||||
|
command = cp "$HYDRA_JSON" "$HYDRA_DATA/joboutput.json"
|
||||||
|
</runcommand>
|
||||||
|
|);
|
||||||
|
|
||||||
|
require Catalyst::Test;
|
||||||
|
Catalyst::Test->import('Hydra');
|
||||||
|
use HTTP::Request::Common qw(GET);
|
||||||
|
|
||||||
|
my $db = $ctx->db();
|
||||||
|
|
||||||
|
my $builds = $ctx->makeAndEvaluateJobset(
|
||||||
|
expression => "runcommand.nix",
|
||||||
|
build => 1,
|
||||||
|
);
|
||||||
|
my $build = $builds->{"metrics"};
|
||||||
|
|
||||||
|
ok(sendNotifications(), "Notifications execute successfully.");
|
||||||
|
|
||||||
|
my $runlog = $build->runcommandlogs->find({});
|
||||||
|
ok($runlog->uuid, "The log's uuid is saved.");
|
||||||
|
|
||||||
|
my $get1 = request(GET '/build/' . $build->id . '/runcommandlog/' . $runlog->uuid);
|
||||||
|
ok($get1->is_success, "GET /build/{id}/runcommandlog/{uuid} succeeded.");
|
||||||
|
|
||||||
|
my $get2 = request(GET '/runcommandlog/' . $runlog->uuid);
|
||||||
|
ok($get2->is_success, "GET /runcommandlog/{uuid} succeeded.");
|
||||||
|
|
||||||
|
my $get3 = request(GET '/runcommandlog/some-invalid-or-nonexistent-uuid');
|
||||||
|
ok(!$get3->is_success, "GET'ing invalid uuid failed.");
|
||||||
|
|
||||||
|
my $get4a = request(GET '/build/' . $build->id . '/runcommandlog/' . $runlog->uuid . '/raw');
|
||||||
|
is($get4a->code, 302, "GET /build/{id}/runcommandlog/{uuid}/raw is a redirect.");
|
||||||
|
my $get4b = request(GET $get4a->header('location'));
|
||||||
|
ok($get4b->is_success, "GET /build/{id}/runcommandlog/{uuid}/raw succeeded.");
|
||||||
|
|
||||||
|
my $get5a = request(GET '/build/' . $build->id . '/runcommandlog/some-invalid-or-nonexistent-uuid/raw');
|
||||||
|
is($get5a->code, 302, "GET /raw of invalid uuid is a redirect.");
|
||||||
|
my $get5b = request(GET $get5a->header("location"));
|
||||||
|
ok(!$get5b->is_success, "GET /raw of invalid uuid failed.");
|
||||||
|
|
||||||
|
done_testing;
|
|
@ -10,13 +10,11 @@ my %ctx = test_init(
|
||||||
</runcommand>
|
</runcommand>
|
||||||
|);
|
|);
|
||||||
|
|
||||||
use Test2::V0;
|
|
||||||
use Hydra::Plugin::RunCommand;
|
|
||||||
|
|
||||||
require Hydra::Schema;
|
require Hydra::Schema;
|
||||||
require Hydra::Model::DB;
|
require Hydra::Model::DB;
|
||||||
|
|
||||||
use Test2::V0;
|
use Test2::V0;
|
||||||
|
use Hydra::Plugin::RunCommand;
|
||||||
|
|
||||||
my $db = Hydra::Model::DB->new;
|
my $db = Hydra::Model::DB->new;
|
||||||
hydra_setup($db);
|
hydra_setup($db);
|
||||||
|
|
|
@ -20,6 +20,12 @@ sub new_run_log {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
subtest "A new record has a UUID" => sub {
|
||||||
|
my $runlog = new_run_log();
|
||||||
|
is(length($runlog->uuid), 36, "The UUID attribute is sufficiently UUID-like.");
|
||||||
|
like($runlog->uuid, qr/[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}/, "The UUID attribute is sufficiently UUID-like.");
|
||||||
|
};
|
||||||
|
|
||||||
subtest "Not yet started" => sub {
|
subtest "Not yet started" => sub {
|
||||||
my $runlog = new_run_log();
|
my $runlog = new_run_log();
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue