forked from lix-project/hydra
Merge pull request #1045 from DeterminateSystems/perlcritic-level-2
Perlcritic: level 2
This commit is contained in:
commit
cff387a027
|
@ -1,4 +1,4 @@
|
|||
theme = community
|
||||
|
||||
# 5 is the least complainy, 1 is the most complainy
|
||||
severity = 3
|
||||
severity = 2
|
||||
|
|
18
flake.nix
18
flake.nix
|
@ -199,6 +199,21 @@
|
|||
};
|
||||
};
|
||||
|
||||
ReadonlyX = final.perlPackages.buildPerlModule {
|
||||
pname = "ReadonlyX";
|
||||
version = "1.04";
|
||||
src = final.fetchurl {
|
||||
url = "mirror://cpan/authors/id/S/SA/SANKO/ReadonlyX-1.04.tar.gz";
|
||||
sha256 = "81bb97dba93ac6b5ccbce04a42c3590eb04557d75018773ee18d5a30fcf48188";
|
||||
};
|
||||
buildInputs = with final.perlPackages; [ ModuleBuildTiny TestFatal ];
|
||||
meta = {
|
||||
homepage = "https://github.com/sanko/readonly";
|
||||
description = "Faster facility for creating read-only scalars, arrays, hashes";
|
||||
license = final.lib.licenses.artistic2;
|
||||
};
|
||||
};
|
||||
|
||||
TieHashMethod = final.buildPerlPackage {
|
||||
pname = "Tie-Hash-Method";
|
||||
version = "0.02";
|
||||
|
@ -462,6 +477,7 @@
|
|||
JSON
|
||||
JSONMaybeXS
|
||||
JSONXS
|
||||
ListSomeUtils
|
||||
LWP
|
||||
LWPProtocolHttps
|
||||
ModulePluggable
|
||||
|
@ -472,7 +488,7 @@
|
|||
ParallelForkManager
|
||||
PerlCriticCommunity
|
||||
PrometheusTinyShared
|
||||
Readonly
|
||||
ReadonlyX
|
||||
SetScalar
|
||||
SQLSplitStatement
|
||||
Starman
|
||||
|
|
|
@ -3,7 +3,7 @@ package Hydra::Base::Controller::NixChannel;
|
|||
use strict;
|
||||
use warnings;
|
||||
use base 'Hydra::Base::Controller::REST';
|
||||
use List::MoreUtils qw(any);
|
||||
use List::SomeUtils qw(any);
|
||||
use Nix::Store;
|
||||
use Hydra::Helper::Nix;
|
||||
use Hydra::Helper::CatalystUtils;
|
||||
|
|
|
@ -5,7 +5,7 @@ use strict;
|
|||
use warnings;
|
||||
|
||||
use base 'DBIx::Class';
|
||||
use JSON;
|
||||
use JSON::MaybeXS;
|
||||
|
||||
sub TO_JSON {
|
||||
my $self = shift;
|
||||
|
@ -27,7 +27,7 @@ sub TO_JSON {
|
|||
}
|
||||
|
||||
foreach my $column (@{$hint->{boolean_columns}}) {
|
||||
$json{$column} = $self->get_column($column) ? JSON::true : JSON::false;
|
||||
$json{$column} = $self->get_column($column) ? JSON::MaybeXS::true : JSON::MaybeXS::false;
|
||||
}
|
||||
|
||||
foreach my $relname (keys %{$hint->{relations}}) {
|
||||
|
|
|
@ -7,7 +7,6 @@ use base 'Hydra::Base::Controller::REST';
|
|||
use Hydra::Helper::Nix;
|
||||
use Hydra::Helper::CatalystUtils;
|
||||
use Hydra::Controller::Project;
|
||||
use JSON;
|
||||
use JSON::MaybeXS;
|
||||
use DateTime;
|
||||
use Digest::SHA qw(sha256_hex);
|
||||
|
@ -87,7 +86,7 @@ sub jobsetToHash {
|
|||
triggertime => $jobset->triggertime,
|
||||
fetcherrormsg => $jobset->fetcherrormsg,
|
||||
errortime => $jobset->errortime,
|
||||
haserrormsg => defined($jobset->errormsg) && $jobset->errormsg ne "" ? JSON::true : JSON::false
|
||||
haserrormsg => defined($jobset->errormsg) && $jobset->errormsg ne "" ? JSON::MaybeXS::true : JSON::MaybeXS::false
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -11,7 +11,7 @@ use File::stat;
|
|||
use Data::Dump qw(dump);
|
||||
use Nix::Store;
|
||||
use Nix::Config;
|
||||
use List::MoreUtils qw(all);
|
||||
use List::SomeUtils qw(all);
|
||||
use Encode;
|
||||
use MIME::Types;
|
||||
use JSON::PP;
|
||||
|
|
|
@ -6,7 +6,7 @@ use warnings;
|
|||
use base 'Hydra::Base::Controller::NixChannel';
|
||||
use Hydra::Helper::Nix;
|
||||
use Hydra::Helper::CatalystUtils;
|
||||
use List::MoreUtils qw(uniq);
|
||||
use List::SomeUtils qw(uniq);
|
||||
|
||||
|
||||
sub evalChain : Chained('/') PathPart('eval') CaptureArgs(1) {
|
||||
|
|
|
@ -11,9 +11,9 @@ use Nix::Store;
|
|||
use Nix::Config;
|
||||
use Encode;
|
||||
use File::Basename;
|
||||
use JSON;
|
||||
use JSON::MaybeXS;
|
||||
use List::Util qw[min max];
|
||||
use List::MoreUtils qw{any};
|
||||
use List::SomeUtils qw{any};
|
||||
use Net::Prometheus;
|
||||
use Types::Standard qw/StrMatch/;
|
||||
|
||||
|
|
|
@ -11,7 +11,7 @@ use Hydra::Helper::Nix;
|
|||
use Hydra::Helper::CatalystUtils;
|
||||
use Hydra::Helper::Email;
|
||||
use LWP::UserAgent;
|
||||
use JSON;
|
||||
use JSON::MaybeXS;
|
||||
use HTML::Entities;
|
||||
use Encode qw(decode);
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@ use strict;
|
|||
use warnings;
|
||||
use utf8;
|
||||
use Encode;
|
||||
use JSON;
|
||||
use JSON::MaybeXS;
|
||||
use Nix::Store;
|
||||
use Nix::Config;
|
||||
use Hydra::Model::DB;
|
||||
|
@ -64,7 +64,8 @@ sub updateDeclarativeJobset {
|
|||
$db->txn_do(sub {
|
||||
my $jobset = $project->jobsets->update_or_create(\%update);
|
||||
$jobset->jobsetinputs->delete;
|
||||
while ((my $name, my $data) = each %{$declSpec->{"inputs"}}) {
|
||||
foreach my $name (keys %{$declSpec->{"inputs"}}) {
|
||||
my $data = $declSpec->{"inputs"}->{$name};
|
||||
my $row = {
|
||||
name => $name,
|
||||
type => $data->{type}
|
||||
|
@ -84,7 +85,8 @@ sub handleDeclarativeJobsetJson {
|
|||
my @kept = keys %$declSpec;
|
||||
push @kept, ".jobsets";
|
||||
$project->jobsets->search({ name => { "not in" => \@kept } })->update({ enabled => 0, hidden => 1 });
|
||||
while ((my $jobsetName, my $spec) = each %$declSpec) {
|
||||
foreach my $jobsetName (keys %$declSpec) {
|
||||
my $spec = $declSpec->{$jobsetName};
|
||||
eval {
|
||||
updateDeclarativeJobset($db, $project, $jobsetName, $spec);
|
||||
1;
|
||||
|
|
|
@ -50,7 +50,7 @@ sub splitPath {
|
|||
sub enumerate {
|
||||
my ($self) = @_;
|
||||
my @paths = sort { length($a) <=> length($b) } @{$self->{"paths"}};
|
||||
return wantarray ? @paths : \@paths;
|
||||
return @paths;
|
||||
}
|
||||
|
||||
1;
|
||||
|
|
|
@ -4,7 +4,7 @@ use utf8;
|
|||
use strict;
|
||||
use warnings;
|
||||
use Exporter;
|
||||
use Readonly;
|
||||
use ReadonlyX;
|
||||
use Nix::Store;
|
||||
use Hydra::Helper::Nix;
|
||||
|
||||
|
@ -34,7 +34,7 @@ our @EXPORT = qw(
|
|||
|
||||
|
||||
# Columns from the Builds table needed to render build lists.
|
||||
Readonly our @buildListColumns => ('id', 'finished', 'timestamp', 'stoptime', 'project', 'jobset', 'job', 'nixname', 'system', 'buildstatus', 'releasename');
|
||||
Readonly::Array our @buildListColumns => ('id', 'finished', 'timestamp', 'stoptime', 'project', 'jobset', 'job', 'nixname', 'system', 'buildstatus', 'releasename');
|
||||
|
||||
|
||||
sub getBuild {
|
||||
|
@ -317,16 +317,16 @@ sub paramToList {
|
|||
|
||||
|
||||
# Security checking of filenames.
|
||||
Readonly our $pathCompRE => "(?:[A-Za-z0-9-\+\._\$][A-Za-z0-9-\+\._\$:]*)";
|
||||
Readonly our $relPathRE => "(?:$pathCompRE(?:/$pathCompRE)*)";
|
||||
Readonly our $relNameRE => "(?:[A-Za-z0-9-_][A-Za-z0-9-\._]*)";
|
||||
Readonly our $attrNameRE => "(?:[A-Za-z_][A-Za-z0-9-_]*)";
|
||||
Readonly our $projectNameRE => "(?:[A-Za-z_][A-Za-z0-9-_]*)";
|
||||
Readonly our $jobsetNameRE => "(?:[A-Za-z_][A-Za-z0-9-_\.]*)";
|
||||
Readonly our $jobNameRE => "(?:$attrNameRE(?:\\.$attrNameRE)*)";
|
||||
Readonly our $systemRE => "(?:[a-z0-9_]+-[a-z0-9_]+)";
|
||||
Readonly our $userNameRE => "(?:[a-z][a-z0-9_\.]*)";
|
||||
Readonly our $inputNameRE => "(?:[A-Za-z_][A-Za-z0-9-_]*)";
|
||||
Readonly::Scalar our $pathCompRE => "(?:[A-Za-z0-9-\+\._\$][A-Za-z0-9-\+\._\$:]*)";
|
||||
Readonly::Scalar our $relPathRE => "(?:$pathCompRE(?:/$pathCompRE)*)";
|
||||
Readonly::Scalar our $relNameRE => "(?:[A-Za-z0-9-_][A-Za-z0-9-\._]*)";
|
||||
Readonly::Scalar our $attrNameRE => "(?:[A-Za-z_][A-Za-z0-9-_]*)";
|
||||
Readonly::Scalar our $projectNameRE => "(?:[A-Za-z_][A-Za-z0-9-_]*)";
|
||||
Readonly::Scalar our $jobsetNameRE => "(?:[A-Za-z_][A-Za-z0-9-_\.]*)";
|
||||
Readonly::Scalar our $jobNameRE => "(?:$attrNameRE(?:\\.$attrNameRE)*)";
|
||||
Readonly::Scalar our $systemRE => "(?:[a-z0-9_]+-[a-z0-9_]+)";
|
||||
Readonly::Scalar our $userNameRE => "(?:[a-z][a-z0-9_\.]*)";
|
||||
Readonly::Scalar our $inputNameRE => "(?:[A-Za-z_][A-Za-z0-9-_]*)";
|
||||
|
||||
|
||||
sub parseJobsetName {
|
||||
|
|
|
@ -5,7 +5,7 @@ use warnings;
|
|||
use parent 'Hydra::Plugin';
|
||||
use HTTP::Request;
|
||||
use LWP::UserAgent;
|
||||
use JSON;
|
||||
use JSON::MaybeXS;
|
||||
use Hydra::Helper::CatalystUtils;
|
||||
use File::Temp;
|
||||
use POSIX qw(strftime);
|
||||
|
|
|
@ -4,7 +4,7 @@ use strict;
|
|||
use warnings;
|
||||
use parent 'Hydra::Plugin';
|
||||
use HTTP::Request;
|
||||
use JSON;
|
||||
use JSON::MaybeXS;
|
||||
use LWP::UserAgent;
|
||||
use Hydra::Helper::CatalystUtils;
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@ use parent 'Hydra::Plugin';
|
|||
use HTTP::Request;
|
||||
use LWP::UserAgent;
|
||||
use Hydra::Helper::CatalystUtils;
|
||||
use JSON;
|
||||
use JSON::MaybeXS;
|
||||
|
||||
sub isEnabled {
|
||||
my ($self) = @_;
|
||||
|
|
|
@ -118,7 +118,8 @@ sub fetchInput {
|
|||
$jobset->get_column('name'),
|
||||
$name);
|
||||
# give preference to the options from the input value
|
||||
while (my ($opt_name, $opt_value) = each %{$options}) {
|
||||
foreach my $opt_name (keys %{$options}) {
|
||||
my $opt_value = $options->{$opt_name};
|
||||
if ($opt_value =~ /^[+-]?\d+\z/) {
|
||||
$opt_value = int($opt_value);
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@ use warnings;
|
|||
use parent 'Hydra::Plugin';
|
||||
|
||||
use HTTP::Request;
|
||||
use JSON;
|
||||
use JSON::MaybeXS;
|
||||
use LWP::UserAgent;
|
||||
use Hydra::Helper::CatalystUtils;
|
||||
use List::Util qw(max);
|
||||
|
|
|
@ -5,7 +5,7 @@ use warnings;
|
|||
use parent 'Hydra::Plugin';
|
||||
use HTTP::Request;
|
||||
use LWP::UserAgent;
|
||||
use JSON;
|
||||
use JSON::MaybeXS;
|
||||
use Hydra::Helper::CatalystUtils;
|
||||
use File::Temp;
|
||||
use POSIX qw(strftime);
|
||||
|
|
|
@ -5,7 +5,7 @@ use warnings;
|
|||
use parent 'Hydra::Plugin';
|
||||
use HTTP::Request;
|
||||
use LWP::UserAgent;
|
||||
use JSON;
|
||||
use JSON::MaybeXS;
|
||||
use Hydra::Helper::CatalystUtils;
|
||||
use File::Temp;
|
||||
use POSIX qw(strftime);
|
||||
|
|
|
@ -4,7 +4,7 @@ use strict;
|
|||
use warnings;
|
||||
use parent 'Hydra::Plugin';
|
||||
use HTTP::Request;
|
||||
use JSON;
|
||||
use JSON::MaybeXS;
|
||||
use LWP::UserAgent;
|
||||
use Hydra::Helper::CatalystUtils;
|
||||
use List::Util qw(max);
|
||||
|
|
|
@ -19,7 +19,7 @@ use warnings;
|
|||
use parent 'Hydra::Plugin';
|
||||
use HTTP::Request;
|
||||
use LWP::UserAgent;
|
||||
use JSON;
|
||||
use JSON::MaybeXS;
|
||||
use Hydra::Helper::CatalystUtils;
|
||||
use File::Temp;
|
||||
use POSIX qw(strftime);
|
||||
|
|
|
@ -4,7 +4,7 @@ use strict;
|
|||
use warnings;
|
||||
use parent 'Hydra::Plugin';
|
||||
use HTTP::Request;
|
||||
use JSON;
|
||||
use JSON::MaybeXS;
|
||||
use LWP::UserAgent;
|
||||
use Hydra::Helper::CatalystUtils;
|
||||
use List::Util qw(max);
|
||||
|
|
|
@ -4,7 +4,7 @@ use strict;
|
|||
use warnings;
|
||||
use parent 'Hydra::Plugin';
|
||||
use experimental 'smartmatch';
|
||||
use JSON;
|
||||
use JSON::MaybeXS;
|
||||
|
||||
sub isEnabled {
|
||||
my ($self) = @_;
|
||||
|
@ -74,7 +74,7 @@ sub makeJsonPayload {
|
|||
my $json = {
|
||||
event => $event,
|
||||
build => $build->id,
|
||||
finished => $build->get_column('finished') ? JSON::true : JSON::false,
|
||||
finished => $build->get_column('finished') ? JSON::MaybeXS::true : JSON::MaybeXS::false,
|
||||
timestamp => $build->get_column('timestamp'),
|
||||
project => $build->get_column('project'),
|
||||
jobset => $build->get_column('jobset'),
|
||||
|
|
|
@ -98,7 +98,8 @@ sub buildFinished {
|
|||
foreach my $reference (@{$refs}) {
|
||||
push @needed_paths, $reference;
|
||||
}
|
||||
while (my ($compression_type, $configs) = each %compression_types) {
|
||||
foreach my $compression_type (keys %compression_types) {
|
||||
my $configs = $compression_types{$compression_type};
|
||||
my @incomplete_buckets = ();
|
||||
# Don't do any work if all the buckets have this path
|
||||
foreach my $bucket_config (@{$configs}) {
|
||||
|
@ -144,7 +145,8 @@ sub buildFinished {
|
|||
}
|
||||
|
||||
# Upload narinfos
|
||||
while (my ($compression_type, $infos) = each %narinfos) {
|
||||
foreach my $compression_type (keys %narinfos) {
|
||||
my $infos = $narinfos{$compression_type};
|
||||
foreach my $bucket_config (@{$compression_types{$compression_type}}) {
|
||||
foreach my $info (@{$infos}) {
|
||||
my $bucket = $client->bucket( name => $bucket_config->{name} );
|
||||
|
|
|
@ -6,7 +6,7 @@ use parent 'Hydra::Plugin';
|
|||
use HTTP::Request;
|
||||
use LWP::UserAgent;
|
||||
use Hydra::Helper::CatalystUtils;
|
||||
use JSON;
|
||||
use JSON::MaybeXS;
|
||||
|
||||
=head1 NAME
|
||||
|
||||
|
|
|
@ -134,7 +134,7 @@ __PACKAGE__->has_many(
|
|||
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2021-08-26 12:02:36
|
||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:CkU+hbVTmhfOzQhkHJHCsg
|
||||
|
||||
use JSON;
|
||||
use JSON::MaybeXS;
|
||||
|
||||
sub as_json {
|
||||
my $self = shift;
|
||||
|
@ -148,7 +148,7 @@ sub as_json {
|
|||
"value" => $input->value // "",
|
||||
|
||||
# boolean_columns
|
||||
"emailresponsible" => $self->get_column("emailresponsible") ? JSON::true : JSON::false,
|
||||
"emailresponsible" => $self->get_column("emailresponsible") ? JSON::MaybeXS::true : JSON::MaybeXS::false,
|
||||
);
|
||||
|
||||
return \%json;
|
||||
|
|
|
@ -375,7 +375,7 @@ __PACKAGE__->has_many(
|
|||
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2021-08-26 12:02:36
|
||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:iI44C3BFTo6IsS1tBwWYsg
|
||||
|
||||
use JSON;
|
||||
use JSON::MaybeXS;
|
||||
|
||||
=head2 builds
|
||||
|
||||
|
@ -423,8 +423,8 @@ sub as_json {
|
|||
"flake" => $self->get_column("flake") // "",
|
||||
|
||||
# boolean_columns
|
||||
"enableemail" => $self->get_column("enableemail") ? JSON::true : JSON::false,
|
||||
"visible" => $self->get_column("hidden") ? JSON::false : JSON::true,
|
||||
"enableemail" => $self->get_column("enableemail") ? JSON::MaybeXS::true : JSON::MaybeXS::false,
|
||||
"visible" => $self->get_column("hidden") ? JSON::MaybeXS::false : JSON::MaybeXS::true,
|
||||
|
||||
"inputs" => { map { $_->name => $_ } $self->jobsetinputs }
|
||||
);
|
||||
|
|
|
@ -246,7 +246,7 @@ __PACKAGE__->many_to_many("usernames", "projectmembers", "username");
|
|||
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2021-08-26 12:02:36
|
||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:nKVZ8ZNCZQQ52zbpDAaoQQ
|
||||
|
||||
use JSON;
|
||||
use JSON::MaybeXS;
|
||||
|
||||
sub as_json {
|
||||
my $self = shift;
|
||||
|
@ -260,8 +260,8 @@ sub as_json {
|
|||
"owner" => $self->get_column("owner") // "",
|
||||
|
||||
# boolean_columns
|
||||
"enabled" => $self->get_column("enabled") ? JSON::true : JSON::false,
|
||||
"hidden" => $self->get_column("hidden") ? JSON::true : JSON::false,
|
||||
"enabled" => $self->get_column("enabled") ? JSON::MaybeXS::true : JSON::MaybeXS::false,
|
||||
"hidden" => $self->get_column("hidden") ? JSON::MaybeXS::true : JSON::MaybeXS::false,
|
||||
|
||||
"jobsets" => [ map { $_->name } $self->jobsets ]
|
||||
);
|
||||
|
|
|
@ -15,7 +15,7 @@ use Hydra::Helper::Nix;
|
|||
use Hydra::Model::DB;
|
||||
use Hydra::Plugin;
|
||||
use Hydra::Schema;
|
||||
use JSON;
|
||||
use JSON::MaybeXS;
|
||||
use Net::Statsd;
|
||||
use Nix::Store;
|
||||
use Time::HiRes qw(clock_gettime CLOCK_MONOTONIC);
|
||||
|
@ -753,7 +753,8 @@ sub checkJobsetWrapped {
|
|||
|
||||
if ($jobsetChanged) {
|
||||
# Create JobsetEvalMembers mappings.
|
||||
while (my ($id, $x) = each %buildMap) {
|
||||
foreach my $id (keys %buildMap) {
|
||||
my $x = $buildMap{$id};
|
||||
$ev->jobsetevalmembers->create({ build => $id, isnew => $x->{new} });
|
||||
}
|
||||
|
||||
|
@ -762,7 +763,8 @@ sub checkJobsetWrapped {
|
|||
# builds for the same derivation, pick the one with the
|
||||
# shortest name.
|
||||
my %drvPathToId;
|
||||
while (my ($id, $x) = each %buildMap) {
|
||||
foreach my $id (keys %buildMap) {
|
||||
my $x = $buildMap{$id};
|
||||
my $y = $drvPathToId{$x->{drvPath}};
|
||||
if (defined $y) {
|
||||
next if length $x->{jobName} > length $y->{jobName};
|
||||
|
@ -806,7 +808,8 @@ sub checkJobsetWrapped {
|
|||
|
||||
# Wake up hydra-queue-runner.
|
||||
my $lowestId;
|
||||
while (my ($id, $x) = each %buildMap) {
|
||||
foreach my $id (keys %buildMap) {
|
||||
my $x = $buildMap{$id};
|
||||
$lowestId = $id if $x->{new} && (!defined $lowestId || $id < $lowestId);
|
||||
}
|
||||
$notifyAdded->execute($lowestId) if defined $lowestId;
|
||||
|
|
|
@ -6,7 +6,7 @@ use utf8;
|
|||
use Hydra::Helper::Nix;
|
||||
use Net::Statsd;
|
||||
use File::Slurper qw(read_text);
|
||||
use JSON;
|
||||
use JSON::MaybeXS;
|
||||
use Getopt::Long qw(:config gnu_getopt);
|
||||
|
||||
STDERR->autoflush(1);
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use strict;
|
||||
use warnings;
|
||||
use Setup;
|
||||
use JSON qw(decode_json encode_json);
|
||||
use JSON::MaybeXS qw(decode_json encode_json);
|
||||
use File::Copy;
|
||||
|
||||
my %ctx = test_init(
|
||||
|
|
|
@ -2,7 +2,7 @@ use feature 'unicode_strings';
|
|||
use strict;
|
||||
use warnings;
|
||||
use Setup;
|
||||
use JSON qw(decode_json encode_json);
|
||||
use JSON::MaybeXS qw(decode_json encode_json);
|
||||
|
||||
my %ctx = test_init();
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use strict;
|
||||
use warnings;
|
||||
use Setup;
|
||||
use JSON qw(decode_json encode_json);
|
||||
use JSON::MaybeXS qw(decode_json encode_json);
|
||||
use Data::Dumper;
|
||||
use URI;
|
||||
my %ctx = test_init();
|
||||
|
|
|
@ -2,7 +2,7 @@ use feature 'unicode_strings';
|
|||
use strict;
|
||||
use warnings;
|
||||
use Setup;
|
||||
use JSON qw(decode_json encode_json);
|
||||
use JSON::MaybeXS qw(decode_json encode_json);
|
||||
|
||||
my %ctx = test_init();
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@ use warnings;
|
|||
use Setup;
|
||||
use IO::Uncompress::Bunzip2 qw(bunzip2);
|
||||
use Archive::Tar;
|
||||
use JSON qw(decode_json);
|
||||
use JSON::MaybeXS qw(decode_json);
|
||||
use Data::Dumper;
|
||||
my %ctx = test_init(
|
||||
use_external_destination_store => 0
|
||||
|
|
|
@ -2,7 +2,7 @@ use feature 'unicode_strings';
|
|||
use strict;
|
||||
use warnings;
|
||||
use Setup;
|
||||
use JSON qw(decode_json encode_json);
|
||||
use JSON::MaybeXS qw(decode_json encode_json);
|
||||
|
||||
my %ctx = test_init();
|
||||
|
||||
|
@ -47,7 +47,7 @@ subtest 'Create new jobset "job" as flake type' => sub {
|
|||
Cookie => $cookie,
|
||||
Content => encode_json({
|
||||
enabled => 2,
|
||||
visible => JSON::true,
|
||||
visible => JSON::MaybeXS::true,
|
||||
name => "job",
|
||||
type => 1,
|
||||
description => "test jobset",
|
||||
|
@ -72,12 +72,12 @@ subtest 'Read newly-created jobset "job"' => sub {
|
|||
description => "test jobset",
|
||||
emailoverride => "",
|
||||
enabled => 2,
|
||||
enableemail => JSON::false,
|
||||
enableemail => JSON::MaybeXS::false,
|
||||
errortime => undef,
|
||||
errormsg => "",
|
||||
fetcherrormsg => "",
|
||||
flake => "github:nixos/nix",
|
||||
visible => JSON::true,
|
||||
visible => JSON::MaybeXS::true,
|
||||
inputs => {},
|
||||
keepnr => 3,
|
||||
lastcheckedtime => undef,
|
||||
|
@ -100,7 +100,7 @@ subtest 'Update jobset "job" to legacy type' => sub {
|
|||
Cookie => $cookie,
|
||||
Content => encode_json({
|
||||
enabled => 3,
|
||||
visible => JSON::true,
|
||||
visible => JSON::MaybeXS::true,
|
||||
name => "job",
|
||||
type => 0,
|
||||
nixexprinput => "ofborg",
|
||||
|
@ -130,17 +130,17 @@ subtest 'Update jobset "job" to legacy type' => sub {
|
|||
description => "test jobset",
|
||||
emailoverride => "",
|
||||
enabled => 3,
|
||||
enableemail => JSON::false,
|
||||
enableemail => JSON::MaybeXS::false,
|
||||
errortime => undef,
|
||||
errormsg => "",
|
||||
fetcherrormsg => "",
|
||||
flake => "",
|
||||
visible => JSON::true,
|
||||
visible => JSON::MaybeXS::true,
|
||||
inputs => {
|
||||
ofborg => {
|
||||
name => "ofborg",
|
||||
type => "git",
|
||||
emailresponsible => JSON::false,
|
||||
emailresponsible => JSON::MaybeXS::false,
|
||||
value => "https://github.com/NixOS/ofborg.git released"
|
||||
}
|
||||
},
|
||||
|
@ -165,7 +165,7 @@ subtest 'Update jobset "job" to have an invalid input type' => sub {
|
|||
Cookie => $cookie,
|
||||
Content => encode_json({
|
||||
enabled => 3,
|
||||
visible => JSON::true,
|
||||
visible => JSON::MaybeXS::true,
|
||||
name => "job",
|
||||
type => 0,
|
||||
nixexprinput => "ofborg",
|
||||
|
|
|
@ -2,7 +2,7 @@ use feature 'unicode_strings';
|
|||
use strict;
|
||||
use warnings;
|
||||
use Setup;
|
||||
use JSON qw(decode_json encode_json);
|
||||
use JSON::MaybeXS qw(decode_json encode_json);
|
||||
|
||||
my %ctx = test_init();
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@ use feature 'unicode_strings';
|
|||
use strict;
|
||||
use warnings;
|
||||
use Setup;
|
||||
use JSON qw(decode_json encode_json);
|
||||
use JSON::MaybeXS qw(decode_json encode_json);
|
||||
|
||||
my %ctx = test_init();
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@ use strict;
|
|||
use warnings;
|
||||
use Setup;
|
||||
use Data::Dumper;
|
||||
use JSON qw(decode_json);
|
||||
use JSON::MaybeXS qw(decode_json);
|
||||
my %ctx = test_init(
|
||||
# Without this, the test will fail because a `file:` store is not treated as a
|
||||
# local store by `isLocalStore` in src/lib/Hydra/Helper/Nix.pm, and any
|
||||
|
|
|
@ -2,7 +2,7 @@ use feature 'unicode_strings';
|
|||
use strict;
|
||||
use warnings;
|
||||
use Setup;
|
||||
use JSON qw(decode_json encode_json);
|
||||
use JSON::MaybeXS qw(decode_json encode_json);
|
||||
|
||||
my %ctx = test_init();
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@ use feature 'unicode_strings';
|
|||
use strict;
|
||||
use warnings;
|
||||
use Setup;
|
||||
use JSON qw(decode_json encode_json);
|
||||
use JSON::MaybeXS qw(decode_json encode_json);
|
||||
|
||||
my %ctx = test_init();
|
||||
|
||||
|
@ -45,8 +45,8 @@ subtest "Read project 'tests'" => sub {
|
|||
is(decode_json($projectinfo->content), {
|
||||
description => "",
|
||||
displayname => "Tests",
|
||||
enabled => JSON::true,
|
||||
hidden => JSON::false,
|
||||
enabled => JSON::MaybeXS::true,
|
||||
hidden => JSON::MaybeXS::false,
|
||||
homepage => "",
|
||||
jobsets => [],
|
||||
name => "tests",
|
||||
|
@ -61,8 +61,8 @@ subtest "Transitioning from declarative project to normal" => sub {
|
|||
Content_Type => 'application/json',
|
||||
Cookie => $cookie,
|
||||
Content => encode_json({
|
||||
enabled => JSON::true,
|
||||
visible => JSON::true,
|
||||
enabled => JSON::MaybeXS::true,
|
||||
visible => JSON::MaybeXS::true,
|
||||
name => "tests",
|
||||
displayname => "Tests",
|
||||
declarative => {
|
||||
|
@ -84,8 +84,8 @@ subtest "Transitioning from declarative project to normal" => sub {
|
|||
is(decode_json($projectinfo->content), {
|
||||
description => "",
|
||||
displayname => "Tests",
|
||||
enabled => JSON::true,
|
||||
hidden => JSON::false,
|
||||
enabled => JSON::MaybeXS::true,
|
||||
hidden => JSON::MaybeXS::false,
|
||||
homepage => "",
|
||||
jobsets => [".jobsets"],
|
||||
name => "tests",
|
||||
|
@ -104,8 +104,8 @@ subtest "Transitioning from declarative project to normal" => sub {
|
|||
Content_Type => 'application/json',
|
||||
Cookie => $cookie,
|
||||
Content => encode_json({
|
||||
enabled => JSON::true,
|
||||
visible => JSON::true,
|
||||
enabled => JSON::MaybeXS::true,
|
||||
visible => JSON::MaybeXS::true,
|
||||
name => "tests",
|
||||
displayname => "Tests",
|
||||
declarative => {
|
||||
|
@ -127,8 +127,8 @@ subtest "Transitioning from declarative project to normal" => sub {
|
|||
is(decode_json($projectinfo->content), {
|
||||
description => "",
|
||||
displayname => "Tests",
|
||||
enabled => JSON::true,
|
||||
hidden => JSON::false,
|
||||
enabled => JSON::MaybeXS::true,
|
||||
hidden => JSON::MaybeXS::false,
|
||||
homepage => "",
|
||||
jobsets => [],
|
||||
name => "tests",
|
||||
|
|
|
@ -31,8 +31,9 @@ $attrs->registerValue("foo");
|
|||
$attrs->registerValue("bar.baz.tux");
|
||||
$attrs->registerValue("bar.baz.bux.foo.bar.baz");
|
||||
|
||||
my @enumerated = $attrs->enumerate();
|
||||
is(
|
||||
$attrs->enumerate(),
|
||||
\@enumerated,
|
||||
[
|
||||
# "foo": skipped since we're registering values, and we
|
||||
# only want to track nested attribute sets.
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use strict;
|
||||
use warnings;
|
||||
use Setup;
|
||||
use JSON;
|
||||
use JSON::MaybeXS;
|
||||
use File::Copy;
|
||||
|
||||
my %ctx = test_init(
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use feature 'unicode_strings';
|
||||
use strict;
|
||||
use warnings;
|
||||
use JSON;
|
||||
use JSON::MaybeXS;
|
||||
use Setup;
|
||||
|
||||
my %ctx = test_init(
|
||||
|
@ -42,7 +42,7 @@ my $dat = do {
|
|||
open(my $json_fh, "<", $filename)
|
||||
or die("Can't open \"$filename\": $!\n");
|
||||
local $/;
|
||||
my $json = JSON->new;
|
||||
my $json = JSON::MaybeXS->new;
|
||||
$json->decode(<$json_fh>)
|
||||
};
|
||||
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
use strict;
|
||||
use warnings;
|
||||
use JSON::MaybeXS;
|
||||
use Setup;
|
||||
|
||||
my %ctx = test_init(
|
||||
|
@ -44,7 +45,7 @@ subtest "Validate the top level fields match" => sub {
|
|||
is($dat->{build}, $build->id, "The build event matches our expected ID.");
|
||||
is($dat->{buildStatus}, 0, "The build status matches.");
|
||||
is($dat->{event}, "buildFinished", "The build event matches.");
|
||||
is($dat->{finished}, JSON::true, "The build finished.");
|
||||
is($dat->{finished}, JSON::MaybeXS::true, "The build finished.");
|
||||
is($dat->{project}, "tests", "The project matches.");
|
||||
is($dat->{jobset}, "basic", "The jobset matches.");
|
||||
is($dat->{job}, "metrics", "The job matches.");
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use feature 'unicode_strings';
|
||||
use strict;
|
||||
use warnings;
|
||||
use JSON;
|
||||
use JSON::MaybeXS;
|
||||
use Setup;
|
||||
|
||||
my %ctx = test_init(
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use feature 'unicode_strings';
|
||||
use strict;
|
||||
use warnings;
|
||||
use JSON;
|
||||
use JSON::MaybeXS;
|
||||
use Setup;
|
||||
|
||||
my $binarycachedir = File::Temp->newdir();
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use strict;
|
||||
use warnings;
|
||||
use LWP::UserAgent;
|
||||
use JSON;
|
||||
use JSON::MaybeXS;
|
||||
|
||||
my $ua = LWP::UserAgent->new;
|
||||
$ua->cookie_jar({});
|
||||
|
|
Loading…
Reference in a new issue