Merge pull request #876 from grahamc/per-test-db
Create an ephemeral PostgreSQL database and Nix store per test, split up tests.
This commit is contained in:
commit
8d3633594b
61
flake.nix
61
flake.nix
|
@ -37,6 +37,66 @@
|
||||||
|
|
||||||
# Add LDAP dependencies that aren't currently found within nixpkgs.
|
# Add LDAP dependencies that aren't currently found within nixpkgs.
|
||||||
perlPackages = prev.perlPackages // {
|
perlPackages = prev.perlPackages // {
|
||||||
|
TestPostgreSQL = final.perlPackages.buildPerlModule {
|
||||||
|
pname = "Test-PostgreSQL";
|
||||||
|
version = "1.27";
|
||||||
|
src = final.fetchurl {
|
||||||
|
url = "mirror://cpan/authors/id/T/TJ/TJC/Test-PostgreSQL-1.27.tar.gz";
|
||||||
|
sha256 = "b1bd231693100cc40905fb0ba3173173201621de9c8301f21c5b593b0a46f907";
|
||||||
|
};
|
||||||
|
buildInputs = with final.perlPackages; [ ModuleBuildTiny TestSharedFork pkgs.postgresql ];
|
||||||
|
propagatedBuildInputs = with final.perlPackages; [ DBDPg DBI FileWhich FunctionParameters Moo TieHashMethod TryTiny TypeTiny ];
|
||||||
|
|
||||||
|
makeMakerFlags = "POSTGRES_HOME=${final.postgresql}";
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
homepage = https://github.com/TJC/Test-postgresql;
|
||||||
|
description = "PostgreSQL runner for tests";
|
||||||
|
license = with final.lib.licenses; [ artistic2 ];
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
FunctionParameters = final.buildPerlPackage {
|
||||||
|
pname = "Function-Parameters";
|
||||||
|
version = "2.001003";
|
||||||
|
src = final.fetchurl {
|
||||||
|
url = "mirror://cpan/authors/id/M/MA/MAUKE/Function-Parameters-2.001003.tar.gz";
|
||||||
|
sha256 = "eaa22c6b43c02499ec7db0758c2dd218a3b2ab47a714b2bdf8010b5ee113c242";
|
||||||
|
};
|
||||||
|
buildInputs = with final.perlPackages; [ DirSelf TestFatal ];
|
||||||
|
meta = {
|
||||||
|
description = "Define functions and methods with parameter lists (\"subroutine signatures\")";
|
||||||
|
license = with final.lib.licenses; [ artistic1 gpl1Plus ];
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
DirSelf = final.buildPerlPackage {
|
||||||
|
pname = "Dir-Self";
|
||||||
|
version = "0.11";
|
||||||
|
src = final.fetchurl {
|
||||||
|
url = "mirror://cpan/authors/id/M/MA/MAUKE/Dir-Self-0.11.tar.gz";
|
||||||
|
sha256 = "e251a51abc7d9ba3e708f73c2aa208e09d47a0c528d6254710fa78cc8d6885b5";
|
||||||
|
};
|
||||||
|
meta = {
|
||||||
|
homepage = "https://github.com/mauke/Dir-Self";
|
||||||
|
description = "A __DIR__ constant for the directory your source file is in";
|
||||||
|
license = with final.lib.licenses; [ artistic1 gpl1Plus ];
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
TieHashMethod = final.buildPerlPackage {
|
||||||
|
pname = "Tie-Hash-Method";
|
||||||
|
version = "0.02";
|
||||||
|
src = final.fetchurl {
|
||||||
|
url = "mirror://cpan/authors/id/Y/YV/YVES/Tie-Hash-Method-0.02.tar.gz";
|
||||||
|
sha256 = "d513fbb51413f7ca1e64a1bdce6194df7ec6076dea55066d67b950191eec32a9";
|
||||||
|
};
|
||||||
|
meta = {
|
||||||
|
description = "Tied hash with specific methods overriden by callbacks";
|
||||||
|
license = with final.lib.licenses; [ artistic1 ];
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
Test2Harness = final.buildPerlPackage {
|
Test2Harness = final.buildPerlPackage {
|
||||||
pname = "Test2-Harness";
|
pname = "Test2-Harness";
|
||||||
version = "1.000042";
|
version = "1.000042";
|
||||||
|
@ -222,6 +282,7 @@
|
||||||
SysHostnameLong
|
SysHostnameLong
|
||||||
TermSizeAny
|
TermSizeAny
|
||||||
TestMore
|
TestMore
|
||||||
|
TestPostgreSQL
|
||||||
TextDiff
|
TextDiff
|
||||||
Test2Harness
|
Test2Harness
|
||||||
TextTable
|
TextTable
|
||||||
|
|
|
@ -13,10 +13,15 @@ struct Connection : pqxx::connection
|
||||||
{
|
{
|
||||||
using namespace nix;
|
using namespace nix;
|
||||||
auto s = getEnv("HYDRA_DBI").value_or("dbi:Pg:dbname=hydra;");
|
auto s = getEnv("HYDRA_DBI").value_or("dbi:Pg:dbname=hydra;");
|
||||||
std::string prefix = "dbi:Pg:";
|
|
||||||
if (std::string(s, 0, prefix.size()) != prefix)
|
std::string lower_prefix = "dbi:Pg:";
|
||||||
throw Error("$HYDRA_DBI does not denote a PostgreSQL database");
|
std::string upper_prefix = "DBI:Pg:";
|
||||||
return concatStringsSep(" ", tokenizeString<Strings>(string(s, prefix.size()), ";"));
|
|
||||||
|
if (hasPrefix(s, lower_prefix) || hasPrefix(s, upper_prefix)) {
|
||||||
|
return concatStringsSep(" ", tokenizeString<Strings>(string(s, lower_prefix.size()), ";"));
|
||||||
|
}
|
||||||
|
|
||||||
|
throw Error("$HYDRA_DBI does not denote a PostgreSQL database");
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -25,9 +25,7 @@ EXTRA_DIST = \
|
||||||
$(TESTS)
|
$(TESTS)
|
||||||
|
|
||||||
TESTS = \
|
TESTS = \
|
||||||
set-up.pl \
|
test.pl
|
||||||
test.pl \
|
|
||||||
tear-down.pl
|
|
||||||
|
|
||||||
check_SCRIPTS = repos
|
check_SCRIPTS = repos
|
||||||
|
|
||||||
|
|
43
tests/build-products.t
Normal file
43
tests/build-products.t
Normal file
|
@ -0,0 +1,43 @@
|
||||||
|
use strict;
|
||||||
|
use Cwd;
|
||||||
|
use Setup;
|
||||||
|
|
||||||
|
(my $datadir, my $pgsql) = test_init();
|
||||||
|
|
||||||
|
require Hydra::Schema;
|
||||||
|
require Hydra::Model::DB;
|
||||||
|
|
||||||
|
use Test2::V0;
|
||||||
|
|
||||||
|
my $db = Hydra::Model::DB->new;
|
||||||
|
hydra_setup($db);
|
||||||
|
|
||||||
|
|
||||||
|
# Test build products
|
||||||
|
|
||||||
|
my $jobset = createBaseJobset("build-products", "build-products.nix");
|
||||||
|
|
||||||
|
ok(evalSucceeds($jobset), "Evaluating jobs/build-products.nix should exit with return code 0");
|
||||||
|
is(nrQueuedBuildsForJobset($jobset), 2, "Evaluating jobs/build-products.nix should result in 2 builds");
|
||||||
|
|
||||||
|
for my $build (queuedBuildsForJobset($jobset)) {
|
||||||
|
subtest "For the build job '" . $build->job . "'" => sub {
|
||||||
|
ok(runBuild($build), "Build should exit with code 0");
|
||||||
|
my $newbuild = $db->resultset('Builds')->find($build->id);
|
||||||
|
|
||||||
|
is($newbuild->finished, 1, "Build should have finished");
|
||||||
|
is($newbuild->buildstatus, 0, "Build should have buildstatus 0");
|
||||||
|
|
||||||
|
my $buildproducts = $db->resultset('BuildProducts')->search({ build => $build->id });
|
||||||
|
my $buildproduct = $buildproducts->next;
|
||||||
|
|
||||||
|
if($build->job eq "simple") {
|
||||||
|
is($buildproduct->name, "text.txt", "We should have \"text.txt\"");
|
||||||
|
} elsif ($build->job eq "with_spaces") {
|
||||||
|
is($buildproduct->name, "some text.txt", "We should have: \"some text.txt\"");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
done_testing;
|
32
tests/evaluate-basic.t
Normal file
32
tests/evaluate-basic.t
Normal file
|
@ -0,0 +1,32 @@
|
||||||
|
use feature 'unicode_strings';
|
||||||
|
use strict;
|
||||||
|
use Cwd;
|
||||||
|
use Setup;
|
||||||
|
|
||||||
|
(my $datadir, my $pgsql) = test_init();
|
||||||
|
|
||||||
|
require Hydra::Schema;
|
||||||
|
require Hydra::Model::DB;
|
||||||
|
|
||||||
|
use Test2::V0;
|
||||||
|
|
||||||
|
my $db = Hydra::Model::DB->new;
|
||||||
|
hydra_setup($db);
|
||||||
|
|
||||||
|
my $project = $db->resultset('Projects')->create({name => "tests", displayname => "", owner => "root"});
|
||||||
|
|
||||||
|
# Most basic test case, no parameters
|
||||||
|
my $jobset = createBaseJobset("basic", "basic.nix");
|
||||||
|
|
||||||
|
ok(evalSucceeds($jobset), "Evaluating jobs/basic.nix should exit with return code 0");
|
||||||
|
is(nrQueuedBuildsForJobset($jobset), 3, "Evaluating jobs/basic.nix should result in 3 builds");
|
||||||
|
|
||||||
|
for my $build (queuedBuildsForJobset($jobset)) {
|
||||||
|
ok(runBuild($build), "Build '".$build->job."' from jobs/basic.nix should exit with code 0");
|
||||||
|
my $newbuild = $db->resultset('Builds')->find($build->id);
|
||||||
|
is($newbuild->finished, 1, "Build '".$build->job."' from jobs/basic.nix should be finished.");
|
||||||
|
my $expected = $build->job eq "fails" ? 1 : $build->job =~ /with_failed/ ? 6 : 0;
|
||||||
|
is($newbuild->buildstatus, $expected, "Build '".$build->job."' from jobs/basic.nix should have buildstatus $expected.");
|
||||||
|
}
|
||||||
|
|
||||||
|
done_testing;
|
43
tests/evaluate-dependent-jobsets.t
Normal file
43
tests/evaluate-dependent-jobsets.t
Normal file
|
@ -0,0 +1,43 @@
|
||||||
|
use strict;
|
||||||
|
use Cwd;
|
||||||
|
use Setup;
|
||||||
|
|
||||||
|
(my $datadir, my $pgsql) = test_init();
|
||||||
|
|
||||||
|
require Hydra::Schema;
|
||||||
|
require Hydra::Model::DB;
|
||||||
|
|
||||||
|
use Test2::V0;
|
||||||
|
|
||||||
|
my $db = Hydra::Model::DB->new;
|
||||||
|
hydra_setup($db);
|
||||||
|
|
||||||
|
# Test jobset with 2 jobs, one has parameter of succeeded build of the other
|
||||||
|
my $jobset = createJobsetWithOneInput("build-output-as-input", "build-output-as-input.nix", "build1", "build", "build1");
|
||||||
|
|
||||||
|
ok(evalSucceeds($jobset), "Evaluating jobs/build-output-as-input.nix should exit with return code 0");
|
||||||
|
is(nrQueuedBuildsForJobset($jobset), 1 , "Evaluation should result in 1 build in queue");
|
||||||
|
|
||||||
|
subtest "For the 'build1' job" => sub {
|
||||||
|
my ($build) = queuedBuildsForJobset($jobset);
|
||||||
|
is($build->job, "build1", "Verify the only job we got is for 'build1'");
|
||||||
|
|
||||||
|
ok(runBuild($build), "Build should exit with code 0");
|
||||||
|
my $newbuild = $db->resultset('Builds')->find($build->id);
|
||||||
|
is($newbuild->finished, 1, "Build should be finished.");
|
||||||
|
is($newbuild->buildstatus, 0, "Build should have buildstatus 0.");
|
||||||
|
};
|
||||||
|
|
||||||
|
ok(evalSucceeds($jobset), "Evaluating jobs/build-output-as-input.nix for second time should exit with return code 0");
|
||||||
|
is(nrQueuedBuildsForJobset($jobset), 1 , "The second evaluation should result in 1 new build in queue: build2");
|
||||||
|
subtest "For the 'build2' job" => sub {
|
||||||
|
my ($build) = queuedBuildsForJobset($jobset);
|
||||||
|
is($build->job, "build2", "Verify the only job we got is for 'build2'");
|
||||||
|
|
||||||
|
ok(runBuild($build), "Build should exit with code 0");
|
||||||
|
my $newbuild = $db->resultset('Builds')->find($build->id);
|
||||||
|
is($newbuild->finished, 1, "Build should be finished.");
|
||||||
|
is($newbuild->buildstatus, 0, "Build should have buildstatus 0.");
|
||||||
|
};
|
||||||
|
|
||||||
|
done_testing;
|
|
@ -1,173 +0,0 @@
|
||||||
use strict;
|
|
||||||
use Hydra::Schema;
|
|
||||||
use Hydra::Model::DB;
|
|
||||||
use Cwd;
|
|
||||||
use Setup;
|
|
||||||
|
|
||||||
my $db = Hydra::Model::DB->new;
|
|
||||||
|
|
||||||
use Test::Simple tests => 76;
|
|
||||||
|
|
||||||
hydra_setup($db);
|
|
||||||
|
|
||||||
my $res;
|
|
||||||
my $stdout;
|
|
||||||
my $stderr;
|
|
||||||
|
|
||||||
my $jobsBaseUri = "file://".getcwd;
|
|
||||||
my $project = $db->resultset('Projects')->create({name => "tests", displayname => "", owner => "root"});
|
|
||||||
my $jobset;
|
|
||||||
|
|
||||||
# Most basic test case, no parameters
|
|
||||||
$jobset = createBaseJobset("basic", "basic.nix");
|
|
||||||
|
|
||||||
ok(evalSucceeds($jobset), "Evaluating jobs/basic.nix should exit with return code 0");
|
|
||||||
ok(nrQueuedBuildsForJobset($jobset) == 3 , "Evaluating jobs/basic.nix should result in 3 builds");
|
|
||||||
|
|
||||||
for my $build (queuedBuildsForJobset($jobset)) {
|
|
||||||
ok(runBuild($build), "Build '".$build->job."' from jobs/basic.nix should exit with code 0");
|
|
||||||
my $newbuild = $db->resultset('Builds')->find($build->id);
|
|
||||||
my $expected = $build->job eq "fails" ? 1 : $build->job =~ /with_failed/ ? 6 : 0;
|
|
||||||
ok($newbuild->finished == 1 && $newbuild->buildstatus == $expected, "Build '".$build->job."' from jobs/basic.nix should have buildstatus $expected");
|
|
||||||
}
|
|
||||||
|
|
||||||
# Test jobset with 2 jobs, one has parameter of succeeded build of the other
|
|
||||||
$jobset = createJobsetWithOneInput("build-output-as-input", "build-output-as-input.nix", "build1", "build", "build1");
|
|
||||||
|
|
||||||
ok(evalSucceeds($jobset), "Evaluating jobs/build-output-as-input.nix should exit with return code 0");
|
|
||||||
ok(nrQueuedBuildsForJobset($jobset) == 1 , "Evaluating jobs/build-output-as-input.nix for first time should result in 1 build in queue");
|
|
||||||
for my $build (queuedBuildsForJobset($jobset)) {
|
|
||||||
ok(runBuild($build), "Build '".$build->job."' from jobs/basic.nix should exit with code 0");
|
|
||||||
my $newbuild = $db->resultset('Builds')->find($build->id);
|
|
||||||
ok($newbuild->finished == 1 && $newbuild->buildstatus == 0, "Build '".$build->job."' from jobs/basic.nix should have buildstatus 0");
|
|
||||||
}
|
|
||||||
|
|
||||||
ok(evalSucceeds($jobset), "Evaluating jobs/build-output-as-input.nix for second time should exit with return code 0");
|
|
||||||
ok(nrQueuedBuildsForJobset($jobset) == 1 , "Evaluating jobs/build-output-as-input.nix for second time after building build1 should result in 1 build in queue");
|
|
||||||
for my $build (queuedBuildsForJobset($jobset)) {
|
|
||||||
ok(runBuild($build), "Build '".$build->job."' from jobs/basic.nix should exit with code 0");
|
|
||||||
my $newbuild = $db->resultset('Builds')->find($build->id);
|
|
||||||
ok($newbuild->finished == 1 && $newbuild->buildstatus == 0, "Build '".$build->job."' from jobs/basic.nix should have buildstatus 0");
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# Test scm inputs
|
|
||||||
my @scminputs = (
|
|
||||||
{
|
|
||||||
name => "svn",
|
|
||||||
nixexpr => "svn-input.nix",
|
|
||||||
type => "svn",
|
|
||||||
uri => "$jobsBaseUri/svn-repo",
|
|
||||||
update => getcwd . "/jobs/svn-update.sh"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name => "svn-checkout",
|
|
||||||
nixexpr => "svn-checkout-input.nix",
|
|
||||||
type => "svn-checkout",
|
|
||||||
uri => "$jobsBaseUri/svn-checkout-repo",
|
|
||||||
update => getcwd . "/jobs/svn-checkout-update.sh"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name => "git",
|
|
||||||
nixexpr => "git-input.nix",
|
|
||||||
type => "git",
|
|
||||||
uri => "$jobsBaseUri/git-repo",
|
|
||||||
update => getcwd . "/jobs/git-update.sh"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name => "git-rev",
|
|
||||||
nixexpr => "git-rev-input.nix",
|
|
||||||
type => "git",
|
|
||||||
uri => "$jobsBaseUri/git-repo 7f60df502b96fd54bbfa64dd94b56d936a407701",
|
|
||||||
update => getcwd . "/jobs/git-rev-update.sh"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name => "deepgit",
|
|
||||||
nixexpr => "deepgit-input.nix",
|
|
||||||
type => "git",
|
|
||||||
uri => "$jobsBaseUri/git-repo master 1",
|
|
||||||
update => getcwd . "/jobs/git-update.sh"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name => "bzr",
|
|
||||||
nixexpr => "bzr-input.nix",
|
|
||||||
type => "bzr",
|
|
||||||
uri => "$jobsBaseUri/bzr-repo",
|
|
||||||
update => getcwd . "/jobs/bzr-update.sh"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name => "bzr-checkout",
|
|
||||||
nixexpr => "bzr-checkout-input.nix",
|
|
||||||
type => "bzr-checkout",
|
|
||||||
uri => "$jobsBaseUri/bzr-checkout-repo",
|
|
||||||
update => getcwd . "/jobs/bzr-checkout-update.sh"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name => "hg",
|
|
||||||
nixexpr => "hg-input.nix",
|
|
||||||
type => "hg",
|
|
||||||
uri => "$jobsBaseUri/hg-repo",
|
|
||||||
update => getcwd . "/jobs/hg-update.sh"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name => "darcs",
|
|
||||||
nixexpr => "darcs-input.nix",
|
|
||||||
type => "darcs",
|
|
||||||
uri => "$jobsBaseUri/darcs-repo",
|
|
||||||
update => getcwd . "/jobs/darcs-update.sh"
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
foreach my $scm ( @scminputs ) {
|
|
||||||
my $scmName = $scm->{"name"};
|
|
||||||
my $nixexpr = $scm->{"nixexpr"};
|
|
||||||
my $type = $scm->{"type"};
|
|
||||||
my $uri = $scm->{"uri"};
|
|
||||||
my $update = $scm->{"update"};
|
|
||||||
$jobset = createJobsetWithOneInput($scmName, $nixexpr, "src", $type, $uri);
|
|
||||||
|
|
||||||
my $state = 0;
|
|
||||||
my $q = 0;
|
|
||||||
my ($loop, $updated) = updateRepository($scmName, $update);
|
|
||||||
while($loop) {
|
|
||||||
my $c = 0;
|
|
||||||
|
|
||||||
# Verify that it can be fetched and possibly queued.
|
|
||||||
ok(evalSucceeds($jobset), "$scmName:$state.$c: Evaluating nix-expression."); $c++;
|
|
||||||
|
|
||||||
# Verify that the evaluation has queued a new job and evaluate again to ...
|
|
||||||
if ($updated) {
|
|
||||||
$q++;
|
|
||||||
ok(nrQueuedBuildsForJobset($jobset) == $q, "$scmName:$state.$c: Expect $q jobs in the queue."); $c++;
|
|
||||||
ok(evalSucceeds($jobset), "$scmName:$state.$c: Evaluating nix-expression again."); $c++;
|
|
||||||
}
|
|
||||||
|
|
||||||
# ... check that it is deterministic and not queued again.
|
|
||||||
ok(nrQueuedBuildsForJobset($jobset) == $q, "$scmName:$state.$c: Expect $q jobs in the queue."); $c++;
|
|
||||||
|
|
||||||
$state++;
|
|
||||||
($loop, $updated) = updateRepository($scmName, $update, getcwd . "/$scmName-repo/");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
# Test build products
|
|
||||||
|
|
||||||
$jobset = createBaseJobset("build-products", "build-products.nix");
|
|
||||||
|
|
||||||
ok(evalSucceeds($jobset), "Evaluating jobs/build-products.nix should exit with return code 0");
|
|
||||||
ok(nrQueuedBuildsForJobset($jobset) == 2 , "Evaluating jobs/build-products.nix should result in 2 builds");
|
|
||||||
|
|
||||||
for my $build (queuedBuildsForJobset($jobset)) {
|
|
||||||
ok(runBuild($build), "Build '".$build->job."' from jobs/build-products.nix should exit with code 0");
|
|
||||||
my $newbuild = $db->resultset('Builds')->find($build->id);
|
|
||||||
ok($newbuild->finished == 1 && $newbuild->buildstatus == 0, "Build '".$build->job."' from jobs/build-products.nix should have buildstatus 0");
|
|
||||||
|
|
||||||
my $buildproducts = $db->resultset('BuildProducts')->search({ build => $build->id });
|
|
||||||
my $buildproduct = $buildproducts->next;
|
|
||||||
|
|
||||||
if($build->job eq "simple") {
|
|
||||||
ok($buildproduct->name eq "text.txt", "We should have text.txt, but found: ".$buildproduct->name."\n");
|
|
||||||
} elsif ($build->job eq "with_spaces") {
|
|
||||||
ok($buildproduct->name eq "some text.txt", "We should have: \"some text.txt\", but found: ".$buildproduct->name."\n");
|
|
||||||
}
|
|
||||||
}
|
|
122
tests/input-types.t
Normal file
122
tests/input-types.t
Normal file
|
@ -0,0 +1,122 @@
|
||||||
|
use strict;
|
||||||
|
use Cwd;
|
||||||
|
use Setup;
|
||||||
|
|
||||||
|
(my $datadir, my $pgsql) = test_init();
|
||||||
|
|
||||||
|
require Hydra::Schema;
|
||||||
|
require Hydra::Model::DB;
|
||||||
|
|
||||||
|
use Test2::V0;
|
||||||
|
|
||||||
|
my $db = Hydra::Model::DB->new;
|
||||||
|
hydra_setup($db);
|
||||||
|
|
||||||
|
my $testdir = getcwd;
|
||||||
|
my $scratchdir = "$datadir/scratch";
|
||||||
|
mkdir $scratchdir;
|
||||||
|
my $jobsBaseUri = "file://".$scratchdir;
|
||||||
|
|
||||||
|
# Test scm inputs
|
||||||
|
my @scminputs = (
|
||||||
|
{
|
||||||
|
name => "svn",
|
||||||
|
nixexpr => "svn-input.nix",
|
||||||
|
type => "svn",
|
||||||
|
uri => "$jobsBaseUri/svn-repo",
|
||||||
|
update => $testdir . "/jobs/svn-update.sh"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name => "svn-checkout",
|
||||||
|
nixexpr => "svn-checkout-input.nix",
|
||||||
|
type => "svn-checkout",
|
||||||
|
uri => "$jobsBaseUri/svn-checkout-repo",
|
||||||
|
update => $testdir . "/jobs/svn-checkout-update.sh"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name => "git",
|
||||||
|
nixexpr => "git-input.nix",
|
||||||
|
type => "git",
|
||||||
|
uri => "$jobsBaseUri/git-repo",
|
||||||
|
update => $testdir . "/jobs/git-update.sh"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name => "git-rev",
|
||||||
|
nixexpr => "git-rev-input.nix",
|
||||||
|
type => "git",
|
||||||
|
uri => "$jobsBaseUri/git-repo 7f60df502b96fd54bbfa64dd94b56d936a407701",
|
||||||
|
update => $testdir . "/jobs/git-rev-update.sh"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name => "deepgit",
|
||||||
|
nixexpr => "deepgit-input.nix",
|
||||||
|
type => "git",
|
||||||
|
uri => "$jobsBaseUri/git-repo master 1",
|
||||||
|
update => $testdir . "/jobs/git-update.sh"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name => "bzr",
|
||||||
|
nixexpr => "bzr-input.nix",
|
||||||
|
type => "bzr",
|
||||||
|
uri => "$jobsBaseUri/bzr-repo",
|
||||||
|
update => $testdir . "/jobs/bzr-update.sh"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name => "bzr-checkout",
|
||||||
|
nixexpr => "bzr-checkout-input.nix",
|
||||||
|
type => "bzr-checkout",
|
||||||
|
uri => "$jobsBaseUri/bzr-checkout-repo",
|
||||||
|
update => $testdir . "/jobs/bzr-checkout-update.sh"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name => "hg",
|
||||||
|
nixexpr => "hg-input.nix",
|
||||||
|
type => "hg",
|
||||||
|
uri => "$jobsBaseUri/hg-repo",
|
||||||
|
update => $testdir . "/jobs/hg-update.sh"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name => "darcs",
|
||||||
|
nixexpr => "darcs-input.nix",
|
||||||
|
type => "darcs",
|
||||||
|
uri => "$jobsBaseUri/darcs-repo",
|
||||||
|
update => $testdir . "/jobs/darcs-update.sh"
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
foreach my $scm ( @scminputs ) {
|
||||||
|
my $scmName = $scm->{"name"};
|
||||||
|
|
||||||
|
subtest "With the SCM input named $scmName" => sub {
|
||||||
|
my $nixexpr = $scm->{"nixexpr"};
|
||||||
|
my $type = $scm->{"type"};
|
||||||
|
my $uri = $scm->{"uri"};
|
||||||
|
my $update = $scm->{"update"};
|
||||||
|
my $jobset = createJobsetWithOneInput($scmName, $nixexpr, "src", $type, $uri);
|
||||||
|
|
||||||
|
my $state = 0;
|
||||||
|
my $q = 0;
|
||||||
|
my ($loop, $updated) = updateRepository($scmName, $update, $scratchdir);
|
||||||
|
while($loop) {
|
||||||
|
subtest "Mutation number $state" => sub {
|
||||||
|
# Verify that it can be fetched and possibly queued.
|
||||||
|
ok(evalSucceeds($jobset), "Evaluating nix-expression.");
|
||||||
|
|
||||||
|
# Verify that the evaluation has queued a new job and evaluate again to ...
|
||||||
|
if ($updated) {
|
||||||
|
$q++;
|
||||||
|
is(nrQueuedBuildsForJobset($jobset), $q, "Expect $q jobs in the queue.");
|
||||||
|
ok(evalSucceeds($jobset), "Evaluating nix-expression again.");
|
||||||
|
}
|
||||||
|
|
||||||
|
# ... check that it is deterministic and not queued again.
|
||||||
|
is(nrQueuedBuildsForJobset($jobset), $q, "Expect deterministic evaluation.");
|
||||||
|
|
||||||
|
$state++;
|
||||||
|
($loop, $updated) = updateRepository($scmName, $update, $scratchdir);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
done_testing;
|
|
@ -2,12 +2,48 @@ package Setup;
|
||||||
|
|
||||||
use strict;
|
use strict;
|
||||||
use Exporter;
|
use Exporter;
|
||||||
use Hydra::Helper::Nix;
|
use Test::PostgreSQL;
|
||||||
use Hydra::Model::DB;
|
use File::Temp;
|
||||||
|
use File::Path qw(make_path);
|
||||||
use Cwd;
|
use Cwd;
|
||||||
|
|
||||||
our @ISA = qw(Exporter);
|
our @ISA = qw(Exporter);
|
||||||
our @EXPORT = qw(hydra_setup nrBuildsForJobset queuedBuildsForJobset nrQueuedBuildsForJobset createBaseJobset createJobsetWithOneInput evalSucceeds runBuild updateRepository);
|
our @EXPORT = qw(test_init hydra_setup nrBuildsForJobset queuedBuildsForJobset nrQueuedBuildsForJobset createBaseJobset createJobsetWithOneInput evalSucceeds runBuild updateRepository);
|
||||||
|
|
||||||
|
sub test_init() {
|
||||||
|
my $dir = File::Temp->newdir();
|
||||||
|
|
||||||
|
$ENV{'HYDRA_DATA'} = "$dir/hydra-data";
|
||||||
|
mkdir $ENV{'HYDRA_DATA'};
|
||||||
|
$ENV{'NIX_CONF_DIR'} = "$dir/nix/etc/nix";
|
||||||
|
make_path($ENV{'NIX_CONF_DIR'});
|
||||||
|
my $nixconf = "$ENV{'NIX_CONF_DIR'}/nix.conf";
|
||||||
|
open(my $fh, '>', $nixconf) or die "Could not open file '$nixconf' $!";
|
||||||
|
print $fh "sandbox = false\n";
|
||||||
|
close $fh;
|
||||||
|
|
||||||
|
$ENV{'NIX_STATE_DIR'} = "$dir/nix/var/nix";
|
||||||
|
|
||||||
|
$ENV{'NIX_MANIFESTS_DIR'} = "$dir/nix/var/nix/manifests";
|
||||||
|
$ENV{'NIX_STORE_DIR'} = "$dir/nix/store";
|
||||||
|
$ENV{'NIX_LOG_DIR'} = "$dir/nix/var/log/nix";
|
||||||
|
|
||||||
|
my $pgsql = Test::PostgreSQL->new(
|
||||||
|
extra_initdb_args => "--locale C.UTF-8"
|
||||||
|
);
|
||||||
|
$ENV{'HYDRA_DBI'} = $pgsql->dsn;
|
||||||
|
system("hydra-init") == 0 or die;
|
||||||
|
return ($dir, $pgsql);
|
||||||
|
}
|
||||||
|
|
||||||
|
sub captureStdoutStderr {
|
||||||
|
# "Lazy"-load Hydra::Helper::Nix to avoid the compile-time
|
||||||
|
# import of Hydra::Model::DB. Early loading of the DB class
|
||||||
|
# causes fixation of the DSN, and we need to fixate it after
|
||||||
|
# the temporary DB is setup.
|
||||||
|
require Hydra::Helper::Nix;
|
||||||
|
return Hydra::Helper::Nix::captureStdoutStderr(@_)
|
||||||
|
}
|
||||||
|
|
||||||
sub hydra_setup {
|
sub hydra_setup {
|
||||||
my ($db) = @_;
|
my ($db) = @_;
|
||||||
|
@ -79,8 +115,11 @@ sub runBuild {
|
||||||
}
|
}
|
||||||
|
|
||||||
sub updateRepository {
|
sub updateRepository {
|
||||||
my ($scm, $update) = @_;
|
my ($scm, $update, $scratchdir) = @_;
|
||||||
|
my $curdir = getcwd;
|
||||||
|
chdir "$scratchdir";
|
||||||
my ($res, $stdout, $stderr) = captureStdoutStderr(60, ($update, $scm));
|
my ($res, $stdout, $stderr) = captureStdoutStderr(60, ($update, $scm));
|
||||||
|
chdir "$curdir";
|
||||||
die "unexpected update error with $scm: $stderr\n" if $res;
|
die "unexpected update error with $scm: $stderr\n" if $res;
|
||||||
my ($message, $loop, $status) = $stdout =~ m/::(.*) -- (.*) -- (.*)::/;
|
my ($message, $loop, $status) = $stdout =~ m/::(.*) -- (.*) -- (.*)::/;
|
||||||
print STDOUT "Update $scm repository: $message\n";
|
print STDOUT "Update $scm repository: $message\n";
|
||||||
|
|
|
@ -1,5 +0,0 @@
|
||||||
use strict;
|
|
||||||
system("initdb -D postgres --locale C.UTF-8 ") == 0 or die;
|
|
||||||
system("pg_ctl -D postgres -o \"-F -p 6433 -h '' -k /tmp \" -w start") == 0 or die;
|
|
||||||
system("createdb -l C.UTF-8 -p 6433 hydra-test-suite") == 0 or die;
|
|
||||||
system("hydra-init") == 0 or die;
|
|
|
@ -1,12 +0,0 @@
|
||||||
use strict;
|
|
||||||
|
|
||||||
my $fail = 0;
|
|
||||||
|
|
||||||
system("dropdb -p 6433 hydra-test-suite") == 0 or $fail = 1;
|
|
||||||
system("pg_ctl -D postgres -w stop") == 0 or $fail = 1;
|
|
||||||
|
|
||||||
system("chmod -R a+w nix") == 0 or $fail = 1;
|
|
||||||
system("rm -rf postgres data nix git-repo hg-repo svn-repo svn-checkout svn-checkout-repo bzr-repo bzr-checkout-repo darcs-repo") == 0 or $fail = 1;
|
|
||||||
system("rm -f .*-state") == 0 or $fail = 1;
|
|
||||||
|
|
||||||
exit $fail;
|
|
Loading…
Reference in a new issue