2012-03-13 10:03:32 +00:00
|
|
|
{ hydraSrc ? { outPath = ./.; revCount = 1234; gitTag = "abcdef"; }
|
2010-03-05 17:52:43 +00:00
|
|
|
, officialRelease ? false
|
|
|
|
}:
|
|
|
|
|
2013-07-01 17:37:14 +00:00
|
|
|
let
|
|
|
|
|
|
|
|
pkgs = import <nixpkgs> {};
|
|
|
|
|
2015-06-12 13:35:14 +00:00
|
|
|
genAttrs' = pkgs.lib.genAttrs [ "x86_64-linux" /* "i686-linux" */ ];
|
2013-07-01 17:37:14 +00:00
|
|
|
|
2013-11-06 17:10:52 +00:00
|
|
|
hydraServer = hydraPkg:
|
|
|
|
{ config, pkgs, ... }:
|
|
|
|
{ imports = [ ./hydra-module.nix ];
|
|
|
|
|
|
|
|
virtualisation.memorySize = 1024;
|
|
|
|
|
|
|
|
services.hydra.enable = true;
|
|
|
|
services.hydra.package = hydraPkg;
|
|
|
|
services.hydra.hydraURL = "http://hydra.example.org";
|
|
|
|
services.hydra.notificationSender = "admin@hydra.example.org";
|
|
|
|
|
|
|
|
services.postgresql.enable = true;
|
|
|
|
services.postgresql.package = pkgs.postgresql92;
|
|
|
|
|
|
|
|
environment.systemPackages = [ pkgs.perlPackages.LWP pkgs.perlPackages.JSON ];
|
|
|
|
};
|
|
|
|
|
2013-07-01 17:37:14 +00:00
|
|
|
in rec {
|
2013-01-23 14:47:42 +00:00
|
|
|
|
2013-01-22 13:41:02 +00:00
|
|
|
tarball =
|
2012-03-13 10:03:32 +00:00
|
|
|
with import <nixpkgs> { };
|
2011-08-19 15:43:43 +00:00
|
|
|
|
2010-09-30 14:29:15 +00:00
|
|
|
releaseTools.makeSourceTarball {
|
|
|
|
name = "hydra-tarball";
|
|
|
|
src = hydraSrc;
|
|
|
|
inherit officialRelease;
|
2011-01-14 10:43:47 +00:00
|
|
|
version = builtins.readFile ./version;
|
2010-09-30 15:02:42 +00:00
|
|
|
|
2011-01-14 12:53:54 +00:00
|
|
|
buildInputs =
|
2014-09-18 10:24:05 +00:00
|
|
|
[ perl libxslt dblatex tetex nukeReferences pkgconfig nixUnstable git openssl ];
|
2011-12-05 14:53:23 +00:00
|
|
|
|
|
|
|
versionSuffix = if officialRelease then "" else "pre${toString hydraSrc.revCount}-${hydraSrc.gitTag}";
|
2010-09-30 15:02:42 +00:00
|
|
|
|
2013-07-19 12:36:52 +00:00
|
|
|
preHook = ''
|
2010-09-30 15:02:42 +00:00
|
|
|
# TeX needs a writable font cache.
|
|
|
|
export VARTEXFONTS=$TMPDIR/texfonts
|
2013-08-12 15:23:33 +00:00
|
|
|
|
|
|
|
addToSearchPath PATH $(pwd)/src/script
|
2015-05-26 13:25:21 +00:00
|
|
|
addToSearchPath PATH $(pwd)/src/hydra-eval-jobs
|
2013-08-12 15:23:33 +00:00
|
|
|
addToSearchPath PERL5LIB $(pwd)/src/lib
|
2010-09-30 15:02:42 +00:00
|
|
|
'';
|
2011-01-14 10:43:47 +00:00
|
|
|
|
2015-06-12 16:02:39 +00:00
|
|
|
postUnpack = ''
|
|
|
|
# Clean up when building from a working tree.
|
|
|
|
if [ -z "$IN_NIX_SHELL" ]; then
|
|
|
|
(cd $sourceRoot && (git ls-files -o --directory | xargs -r rm -rfv)) || true
|
|
|
|
fi
|
|
|
|
'';
|
|
|
|
|
2011-07-04 15:36:38 +00:00
|
|
|
configureFlags =
|
2014-09-18 10:24:05 +00:00
|
|
|
[ "--with-docbook-xsl=${docbook_xsl}/xml/xsl/docbook" ];
|
2011-01-14 12:42:37 +00:00
|
|
|
|
2011-01-14 10:48:50 +00:00
|
|
|
postDist = ''
|
2011-07-04 20:11:51 +00:00
|
|
|
make -C doc/manual install prefix="$out"
|
2011-07-04 14:48:44 +00:00
|
|
|
nuke-refs "$out/share/doc/hydra/manual.pdf"
|
|
|
|
|
2011-10-12 15:04:13 +00:00
|
|
|
echo "doc manual $out/share/doc/hydra manual.html" >> \
|
2011-07-04 14:48:44 +00:00
|
|
|
"$out/nix-support/hydra-build-products"
|
|
|
|
echo "doc-pdf manual $out/share/doc/hydra/manual.pdf" >> \
|
|
|
|
"$out/nix-support/hydra-build-products"
|
2011-01-14 10:43:47 +00:00
|
|
|
'';
|
2010-09-30 14:29:15 +00:00
|
|
|
};
|
2010-03-05 17:52:43 +00:00
|
|
|
|
2013-01-23 14:47:42 +00:00
|
|
|
|
2013-07-01 17:37:14 +00:00
|
|
|
build = genAttrs' (system:
|
2010-03-05 17:52:43 +00:00
|
|
|
|
2013-07-01 17:37:14 +00:00
|
|
|
with import <nixpkgs> { inherit system; };
|
2008-11-28 16:13:06 +00:00
|
|
|
|
2013-05-24 19:26:47 +00:00
|
|
|
let
|
|
|
|
|
|
|
|
nix = nixUnstable;
|
|
|
|
|
|
|
|
perlDeps = buildEnv {
|
|
|
|
name = "hydra-perl-deps";
|
|
|
|
paths = with perlPackages;
|
2013-06-02 21:27:46 +00:00
|
|
|
[ ModulePluggable
|
2014-11-06 14:36:46 +00:00
|
|
|
CatalystActionREST
|
2013-06-02 21:27:46 +00:00
|
|
|
CatalystAuthenticationStoreDBIxClass
|
2014-11-06 14:36:46 +00:00
|
|
|
CatalystDevel
|
2013-06-13 13:13:52 +00:00
|
|
|
CatalystDispatchTypeRegex
|
2013-05-24 19:26:47 +00:00
|
|
|
CatalystPluginAccessLog
|
|
|
|
CatalystPluginAuthorizationRoles
|
|
|
|
CatalystPluginCaptcha
|
|
|
|
CatalystPluginSessionStateCookie
|
|
|
|
CatalystPluginSessionStoreFastMmap
|
|
|
|
CatalystPluginStackTrace
|
2014-05-01 14:33:25 +00:00
|
|
|
CatalystPluginUnicodeEncoding
|
2013-06-13 13:13:52 +00:00
|
|
|
CatalystTraitForRequestProxyBase
|
2013-05-24 19:26:47 +00:00
|
|
|
CatalystViewDownload
|
|
|
|
CatalystViewJSON
|
|
|
|
CatalystViewTT
|
|
|
|
CatalystXScriptServerStarman
|
|
|
|
CryptRandPasswd
|
|
|
|
DBDPg
|
|
|
|
DBDSQLite
|
|
|
|
DataDump
|
|
|
|
DateTime
|
|
|
|
DigestSHA1
|
2014-11-19 13:44:04 +00:00
|
|
|
EmailMIME
|
2013-05-24 19:26:47 +00:00
|
|
|
EmailSender
|
|
|
|
FileSlurp
|
|
|
|
IOCompress
|
|
|
|
IPCRun
|
|
|
|
JSONXS
|
2014-11-06 14:36:46 +00:00
|
|
|
LWP
|
|
|
|
LWPProtocolHttps
|
|
|
|
NetAmazonS3
|
2013-05-24 19:26:47 +00:00
|
|
|
PadWalker
|
|
|
|
Readonly
|
|
|
|
SQLSplitStatement
|
2014-11-06 14:36:46 +00:00
|
|
|
SetScalar
|
2013-05-24 19:26:47 +00:00
|
|
|
Starman
|
|
|
|
SysHostnameLong
|
|
|
|
TestMore
|
|
|
|
TextDiff
|
|
|
|
TextTable
|
|
|
|
XMLSimple
|
|
|
|
nix git
|
|
|
|
];
|
|
|
|
};
|
|
|
|
|
|
|
|
in
|
2010-03-05 17:52:43 +00:00
|
|
|
|
2010-09-30 14:29:19 +00:00
|
|
|
releaseTools.nixBuild {
|
2010-09-30 15:09:34 +00:00
|
|
|
name = "hydra";
|
2013-01-22 13:41:02 +00:00
|
|
|
src = tarball;
|
2010-03-05 17:52:43 +00:00
|
|
|
|
|
|
|
buildInputs =
|
2014-09-18 10:24:05 +00:00
|
|
|
[ makeWrapper libtool unzip nukeReferences pkgconfig sqlite
|
2013-07-10 09:50:24 +00:00
|
|
|
gitAndTools.topGit mercurial darcs subversion bazaar openssl bzip2
|
2013-05-24 19:26:47 +00:00
|
|
|
guile # optional, for Guile + Guix support
|
2013-06-02 21:27:46 +00:00
|
|
|
perlDeps perl
|
2013-05-24 19:26:47 +00:00
|
|
|
];
|
2010-03-05 17:52:43 +00:00
|
|
|
|
2013-01-23 14:47:42 +00:00
|
|
|
hydraPath = lib.makeSearchPath "bin" (
|
|
|
|
[ libxslt sqlite subversion openssh nix coreutils findutils
|
2013-07-10 09:50:24 +00:00
|
|
|
gzip bzip2 lzma gnutar unzip git gitAndTools.topGit mercurial darcs gnused graphviz bazaar
|
2013-01-23 14:47:42 +00:00
|
|
|
] ++ lib.optionals stdenv.isLinux [ rpm dpkg cdrkit ] );
|
2010-03-05 17:52:43 +00:00
|
|
|
|
2013-08-16 14:15:09 +00:00
|
|
|
preCheck = ''
|
|
|
|
patchShebangs .
|
|
|
|
export LOGNAME=${LOGNAME:-foo}
|
|
|
|
'';
|
2011-11-30 17:03:50 +00:00
|
|
|
|
2010-09-30 14:29:19 +00:00
|
|
|
postInstall = ''
|
2013-01-23 14:47:42 +00:00
|
|
|
mkdir -p $out/nix-support
|
2010-09-30 15:02:42 +00:00
|
|
|
nuke-refs $out/share/doc/hydra/manual/manual.pdf
|
2010-03-05 17:52:43 +00:00
|
|
|
|
|
|
|
for i in $out/bin/*; do
|
|
|
|
wrapProgram $i \
|
|
|
|
--prefix PERL5LIB ':' $out/libexec/hydra/lib:$PERL5LIB \
|
|
|
|
--prefix PATH ':' $out/bin:$hydraPath \
|
|
|
|
--set HYDRA_RELEASE ${tarball.version} \
|
2010-10-18 10:34:54 +00:00
|
|
|
--set HYDRA_HOME $out/libexec/hydra \
|
2014-09-24 13:21:01 +00:00
|
|
|
--set NIX_RELEASE ${nix.name or "unknown"}
|
2010-03-05 17:52:43 +00:00
|
|
|
done
|
|
|
|
''; # */
|
2011-08-19 15:28:32 +00:00
|
|
|
|
2013-01-23 14:47:42 +00:00
|
|
|
meta.description = "Build of Hydra on ${system}";
|
Add a plugin for backing up builds in s3
In your hydra config, you can add an arbitrary number of <s3config>
sections, with the following options:
* name (required): Bucket name
* jobs (required): A regex to match job names (in project:jobset:job
format) that should be backed up to this bucket
* compression_type: bzip2 (default), xz, or none
* prefix: String to prepend to all hydra-created s3 keys (if this is
meant to represent a directory, you should include the trailing slash,
e.g. "cache/"). Default "".
After each build with an output (i.e. successful or failed-with-output
builds), the output path and its closure are uploaded to the bucket as
.nar files, with corresponding .narinfos to enable use as a binary
cache.
This plugin requires that s3 credentials be available. It uses
Net::Amazon::S3, which as of this commit the nixpkgs version can
retrieve s3 credentials from the AWS_ACCESS_KEY_ID and
AWS_SECRET_ACCESS_KEY environment variables, or from ec2 instance
metadata when using an IAM role.
This commit also adds a hydra-s3-backup-collect-garbage program, which
uses hydra's gc roots directory to determine which paths are live, and
then deletes all files except nix-cache-info and any .nar or .narinfo
files corresponding to live paths. hydra-s3-backup-collect-garbage
respects the prefix configuration option, so it won't delete anything
outside of the hierarchy you give it, and it has the same credential
requirements as the plugin. Probably a timer unit running the garbage
collection periodically should be added to hydra-module.nix
Note that two of the added tests fail, due to a bug in the interaction
between Net::Amazon::S3 and fake-s3. Those behaviors work against real
s3 though, so I'm committing this even with the broken tests.
Signed-off-by: Shea Levy <shea@shealevy.com>
2013-09-03 14:53:56 +00:00
|
|
|
passthru.perlDeps = perlDeps;
|
2013-07-01 17:37:14 +00:00
|
|
|
});
|
2010-03-10 15:48:45 +00:00
|
|
|
|
|
|
|
|
2013-07-01 17:37:14 +00:00
|
|
|
tests.install = genAttrs' (system:
|
2013-10-11 08:58:25 +00:00
|
|
|
with import <nixpkgs/nixos/lib/testing.nix> { inherit system; };
|
2013-07-01 17:37:14 +00:00
|
|
|
simpleTest {
|
2015-06-12 16:02:54 +00:00
|
|
|
machine = hydraServer build.${system};
|
2013-07-01 17:37:14 +00:00
|
|
|
testScript =
|
|
|
|
''
|
2013-11-06 17:10:52 +00:00
|
|
|
$machine->waitForJob("hydra-init");
|
|
|
|
$machine->waitForJob("hydra-server");
|
|
|
|
$machine->waitForJob("hydra-evaluator");
|
|
|
|
$machine->waitForJob("hydra-queue-runner");
|
2013-07-01 17:37:14 +00:00
|
|
|
$machine->waitForOpenPort("3000");
|
2013-11-06 17:10:52 +00:00
|
|
|
$machine->succeed("curl --fail http://localhost:3000/");
|
2013-07-01 17:37:14 +00:00
|
|
|
'';
|
|
|
|
});
|
2010-03-10 15:48:45 +00:00
|
|
|
|
2013-06-17 16:34:21 +00:00
|
|
|
tests.api = genAttrs' (system:
|
2013-10-11 08:58:25 +00:00
|
|
|
with import <nixpkgs/nixos/lib/testing.nix> { inherit system; };
|
2013-06-17 16:34:21 +00:00
|
|
|
simpleTest {
|
2015-06-12 16:02:54 +00:00
|
|
|
machine = hydraServer build.${system};
|
2013-06-17 16:34:21 +00:00
|
|
|
testScript =
|
2013-11-06 14:37:06 +00:00
|
|
|
let dbi = "dbi:Pg:dbname=hydra;user=root;"; in
|
2013-06-17 16:34:21 +00:00
|
|
|
''
|
2013-11-06 17:10:52 +00:00
|
|
|
$machine->waitForJob("hydra-init");
|
2013-06-17 16:34:21 +00:00
|
|
|
|
2013-11-06 17:10:52 +00:00
|
|
|
# Create an admin account and some other state.
|
|
|
|
$machine->succeed
|
2015-06-15 09:47:56 +00:00
|
|
|
( "su - hydra -c \"hydra-create-user root --email-address 'alice\@example.org' --password foobar --role admin\""
|
2013-11-06 17:10:52 +00:00
|
|
|
, "mkdir /run/jobset /tmp/nix"
|
|
|
|
, "chmod 755 /run/jobset /tmp/nix"
|
2013-06-17 16:34:21 +00:00
|
|
|
, "cp ${./tests/api-test.nix} /run/jobset/default.nix"
|
|
|
|
, "chmod 644 /run/jobset/default.nix"
|
2013-11-06 17:10:52 +00:00
|
|
|
, "chown -R hydra /run/jobset /tmp/nix"
|
2013-06-17 16:34:21 +00:00
|
|
|
);
|
|
|
|
|
2013-11-06 17:10:52 +00:00
|
|
|
# Start the web interface with some weird settings.
|
|
|
|
$machine->succeed("systemctl stop hydra-server hydra-evaluator hydra-queue-runner");
|
2015-06-15 09:47:56 +00:00
|
|
|
$machine->mustSucceed("su - hydra -c 'NIX_STORE_DIR=/tmp/nix/store NIX_LOG_DIR=/tmp/nix/var/log/nix NIX_STATE_DIR=/tmp/nix/var/nix NIX_REMOTE= DBIC_TRACE=1 hydra-server -d' >&2 &");
|
2013-06-17 16:34:21 +00:00
|
|
|
$machine->waitForOpenPort("3000");
|
|
|
|
|
2013-11-06 17:10:52 +00:00
|
|
|
# Run the API tests.
|
2015-06-15 09:47:56 +00:00
|
|
|
$machine->mustSucceed("su - hydra -c 'perl ${./tests/api-test.pl}' >&2");
|
2013-06-17 16:34:21 +00:00
|
|
|
'';
|
|
|
|
});
|
Add a plugin for backing up builds in s3
In your hydra config, you can add an arbitrary number of <s3config>
sections, with the following options:
* name (required): Bucket name
* jobs (required): A regex to match job names (in project:jobset:job
format) that should be backed up to this bucket
* compression_type: bzip2 (default), xz, or none
* prefix: String to prepend to all hydra-created s3 keys (if this is
meant to represent a directory, you should include the trailing slash,
e.g. "cache/"). Default "".
After each build with an output (i.e. successful or failed-with-output
builds), the output path and its closure are uploaded to the bucket as
.nar files, with corresponding .narinfos to enable use as a binary
cache.
This plugin requires that s3 credentials be available. It uses
Net::Amazon::S3, which as of this commit the nixpkgs version can
retrieve s3 credentials from the AWS_ACCESS_KEY_ID and
AWS_SECRET_ACCESS_KEY environment variables, or from ec2 instance
metadata when using an IAM role.
This commit also adds a hydra-s3-backup-collect-garbage program, which
uses hydra's gc roots directory to determine which paths are live, and
then deletes all files except nix-cache-info and any .nar or .narinfo
files corresponding to live paths. hydra-s3-backup-collect-garbage
respects the prefix configuration option, so it won't delete anything
outside of the hierarchy you give it, and it has the same credential
requirements as the plugin. Probably a timer unit running the garbage
collection periodically should be added to hydra-module.nix
Note that two of the added tests fail, due to a bug in the interaction
between Net::Amazon::S3 and fake-s3. Those behaviors work against real
s3 though, so I'm committing this even with the broken tests.
Signed-off-by: Shea Levy <shea@shealevy.com>
2013-09-03 14:53:56 +00:00
|
|
|
|
2014-09-24 13:21:01 +00:00
|
|
|
/*
|
Add a plugin for backing up builds in s3
In your hydra config, you can add an arbitrary number of <s3config>
sections, with the following options:
* name (required): Bucket name
* jobs (required): A regex to match job names (in project:jobset:job
format) that should be backed up to this bucket
* compression_type: bzip2 (default), xz, or none
* prefix: String to prepend to all hydra-created s3 keys (if this is
meant to represent a directory, you should include the trailing slash,
e.g. "cache/"). Default "".
After each build with an output (i.e. successful or failed-with-output
builds), the output path and its closure are uploaded to the bucket as
.nar files, with corresponding .narinfos to enable use as a binary
cache.
This plugin requires that s3 credentials be available. It uses
Net::Amazon::S3, which as of this commit the nixpkgs version can
retrieve s3 credentials from the AWS_ACCESS_KEY_ID and
AWS_SECRET_ACCESS_KEY environment variables, or from ec2 instance
metadata when using an IAM role.
This commit also adds a hydra-s3-backup-collect-garbage program, which
uses hydra's gc roots directory to determine which paths are live, and
then deletes all files except nix-cache-info and any .nar or .narinfo
files corresponding to live paths. hydra-s3-backup-collect-garbage
respects the prefix configuration option, so it won't delete anything
outside of the hierarchy you give it, and it has the same credential
requirements as the plugin. Probably a timer unit running the garbage
collection periodically should be added to hydra-module.nix
Note that two of the added tests fail, due to a bug in the interaction
between Net::Amazon::S3 and fake-s3. Those behaviors work against real
s3 though, so I'm committing this even with the broken tests.
Signed-off-by: Shea Levy <shea@shealevy.com>
2013-09-03 14:53:56 +00:00
|
|
|
tests.s3backup = genAttrs' (system:
|
2013-10-11 08:58:25 +00:00
|
|
|
with import <nixpkgs/nixos/lib/testing.nix> { inherit system; };
|
2015-06-12 16:02:54 +00:00
|
|
|
let hydra = build.${system}
|
Add a plugin for backing up builds in s3
In your hydra config, you can add an arbitrary number of <s3config>
sections, with the following options:
* name (required): Bucket name
* jobs (required): A regex to match job names (in project:jobset:job
format) that should be backed up to this bucket
* compression_type: bzip2 (default), xz, or none
* prefix: String to prepend to all hydra-created s3 keys (if this is
meant to represent a directory, you should include the trailing slash,
e.g. "cache/"). Default "".
After each build with an output (i.e. successful or failed-with-output
builds), the output path and its closure are uploaded to the bucket as
.nar files, with corresponding .narinfos to enable use as a binary
cache.
This plugin requires that s3 credentials be available. It uses
Net::Amazon::S3, which as of this commit the nixpkgs version can
retrieve s3 credentials from the AWS_ACCESS_KEY_ID and
AWS_SECRET_ACCESS_KEY environment variables, or from ec2 instance
metadata when using an IAM role.
This commit also adds a hydra-s3-backup-collect-garbage program, which
uses hydra's gc roots directory to determine which paths are live, and
then deletes all files except nix-cache-info and any .nar or .narinfo
files corresponding to live paths. hydra-s3-backup-collect-garbage
respects the prefix configuration option, so it won't delete anything
outside of the hierarchy you give it, and it has the same credential
requirements as the plugin. Probably a timer unit running the garbage
collection periodically should be added to hydra-module.nix
Note that two of the added tests fail, due to a bug in the interaction
between Net::Amazon::S3 and fake-s3. Those behaviors work against real
s3 though, so I'm committing this even with the broken tests.
Signed-off-by: Shea Levy <shea@shealevy.com>
2013-09-03 14:53:56 +00:00
|
|
|
simpleTest {
|
|
|
|
machine =
|
|
|
|
{ config, pkgs, ... }:
|
|
|
|
{ services.postgresql.enable = true;
|
|
|
|
services.postgresql.package = pkgs.postgresql92;
|
|
|
|
environment.systemPackages = [ hydra pkgs.rubyLibs.fakes3 ];
|
|
|
|
virtualisation.memorySize = 2047;
|
|
|
|
boot.kernelPackages = pkgs.linuxPackages_3_10;
|
|
|
|
virtualisation.writableStore = true;
|
|
|
|
networking.extraHosts = ''
|
|
|
|
127.0.0.1 hydra.s3.amazonaws.com
|
|
|
|
'';
|
|
|
|
};
|
|
|
|
|
|
|
|
testScript =
|
|
|
|
''
|
|
|
|
$machine->waitForJob("postgresql");
|
|
|
|
|
|
|
|
# Initialise the database and the state.
|
2013-11-06 17:10:52 +00:00
|
|
|
$machine->succeed
|
Add a plugin for backing up builds in s3
In your hydra config, you can add an arbitrary number of <s3config>
sections, with the following options:
* name (required): Bucket name
* jobs (required): A regex to match job names (in project:jobset:job
format) that should be backed up to this bucket
* compression_type: bzip2 (default), xz, or none
* prefix: String to prepend to all hydra-created s3 keys (if this is
meant to represent a directory, you should include the trailing slash,
e.g. "cache/"). Default "".
After each build with an output (i.e. successful or failed-with-output
builds), the output path and its closure are uploaded to the bucket as
.nar files, with corresponding .narinfos to enable use as a binary
cache.
This plugin requires that s3 credentials be available. It uses
Net::Amazon::S3, which as of this commit the nixpkgs version can
retrieve s3 credentials from the AWS_ACCESS_KEY_ID and
AWS_SECRET_ACCESS_KEY environment variables, or from ec2 instance
metadata when using an IAM role.
This commit also adds a hydra-s3-backup-collect-garbage program, which
uses hydra's gc roots directory to determine which paths are live, and
then deletes all files except nix-cache-info and any .nar or .narinfo
files corresponding to live paths. hydra-s3-backup-collect-garbage
respects the prefix configuration option, so it won't delete anything
outside of the hierarchy you give it, and it has the same credential
requirements as the plugin. Probably a timer unit running the garbage
collection periodically should be added to hydra-module.nix
Note that two of the added tests fail, due to a bug in the interaction
between Net::Amazon::S3 and fake-s3. Those behaviors work against real
s3 though, so I'm committing this even with the broken tests.
Signed-off-by: Shea Levy <shea@shealevy.com>
2013-09-03 14:53:56 +00:00
|
|
|
( "createdb -O root hydra"
|
|
|
|
, "psql hydra -f ${hydra}/libexec/hydra/sql/hydra-postgresql.sql"
|
|
|
|
, "mkdir /var/lib/hydra"
|
|
|
|
, "mkdir /tmp/jobs"
|
|
|
|
, "cp ${./tests/s3-backup-test.pl} /tmp/s3-backup-test.pl"
|
|
|
|
, "cp ${./tests/api-test.nix} /tmp/jobs/default.nix"
|
|
|
|
);
|
|
|
|
|
|
|
|
# start fakes3
|
2013-11-06 17:10:52 +00:00
|
|
|
$machine->succeed("fakes3 --root /tmp/s3 --port 80 &>/dev/null &");
|
Add a plugin for backing up builds in s3
In your hydra config, you can add an arbitrary number of <s3config>
sections, with the following options:
* name (required): Bucket name
* jobs (required): A regex to match job names (in project:jobset:job
format) that should be backed up to this bucket
* compression_type: bzip2 (default), xz, or none
* prefix: String to prepend to all hydra-created s3 keys (if this is
meant to represent a directory, you should include the trailing slash,
e.g. "cache/"). Default "".
After each build with an output (i.e. successful or failed-with-output
builds), the output path and its closure are uploaded to the bucket as
.nar files, with corresponding .narinfos to enable use as a binary
cache.
This plugin requires that s3 credentials be available. It uses
Net::Amazon::S3, which as of this commit the nixpkgs version can
retrieve s3 credentials from the AWS_ACCESS_KEY_ID and
AWS_SECRET_ACCESS_KEY environment variables, or from ec2 instance
metadata when using an IAM role.
This commit also adds a hydra-s3-backup-collect-garbage program, which
uses hydra's gc roots directory to determine which paths are live, and
then deletes all files except nix-cache-info and any .nar or .narinfo
files corresponding to live paths. hydra-s3-backup-collect-garbage
respects the prefix configuration option, so it won't delete anything
outside of the hierarchy you give it, and it has the same credential
requirements as the plugin. Probably a timer unit running the garbage
collection periodically should be added to hydra-module.nix
Note that two of the added tests fail, due to a bug in the interaction
between Net::Amazon::S3 and fake-s3. Those behaviors work against real
s3 though, so I'm committing this even with the broken tests.
Signed-off-by: Shea Levy <shea@shealevy.com>
2013-09-03 14:53:56 +00:00
|
|
|
$machine->waitForOpenPort("80");
|
|
|
|
|
2013-11-06 17:10:52 +00:00
|
|
|
$machine->succeed("cd /tmp && LOGNAME=root AWS_ACCESS_KEY_ID=foo AWS_SECRET_ACCESS_KEY=bar HYDRA_DBI='dbi:Pg:dbname=hydra;user=root;' HYDRA_CONFIG=${./tests/s3-backup-test.config} perl -I ${hydra}/libexec/hydra/lib -I ${hydra.perlDeps}/lib/perl5/site_perl ./s3-backup-test.pl >&2");
|
Add a plugin for backing up builds in s3
In your hydra config, you can add an arbitrary number of <s3config>
sections, with the following options:
* name (required): Bucket name
* jobs (required): A regex to match job names (in project:jobset:job
format) that should be backed up to this bucket
* compression_type: bzip2 (default), xz, or none
* prefix: String to prepend to all hydra-created s3 keys (if this is
meant to represent a directory, you should include the trailing slash,
e.g. "cache/"). Default "".
After each build with an output (i.e. successful or failed-with-output
builds), the output path and its closure are uploaded to the bucket as
.nar files, with corresponding .narinfos to enable use as a binary
cache.
This plugin requires that s3 credentials be available. It uses
Net::Amazon::S3, which as of this commit the nixpkgs version can
retrieve s3 credentials from the AWS_ACCESS_KEY_ID and
AWS_SECRET_ACCESS_KEY environment variables, or from ec2 instance
metadata when using an IAM role.
This commit also adds a hydra-s3-backup-collect-garbage program, which
uses hydra's gc roots directory to determine which paths are live, and
then deletes all files except nix-cache-info and any .nar or .narinfo
files corresponding to live paths. hydra-s3-backup-collect-garbage
respects the prefix configuration option, so it won't delete anything
outside of the hierarchy you give it, and it has the same credential
requirements as the plugin. Probably a timer unit running the garbage
collection periodically should be added to hydra-module.nix
Note that two of the added tests fail, due to a bug in the interaction
between Net::Amazon::S3 and fake-s3. Those behaviors work against real
s3 though, so I'm committing this even with the broken tests.
Signed-off-by: Shea Levy <shea@shealevy.com>
2013-09-03 14:53:56 +00:00
|
|
|
'';
|
|
|
|
});
|
2014-09-24 13:21:01 +00:00
|
|
|
*/
|
2010-03-05 17:52:43 +00:00
|
|
|
}
|