diff --git a/Makefile.am b/Makefile.am index 339af868..ad911b4d 100644 --- a/Makefile.am +++ b/Makefile.am @@ -1,4 +1,8 @@ SUBDIRS = src tests doc BOOTCLEAN_SUBDIRS = $(SUBDIRS) DIST_SUBDIRS = $(SUBDIRS) +EXTRA_DIST = hydra-module.nix +install-data-local: hydra-module.nix + $(INSTALL) -d $(DESTDIR)$(datadir)/nix + $(INSTALL_DATA) hydra-module.nix $(DESTDIR)$(datadir)/nix/ diff --git a/configure.ac b/configure.ac index c136547f..8e90bdd7 100644 --- a/configure.ac +++ b/configure.ac @@ -52,8 +52,7 @@ then NIX_STATE_DIR="$TMPDIR" export NIX_STATE_DIR fi -if "$NIX_STORE_PROGRAM" --timeout 123 -q > /dev/null 2>&1 -then +if NIX_REMOTE=daemon "$NIX_STORE_PROGRAM" --timeout 123 -q; then AC_MSG_RESULT([yes]) else AC_MSG_RESULT([no]) @@ -68,7 +67,7 @@ LDFLAGS="$LDFLAGS -L$nix/lib/nix" AC_CHECK_HEADER([store-api.hh], [:], [AC_MSG_ERROR([Nix headers not found; please install Nix or check the `--with-nix' option.])]) -AC_CHECK_LIB([expr], [_ZN3nix9EvalState17parseExprFromFileESs], [:], +AC_CHECK_LIB([expr], [_ZN3nix9EvalState8evalFileERKSsRNS_5ValueE], [:], [AC_MSG_ERROR([Nix library not found; please install Nix or check the `--with-nix' option.])]) CPPFLAGS="$old_CPPFLAGS" diff --git a/dev-shell b/dev-shell new file mode 100755 index 00000000..d996c245 --- /dev/null +++ b/dev-shell @@ -0,0 +1,7 @@ +#! /bin/sh +s=$(type -p nix-shell) +exec $s release.nix -A build.x86_64-linux --exclude tarball --command " + export NIX_REMOTE=daemon + export NIX_PATH='$NIX_PATH' + export NIX_BUILD_SHELL=$(type -p bash) + exec $s release.nix -A tarball" "$@" diff --git a/doc/dev-notes.txt b/doc/dev-notes.txt index 6fe824c5..fce0fdb1 100644 --- a/doc/dev-notes.txt +++ b/doc/dev-notes.txt @@ -127,3 +127,13 @@ succeed in the nixpkgs:trunk jobset: select job, system from builds b natural join buildresultinfo where project = 'nixpkgs' and jobset = 'stdenv' and iscurrent = 1 and finished = 1 and buildstatus != 0 and exists (select 1 from builds natural join buildresultinfo where project = 'nixpkgs' and jobset = 'trunk' and job = b.job and system = b.system and iscurrent = 1 and finished = 1 and buildstatus = 0) order by job, system; + + +* Get all Nixpkgs jobs that have never built succesfully: + + select project, jobset, job from builds b1 + where project = 'nixpkgs' and jobset = 'trunk' and iscurrent = 1 + group by project, jobset, job + having not exists + (select 1 from builds b2 where b1.project = b2.project and b1.jobset = b2.jobset and b1.job = b2.job and finished = 1 and buildstatus = 0) + order by project, jobset, job; diff --git a/doc/manual/Makefile.am b/doc/manual/Makefile.am index 7c788727..67fa3da6 100644 --- a/doc/manual/Makefile.am +++ b/doc/manual/Makefile.am @@ -1,4 +1,4 @@ -DOCBOOK_FILES = installation.xml introduction.xml manual.xml projects.xml +DOCBOOK_FILES = installation.xml introduction.xml manual.xml projects.xml hacking.xml EXTRA_DIST = $(DOCBOOK_FILES) diff --git a/doc/manual/hacking.xml b/doc/manual/hacking.xml new file mode 100644 index 00000000..5a5ff9ca --- /dev/null +++ b/doc/manual/hacking.xml @@ -0,0 +1,39 @@ + + +Hacking + +This section provides some notes on how to hack on Hydra. To +get the latest version of Hydra from GitHub: + +$ git clone git://github.com/NixOS/hydra.git +$ cd hydra + + + +To build it and its dependencies: + +$ nix-build release.nix -A build.x86_64-linux + + + +To build all dependencies and start a shell in which all +environment variables (such as PERL5LIB) are set up so +that those dependencies can be found: + +$ ./dev-shell + +To build Hydra, you should then do: + +[nix-shell]$ ./bootstrap +[nix-shell]$ configurePhase +[nix-shell]$ make + +You can run the Hydra web server in your source tree as follows: + +$ ./src/script/hydra-server + + + + diff --git a/doc/manual/manual.xml b/doc/manual/manual.xml index 2d2b6500..c9273cde 100644 --- a/doc/manual/manual.xml +++ b/doc/manual/manual.xml @@ -52,8 +52,7 @@ - 2009 - 2010 + 2009-2013 Eelco Dolstra @@ -64,6 +63,7 @@ + diff --git a/hydra-module.nix b/hydra-module.nix index 16f95102..1b8d8663 100644 --- a/hydra-module.nix +++ b/hydra-module.nix @@ -7,13 +7,7 @@ let baseDir = "/var/lib/hydra"; - hydraConf = pkgs.writeScript "hydra.conf" - '' - using_frontend_proxy 1 - base_uri ${cfg.hydraURL} - notification_sender ${cfg.notificationSender} - max_servers 25 - ''; + hydraConf = pkgs.writeScript "hydra.conf" cfg.extraConfig; env = { NIX_REMOTE = "daemon"; @@ -28,7 +22,7 @@ let serverEnv = env // { HYDRA_LOGO = if cfg.logo != null then cfg.logo else ""; HYDRA_TRACKER = cfg.tracker; - }; + } // (optionalAttrs cfg.debugServer { DBIC_TRACE = 1; }); in { @@ -64,6 +58,15 @@ in ''; }; + listenHost = mkOption { + default = "*"; + example = "localhost"; + description = '' + The hostname or address to listen on or * to listen + on all interfaces. + ''; + }; + port = mkOption { default = 3000; description = '' @@ -112,6 +115,17 @@ in ''; }; + debugServer = mkOption { + default = false; + type = types.bool; + description = "Whether to run the server in debug mode"; + }; + + extraConfig = mkOption { + type = types.lines; + description = "Extra lines for the hydra config"; + }; + }; }; @@ -120,6 +134,14 @@ in ###### implementation config = mkIf cfg.enable { + services.hydra.extraConfig = + '' + using_frontend_proxy 1 + base_uri ${cfg.hydraURL} + notification_sender ${cfg.notificationSender} + max_servers 25 + ''; + environment.systemPackages = [ cfg.hydra ]; users.extraUsers.hydra = @@ -151,14 +173,36 @@ in systemd.services."hydra-init" = { wantedBy = [ "multi-user.target" ]; + requires = [ "postgresql.service" ]; + after = [ "postgresql.service" ]; environment = env; script = '' mkdir -p ${baseDir}/data chown hydra ${baseDir}/data ln -sf ${hydraConf} ${baseDir}/data/hydra.conf + pass=$(HOME=/root ${pkgs.openssl}/bin/openssl rand -base64 32) + if [ ! -f ${baseDir}/.pgpass ]; then + ${config.services.postgresql.package}/bin/psql postgres << EOF + CREATE USER hydra PASSWORD '$pass'; + EOF + ${config.services.postgresql.package}/bin/createdb -O hydra hydra + cat > ${baseDir}/.pgpass-tmp << EOF + localhost:*:hydra:hydra:$pass + EOF + chown hydra ${baseDir}/.pgpass-tmp + chmod 600 ${baseDir}/.pgpass-tmp + mv ${baseDir}/.pgpass-tmp ${baseDir}/.pgpass + fi ${pkgs.shadow}/bin/su hydra -c ${cfg.hydra}/bin/hydra-init + ${config.services.postgresql.package}/bin/psql hydra << EOF + BEGIN; + INSERT INTO Users(userName, emailAddress, password) VALUES ('admin', '${cfg.notificationSender}', '$(echo -n $pass | sha1sum | cut -c1-40)'); + INSERT INTO UserRoles(userName, role) values('admin', 'admin'); + COMMIT; + EOF ''; serviceConfig.Type = "oneshot"; + serviceConfig.RemainAfterExit = true; }; systemd.services."hydra-server" = @@ -167,7 +211,7 @@ in after = [ "hydra-init.service" ]; environment = serverEnv; serviceConfig = - { ExecStart = "@${cfg.hydra}/bin/hydra-server hydra-server -f -h \* --max_spare_servers 5 --max_servers 25 --max_requests 100"; + { ExecStart = "@${cfg.hydra}/bin/hydra-server hydra-server -f -h '${cfg.listenHost}' --max_spare_servers 5 --max_servers 25 --max_requests 100${optionalString cfg.debugServer " -d"}"; User = "hydra"; Restart = "always"; }; @@ -177,7 +221,7 @@ in { wantedBy = [ "multi-user.target" ]; wants = [ "hydra-init.service" ]; after = [ "hydra-init.service" "network.target" ]; - path = [ pkgs.nettools pkgs.ssmtp ]; + path = [ pkgs.nettools ]; environment = env; serviceConfig = { ExecStartPre = "${cfg.hydra}/bin/hydra-queue-runner --unlock"; @@ -191,7 +235,7 @@ in { wantedBy = [ "multi-user.target" ]; wants = [ "hydra-init.service" ]; after = [ "hydra-init.service" "network.target" ]; - path = [ pkgs.nettools pkgs.ssmtp ]; + path = [ pkgs.nettools ]; environment = env; serviceConfig = { ExecStart = "@${cfg.hydra}/bin/hydra-evaluator hydra-evaluator"; @@ -226,9 +270,10 @@ in ''; compressLogs = pkgs.writeScript "compress-logs" '' - #! ${pkgs.stdenv.shell} -e - touch -d 'last month' r - find /nix/var/log/nix/drvs -type f -a ! -newer r -name '*.drv' | xargs bzip2 -v + #! ${pkgs.stdenv.shell} -e + find /nix/var/log/nix/drvs \ + -type f -a ! -newermt 'last month' \ + -name '*.drv' -exec bzip2 -v {} + ''; in [ "*/5 * * * * root ${checkSpace} &> ${baseDir}/data/checkspace.log" diff --git a/release.nix b/release.nix index 94cf22e4..2be29519 100644 --- a/release.nix +++ b/release.nix @@ -6,7 +6,7 @@ let pkgs = import {}; - genAttrs' = pkgs.lib.genAttrs [ "x86_64-linux" "i686-linux" ]; + genAttrs' = pkgs.lib.genAttrs [ "x86_64-linux" ]; in rec { @@ -24,13 +24,17 @@ in rec { versionSuffix = if officialRelease then "" else "pre${toString hydraSrc.revCount}-${hydraSrc.gitTag}"; - preConfigure = '' + preHook = '' # TeX needs a writable font cache. export VARTEXFONTS=$TMPDIR/texfonts + + addToSearchPath PATH $(pwd)/src/script + addToSearchPath PATH $(pwd)/src/c + addToSearchPath PERL5LIB $(pwd)/src/lib ''; configureFlags = - [ "--with-nix=${nix}" + [ "--with-nix=${nixUnstable}" "--with-docbook-xsl=${docbook_xsl}/xml/xsl/docbook" ]; @@ -88,6 +92,7 @@ in rec { PadWalker CatalystDevel Readonly + SetScalar SQLSplitStatement Starman SysHostnameLong @@ -95,6 +100,7 @@ in rec { TextDiff TextTable XMLSimple + NetAmazonS3 nix git ]; }; @@ -108,17 +114,20 @@ in rec { buildInputs = [ makeWrapper libtool unzip nukeReferences pkgconfig boehmgc sqlite - gitAndTools.topGit mercurial subversion bazaar openssl bzip2 + gitAndTools.topGit mercurial darcs subversion bazaar openssl bzip2 guile # optional, for Guile + Guix support perlDeps perl ]; hydraPath = lib.makeSearchPath "bin" ( [ libxslt sqlite subversion openssh nix coreutils findutils - gzip bzip2 lzma gnutar unzip git gitAndTools.topGit mercurial gnused graphviz bazaar + gzip bzip2 lzma gnutar unzip git gitAndTools.topGit mercurial darcs gnused graphviz bazaar ] ++ lib.optionals stdenv.isLinux [ rpm dpkg cdrkit ] ); - preConfigure = "patchShebangs ."; + preCheck = '' + patchShebangs . + export LOGNAME=${LOGNAME:-foo} + ''; postInstall = '' mkdir -p $out/nix-support @@ -134,14 +143,13 @@ in rec { done ''; # */ - LOGNAME = "foo"; - meta.description = "Build of Hydra on ${system}"; + passthru.perlDeps = perlDeps; }); tests.install = genAttrs' (system: - with import { inherit system; }; + with import { inherit system; }; let hydra = builtins.getAttr system build; in # build.${system} simpleTest { machine = @@ -169,8 +177,8 @@ in rec { }); tests.api = genAttrs' (system: - with import { inherit system; }; - let hydra = builtins.getAttr system build; in # build.${system} + with import { inherit system; }; + let hydra = builtins.getAttr system build; in # build."${system}" simpleTest { machine = { config, pkgs, ... }: @@ -178,6 +186,7 @@ in rec { services.postgresql.package = pkgs.postgresql92; environment.systemPackages = [ hydra pkgs.perlPackages.LWP pkgs.perlPackages.JSON ]; virtualisation.memorySize = 2047; + boot.kernelPackages = pkgs.linuxPackages_3_10; }; testScript = @@ -204,4 +213,43 @@ in rec { $machine->mustSucceed("perl ${./tests/api-test.pl} >&2"); ''; }); + + tests.s3backup = genAttrs' (system: + with import { inherit system; }; + let hydra = builtins.getAttr system build; in # build."${system}" + simpleTest { + machine = + { config, pkgs, ... }: + { services.postgresql.enable = true; + services.postgresql.package = pkgs.postgresql92; + environment.systemPackages = [ hydra pkgs.rubyLibs.fakes3 ]; + virtualisation.memorySize = 2047; + boot.kernelPackages = pkgs.linuxPackages_3_10; + virtualisation.writableStore = true; + networking.extraHosts = '' + 127.0.0.1 hydra.s3.amazonaws.com + ''; + }; + + testScript = + '' + $machine->waitForJob("postgresql"); + + # Initialise the database and the state. + $machine->mustSucceed + ( "createdb -O root hydra" + , "psql hydra -f ${hydra}/libexec/hydra/sql/hydra-postgresql.sql" + , "mkdir /var/lib/hydra" + , "mkdir /tmp/jobs" + , "cp ${./tests/s3-backup-test.pl} /tmp/s3-backup-test.pl" + , "cp ${./tests/api-test.nix} /tmp/jobs/default.nix" + ); + + # start fakes3 + $machine->mustSucceed("fakes3 --root /tmp/s3 --port 80 &>/dev/null &"); + $machine->waitForOpenPort("80"); + + $machine->mustSucceed("cd /tmp && LOGNAME=root AWS_ACCESS_KEY_ID=foo AWS_SECRET_ACCESS_KEY=bar HYDRA_DBI='dbi:Pg:dbname=hydra;user=root;' HYDRA_CONFIG=${./tests/s3-backup-test.config} perl -I ${hydra}/libexec/hydra/lib -I ${hydra.perlDeps}/lib/perl5/site_perl ./s3-backup-test.pl >&2"); + ''; + }); } diff --git a/src/c/hydra-eval-jobs.cc b/src/c/hydra-eval-jobs.cc index 4ff01057..616bf52e 100644 --- a/src/c/hydra-eval-jobs.cc +++ b/src/c/hydra-eval-jobs.cc @@ -109,6 +109,22 @@ static int queryMetaFieldInt(MetaInfo & meta, const string & name, int def) } +static string queryMetaField(MetaInfo & meta, const string & name) +{ + string res; + MetaValue value = meta[name]; + if (value.type == MetaValue::tpString) + res = value.stringValue; + else if (value.type == MetaValue::tpStrings) { + foreach (Strings::const_iterator, i, value.stringValues) { + if (res.size() != 0) res += ", "; + res += *i; + } + } + return res; +} + + static void findJobsWrapped(EvalState & state, XMLWriter & doc, const ArgsUsed & argsUsed, const AutoArgs & argsLeft, Value & v, const string & attrPath) @@ -136,8 +152,9 @@ static void findJobsWrapped(EvalState & state, XMLWriter & doc, MetaInfo meta = drv.queryMetaInfo(state); xmlAttrs["description"] = queryMetaFieldString(meta, "description"); xmlAttrs["longDescription"] = queryMetaFieldString(meta, "longDescription"); - xmlAttrs["license"] = queryMetaFieldString(meta, "license"); + xmlAttrs["license"] = queryMetaField(meta, "license"); xmlAttrs["homepage"] = queryMetaFieldString(meta, "homepage"); + xmlAttrs["maintainers"] = queryMetaField(meta, "maintainers"); int prio = queryMetaFieldInt(meta, "schedulingPriority", 100); xmlAttrs["schedulingPriority"] = int2String(prio); @@ -148,17 +165,22 @@ static void findJobsWrapped(EvalState & state, XMLWriter & doc, int maxsilent = queryMetaFieldInt(meta, "maxSilent", 3600); xmlAttrs["maxSilent"] = int2String(maxsilent); - string maintainers; - MetaValue value = meta["maintainers"]; - if (value.type == MetaValue::tpString) - maintainers = value.stringValue; - else if (value.type == MetaValue::tpStrings) { - foreach (Strings::const_iterator, i, value.stringValues) { - if (maintainers.size() != 0) maintainers += ", "; - maintainers += *i; - } + /* If this is an aggregate, then get its constituents. */ + Bindings::iterator a = v.attrs->find(state.symbols.create("_hydraAggregate")); + if (a != v.attrs->end() && state.forceBool(*a->value)) { + Bindings::iterator a = v.attrs->find(state.symbols.create("constituents")); + if (a == v.attrs->end()) + throw EvalError("derivation must have a ‘constituents’ attribute"); + PathSet context; + state.coerceToString(*a->value, context, true, false); + PathSet drvs; + foreach (PathSet::iterator, i, context) + if (i->at(0) == '!') { + size_t index = i->find("!", 1); + drvs.insert(string(*i, index + 1)); + } + xmlAttrs["constituents"] = concatStringsSep(" ", drvs); } - xmlAttrs["maintainers"] = maintainers; /* Register the derivation as a GC root. !!! This registers roots for jobs that we may have already @@ -267,9 +289,8 @@ void run(Strings args) store = openStore(); - Expr * e = state.parseExprFromFile(releaseExpr); Value v; - state.mkThunk_(v, e); + state.evalFile(releaseExpr, v); XMLWriter doc(true, std::cout); XMLOpenElement root(doc, "jobs"); diff --git a/src/lib/Hydra/Base/Controller/ListBuilds.pm b/src/lib/Hydra/Base/Controller/ListBuilds.pm index 6dce9c0d..b971d5e8 100644 --- a/src/lib/Hydra/Base/Controller/ListBuilds.pm +++ b/src/lib/Hydra/Base/Controller/ListBuilds.pm @@ -7,46 +7,6 @@ use Hydra::Helper::Nix; use Hydra::Helper::CatalystUtils; -sub getJobStatus { - my ($self, $c) = @_; - - my $maintainer = $c->request->params->{"maintainer"}; - - my $latest = $c->stash->{jobStatus}->search( - defined $maintainer ? { maintainers => { like => "%$maintainer%" } } : {}, - { '+select' => ["me.statusChangeId", "me.statusChangeTime"] - , '+as' => ["statusChangeId", "statusChangeTime"] - , order_by => "coalesce(statusChangeTime, 0) desc" - }); - - return $latest; -} - - -sub jobstatus : Chained('get_builds') PathPart Args(0) { - my ($self, $c) = @_; - $c->stash->{template} = 'jobstatus.tt'; - $c->stash->{latestBuilds} = [getJobStatus($self, $c)->all]; -} - - - -# A convenient way to see all the errors - i.e. things demanding -# attention - at a glance. -sub errors : Chained('get_builds') PathPart Args(0) { - my ($self, $c) = @_; - $c->stash->{template} = 'errors.tt'; - $c->stash->{brokenJobsets} = - [$c->stash->{allJobsets}->search({errormsg => {'!=' => ''}})] - if defined $c->stash->{allJobsets}; - $c->stash->{brokenJobs} = - [$c->stash->{allJobs}->search({errormsg => {'!=' => ''}})] - if defined $c->stash->{allJobs}; - $c->stash->{brokenBuilds} = - [getJobStatus($self, $c)->search({buildStatus => {'!=' => 0}})]; -} - - sub all : Chained('get_builds') PathPart { my ($self, $c) = @_; @@ -56,13 +16,12 @@ sub all : Chained('get_builds') PathPart { my $resultsPerPage = 20; - my $nrBuilds = $c->stash->{allBuilds}->search({finished => 1})->count; - $c->stash->{baseUri} = $c->uri_for($self->action_for("all"), $c->req->captures); $c->stash->{page} = $page; $c->stash->{resultsPerPage} = $resultsPerPage; - $c->stash->{total} = $nrBuilds; + $c->stash->{total} = $c->stash->{allBuilds}->search({finished => 1})->count + unless defined $c->stash->{total}; $c->stash->{builds} = [ $c->stash->{allBuilds}->search( { finished => 1 }, @@ -82,6 +41,7 @@ sub nix : Chained('get_builds') PathPart('channel') CaptureArgs(1) { ->search_literal("exists (select 1 from buildproducts where build = me.id and type = 'nix-build')") ->search({}, { columns => [@buildListColumns, 'drvpath', 'description', 'homepage'] , join => ["buildoutputs"] + , order_by => ["me.id", "buildoutputs.name"] , '+select' => ['buildoutputs.path', 'buildoutputs.name'], '+as' => ['outpath', 'outname'] }); } else { @@ -120,4 +80,22 @@ sub latest_for : Chained('get_builds') PathPart('latest-for') { } +# Redirect to the latest successful build in a finished evaluation +# (i.e. an evaluation that has no unfinished builds). +sub latest_finished : Chained('get_builds') PathPart('latest-finished') { + my ($self, $c, @rest) = @_; + + my $latest = $c->stash->{allBuilds}->find( + { finished => 1, buildstatus => 0 }, + { order_by => ["id DESC"], rows => 1, join => ["jobsetevalmembers"] + , where => \ + "not exists (select 1 from jobsetevalmembers m2 join builds b2 on jobsetevalmembers.eval = m2.eval and m2.build = b2.id and b2.finished = 0)" + }); + + notFound($c, "There is no successful build to redirect to.") unless defined $latest; + + $c->res->redirect($c->uri_for($c->controller('Build')->action_for("build"), [$latest->id], @rest)); +} + + 1; diff --git a/src/lib/Hydra/Base/Controller/NixChannel.pm b/src/lib/Hydra/Base/Controller/NixChannel.pm index 40aedaa0..6452f279 100644 --- a/src/lib/Hydra/Base/Controller/NixChannel.pm +++ b/src/lib/Hydra/Base/Controller/NixChannel.pm @@ -14,20 +14,36 @@ sub getChannelData { my @storePaths = (); $c->stash->{nixPkgs} = []; - foreach my $build ($c->stash->{channelBuilds}->all) { - my $outPath = $build->get_column("outpath"); - my $outName = $build->get_column("outname"); - next if $checkValidity && !isValidPath($outPath); - push @storePaths, $outPath; - my $pkgName = $build->nixname . "-" . $build->system . "-" . $build->id . ($outName ne "out" ? "-" . $outName : ""); - push @{$c->stash->{nixPkgs}}, { build => $build, name => $pkgName, outPath => $outPath, outName => $outName }; - # Put the system type in the manifest (for top-level paths) as - # a hint to the binary patch generator. (It shouldn't try to - # generate patches between builds for different systems.) It - # would be nice if Nix stored this info for every path but it - # doesn't. - $c->stash->{systemForPath}->{$outPath} = $build->system; - }; + + my @builds = $c->stash->{channelBuilds}->all; + + for (my $n = 0; $n < scalar @builds; ) { + # Since channelData is a join of Builds and BuildOutputs, we + # need to gather the rows that belong to a single build. + my $build = $builds[$n++]; + my @outputs = ($build); + push @outputs, $builds[$n++] while $n < scalar @builds && $builds[$n]->id == $build->id; + @outputs = grep { $_->get_column("outpath") } @outputs; + + my $outputs = {}; + foreach my $output (@outputs) { + my $outPath = $output->get_column("outpath"); + next if $checkValidity && !isValidPath($outPath); + $outputs->{$output->get_column("outname")} = $outPath; + push @storePaths, $outPath; + # Put the system type in the manifest (for top-level + # paths) as a hint to the binary patch generator. (It + # shouldn't try to generate patches between builds for + # different systems.) It would be nice if Nix stored this + # info for every path but it doesn't. + $c->stash->{systemForPath}->{$outPath} = $build->system; + } + + next if !%$outputs; + + my $pkgName = $build->nixname . "-" . $build->system . "-" . $build->id; + push @{$c->stash->{nixPkgs}}, { build => $build, name => $pkgName, outputs => $outputs }; + } $c->stash->{storePaths} = [@storePaths]; } diff --git a/src/lib/Hydra/Base/Controller/REST.pm b/src/lib/Hydra/Base/Controller/REST.pm index 606f0e09..7adb1cf4 100644 --- a/src/lib/Hydra/Base/Controller/REST.pm +++ b/src/lib/Hydra/Base/Controller/REST.pm @@ -4,8 +4,12 @@ use strict; use warnings; use base 'Catalyst::Controller::REST'; +# Hack: Erase the map set by C::C::REST +__PACKAGE__->config( map => undef ); __PACKAGE__->config( map => { + 'application/json' => 'JSON', + 'text/x-json' => 'JSON', 'text/html' => [ 'View', 'TT' ] }, default => 'text/html', diff --git a/src/lib/Hydra/Component/ToJSON.pm b/src/lib/Hydra/Component/ToJSON.pm index 6abc4877..ed8d59d9 100644 --- a/src/lib/Hydra/Component/ToJSON.pm +++ b/src/lib/Hydra/Component/ToJSON.pm @@ -8,36 +8,26 @@ use base 'DBIx::Class'; sub TO_JSON { my $self = shift; - my $json = { $self->get_columns }; - my $rs = $self->result_source; - my @relnames = $rs->relationships; - RELLOOP: foreach my $relname (@relnames) { - my $relinfo = $rs->relationship_info($relname); - next unless defined $relinfo->{attrs}->{accessor}; - my $accessor = $relinfo->{attrs}->{accessor}; - if ($accessor eq "single" and exists $self->{_relationship_data}{$relname}) { - $json->{$relname} = $self->$relname->TO_JSON; - } else { - unless (defined $self->{related_resultsets}{$relname}) { - my $cond = $relinfo->{cond}; - if (ref $cond eq 'HASH') { - foreach my $k (keys %{$cond}) { - my $v = $cond->{$k}; - $v =~ s/^self\.//; - next RELLOOP unless $self->has_column_loaded($v); - } - } #!!! TODO: Handle ARRAY conditions - } - if (defined $self->related_resultset($relname)->get_cache) { - if ($accessor eq "multi") { - $json->{$relname} = [ map { $_->TO_JSON } $self->$relname ]; - } else { - $json->{$relname} = $self->$relname->TO_JSON; - } - } - } + + my $hint = $self->json_hint; + + my %json = (); + + foreach my $column (@{$hint->{columns}}) { + $json{$column} = $self->get_column($column); } - return $json; + + foreach my $relname (keys %{$hint->{relations}}) { + my $key = $hint->{relations}->{$relname}; + $json{$relname} = [ map { $_->$key } $self->$relname ]; + } + + foreach my $relname (keys %{$hint->{eager_relations}}) { + my $key = $hint->{eager_relations}->{$relname}; + $json{$relname} = { map { $_->$key => $_ } $self->$relname }; + } + + return \%json; } 1; diff --git a/src/lib/Hydra/Controller/API.pm b/src/lib/Hydra/Controller/API.pm index 48b0939a..fbbb70be 100644 --- a/src/lib/Hydra/Controller/API.pm +++ b/src/lib/Hydra/Controller/API.pm @@ -15,8 +15,6 @@ use Digest::SHA qw(sha256_hex); use Text::Diff; use File::Slurp; -# !!! Rewrite this to use View::JSON. - sub api : Chained('/') PathPart('api') CaptureArgs(0) { my ($self, $c) = @_; @@ -24,32 +22,6 @@ sub api : Chained('/') PathPart('api') CaptureArgs(0) { } -sub projectToHash { - my ($project) = @_; - return { - name => $project->name, - description => $project->description - }; -} - - -sub projects : Chained('api') PathPart('projects') Args(0) { - my ($self, $c) = @_; - - my @projects = $c->model('DB::Projects')->search({hidden => 0}, {order_by => 'name'}); - - my @list; - foreach my $p (@projects) { - push @list, projectToHash($p); - } - - $c->stash->{'plain'} = { - data => scalar (JSON::Any->objToJson(\@list)) - }; - $c->forward('Hydra::View::Plain'); -} - - sub buildToHash { my ($build) = @_; my $result = { diff --git a/src/lib/Hydra/Controller/Admin.pm b/src/lib/Hydra/Controller/Admin.pm index c9ef09fc..a73762dc 100644 --- a/src/lib/Hydra/Controller/Admin.pm +++ b/src/lib/Hydra/Controller/Admin.pm @@ -34,8 +34,12 @@ sub machines : Chained('admin') PathPart('machines') Args(0) { sub clear_queue_non_current : Chained('admin') PathPart('clear-queue-non-current') Args(0) { my ($self, $c) = @_; - my $time = time(); - $c->model('DB::Builds')->search({finished => 0, iscurrent => 0, busy => 0})->update({ finished => 1, buildstatus => 4, starttime => $time, stoptime => $time }); + my $builds = $c->model('DB::Builds')->search( + { finished => 0, busy => 0 + , id => { -not_in => \ "select build from JobsetEvalMembers where eval in (select max(id) from JobsetEvals where hasNewBuilds = 1 group by project, jobset)" } + }); + my $n = cancelBuilds($c->model('DB')->schema, $builds); + $c->flash->{successMsg} = "$n builds have been cancelled."; $c->res->redirect($c->request->referer // "/admin"); } @@ -49,19 +53,11 @@ sub clearfailedcache : Chained('admin') PathPart('clear-failed-cache') Args(0) { sub clearvcscache : Chained('admin') PathPart('clear-vcs-cache') Args(0) { my ($self, $c) = @_; - - print STDERR "Clearing path cache\n"; - $c->model('DB::CachedPathInputs')->delete_all; - - print STDERR "Clearing git cache\n"; - $c->model('DB::CachedGitInputs')->delete_all; - - print STDERR "Clearing subversion cache\n"; - $c->model('DB::CachedSubversionInputs')->delete_all; - - print STDERR "Clearing bazaar cache\n"; - $c->model('DB::CachedBazaarInputs')->delete_all; - + $c->model('DB::CachedPathInputs')->delete; + $c->model('DB::CachedGitInputs')->delete; + $c->model('DB::CachedSubversionInputs')->delete; + $c->model('DB::CachedBazaarInputs')->delete; + $c->flash->{successMsg} = "VCS caches have been cleared."; $c->res->redirect($c->request->referer // "/admin"); } diff --git a/src/lib/Hydra/Controller/Build.pm b/src/lib/Hydra/Controller/Build.pm index 18ca624d..cba1f93d 100644 --- a/src/lib/Hydra/Controller/Build.pm +++ b/src/lib/Hydra/Controller/Build.pm @@ -35,18 +35,18 @@ sub buildChain :Chained('/') :PathPart('build') :CaptureArgs(1) { sub findBuildStepByOutPath { - my ($self, $c, $path, $status) = @_; + my ($self, $c, $path) = @_; return $c->model('DB::BuildSteps')->search( - { path => $path, busy => 0, status => $status }, - { join => ["buildstepoutputs"], order_by => ["stopTime"], limit => 1 })->single; + { path => $path, busy => 0 }, + { join => ["buildstepoutputs"], order_by => ["status", "stopTime"], rows => 1 })->single; } sub findBuildStepByDrvPath { - my ($self, $c, $drvPath, $status) = @_; + my ($self, $c, $drvPath) = @_; return $c->model('DB::BuildSteps')->search( - { drvpath => $drvPath, busy => 0, status => $status }, - { order_by => ["stopTime"], limit => 1 })->single; + { drvpath => $drvPath, busy => 0 }, + { order_by => ["status", "stopTime"], rows => 1 })->single; } @@ -60,7 +60,6 @@ sub build_GET { $c->stash->{template} = 'build.tt'; $c->stash->{available} = all { isValidPath($_->path) } $build->buildoutputs->all; $c->stash->{drvAvailable} = isValidPath $build->drvpath; - $c->stash->{flashMsg} = $c->flash->{buildMsg}; if (!$build->finished && $build->busy) { $c->stash->{logtext} = read_file($build->logfile, err_mode => 'quiet') // ""; @@ -68,8 +67,7 @@ sub build_GET { if ($build->finished && $build->iscachedbuild) { my $path = ($build->buildoutputs)[0]->path or die; - my $cachedBuildStep = findBuildStepByOutPath($self, $c, $path, - $build->buildstatus == 0 || $build->buildstatus == 6 ? 0 : 1); + my $cachedBuildStep = findBuildStepByOutPath($self, $c, $path); $c->stash->{cachedBuild} = $cachedBuildStep->build if defined $cachedBuildStep; } @@ -95,25 +93,16 @@ sub build_GET { # Get the first eval of which this build was a part. ($c->stash->{nrEvals}) = $c->stash->{build}->jobsetevals->search({ hasnewbuilds => 1 })->count; - ($c->stash->{eval}) = $c->stash->{build}->jobsetevals->search( + $c->stash->{eval} = $c->stash->{build}->jobsetevals->search( { hasnewbuilds => 1}, - { limit => 1, order_by => ["id"] }); + { rows => 1, order_by => ["id"] })->single; $self->status_ok( $c, - entity => $c->model('DB::Builds')->find($build->id,{ - columns => [ - 'id', - 'finished', - 'timestamp', - 'buildstatus', - 'job', - 'project', - 'jobset', - 'starttime', - 'stoptime', - ] - }) + entity => $build ); + + # If this is an aggregate build, get its constituents. + $c->stash->{constituents} = [$c->stash->{build}->constituents_->search({}, {order_by => ["job"]})]; } @@ -125,35 +114,43 @@ sub view_nixlog : Chained('buildChain') PathPart('nixlog') { $c->stash->{step} = $step; - showLog($c, $step->drvpath, $mode); + showLog($c, $mode, $step->drvpath, map { $_->path } $step->buildstepoutputs->all); } sub view_log : Chained('buildChain') PathPart('log') { my ($self, $c, $mode) = @_; - showLog($c, $c->stash->{build}->drvpath, $mode); + showLog($c, $mode, $c->stash->{build}->drvpath, map { $_->path } $c->stash->{build}->buildoutputs->all); } sub showLog { - my ($c, $drvPath, $mode) = @_; + my ($c, $mode, $drvPath, @outPaths) = @_; - my $logPath = getDrvLogPath($drvPath); + my $logPath = findLog($c, $drvPath, @outPaths); notFound($c, "The build log of derivation ‘$drvPath’ is not available.") unless defined $logPath; + my $size = stat($logPath)->size; + error($c, "This build log is too big to display ($size bytes).") + if $size >= 64 * 1024 * 1024; + if (!$mode) { # !!! quick hack - my $pipeline = "nix-store -l $drvPath" + my $pipeline = ($logPath =~ /.bz2$/ ? "bzip2 -d < $logPath" : "cat $logPath") . " | nix-log2xml | xsltproc " . $c->path_to("xsl/mark-errors.xsl") . " -" . " | xsltproc " . $c->path_to("xsl/log2html.xsl") . " - | tail -n +2"; $c->stash->{template} = 'log.tt'; - $c->stash->{logtext} = `$pipeline`; + $c->stash->{logtext} = `ulimit -t 5 ; $pipeline`; } elsif ($mode eq "raw") { - $c->stash->{'plain'} = { data => (scalar logContents($drvPath)) || " " }; - $c->forward('Hydra::View::Plain'); + if ($logPath !~ /.bz2$/) { + $c->serve_static_file($logPath); + } else { + $c->stash->{'plain'} = { data => (scalar logContents($logPath)) || " " }; + $c->forward('Hydra::View::Plain'); + } } elsif ($mode eq "tail-reload") { @@ -162,12 +159,12 @@ sub showLog { $c->stash->{url} = $url; $c->stash->{reload} = !$c->stash->{build}->finished && $c->stash->{build}->busy; $c->stash->{title} = ""; - $c->stash->{contents} = (scalar logContents($drvPath, 50)) || " "; + $c->stash->{contents} = (scalar logContents($logPath, 50)) || " "; $c->stash->{template} = 'plain-reload.tt'; } elsif ($mode eq "tail") { - $c->stash->{'plain'} = { data => (scalar logContents($drvPath, 50)) || " " }; + $c->stash->{'plain'} = { data => (scalar logContents($logPath, 50)) || " " }; $c->forward('Hydra::View::Plain'); } @@ -238,6 +235,21 @@ sub download : Chained('buildChain') PathPart { } +sub output : Chained('buildChain') PathPart Args(1) { + my ($self, $c, $outputName) = @_; + my $build = $c->stash->{build}; + + error($c, "This build is not finished yet.") unless $build->finished; + my $output = $build->buildoutputs->find({name => $outputName}); + notFound($c, "This build has no output named ‘$outputName’") unless defined $output; + error($c, "Output is not available.") unless isValidPath $output->path; + + $c->response->header('Content-Disposition', "attachment; filename=\"build-${\$build->id}-${\$outputName}.nar.bz2\""); + $c->stash->{current_view} = 'NixNAR'; + $c->stash->{storePath} = $output->path; +} + + # Redirect to a download with the given type. Useful when you want to # link to some build product of the latest build (i.e. in conjunction # with the .../latest redirect). @@ -269,7 +281,7 @@ sub contents : Chained('buildChain') PathPart Args(1) { notFound($c, "Product $path has disappeared.") unless -e $path; # Sanitize $path to prevent shell injection attacks. - $path =~ /^\/[\/[A-Za-z0-9_\-\.=]+$/ or die "Filename contains illegal characters.\n"; + $path =~ /^\/[\/[A-Za-z0-9_\-\.=+:]+$/ or die "Filename contains illegal characters.\n"; # FIXME: don't use shell invocations below. @@ -339,8 +351,8 @@ sub getDependencyGraph { { path => $path , name => $name , buildStep => $runtime - ? findBuildStepByOutPath($self, $c, $path, 0) - : findBuildStepByDrvPath($self, $c, $path, 0) + ? findBuildStepByOutPath($self, $c, $path) + : findBuildStepByDrvPath($self, $c, $path) }; $$done{$path} = $node; my @refs; @@ -409,49 +421,22 @@ sub nix : Chained('buildChain') PathPart('nix') CaptureArgs(0) { sub restart : Chained('buildChain') PathPart Args(0) { my ($self, $c) = @_; - my $build = $c->stash->{build}; - requireProjectOwner($c, $build->project); - - my $drvpath = $build->drvpath; - error($c, "This build cannot be restarted.") - unless $build->finished && -f $drvpath; - - restartBuild($c->model('DB')->schema, $build); - - $c->flash->{buildMsg} = "Build has been restarted."; - + my $n = restartBuilds($c->model('DB')->schema, $c->model('DB::Builds')->search({ id => $build->id })); + error($c, "This build cannot be restarted.") if $n != 1; + $c->flash->{successMsg} = "Build has been restarted."; $c->res->redirect($c->uri_for($self->action_for("build"), $c->req->captures)); } sub cancel : Chained('buildChain') PathPart Args(0) { my ($self, $c) = @_; - my $build = $c->stash->{build}; - requireProjectOwner($c, $build->project); - - txn_do($c->model('DB')->schema, sub { - error($c, "This build cannot be cancelled.") - if $build->finished || $build->busy; - - # !!! Actually, it would be nice to be able to cancel busy - # builds as well, but we would have to send a signal or - # something to the build process. - - my $time = time(); - $build->update( - { finished => 1, busy => 0 - , iscachedbuild => 0, buildstatus => 4 # = cancelled - , starttime => $time - , stoptime => $time - }); - }); - - $c->flash->{buildMsg} = "Build has been cancelled."; - + my $n = cancelBuilds($c->model('DB')->schema, $c->model('DB::Builds')->search({ id => $build->id })); + error($c, "This build cannot be cancelled.") if $n != 1; + $c->flash->{successMsg} = "Build has been cancelled."; $c->res->redirect($c->uri_for($self->action_for("build"), $c->req->captures)); } @@ -472,7 +457,7 @@ sub keep : Chained('buildChain') PathPart Args(1) { $build->update({keep => $keep}); }); - $c->flash->{buildMsg} = + $c->flash->{successMsg} = $keep ? "Build will be kept." : "Build will not be kept."; $c->res->redirect($c->uri_for($self->action_for("build"), $c->req->captures)); @@ -502,89 +487,12 @@ sub add_to_release : Chained('buildChain') PathPart('add-to-release') Args(0) { $release->releasemembers->create({build => $build->id, description => $build->description}); - $c->flash->{buildMsg} = "Build added to project $releaseName."; + $c->flash->{successMsg} = "Build added to project $releaseName."; $c->res->redirect($c->uri_for($self->action_for("build"), $c->req->captures)); } -sub clone : Chained('buildChain') PathPart('clone') Args(0) { - my ($self, $c) = @_; - - my $build = $c->stash->{build}; - - requireProjectOwner($c, $build->project); - - $c->stash->{template} = 'clone-build.tt'; -} - - -sub clone_submit : Chained('buildChain') PathPart('clone/submit') Args(0) { - my ($self, $c) = @_; - - my $build = $c->stash->{build}; - - requireProjectOwner($c, $build->project); - - my ($nixExprPath, $nixExprInputName) = Hydra::Controller::Jobset::nixExprPathFromParams $c; - - # When the expression is in a .scm file, assume it's a Guile + Guix - # build expression. - my $exprType = - $c->request->params->{"nixexprpath"} =~ /.scm$/ ? "guile" : "nix"; - - my $jobName = trim $c->request->params->{"jobname"}; - error($c, "Invalid job name: $jobName") if $jobName !~ /^$jobNameRE$/; - - my $inputInfo = {}; - - foreach my $param (keys %{$c->request->params}) { - next unless $param =~ /^input-(\w+)-name$/; - my $baseName = $1; - my ($inputName, $inputType) = - Hydra::Controller::Jobset::checkInput($c, $baseName); - my $inputValue = Hydra::Controller::Jobset::checkInputValue( - $c, $inputType, $c->request->params->{"input-$baseName-value"}); - eval { - # !!! fetchInput can take a long time, which might cause - # the current HTTP request to time out. So maybe this - # should be done asynchronously. But then error reporting - # becomes harder. - my $info = fetchInput( - $c->hydra_plugins, $c->model('DB'), $build->project, $build->jobset, - $inputName, $inputType, $inputValue); - push @{$$inputInfo{$inputName}}, $info if defined $info; - }; - error($c, $@) if $@; - } - - my ($jobs, $nixExprInput) = evalJobs($inputInfo, $exprType, $nixExprInputName, $nixExprPath); - - my $job; - foreach my $j (@{$jobs->{job}}) { - print STDERR $j->{jobName}, "\n"; - if ($j->{jobName} eq $jobName) { - error($c, "Nix expression returned multiple builds for job $jobName.") - if $job; - $job = $j; - } - } - - error($c, "Nix expression did not return a job named $jobName.") unless $job; - - my %currentBuilds; - my $newBuild = checkBuild( - $c->model('DB'), $build->project, $build->jobset, - $inputInfo, $nixExprInput, $job, \%currentBuilds, undef, {}); - - error($c, "This build has already been performed.") unless $newBuild; - - $c->flash->{buildMsg} = "Build " . $newBuild->id . " added to the queue."; - - $c->res->redirect($c->uri_for($c->controller('Root')->action_for('queue'))); -} - - sub get_info : Chained('buildChain') PathPart('api/get-info') Args(0) { my ($self, $c) = @_; my $build = $c->stash->{build}; @@ -614,6 +522,22 @@ sub evals : Chained('buildChain') PathPart('evals') Args(0) { } +# Redirect to the latest finished evaluation that contains this build. +sub eval : Chained('buildChain') PathPart('eval') { + my ($self, $c, @rest) = @_; + + my $eval = $c->stash->{build}->jobsetevals->find( + { hasnewbuilds => 1 }, + { order_by => "id DESC", rows => 1 + , "not exists (select 1 from jobsetevalmembers m2 join builds b2 on me.eval = m2.eval and m2.build = b2.id and b2.finished = 0)" + }); + + notFound($c, "There is no finished evaluation containing this build.") unless defined $eval; + + $c->res->redirect($c->uri_for($c->controller('JobsetEval')->action_for("view"), [$eval->id], @rest, $c->req->params)); +} + + sub reproduce : Chained('buildChain') PathPart('reproduce') Args(0) { my ($self, $c) = @_; $c->response->content_type('text/x-shellscript'); diff --git a/src/lib/Hydra/Controller/Job.pm b/src/lib/Hydra/Controller/Job.pm index e062e14a..2758345c 100644 --- a/src/lib/Hydra/Controller/Job.pm +++ b/src/lib/Hydra/Controller/Job.pm @@ -20,24 +20,52 @@ sub job : Chained('/') PathPart('job') CaptureArgs(3) { sub overview : Chained('job') PathPart('') Args(0) { my ($self, $c) = @_; + my $job = $c->stash->{job}; $c->stash->{template} = 'job.tt'; $c->stash->{lastBuilds} = - [ $c->stash->{job}->builds->search({ finished => 1 }, + [ $job->builds->search({ finished => 1 }, { order_by => 'id DESC', rows => 10, columns => [@buildListColumns] }) ]; $c->stash->{queuedBuilds} = [ - $c->stash->{job}->builds->search( + $job->builds->search( { finished => 0 }, - { join => ['project'] - , order_by => ["priority DESC", "id"] - , '+select' => ['project.enabled'] - , '+as' => ['enabled'] - } + { order_by => ["priority DESC", "id"] } ) ]; - $c->stash->{systems} = [$c->stash->{job}->builds->search({iscurrent => 1}, {select => ["system"], distinct => 1})]; + # If this is an aggregate job, then get its constituents. + my @constituents = $c->model('DB::Builds')->search( + { aggregate => { -in => $job->builds->search({}, { columns => ["id"], order_by => "id desc", rows => 15 })->as_query } }, + { join => 'aggregateconstituents_constituents', + columns => ['id', 'job', 'finished', 'buildstatus'], + +select => ['aggregateconstituents_constituents.aggregate'], + +as => ['aggregate'] + }); + + my $aggregates = {}; + my %constituentJobs; + foreach my $b (@constituents) { + my $jobName = $b->get_column('job'); + $aggregates->{$b->get_column('aggregate')}->{constituents}->{$jobName} = + { id => $b->id, finished => $b->finished, buildstatus => $b->buildstatus }; + $constituentJobs{$jobName} = 1; + } + + foreach my $agg (keys %$aggregates) { + # FIXME: could be done in one query. + $aggregates->{$agg}->{build} = + $c->model('DB::Builds')->find({id => $agg}, {columns => [@buildListColumns]}) or die; + } + + $c->stash->{aggregates} = $aggregates; + $c->stash->{constituentJobs} = [sort (keys %constituentJobs)]; + + $c->stash->{starred} = $c->user->starredjobs( + { project => $c->stash->{project}->name + , jobset => $c->stash->{jobset}->name + , job => $c->stash->{job}->name + })->count == 1 if $c->user_exists; } @@ -45,9 +73,6 @@ sub overview : Chained('job') PathPart('') Args(0) { sub get_builds : Chained('job') PathPart('') CaptureArgs(0) { my ($self, $c) = @_; $c->stash->{allBuilds} = $c->stash->{job}->builds; - $c->stash->{jobStatus} = $c->model('DB')->resultset('JobStatusForJob') - ->search({}, {bind => [$c->stash->{project}->name, $c->stash->{jobset}->name, $c->stash->{job}->name]}); - $c->stash->{allJobs} = $c->stash->{job_}; $c->stash->{latestSucceeded} = $c->model('DB')->resultset('LatestSucceededForJob') ->search({}, {bind => [$c->stash->{project}->name, $c->stash->{jobset}->name, $c->stash->{job}->name]}); $c->stash->{channelBaseName} = @@ -55,4 +80,22 @@ sub get_builds : Chained('job') PathPart('') CaptureArgs(0) { } +sub star : Chained('job') PathPart('star') Args(0) { + my ($self, $c) = @_; + requirePost($c); + requireUser($c); + my $args = + { project => $c->stash->{project}->name + , jobset => $c->stash->{jobset}->name + , job => $c->stash->{job}->name + }; + if ($c->request->params->{star} eq "1") { + $c->user->starredjobs->update_or_create($args); + } else { + $c->user->starredjobs->find($args)->delete; + } + $c->stash->{resource}->{success} = 1; +} + + 1; diff --git a/src/lib/Hydra/Controller/Jobset.pm b/src/lib/Hydra/Controller/Jobset.pm index cb52f68d..35b29d86 100644 --- a/src/lib/Hydra/Controller/Jobset.pm +++ b/src/lib/Hydra/Controller/Jobset.pm @@ -1,5 +1,6 @@ package Hydra::Controller::Jobset; +use utf8; use strict; use warnings; use base 'Hydra::Base::Controller::ListBuilds'; @@ -9,35 +10,18 @@ use Hydra::Helper::CatalystUtils; sub jobsetChain :Chained('/') :PathPart('jobset') :CaptureArgs(2) { my ($self, $c, $projectName, $jobsetName) = @_; + $c->stash->{params}->{name} //= $jobsetName; my $project = $c->model('DB::Projects')->find($projectName); - if ($project) { - $c->stash->{project} = $project; + notFound($c, "Project ‘$projectName’ doesn't exist.") if !$project; - $c->stash->{jobset_} = $project->jobsets->search({'me.name' => $jobsetName}); - my $jobset = $c->stash->{jobset_}->single; + $c->stash->{project} = $project; - if ($jobset) { - $c->stash->{jobset} = $jobset; - } else { - if ($c->action->name eq "jobset" and $c->request->method eq "PUT") { - $c->stash->{jobsetName} = $jobsetName; - } else { - $self->status_not_found( - $c, - message => "Jobset $jobsetName doesn't exist." - ); - $c->detach; - } - } - } else { - $self->status_not_found( - $c, - message => "Project $projectName doesn't exist." - ); - $c->detach; - } + $c->stash->{jobset} = $project->jobsets->find({ name => $jobsetName }); + + notFound($c, "Jobset ‘$jobsetName’ doesn't exist.") + if !$c->stash->{jobset} && !($c->action->name eq "jobset" and $c->request->method eq "PUT"); } @@ -50,26 +34,11 @@ sub jobset_GET { $c->stash->{evals} = getEvals($self, $c, scalar $c->stash->{jobset}->jobsetevals, 0, 10); - ($c->stash->{latestEval}) = $c->stash->{jobset}->jobsetevals->search({}, { limit => 1, order_by => ["id desc"] }); + $c->stash->{latestEval} = $c->stash->{jobset}->jobsetevals->search({}, { rows => 1, order_by => ["id desc"] })->single; - $self->status_ok( - $c, - entity => $c->stash->{jobset_}->find({}, { - columns => [ - 'me.name', - 'me.project', - 'me.errormsg', - 'jobsetinputs.name', - { - 'jobsetinputs.jobsetinputalts.altnr' => 'jobsetinputalts.altnr', - 'jobsetinputs.jobsetinputalts.value' => 'jobsetinputalts.value' - } - ], - join => { 'jobsetinputs' => 'jobsetinputalts' }, - collapse => 1, - order_by => "me.name" - }) - ); + $c->stash->{totalShares} = getTotalShares($c->model('DB')->schema); + + $self->status_ok($c, entity => $c->stash->{jobset}); } sub jobset_PUT { @@ -78,133 +47,91 @@ sub jobset_PUT { requireProjectOwner($c, $c->stash->{project}); if (defined $c->stash->{jobset}) { - error($c, "Cannot rename jobset `$c->stash->{params}->{oldName}' over existing jobset `$c->stash->{jobset}->name") if defined $c->stash->{params}->{oldName} and $c->stash->{params}->{oldName} ne $c->stash->{jobset}->name; txn_do($c->model('DB')->schema, sub { updateJobset($c, $c->stash->{jobset}); }); - if ($c->req->looks_like_browser) { - $c->res->redirect($c->uri_for($self->action_for("jobset"), - [$c->stash->{project}->name, $c->stash->{jobset}->name]) . "#tabs-configuration"); - } else { - $self->status_no_content($c); - } - } elsif (defined $c->stash->{params}->{oldName}) { - my $jobset = $c->stash->{project}->jobsets->find({'me.name' => $c->stash->{params}->{oldName}}); + my $uri = $c->uri_for($self->action_for("jobset"), [$c->stash->{project}->name, $c->stash->{jobset}->name]) . "#tabs-configuration"; + $self->status_ok($c, entity => { redirect => "$uri" }); - if (defined $jobset) { - txn_do($c->model('DB')->schema, sub { - updateJobset($c, $jobset); - }); - - my $uri = $c->uri_for($self->action_for("jobset"), [$c->stash->{project}->name, $jobset->name]); - - if ($c->req->looks_like_browser) { - $c->res->redirect($uri . "#tabs-configuration"); - } else { - $self->status_created( - $c, - location => "$uri", - entity => { name => $jobset->name, uri => "$uri", type => "jobset" } - ); - } - } else { - $self->status_not_found( - $c, - message => "Jobset $c->stash->{params}->{oldName} doesn't exist." - ); - } - } else { - my $exprType = - $c->stash->{params}->{"nixexprpath"} =~ /.scm$/ ? "guile" : "nix"; - - error($c, "Invalid jobset name: ‘$c->stash->{jobsetName}’") if $c->stash->{jobsetName} !~ /^$jobsetNameRE$/; + $c->flash->{successMsg} = "The jobset configuration has been updated."; + } + else { my $jobset; txn_do($c->model('DB')->schema, sub { # Note: $jobsetName is validated in updateProject, which will # abort the transaction if the name isn't valid. $jobset = $c->stash->{project}->jobsets->create( - {name => $c->stash->{jobsetName}, nixexprinput => "", nixexprpath => "", emailoverride => ""}); + {name => ".tmp", nixexprinput => "", nixexprpath => "", emailoverride => ""}); updateJobset($c, $jobset); }); my $uri = $c->uri_for($self->action_for("jobset"), [$c->stash->{project}->name, $jobset->name]); - if ($c->req->looks_like_browser) { - $c->res->redirect($uri . "#tabs-configuration"); - } else { - $self->status_created( - $c, - location => "$uri", - entity => { name => $jobset->name, uri => "$uri", type => "jobset" } - ); - } + $self->status_created($c, + location => "$uri", + entity => { name => $jobset->name, uri => "$uri", redirect => "$uri", type => "jobset" }); } } +sub jobset_DELETE { + my ($self, $c) = @_; + + requireProjectOwner($c, $c->stash->{project}); + + txn_do($c->model('DB')->schema, sub { + $c->stash->{jobset}->jobsetevals->delete; + $c->stash->{jobset}->builds->delete; + $c->stash->{jobset}->delete; + }); + + my $uri = $c->uri_for($c->controller('Project')->action_for("project"), [$c->stash->{project}->name]); + $self->status_ok($c, entity => { redirect => "$uri" }); + + $c->flash->{successMsg} = "The jobset has been deleted."; +} + sub jobs_tab : Chained('jobsetChain') PathPart('jobs-tab') Args(0) { my ($self, $c) = @_; $c->stash->{template} = 'jobset-jobs-tab.tt'; - $c->stash->{activeJobs} = []; - $c->stash->{inactiveJobs} = []; + $c->stash->{filter} = $c->request->params->{filter} // ""; + my $filter = "%" . $c->stash->{filter} . "%"; - (my $latestEval) = $c->stash->{jobset}->jobsetevals->search( - { hasnewbuilds => 1}, { limit => 1, order_by => ["id desc"] }); + my @evals = $c->stash->{jobset}->jobsetevals->search({ hasnewbuilds => 1}, { order_by => "id desc", rows => 20 }); - my %activeJobs; - if (defined $latestEval) { - foreach my $build ($latestEval->builds->search({}, { order_by => ["job"], select => ["job"] })) { - my $job = $build->get_column("job"); - if (!defined $activeJobs{$job}) { - $activeJobs{$job} = 1; - push @{$c->stash->{activeJobs}}, $job; - } + my $evals = {}; + my %jobs; + my $nrBuilds = 0; + + foreach my $eval (@evals) { + my @builds = $eval->builds->search( + { job => { ilike => $filter } }, + { columns => ['id', 'job', 'finished', 'buildstatus'] }); + foreach my $b (@builds) { + my $jobName = $b->get_column('job'); + $evals->{$eval->id}->{$jobName} = + { id => $b->id, finished => $b->finished, buildstatus => $b->buildstatus }; + $jobs{$jobName} = 1; + $nrBuilds++; + } + last if $nrBuilds >= 10000; + } + + if ($c->request->params->{showInactive}) { + $c->stash->{showInactive} = 1; + foreach my $job ($c->stash->{jobset}->jobs->search({ name => { ilike => $filter } })) { + next if defined $jobs{$job->name}; + $c->stash->{inactiveJobs}->{$job->name} = $jobs{$job->name} = 1; } } - foreach my $job ($c->stash->{jobset}->jobs->search({}, { order_by => ["name"] })) { - if (!defined $activeJobs{$job->name}) { - push @{$c->stash->{inactiveJobs}}, $job->name; - } - } -} - - -sub status_tab : Chained('jobsetChain') PathPart('status-tab') Args(0) { - my ($self, $c) = @_; - $c->stash->{template} = 'jobset-status-tab.tt'; - - # FIXME: use latest eval instead of iscurrent. - - $c->stash->{systems} = - [ $c->stash->{jobset}->builds->search({ iscurrent => 1 }, { select => ["system"], distinct => 1, order_by => "system" }) ]; - - # status per system - my @systems = (); - foreach my $system (@{$c->stash->{systems}}) { - push(@systems, $system->system); - } - - my @select = (); - my @as = (); - push(@select, "job"); push(@as, "job"); - foreach my $system (@systems) { - push(@select, "(select buildstatus from Builds b where b.id = (select max(id) from Builds t where t.project = me.project and t.jobset = me.jobset and t.job = me.job and t.system = '$system' and t.iscurrent = 1 ))"); - push(@as, $system); - push(@select, "(select b.id from Builds b where b.id = (select max(id) from Builds t where t.project = me.project and t.jobset = me.jobset and t.job = me.job and t.system = '$system' and t.iscurrent = 1 ))"); - push(@as, "$system-build"); - } - - $c->stash->{activeJobsStatus} = [ - $c->model('DB')->resultset('ActiveJobsForJobset')->search( - {}, - { bind => [$c->stash->{project}->name, $c->stash->{jobset}->name] - , select => \@select - , as => \@as - , order_by => ["job"] - }) ]; + $c->stash->{evals} = $evals; + my @jobs = sort (keys %jobs); + $c->stash->{nrJobs} = scalar @jobs; + splice @jobs, 250 if $c->stash->{filter} eq ""; + $c->stash->{jobs} = [@jobs]; } @@ -212,10 +139,6 @@ sub status_tab : Chained('jobsetChain') PathPart('status-tab') Args(0) { sub get_builds : Chained('jobsetChain') PathPart('') CaptureArgs(0) { my ($self, $c) = @_; $c->stash->{allBuilds} = $c->stash->{jobset}->builds; - $c->stash->{jobStatus} = $c->model('DB')->resultset('JobStatusForJobset') - ->search({}, {bind => [$c->stash->{project}->name, $c->stash->{jobset}->name]}); - $c->stash->{allJobsets} = $c->stash->{jobset_}; - $c->stash->{allJobs} = $c->stash->{jobset}->jobs; $c->stash->{latestSucceeded} = $c->model('DB')->resultset('LatestSucceededForJobset') ->search({}, {bind => [$c->stash->{project}->name, $c->stash->{jobset}->name]}); $c->stash->{channelBaseName} = @@ -230,31 +153,8 @@ sub edit : Chained('jobsetChain') PathPart Args(0) { $c->stash->{template} = 'edit-jobset.tt'; $c->stash->{edit} = 1; -} - - -sub submit : Chained('jobsetChain') PathPart Args(0) { - my ($self, $c) = @_; - - requirePost($c); - - if (($c->request->params->{submit} // "") eq "delete") { - txn_do($c->model('DB')->schema, sub { - $c->stash->{jobset}->jobsetevals->delete_all; - $c->stash->{jobset}->builds->delete_all; - $c->stash->{jobset}->delete; - }); - return $c->res->redirect($c->uri_for($c->controller('Project')->action_for("project"), [$c->stash->{project}->name])); - } - - my $newName = trim $c->stash->{params}->{name}; - my $oldName = trim $c->stash->{jobset}->name; - unless ($oldName eq $newName) { - $c->stash->{params}->{oldName} = $oldName; - $c->stash->{jobsetName} = $newName; - undef $c->stash->{jobset}; - } - jobset_PUT($self, $c); + $c->stash->{clone} = defined $c->stash->{params}->{clone}; + $c->stash->{totalShares} = getTotalShares($c->model('DB')->schema); } @@ -263,10 +163,10 @@ sub nixExprPathFromParams { # The Nix expression path must be relative and can't contain ".." elements. my $nixExprPath = trim $c->stash->{params}->{"nixexprpath"}; - error($c, "Invalid Nix expression path: $nixExprPath") if $nixExprPath !~ /^$relPathRE$/; + error($c, "Invalid Nix expression path ‘$nixExprPath’.") if $nixExprPath !~ /^$relPathRE$/; my $nixExprInput = trim $c->stash->{params}->{"nixexprinput"}; - error($c, "Invalid Nix expression input name: $nixExprInput") unless $nixExprInput =~ /^\w+$/; + error($c, "Invalid Nix expression input name ‘$nixExprInput’.") unless $nixExprInput =~ /^[[:alpha:]][\w-]*$/; return ($nixExprPath, $nixExprInput); } @@ -275,7 +175,7 @@ sub nixExprPathFromParams { sub checkInputValue { my ($c, $type, $value) = @_; $value = trim $value; - error($c, "Invalid Boolean value: $value") if + error($c, "Invalid Boolean value ‘$value’.") if $type eq "boolean" && !($value eq "true" || $value eq "false"); return $value; } @@ -284,8 +184,11 @@ sub checkInputValue { sub updateJobset { my ($c, $jobset) = @_; - my $jobsetName = $c->stash->{jobsetName} // $jobset->name; - error($c, "Invalid jobset name: ‘$jobsetName’") if $jobsetName !~ /^$jobsetNameRE$/; + my $jobsetName = $c->stash->{params}->{name}; + error($c, "Invalid jobset identifier ‘$jobsetName’.") if $jobsetName !~ /^$jobsetNameRE$/; + + error($c, "Cannot rename jobset to ‘$jobsetName’ since that identifier is already taken.") + if $jobsetName ne $jobset->name && defined $c->stash->{project}->jobsets->find({ name => $jobsetName }); # When the expression is in a .scm file, assume it's a Guile + Guix # build expression. @@ -294,118 +197,61 @@ sub updateJobset { my ($nixExprPath, $nixExprInput) = nixExprPathFromParams $c; + my $enabled = int($c->stash->{params}->{enabled}); + die if $enabled < 0 || $enabled > 2; + $jobset->update( { name => $jobsetName , description => trim($c->stash->{params}->{"description"}) , nixexprpath => $nixExprPath , nixexprinput => $nixExprInput - , enabled => defined $c->stash->{params}->{enabled} ? 1 : 0 + , enabled => $enabled , enableemail => defined $c->stash->{params}->{enableemail} ? 1 : 0 , emailoverride => trim($c->stash->{params}->{emailoverride}) || "" , hidden => defined $c->stash->{params}->{visible} ? 0 : 1 - , keepnr => int(trim($c->stash->{params}->{keepnr})) || 3 + , keepnr => int(trim($c->stash->{params}->{keepnr})) , checkinterval => int(trim($c->stash->{params}->{checkinterval})) - , triggertime => $jobset->triggertime // time() + , triggertime => $enabled ? $jobset->triggertime // time() : undef + , schedulingshares => int($c->stash->{params}->{schedulingshares}) }); - # Process the inputs of this jobset. - unless (defined $c->stash->{params}->{inputs}) { - $c->stash->{params}->{inputs} = {}; - foreach my $param (keys %{$c->stash->{params}}) { - next unless $param =~ /^input-(\w+)-name$/; - my $baseName = $1; - next if $baseName eq "template"; - $c->stash->{params}->{inputs}->{$c->stash->{params}->{$param}} = { type => $c->stash->{params}->{"input-$baseName-type"}, values => $c->stash->{params}->{"input-$baseName-values"} }; - unless ($baseName =~ /^\d+$/) { # non-numeric base name is an existing entry - $c->stash->{params}->{inputs}->{$c->stash->{params}->{$param}}->{oldName} = $baseName; - } - } - } + # Set the inputs of this jobset. + $jobset->jobsetinputs->delete; - foreach my $inputName (keys %{$c->stash->{params}->{inputs}}) { - my $inputData = $c->stash->{params}->{inputs}->{$inputName}; - error($c, "Invalid input name: $inputName") unless $inputName =~ /^[[:alpha:]]\w*$/; - - my $inputType = $inputData->{type}; - error($c, "Invalid input type: $inputType") unless - $inputType eq "svn" || $inputType eq "svn-checkout" || $inputType eq "hg" || $inputType eq "tarball" || - $inputType eq "string" || $inputType eq "path" || $inputType eq "boolean" || $inputType eq "bzr" || $inputType eq "bzr-checkout" || - $inputType eq "git" || $inputType eq "build" || $inputType eq "sysbuild" ; - - my $input; - unless (defined $inputData->{oldName}) { - $input = $jobset->jobsetinputs->update_or_create( - { name => $inputName - , type => $inputType - }); - } else { # it's an existing input - $input = ($jobset->jobsetinputs->search({name => $inputData->{oldName}}))[0]; - die unless defined $input; - $input->update({name => $inputName, type => $inputType}); - } - - # Update the values for this input. Just delete all the - # current ones, then create the new values. - $input->jobsetinputalts->delete_all; + foreach my $name (keys %{$c->stash->{params}->{inputs}}) { + my $inputData = $c->stash->{params}->{inputs}->{$name}; + my $type = $inputData->{type}; my $values = $inputData->{values}; - $values = [] unless defined $values; - $values = [$values] unless ref($values) eq 'ARRAY'; + my $emailresponsible = defined $inputData->{emailresponsible} ? 1 : 0; + + error($c, "Invalid input name ‘$name’.") unless $name =~ /^[[:alpha:]][\w-]*$/; + error($c, "Invalid input type ‘$type’.") unless defined $c->stash->{inputTypes}->{$type}; + + my $input = $jobset->jobsetinputs->create({ + name => $name, + type => $type, + emailresponsible => $emailresponsible + }); + + # Set the values for this input. + my @values = ref($values) eq 'ARRAY' ? @{$values} : ($values); my $altnr = 0; - foreach my $value (@{$values}) { - $value = checkInputValue($c, $inputType, $value); + foreach my $value (@values) { + $value = checkInputValue($c, $type, $value); $input->jobsetinputalts->create({altnr => $altnr++, value => $value}); } } - - # Get rid of deleted inputs. - my @inputs = $jobset->jobsetinputs->all; - foreach my $input (@inputs) { - $input->delete unless defined $c->stash->{params}->{inputs}->{$input->name}; - } } sub clone : Chained('jobsetChain') PathPart('clone') Args(0) { my ($self, $c) = @_; - my $jobset = $c->stash->{jobset}; - requireProjectOwner($c, $jobset->project); + requireProjectOwner($c, $c->stash->{project}); - $c->stash->{template} = 'clone-jobset.tt'; -} - - -sub clone_submit : Chained('jobsetChain') PathPart('clone/submit') Args(0) { - my ($self, $c) = @_; - - my $jobset = $c->stash->{jobset}; - requireProjectOwner($c, $jobset->project); - requirePost($c); - - my $newJobsetName = trim $c->stash->{params}->{"newjobset"}; - error($c, "Invalid jobset name: $newJobsetName") unless $newJobsetName =~ /^[[:alpha:]][\w\-]*$/; - - my $newJobset; - txn_do($c->model('DB')->schema, sub { - $newJobset = $jobset->project->jobsets->create( - { name => $newJobsetName - , description => $jobset->description - , nixexprpath => $jobset->nixexprpath - , nixexprinput => $jobset->nixexprinput - , enabled => 0 - , enableemail => $jobset->enableemail - , emailoverride => $jobset->emailoverride || "" - }); - - foreach my $input ($jobset->jobsetinputs) { - my $newinput = $newJobset->jobsetinputs->create({name => $input->name, type => $input->type}); - foreach my $inputalt ($input->jobsetinputalts) { - $newinput->jobsetinputalts->create({altnr => $inputalt->altnr, value => $inputalt->value}); - } - } - }); - - $c->res->redirect($c->uri_for($c->controller('Jobset')->action_for("edit"), [$jobset->project->name, $newJobsetName])); + $c->stash->{template} = 'edit-jobset.tt'; + $c->stash->{clone} = 1; + $c->stash->{totalShares} = getTotalShares($c->model('DB')->schema); } @@ -428,24 +274,7 @@ sub evals_GET { my $offset = ($page - 1) * $resultsPerPage; $c->stash->{evals} = getEvals($self, $c, $evals, $offset, $resultsPerPage); my %entity = ( - evals => [ $evals->search({ 'me.hasnewbuilds' => 1 }, { - columns => [ - 'me.hasnewbuilds', - 'me.id', - 'jobsetevalinputs.name', - 'jobsetevalinputs.altnr', - 'jobsetevalinputs.revision', - 'jobsetevalinputs.type', - 'jobsetevalinputs.uri', - 'jobsetevalinputs.dependency', - 'jobsetevalmembers.build', - ], - join => [ 'jobsetevalinputs', 'jobsetevalmembers' ], - collapse => 1, - rows => $resultsPerPage, - offset => $offset, - order_by => "me.id DESC", - }) ], + evals => [ map { $_->{eval} } @{$c->stash->{evals}} ], first => "?page=1", last => "?page=" . POSIX::ceil($c->stash->{total}/$resultsPerPage) ); diff --git a/src/lib/Hydra/Controller/JobsetEval.pm b/src/lib/Hydra/Controller/JobsetEval.pm index 663c4b12..74ab3bce 100644 --- a/src/lib/Hydra/Controller/JobsetEval.pm +++ b/src/lib/Hydra/Controller/JobsetEval.pm @@ -26,6 +26,9 @@ sub view : Chained('eval') PathPart('') Args(0) { my $eval = $c->stash->{eval}; + $c->stash->{filter} = $c->request->params->{filter} // ""; + my $filter = $c->stash->{filter} eq "" ? {} : { job => { ilike => "%" . $c->stash->{filter} . "%" } }; + my $compare = $c->req->params->{compare}; my $eval2; @@ -36,6 +39,11 @@ sub view : Chained('eval') PathPart('') Args(0) { if ($compare =~ /^\d+$/) { $eval2 = $c->model('DB::JobsetEvals')->find($compare) or notFound($c, "Evaluation $compare doesn't exist."); + } elsif ($compare =~ /^-(\d+)$/) { + my $t = int($1); + $eval2 = $c->stash->{jobset}->jobsetevals->find( + { hasnewbuilds => 1, timestamp => {'<=', $eval->timestamp - $t} }, + { order_by => "timestamp desc", rows => 1}); } elsif (defined $compare && $compare =~ /^($jobsetNameRE)$/) { my $j = $c->stash->{project}->jobsets->find({name => $compare}) or notFound($c, "Jobset $compare doesn't exist."); @@ -51,10 +59,17 @@ sub view : Chained('eval') PathPart('') Args(0) { $c->stash->{otherEval} = $eval2 if defined $eval2; - my @builds = $eval->builds->search({}, { order_by => ["job", "system", "id"], columns => [@buildListColumns] }); - my @builds2 = defined $eval2 - ? $eval2->builds->search({}, { order_by => ["job", "system", "id"], columns => [@buildListColumns] }) - : (); + sub cmpBuilds { + my ($a, $b) = @_; + return $a->get_column('job') cmp $b->get_column('job') + || $a->get_column('system') cmp $b->get_column('system') + } + + my @builds = $eval->builds->search($filter, { columns => [@buildListColumns] }); + my @builds2 = defined $eval2 ? $eval2->builds->search($filter, { columns => [@buildListColumns] }) : (); + + @builds = sort { cmpBuilds($a, $b) } @builds; + @builds2 = sort { cmpBuilds($a, $b) } @builds2; $c->stash->{stillSucceed} = []; $c->stash->{stillFail} = []; @@ -63,15 +78,19 @@ sub view : Chained('eval') PathPart('') Args(0) { $c->stash->{new} = []; $c->stash->{removed} = []; $c->stash->{unfinished} = []; + $c->stash->{aborted} = []; my $n = 0; foreach my $build (@builds) { + if ($build->finished != 0 && ($build->buildstatus == 3 || $build->buildstatus == 4)) { + push @{$c->stash->{aborted}}, $build; + next; + } my $d; my $found = 0; while ($n < scalar(@builds2)) { my $build2 = $builds2[$n]; - my $d = $build->get_column('job') cmp $build2->get_column('job') - || $build->get_column('system') cmp $build2->get_column('system'); + my $d = cmpBuilds($build, $build2); last if $d == -1; if ($d == 0) { $n++; @@ -135,6 +154,25 @@ sub release : Chained('eval') PathPart('release') Args(0) { } +sub cancel : Chained('eval') PathPart('cancel') Args(0) { + my ($self, $c) = @_; + requireProjectOwner($c, $c->stash->{eval}->project); + my $n = cancelBuilds($c->model('DB')->schema, $c->stash->{eval}->builds); + $c->flash->{successMsg} = "$n builds have been cancelled."; + $c->res->redirect($c->uri_for($c->controller('JobsetEval')->action_for('view'), $c->req->captures)); +} + + +sub restart_aborted : Chained('eval') PathPart('restart-aborted') Args(0) { + my ($self, $c) = @_; + requireProjectOwner($c, $c->stash->{eval}->project); + my $builds = $c->stash->{eval}->builds->search({ finished => 1, buildstatus => { -in => [3, 4] } }); + my $n = restartBuilds($c->model('DB')->schema, $builds); + $c->flash->{successMsg} = "$n builds have been restarted."; + $c->res->redirect($c->uri_for($c->controller('JobsetEval')->action_for('view'), $c->req->captures)); +} + + # Hydra::Base::Controller::NixChannel needs this. sub nix : Chained('eval') PathPart('channel') CaptureArgs(0) { my ($self, $c) = @_; @@ -144,8 +182,20 @@ sub nix : Chained('eval') PathPart('channel') CaptureArgs(0) { ->search({ finished => 1, buildstatus => 0 }, { columns => [@buildListColumns, 'drvpath', 'description', 'homepage'] , join => ["buildoutputs"] + , order_by => ["build.id", "buildoutputs.name"] , '+select' => ['buildoutputs.path', 'buildoutputs.name'], '+as' => ['outpath', 'outname'] }); } +sub job : Chained('eval') PathPart('job') { + my ($self, $c, $job, @rest) = @_; + + my $build = $c->stash->{eval}->builds->find({job => $job}); + + notFound($c, "This evaluation has no job with the specified name.") unless defined $build; + + $c->res->redirect($c->uri_for($c->controller('Build')->action_for("build"), [$build->id], @rest)); +} + + 1; diff --git a/src/lib/Hydra/Controller/Project.pm b/src/lib/Hydra/Controller/Project.pm index c0c9145f..582c5e88 100644 --- a/src/lib/Hydra/Controller/Project.pm +++ b/src/lib/Hydra/Controller/Project.pm @@ -1,5 +1,6 @@ package Hydra::Controller::Project; +use utf8; use strict; use warnings; use base 'Hydra::Base::Controller::ListBuilds'; @@ -9,35 +10,15 @@ use Hydra::Helper::CatalystUtils; sub projectChain :Chained('/') :PathPart('project') :CaptureArgs(1) { my ($self, $c, $projectName) = @_; + $c->stash->{params}->{name} //= $projectName; - my $project = $c->model('DB::Projects')->find($projectName, { columns => [ - "me.name", - "me.displayName", - "me.description", - "me.enabled", - "me.hidden", - "me.homepage", - "owner.username", - "owner.fullname", - "views.name", - "releases.name", - "releases.timestamp", - "jobsets.name", - ], join => [ 'owner', 'views', 'releases', 'jobsets' ], order_by => { -desc => "releases.timestamp" }, collapse => 1 }); + $c->stash->{project} = $c->model('DB::Projects')->find($projectName, { + join => [ 'releases' ], + order_by => { -desc => "releases.timestamp" }, + }); - if ($project) { - $c->stash->{project} = $project; - } else { - if ($c->action->name eq "project" and $c->request->method eq "PUT") { - $c->stash->{projectName} = $projectName; - } else { - $self->status_not_found( - $c, - message => "Project $projectName doesn't exist." - ); - $c->detach; - } - } + notFound($c, "Project ‘$projectName’ doesn't exist.") + if !$c->stash->{project} && !($c->action->name eq "project" and $c->request->method eq "PUT"); } @@ -53,55 +34,27 @@ sub project_GET { $c->stash->{releases} = [$c->stash->{project}->releases->search({}, {order_by => ["timestamp DESC"]})]; - $self->status_ok( - $c, - entity => $c->stash->{project} - ); + $self->status_ok($c, entity => $c->stash->{project}); } sub project_PUT { my ($self, $c) = @_; if (defined $c->stash->{project}) { - error($c, "Cannot rename project `$c->stash->{params}->{oldName}' over existing project `$c->stash->{project}->name") if defined $c->stash->{params}->{oldName}; requireProjectOwner($c, $c->stash->{project}); + txn_do($c->model('DB')->schema, sub { updateProject($c, $c->stash->{project}); }); - if ($c->req->looks_like_browser) { - $c->res->redirect($c->uri_for($self->action_for("project"), [$c->stash->{project}->name]) . "#tabs-configuration"); - } else { - $self->status_no_content($c); - } - } elsif (defined $c->stash->{params}->{oldName}) { - my $project = $c->model('DB::Projects')->find($c->stash->{params}->{oldName}); - if (defined $project) { - requireProjectOwner($c, $project); - txn_do($c->model('DB')->schema, sub { - updateProject($c, $project); - }); + my $uri = $c->uri_for($self->action_for("project"), [$c->stash->{project}->name]) . "#tabs-configuration"; + $self->status_ok($c, entity => { redirect => "$uri" }); - my $uri = $c->uri_for($self->action_for("project"), [$project->name]); + $c->flash->{successMsg} = "The project configuration has been updated."; + } - if ($c->req->looks_like_browser) { - $c->res->redirect($uri . "#tabs-configuration"); - } else { - $self->status_created( - $c, - location => "$uri", - entity => { name => $project->name, uri => "$uri", type => "project" } - ); - } - } else { - $self->status_not_found( - $c, - message => "Project $c->stash->{params}->{oldName} doesn't exist." - ); - } - } else { + else { requireMayCreateProjects($c); - error($c, "Invalid project name: ‘$c->stash->{projectName}’") if $c->stash->{projectName} !~ /^$projectNameRE$/; my $project; txn_do($c->model('DB')->schema, sub { @@ -110,23 +63,34 @@ sub project_PUT { # valid. Idem for the owner. my $owner = $c->user->username; $project = $c->model('DB::Projects')->create( - {name => $c->stash->{projectName}, displayname => "", owner => $owner}); + { name => ".tmp", displayname => "", owner => $owner }); updateProject($c, $project); }); my $uri = $c->uri_for($self->action_for("project"), [$project->name]); - if ($c->req->looks_like_browser) { - $c->res->redirect($uri . "#tabs-configuration"); - } else { - $self->status_created( - $c, - location => "$uri", - entity => { name => $project->name, uri => "$uri", type => "project" } - ); - } + $self->status_created($c, + location => "$uri", + entity => { name => $project->name, uri => "$uri", redirect => "$uri", type => "project" }); } } +sub project_DELETE { + my ($self, $c) = @_; + + requireProjectOwner($c, $c->stash->{project}); + + txn_do($c->model('DB')->schema, sub { + $c->stash->{project}->jobsetevals->delete; + $c->stash->{project}->builds->delete; + $c->stash->{project}->delete; + }); + + my $uri = $c->res->redirect($c->uri_for("/")); + $self->status_ok($c, entity => { redirect => "$uri" }); + + $c->flash->{successMsg} = "The project has been deleted."; +} + sub edit : Chained('projectChain') PathPart Args(0) { my ($self, $c) = @_; @@ -138,36 +102,10 @@ sub edit : Chained('projectChain') PathPart Args(0) { } -sub submit : Chained('projectChain') PathPart Args(0) { - my ($self, $c) = @_; - - requirePost($c); - if (($c->request->params->{submit} // "") eq "delete") { - txn_do($c->model('DB')->schema, sub { - $c->stash->{project}->jobsetevals->delete_all; - $c->stash->{project}->builds->delete_all; - $c->stash->{project}->delete; - }); - return $c->res->redirect($c->uri_for("/")); - } - - my $newName = trim $c->stash->{params}->{name}; - my $oldName = trim $c->stash->{project}->name; - unless ($oldName eq $newName) { - $c->stash->{params}->{oldName} = $oldName; - $c->stash->{projectName} = $newName; - undef $c->stash->{project}; - } - project_PUT($self, $c); -} - - sub requireMayCreateProjects { my ($c) = @_; - - requireLogin($c) if !$c->user_exists; - - error($c, "Only administrators or authorised users can perform this operation.") + requireUser($c); + accessDenied($c, "Only administrators or authorised users can perform this operation.") unless $c->check_user_roles('admin') || $c->check_user_roles('create-projects'); } @@ -183,15 +121,6 @@ sub create : Path('/create-project') { } -sub create_submit : Path('/create-project/submit') { - my ($self, $c) = @_; - - $c->stash->{projectName} = trim $c->stash->{params}->{name}; - - project_PUT($self, $c); -} - - sub create_jobset : Chained('projectChain') PathPart('create-jobset') Args(0) { my ($self, $c) = @_; @@ -200,15 +129,7 @@ sub create_jobset : Chained('projectChain') PathPart('create-jobset') Args(0) { $c->stash->{template} = 'edit-jobset.tt'; $c->stash->{create} = 1; $c->stash->{edit} = 1; -} - - -sub create_jobset_submit : Chained('projectChain') PathPart('create-jobset/submit') Args(0) { - my ($self, $c) = @_; - - $c->stash->{jobsetName} = trim $c->stash->{params}->{name}; - - Hydra::Controller::Jobset::jobset_PUT($self, $c); + $c->stash->{totalShares} = getTotalShares($c->model('DB')->schema); } @@ -218,15 +139,18 @@ sub updateProject { my $owner = $project->owner; if ($c->check_user_roles('admin') and defined $c->stash->{params}->{owner}) { $owner = trim $c->stash->{params}->{owner}; - error($c, "Invalid owner: $owner") - unless defined $c->model('DB::Users')->find({username => $owner}); + error($c, "The user name ‘$owner’ does not exist.") + unless defined $c->model('DB::Users')->find($owner); } - my $projectName = $c->stash->{projectName} or $project->name; - error($c, "Invalid project name: ‘$projectName’") if $projectName !~ /^$projectNameRE$/; + my $projectName = $c->stash->{params}->{name}; + error($c, "Invalid project identifier ‘$projectName’.") if $projectName !~ /^$projectNameRE$/; + + error($c, "Cannot rename project to ‘$projectName’ since that identifier is already taken.") + if $projectName ne $project->name && defined $c->model('DB::Projects')->find($projectName); my $displayName = trim $c->stash->{params}->{displayname}; - error($c, "Invalid display name: $displayName") if $displayName eq ""; + error($c, "You must specify a display name.") if $displayName eq ""; $project->update( { name => $projectName @@ -244,10 +168,6 @@ sub updateProject { sub get_builds : Chained('projectChain') PathPart('') CaptureArgs(0) { my ($self, $c) = @_; $c->stash->{allBuilds} = $c->stash->{project}->builds; - $c->stash->{jobStatus} = $c->model('DB')->resultset('JobStatusForProject') - ->search({}, {bind => [$c->stash->{project}->name]}); - $c->stash->{allJobsets} = $c->stash->{project}->jobsets; - $c->stash->{allJobs} = $c->stash->{project}->jobs; $c->stash->{latestSucceeded} = $c->model('DB')->resultset('LatestSucceededForProject') ->search({}, {bind => [$c->stash->{project}->name]}); $c->stash->{channelBaseName} = $c->stash->{project}->name; diff --git a/src/lib/Hydra/Controller/Release.pm b/src/lib/Hydra/Controller/Release.pm index 126cad6f..a4fb61c9 100644 --- a/src/lib/Hydra/Controller/Release.pm +++ b/src/lib/Hydra/Controller/Release.pm @@ -38,7 +38,7 @@ sub updateRelease { , description => trim $c->request->params->{description} }); - $release->releasemembers->delete_all; + $release->releasemembers->delete; foreach my $param (keys %{$c->request->params}) { next unless $param =~ /^member-(\d+)-description$/; my $buildId = $1; @@ -72,7 +72,7 @@ sub submit : Chained('release') PathPart('submit') Args(0) { txn_do($c->model('DB')->schema, sub { updateRelease($c, $c->stash->{release}); }); - $c->res->redirect($c->uri_for($self->action_for("project"), + $c->res->redirect($c->uri_for($self->action_for("view"), [$c->stash->{project}->name, $c->stash->{release}->name])); } } diff --git a/src/lib/Hydra/Controller/Root.pm b/src/lib/Hydra/Controller/Root.pm index 7f1a28ae..cbdba121 100644 --- a/src/lib/Hydra/Controller/Root.pm +++ b/src/lib/Hydra/Controller/Root.pm @@ -8,6 +8,7 @@ use Hydra::Helper::CatalystUtils; use Digest::SHA1 qw(sha1_hex); use Nix::Store; use Nix::Config; +use Encode; # Put this controller at top-level. __PACKAGE__->config->{namespace} = ''; @@ -33,6 +34,7 @@ sub begin :Private { $c->stash->{inputTypes} = { 'string' => 'String value', 'boolean' => 'Boolean', + 'nix' => 'Nix expression', 'build' => 'Build output', 'sysbuild' => 'Build output (same system)' }; @@ -54,12 +56,8 @@ sub index :Path :Args(0) { $c->stash->{template} = 'overview.tt'; $c->stash->{projects} = [$c->model('DB::Projects')->search(isAdmin($c) ? {} : {hidden => 0}, {order_by => 'name'})]; $c->stash->{newsItems} = [$c->model('DB::NewsItems')->search({}, { order_by => ['createtime DESC'], rows => 5 })]; - $self->status_ok( - $c, - entity => [$c->model('DB::Projects')->search(isAdmin($c) ? {} : {hidden => 0}, { - order_by => 'name', - columns => [ 'name', 'displayname' ] - })] + $self->status_ok($c, + entity => $c->stash->{projects} ); } @@ -72,8 +70,7 @@ sub queue_GET { $c->stash->{flashMsg} //= $c->flash->{buildMsg}; $self->status_ok( $c, - entity => [$c->model('DB::Builds')->search( - {finished => 0}, { join => ['project'], order_by => ["priority DESC", "id"], columns => [@buildListColumns], '+select' => ['project.enabled'], '+as' => ['enabled'] })] + entity => [$c->model('DB::Builds')->search({finished => 0}, { order_by => ["priority DESC", "id"]})] ); } @@ -100,22 +97,7 @@ sub status_GET { $c, entity => [ $c->model('DB::BuildSteps')->search( { 'me.busy' => 1, 'build.finished' => 0, 'build.busy' => 1 }, - { join => { build => [ 'project', 'job', 'jobset' ] }, - columns => [ - 'me.machine', - 'me.system', - 'me.stepnr', - 'me.drvpath', - 'me.starttime', - 'build.id', - { - 'build.project.name' => 'project.name', - 'build.jobset.name' => 'jobset.name', - 'build.job.name' => 'job.name' - } - ], - order_by => [ 'machine' ] - } + { order_by => [ 'machine' ], join => [ 'build' ] } ) ] ); } @@ -150,11 +132,9 @@ sub machines :Local Args(0) { sub get_builds : Chained('/') PathPart('') CaptureArgs(0) { my ($self, $c) = @_; $c->stash->{allBuilds} = $c->model('DB::Builds'); - $c->stash->{jobStatus} = $c->model('DB')->resultset('JobStatus'); - $c->stash->{allJobsets} = $c->model('DB::Jobsets'); - $c->stash->{allJobs} = $c->model('DB::Jobs'); $c->stash->{latestSucceeded} = $c->model('DB')->resultset('LatestSucceeded'); $c->stash->{channelBaseName} = "everything"; + $c->stash->{total} = $c->model('DB::NrBuilds')->find('finished')->count; } @@ -213,35 +193,32 @@ sub default :Path { sub end : ActionClass('RenderView') { my ($self, $c) = @_; + my @errors = map { encode_utf8($_); } @{$c->error}; + if (defined $c->stash->{json}) { - if (scalar @{$c->error}) { - $c->stash->{json}->{error} = join "\n", @{$c->error}; + if (scalar @errors) { + $c->stash->{json}->{error} = join "\n", @errors; $c->clear_errors; } $c->forward('View::JSON'); } - if (scalar @{$c->error}) { - $c->stash->{resource} = { errors => "$c->error" }; + elsif (scalar @{$c->error}) { + $c->stash->{resource} = { error => join "\n", @{$c->error} }; $c->stash->{template} = 'error.tt'; - $c->stash->{errors} = $c->error; + $c->stash->{errors} = [@errors]; $c->response->status(500) if $c->response->status == 200; if ($c->response->status >= 300) { $c->stash->{httpStatus} = $c->response->status . " " . HTTP::Status::status_message($c->response->status); } $c->clear_errors; - } elsif (defined $c->stash->{resource} and - (ref $c->stash->{resource} eq ref {}) and - defined $c->stash->{resource}->{error}) { - $c->stash->{template} = 'error.tt'; - $c->stash->{httpStatus} = - $c->response->status . " " . HTTP::Status::status_message($c->response->status); } - $c->forward('serialize'); + $c->forward('serialize') if defined $c->stash->{resource}; } + sub serialize : ActionClass('Serialize') { } @@ -282,6 +259,7 @@ sub narinfo :LocalRegex('^([a-z0-9]+).narinfo$') :Args(0) { my $path = queryPathFromHashPart($hash); if (!$path) { + $c->response->status(404); $c->response->content_type('text/plain'); $c->stash->{plain}->{data} = "does not exist\n"; $c->forward('Hydra::View::Plain'); diff --git a/src/lib/Hydra/Controller/User.pm b/src/lib/Hydra/Controller/User.pm index cb852830..c6bc3768 100644 --- a/src/lib/Hydra/Controller/User.pm +++ b/src/lib/Hydra/Controller/User.pm @@ -182,15 +182,11 @@ sub currentUser :Path('/current-user') :ActionClass('REST') { } sub currentUser_GET { my ($self, $c) = @_; - requireLogin($c) if !$c->user_exists; + requireUser($c); $self->status_ok( $c, - entity => $c->model('DB::Users')->find({ 'me.username' => $c->user->username}, { - columns => [ "me.fullname", "me.emailaddress", "me.username", "userroles.role" ] - , join => [ "userroles" ] - , collapse => 1 - }) + entity => $c->model("DB::Users")->find($c->user->username) ); } @@ -198,9 +194,9 @@ sub currentUser_GET { sub user :Chained('/') PathPart('user') CaptureArgs(1) { my ($self, $c, $userName) = @_; - requireLogin($c) if !$c->user_exists; + requireUser($c); - error($c, "You do not have permission to edit other users.") + accessDenied($c, "You do not have permission to edit other users.") if $userName ne $c->user->username && !isAdmin($c); $c->stash->{user} = $c->model('DB::Users')->find($userName) @@ -287,7 +283,7 @@ sub edit_POST { } if (isAdmin($c)) { - $user->userroles->delete_all; + $user->userroles->delete; $user->userroles->create({ role => $_}) foreach paramToList($c, "roles"); } @@ -303,4 +299,19 @@ sub edit_POST { } +sub dashboard :Chained('user') :Args(0) { + my ($self, $c) = @_; + $c->stash->{template} = 'dashboard.tt'; + + # Get the N most recent builds for each starred job. + $c->stash->{starredJobs} = []; + foreach my $j ($c->stash->{user}->starredjobs->search({}, { order_by => ['project', 'jobset', 'job'] })) { + my @builds = $j->job->builds->search( + { }, + { rows => 20, order_by => "id desc" }); + push $c->stash->{starredJobs}, { job => $j->job, builds => [@builds] }; + } +} + + 1; diff --git a/src/lib/Hydra/Controller/View.pm b/src/lib/Hydra/Controller/View.pm index 1f8f7847..88e78cb5 100644 --- a/src/lib/Hydra/Controller/View.pm +++ b/src/lib/Hydra/Controller/View.pm @@ -41,7 +41,7 @@ sub updateView { { name => $viewName , description => trim $c->request->params->{description} }); - $view->viewjobs->delete_all; + $view->viewjobs->delete; foreach my $param (keys %{$c->request->params}) { next unless $param =~ /^job-(\d+)-name$/; diff --git a/src/lib/Hydra/Helper/AddBuilds.pm b/src/lib/Hydra/Helper/AddBuilds.pm index b2106ffa..a7e5c9c8 100644 --- a/src/lib/Hydra/Helper/AddBuilds.pm +++ b/src/lib/Hydra/Helper/AddBuilds.pm @@ -2,6 +2,7 @@ package Hydra::Helper::AddBuilds; use strict; use feature 'switch'; +use utf8; use XML::Simple; use IPC::Run; use Nix::Store; @@ -15,6 +16,7 @@ use File::Path; use File::Temp; use File::Spec; use File::Slurp; +use Hydra::Helper::PluginHooks; our @ISA = qw(Exporter); our @EXPORT = qw( @@ -86,10 +88,7 @@ sub fetchInputBuild { { order_by => "me.id DESC", rows => 1 , where => \ attrsToSQL($attrs, "me.id") }); - if (!defined $prevBuild || !isValidPath(getMainOutput($prevBuild)->path)) { - print STDERR "input `", $name, "': no previous build available\n"; - return undef; - } + return () if !defined $prevBuild || !isValidPath(getMainOutput($prevBuild)->path); #print STDERR "input `", $name, "': using build ", $prevBuild->id, "\n"; @@ -148,9 +147,8 @@ sub fetchInputSystemBuild { return @inputs; } - sub fetchInput { - my ($plugins, $db, $project, $jobset, $name, $type, $value) = @_; + my ($plugins, $db, $project, $jobset, $name, $type, $value, $emailresponsible) = @_; my @inputs; if ($type eq "build") { @@ -159,7 +157,7 @@ sub fetchInput { elsif ($type eq "sysbuild") { @inputs = fetchInputSystemBuild($db, $project, $jobset, $name, $value); } - elsif ($type eq "string") { + elsif ($type eq "string" || $type eq "nix") { die unless defined $value; @inputs = { value => $value }; } @@ -170,7 +168,7 @@ sub fetchInput { else { my $found = 0; foreach my $plugin (@{$plugins}) { - @inputs = $plugin->fetchInput($type, $name, $value); + @inputs = $plugin->fetchInput($type, $name, $value, $project, $jobset); if (defined $inputs[0]) { $found = 1; last; @@ -179,7 +177,10 @@ sub fetchInput { die "input `$name' has unknown type `$type'." unless $found; } - $_->{type} = $type foreach @inputs; + foreach my $input (@inputs) { + $input->{type} = $type; + $input->{emailresponsible} = $emailresponsible; + } return @inputs; } @@ -243,6 +244,9 @@ sub inputsToArgs { when ("boolean") { push @res, "--arg", $input, booleanToString($exprType, $alt->{value}); } + when ("nix") { + push @res, "--arg", $input, $alt->{value}; + } default { push @res, "--arg", $input, buildInputToString($exprType, $alt); } @@ -287,17 +291,25 @@ sub evalJobs { my $validJob = 1; foreach my $arg (@{$job->{arg}}) { my $input = $inputInfo->{$arg->{name}}->[$arg->{altnr}]; - if ($input->{type} eq "sysbuild" && $input->{system} ne $job->{system}) { - $validJob = 0; - } - } - if ($validJob) { - push(@filteredJobs, $job); + $validJob = 0 if $input->{type} eq "sysbuild" && $input->{system} ne $job->{system}; } + push(@filteredJobs, $job) if $validJob; } $jobs->{job} = \@filteredJobs; - return ($jobs, $nixExprInput); + my %jobNames; + my $errors; + foreach my $job (@{$jobs->{job}}) { + $jobNames{$job->{jobName}}++; + if ($jobNames{$job->{jobName}} == 2) { + $errors .= "warning: there are multiple jobs named ‘$job->{jobName}’; support for this will go away soon!\n\n"; + } + } + + # Handle utf-8 characters in error messages. No idea why this works. + utf8::decode($_->{msg}) foreach @{$jobs->{error}}; + + return ($jobs, $nixExprInput, $errors); } @@ -389,7 +401,7 @@ sub getPrevJobsetEval { # Check whether to add the build described by $buildInfo. sub checkBuild { - my ($db, $project, $jobset, $inputInfo, $nixExprInput, $buildInfo, $buildIds, $prevEval, $jobOutPathMap) = @_; + my ($db, $jobset, $inputInfo, $nixExprInput, $buildInfo, $buildMap, $prevEval, $jobOutPathMap, $plugins) = @_; my @outputNames = sort keys %{$buildInfo->{output}}; die unless scalar @outputNames; @@ -410,9 +422,7 @@ sub checkBuild { my $build; txn_do($db, sub { - my $job = $jobset->jobs->update_or_create( - { name => $jobName - }); + my $job = $jobset->jobs->update_or_create({ name => $jobName }); # Don't add a build that has already been scheduled for this # job, or has been built but is still a "current" build for @@ -433,19 +443,19 @@ sub checkBuild { # semantically unnecessary (because they're implied by # the eval), but they give a factor 1000 speedup on # the Nixpkgs jobset with PostgreSQL. - { project => $project->name, jobset => $jobset->name, job => $job->name, + { project => $jobset->project->name, jobset => $jobset->name, job => $jobName, name => $firstOutputName, path => $firstOutputPath }, { rows => 1, columns => ['id'], join => ['buildoutputs'] }); if (defined $prevBuild) { print STDERR " already scheduled/built as build ", $prevBuild->id, "\n"; - $buildIds->{$prevBuild->id} = 0; + $buildMap->{$prevBuild->id} = { id => $prevBuild->id, jobName => $jobName, new => 0, drvPath => $drvPath }; return; } } # Prevent multiple builds with the same (job, outPath) from # being added. - my $prev = $$jobOutPathMap{$job->name . "\t" . $firstOutputPath}; + my $prev = $$jobOutPathMap{$jobName . "\t" . $firstOutputPath}; if (defined $prev) { print STDERR " already scheduled as build ", $prev, "\n"; return; @@ -511,12 +521,13 @@ sub checkBuild { $build->buildoutputs->create({ name => $_, path => $buildInfo->{output}->{$_}->{path} }) foreach @outputNames; - $buildIds->{$build->id} = 1; - $$jobOutPathMap{$job->name . "\t" . $firstOutputPath} = $build->id; + $buildMap->{$build->id} = { id => $build->id, jobName => $jobName, new => 1, drvPath => $drvPath }; + $$jobOutPathMap{$jobName . "\t" . $firstOutputPath} = $build->id; if ($build->iscachedbuild) { print STDERR " marked as cached build ", $build->id, "\n"; addBuildProducts($db, $build); + notifyBuildFinished($plugins, $build, []); } else { print STDERR " added to queue as build ", $build->id, "\n"; } @@ -545,6 +556,7 @@ sub checkBuild { , uri => $input->{uri} , revision => $input->{revision} , value => $input->{value} + , emailresponsible => $input->{emailresponsible} , dependency => $input->{id} , path => $input->{storePath} || "" # !!! temporary hack , sha256hash => $input->{sha256hash} @@ -556,29 +568,4 @@ sub checkBuild { }; -sub restartBuild { - my ($db, $build) = @_; - - txn_do($db, sub { - my @paths; - push @paths, $build->drvpath; - push @paths, $_->drvpath foreach $build->buildsteps; - - my $r = `nix-store --clear-failed-paths @paths`; - - $build->update( - { finished => 0 - , busy => 0 - , locker => "" - , iscachedbuild => 0 - }); - - $build->buildproducts->delete_all; - - # Reset the stats for the evals to which this build belongs. - # !!! Should do this in a trigger. - foreach my $m ($build->jobsetevalmembers->all) { - $m->eval->update({nrsucceeded => undef}); - } - }); -} +1; diff --git a/src/lib/Hydra/Helper/CatalystUtils.pm b/src/lib/Hydra/Helper/CatalystUtils.pm index 2cbb601a..e83bd0ea 100644 --- a/src/lib/Hydra/Helper/CatalystUtils.pm +++ b/src/lib/Hydra/Helper/CatalystUtils.pm @@ -15,8 +15,8 @@ use feature qw/switch/; our @ISA = qw(Exporter); our @EXPORT = qw( getBuild getPreviousBuild getNextBuild getPreviousSuccessfulBuild - error notFound - requireLogin requireProjectOwner requireAdmin requirePost isAdmin isProjectOwner + error notFound accessDenied + forceLogin requireUser requireProjectOwner requireAdmin requirePost isAdmin isProjectOwner trim getLatestFinishedEval sendEmail @@ -27,6 +27,7 @@ our @EXPORT = qw( parseJobsetName showJobName showStatus + getResponsibleAuthors ); @@ -102,6 +103,12 @@ sub notFound { } +sub accessDenied { + my ($c, $msg) = @_; + error($c, $msg, 403); +} + + sub backToReferer { my ($c) = @_; $c->response->redirect($c->session->{referer} || $c->uri_for('/')); @@ -110,26 +117,33 @@ sub backToReferer { } -sub requireLogin { +sub forceLogin { my ($c) = @_; $c->session->{referer} = $c->request->uri; - error($c, "This page requires you to sign in.", 403); + accessDenied($c, "This page requires you to sign in."); +} + + +sub requireUser { + my ($c) = @_; + forceLogin($c) if !$c->user_exists; } sub isProjectOwner { my ($c, $project) = @_; - - return $c->user_exists && ($c->check_user_roles('admin') || $c->user->username eq $project->owner->username || defined $c->model('DB::ProjectMembers')->find({ project => $project, userName => $c->user->username })); + return + $c->user_exists && + (isAdmin($c) || + $c->user->username eq $project->owner->username || + defined $c->model('DB::ProjectMembers')->find({ project => $project, userName => $c->user->username })); } sub requireProjectOwner { my ($c, $project) = @_; - - requireLogin($c) if !$c->user_exists; - - error($c, "Only the project members or administrators can perform this operation.", 403) + requireUser($c); + accessDenied($c, "Only the project members or administrators can perform this operation.") unless isProjectOwner($c, $project); } @@ -142,8 +156,8 @@ sub isAdmin { sub requireAdmin { my ($c) = @_; - requireLogin($c) if !$c->user_exists; - error($c, "Only administrators can perform this operation.", 403) + requireUser($c); + accessDenied($c, "Only administrators can perform this operation.") unless isAdmin($c); } @@ -206,12 +220,12 @@ sub paramToList { # Security checking of filenames. -Readonly our $pathCompRE => "(?:[A-Za-z0-9-\+\._\$][A-Za-z0-9-\+\._\$]*)"; +Readonly our $pathCompRE => "(?:[A-Za-z0-9-\+\._\$][A-Za-z0-9-\+\._\$:]*)"; Readonly our $relPathRE => "(?:$pathCompRE(?:/$pathCompRE)*)"; Readonly our $relNameRE => "(?:[A-Za-z0-9-_][A-Za-z0-9-\._]*)"; Readonly our $attrNameRE => "(?:[A-Za-z_][A-Za-z0-9-_]*)"; Readonly our $projectNameRE => "(?:[A-Za-z_][A-Za-z0-9-_]*)"; -Readonly our $jobsetNameRE => "(?:[A-Za-z_][A-Za-z0-9-_]*)"; +Readonly our $jobsetNameRE => "(?:[A-Za-z_][A-Za-z0-9-_\.]*)"; Readonly our $jobNameRE => "(?:$attrNameRE(?:\\.$attrNameRE)*)"; Readonly our $systemRE => "(?:[a-z0-9_]+-[a-z0-9_]+)"; Readonly our $userNameRE => "(?:[a-z][a-z0-9_\.]*)"; @@ -246,4 +260,42 @@ sub showStatus { } +# Determine who broke/fixed the build. +sub getResponsibleAuthors { + my ($build, $plugins) = @_; + + my $prevBuild = getPreviousBuild($build); + + my $nrCommits = 0; + my %authors; + my @emailable_authors; + + if ($prevBuild) { + foreach my $curInput ($build->buildinputs_builds) { + next unless ($curInput->type eq "git" || $curInput->type eq "hg"); + my $prevInput = $prevBuild->buildinputs_builds->find({ name => $curInput->name }); + next unless defined $prevInput; + + next if $curInput->type ne $prevInput->type; + next if $curInput->uri ne $prevInput->uri; + next if $curInput->revision eq $prevInput->revision; + + my @commits; + foreach my $plugin (@{$plugins}) { + push @commits, @{$plugin->getCommits($curInput->type, $curInput->uri, $prevInput->revision, $curInput->revision)}; + } + + foreach my $commit (@commits) { + #print STDERR "$commit->{revision} by $commit->{author}\n"; + $authors{$commit->{author}} = $commit->{email}; + push @emailable_authors, $commit->{email} if $curInput->emailresponsible; + $nrCommits++; + } + } + } + + return (\%authors, $nrCommits, \@emailable_authors); +} + + 1; diff --git a/src/lib/Hydra/Helper/Nix.pm b/src/lib/Hydra/Helper/Nix.pm index 571788b4..a7fa06ed 100644 --- a/src/lib/Hydra/Helper/Nix.pm +++ b/src/lib/Hydra/Helper/Nix.pm @@ -7,6 +7,7 @@ use File::Basename; use Config::General; use Hydra::Helper::CatalystUtils; use Hydra::Model::DB; +use Nix::Store; our @ISA = qw(Exporter); our @EXPORT = qw( @@ -16,11 +17,13 @@ our @EXPORT = qw( getPrimaryBuildsForView getPrimaryBuildTotal getViewResult getLatestSuccessfulViewResult - jobsetOverview removeAsciiEscapes getDrvLogPath logContents + jobsetOverview removeAsciiEscapes getDrvLogPath findLog logContents getMainOutput getEvals getMachines pathIsInsidePrefix - captureStdoutStderr); + captureStdoutStderr run grab + getTotalShares + cancelBuilds restartBuilds); sub getHydraHome { @@ -42,11 +45,12 @@ sub getHydraConfig { # doesn't work. sub txn_do { my ($db, $coderef) = @_; + my $res; while (1) { eval { - $db->txn_do($coderef); + $res = $db->txn_do($coderef); }; - last if !$@; + return $res if !$@; die $@ unless $@ =~ "database is locked"; } } @@ -253,21 +257,46 @@ sub getLatestSuccessfulViewResult { sub getDrvLogPath { my ($drvPath) = @_; my $base = basename $drvPath; - my $fn = - ($ENV{NIX_LOG_DIR} || "/nix/var/log/nix") . "/drvs/" - . substr($base, 0, 2) . "/" - . substr($base, 2); - return $fn if -f $fn; - $fn .= ".bz2"; - return $fn if -f $fn; + my $bucketed = substr($base, 0, 2) . "/" . substr($base, 2); + my $fn = ($ENV{NIX_LOG_DIR} || "/nix/var/log/nix") . "/drvs/"; + for ($fn . $bucketed . ".bz2", $fn . $bucketed, $fn . $base . ".bz2", $fn . $base) { + return $_ if (-f $_); + } + return undef; +} + + +# Find the log of the derivation denoted by $drvPath. It it doesn't +# exist, try other derivations that produced its outputs (@outPaths). +sub findLog { + my ($c, $drvPath, @outPaths) = @_; + + if (defined $drvPath) { + my $logPath = getDrvLogPath($drvPath); + return $logPath if defined $logPath; + } + + return undef if scalar @outPaths == 0; + + my @steps = $c->model('DB::BuildSteps')->search( + { path => { -in => [@outPaths] } }, + { select => ["drvpath"] + , distinct => 1 + , join => "buildstepoutputs" + }); + + foreach my $step (@steps) { + next unless defined $step->drvpath; + my $logPath = getDrvLogPath($step->drvpath); + return $logPath if defined $logPath; + } + return undef; } sub logContents { - my ($drvPath, $tail) = @_; - my $logPath = getDrvLogPath($drvPath); - die unless defined $logPath; + my ($logPath, $tail) = @_; my $cmd; if ($logPath =~ /.bz2$/) { $cmd = "bzip2 -d < $logPath"; @@ -381,7 +410,7 @@ sub getEvals { } sub getMachines { - my $machinesConf = $ENV{"NIX_REMOTE_SYSTEMS"} || "/etc/nix.machines"; + my $machinesConf = $ENV{"NIX_REMOTE_SYSTEMS"} || "/etc/nix/machines"; # Read the list of machines. my %machines = (); @@ -472,4 +501,102 @@ sub captureStdoutStderr { } +sub run { + my (%args) = @_; + my $res = { stdout => "", stderr => "" }; + my $stdin = ""; + + eval { + local $SIG{ALRM} = sub { die "timeout\n" }; # NB: \n required + alarm $args{timeout} if defined $args{timeout}; + my @x = ($args{cmd}, \$stdin, \$res->{stdout}); + push @x, \$res->{stderr} if $args{grabStderr} // 1; + IPC::Run::run(@x, + init => sub { chdir $args{dir} or die "changing to $args{dir}" if defined $args{dir}; }); + alarm 0; + }; + + if ($@) { + die unless $@ eq "timeout\n"; # propagate unexpected errors + $res->{status} = -1; + $res->{stderr} = "timeout\n"; + } else { + $res->{status} = $?; + chomp $res->{stdout} if $args{chomp} // 0; + } + + return $res; +} + + +sub grab { + my (%args) = @_; + my $res = run(%args, grabStderr => 0); + die "command `@{$args{cmd}}' failed with exit status $res->{status}" if $res->{status}; + return $res->{stdout}; +} + + +sub getTotalShares { + my ($db) = @_; + return $db->resultset('Jobsets')->search( + { 'project.enabled' => 1, 'me.enabled' => { '!=' => 0 } }, + { join => 'project', select => { sum => 'schedulingshares' }, as => 'sum' })->single->get_column('sum'); +} + + +sub cancelBuilds($$) { + my ($db, $builds) = @_; + return txn_do($db, sub { + $builds = $builds->search({ finished => 0, busy => 0 }); + my $n = $builds->count; + my $time = time(); + $builds->update( + { finished => 1, + , iscachedbuild => 0, buildstatus => 4 # = cancelled + , starttime => $time + , stoptime => $time + }); + return $n; + }); +} + + +sub restartBuilds($$) { + my ($db, $builds) = @_; + my $n = 0; + + txn_do($db, sub { + my @paths; + + $builds = $builds->search({ finished => 1 }); + foreach my $build ($builds->all) { + next if !isValidPath($build->drvpath); + push @paths, $build->drvpath; + push @paths, $_->drvpath foreach $build->buildsteps; + + registerRoot $build->drvpath; + + $build->update( + { finished => 0 + , busy => 0 + , locker => "" + , iscachedbuild => 0 + }); + $n++; + + # Reset the stats for the evals to which this build belongs. + # !!! Should do this in a trigger. + $build->jobsetevals->update({nrsucceeded => undef}); + } + + # Clear Nix's negative failure cache. + # FIXME: Add this to the API. + system("nix-store", "--clear-failed-paths", @paths); + }); + + return $n; +} + + 1; diff --git a/src/lib/Hydra/Helper/PluginHooks.pm b/src/lib/Hydra/Helper/PluginHooks.pm new file mode 100644 index 00000000..4000045b --- /dev/null +++ b/src/lib/Hydra/Helper/PluginHooks.pm @@ -0,0 +1,22 @@ +package Hydra::Helper::PluginHooks; + +use strict; +use Exporter; + +our @ISA = qw(Exporter); +our @EXPORT = qw( + notifyBuildFinished); + +sub notifyBuildFinished { + my ($plugins, $build, $dependents) = @_; + foreach my $plugin (@{$plugins}) { + eval { + $plugin->buildFinished($build, $dependents); + }; + if ($@) { + print STDERR "$plugin->buildFinished: $@\n"; + } + } +} + +1; diff --git a/src/lib/Hydra/Plugin.pm b/src/lib/Hydra/Plugin.pm index 8b3df782..4a8ef69e 100644 --- a/src/lib/Hydra/Plugin.pm +++ b/src/lib/Hydra/Plugin.pm @@ -38,7 +38,7 @@ sub supportedInputTypes { # Called to fetch an input of type ‘$type’. ‘$value’ is the input # location, typically the repository URL. sub fetchInput { - my ($self, $type, $name, $value) = @_; + my ($self, $type, $name, $value, $project, $jobset) = @_; return undef; } diff --git a/src/lib/Hydra/Plugin/BazaarInput.pm b/src/lib/Hydra/Plugin/BazaarInput.pm index 5aa4b7a8..26d7217b 100644 --- a/src/lib/Hydra/Plugin/BazaarInput.pm +++ b/src/lib/Hydra/Plugin/BazaarInput.pm @@ -25,21 +25,8 @@ sub fetchInput { my $stdout; my $stderr; - my $cacheDir = getSCMCacheDir . "/bzr"; - mkpath($cacheDir); - my $clonePath = $cacheDir . "/" . sha256_hex($uri); - - if (! -d $clonePath) { - (my $res, $stdout, $stderr) = captureStdoutStderr(600, "bzr", "branch", $uri, $clonePath); - die "error cloning bazaar branch at `$uri':\n$stderr" if $res; - } - - chdir $clonePath or die $!; - (my $res, $stdout, $stderr) = captureStdoutStderr(600, "bzr", "pull"); - die "error pulling latest change bazaar branch at `$uri':\n$stderr" if $res; - # First figure out the last-modified revision of the URI. - my @cmd = (["bzr", "revno"], "|", ["sed", 's/^ *\([0-9]*\).*/\1/']); + my @cmd = (["bzr", "revno", $uri], "|", ["sed", 's/^ *\([0-9]*\).*/\1/']); IPC::Run::run(@cmd, \$stdout, \$stderr); die "cannot get head revision of Bazaar branch at `$uri':\n$stderr" if $?; @@ -61,7 +48,7 @@ sub fetchInput { $ENV{"NIX_PREFETCH_BZR_LEAVE_DOT_BZR"} = $type eq "bzr-checkout" ? "1" : "0"; (my $res, $stdout, $stderr) = captureStdoutStderr(600, - "nix-prefetch-bzr", $clonePath, $revision); + "nix-prefetch-bzr", $uri, $revision); die "cannot check out Bazaar branch `$uri':\n$stderr" if $res; ($sha256, $storePath) = split ' ', $stdout; diff --git a/src/lib/Hydra/Plugin/DarcsInput.pm b/src/lib/Hydra/Plugin/DarcsInput.pm new file mode 100644 index 00000000..c6227123 --- /dev/null +++ b/src/lib/Hydra/Plugin/DarcsInput.pm @@ -0,0 +1,104 @@ +package Hydra::Plugin::DarcsInput; + +use strict; +use parent 'Hydra::Plugin'; +use Digest::SHA qw(sha256_hex); +use File::Path; +use Hydra::Helper::Nix; +use Nix::Store; + +sub supportedInputTypes { + my ($self, $inputTypes) = @_; + $inputTypes->{'darcs'} = 'Darcs checkout'; +} + +sub fetchInput { + my ($self, $type, $name, $uri) = @_; + + return undef if $type ne "darcs"; + + my $timestamp = time; + my $sha256; + my $storePath; + my $revCount; + + my $cacheDir = getSCMCacheDir . "/git"; + mkpath($cacheDir); + my $clonePath = $cacheDir . "/" . sha256_hex($uri); + $uri =~ s|^file://||; # darcs wants paths, not file:// uris + + my $stdout = ""; my $stderr = ""; my $res; + if (! -d $clonePath) { + # Clone the repository. + $res = run(timeout => 600, + cmd => ["darcs", "get", "--lazy", $uri, $clonePath], + dir => $ENV{"TMPDIR"}); + die "Error getting darcs repo at `$uri':\n$stderr" if $res->{status}; + } + + # Update the repository to match $uri. + ($res, $stdout, $stderr) = captureStdoutStderr(600, + ("darcs", "pull", "-a", "--repodir", $clonePath, "$uri")); + die "Error fetching latest change from darcs repo at `$uri':\n$stderr" if $res; + + ($res, $stdout, $stderr) = captureStdoutStderr(600, + ("darcs", "changes", "--last", "1", "--xml", "--repodir", $clonePath)); + die "Error getting revision ID of darcs repo at `$uri':\n$stderr" if $res; + + $stdout =~ /^{db}->resultset('CachedDarcsInputs')->search( + {uri => $uri, revision => $revision}, + {rows => 1}); + + if (defined $cachedInput && isValidPath($cachedInput->storepath)) { + $storePath = $cachedInput->storepath; + $sha256 = $cachedInput->sha256hash; + $revision = $cachedInput->revision; + $revCount = $cachedInput->revcount; + } else { + # Then download this revision into the store. + print STDERR "checking out darcs repo $uri\n"; + + my $tmpDir = File::Temp->newdir("hydra-darcs-export.XXXXXX", CLEANUP => 1, TMPDIR => 1) or die; + (system "darcs", "get", "--lazy", $clonePath, "$tmpDir/export", "--quiet", + "--to-match", "hash $revision") == 0 + or die "darcs export failed"; + $revCount = `darcs changes --count --repodir $tmpDir/export`; chomp $revCount; + die "darcs changes --count failed" if $? != 0; + + system "rm", "-rf", "$tmpDir/export/_darcs"; + $storePath = addToStore("$tmpDir/export", 1, "sha256"); + $sha256 = queryPathHash($storePath); + $sha256 =~ s/sha256://; + + txn_do($self->{db}, sub { + $self->{db}->resultset('CachedDarcsInputs')->update_or_create( + { uri => $uri + , revision => $revision + , revcount => $revCount + , sha256hash => $sha256 + , storepath => $storePath + }); + }); + } + + $revision =~ /^([0-9]+)/; + my $shortRev = $1; + + return + { uri => $uri + , storePath => $storePath + , sha256hash => $sha256 + , revision => $revision + , revCount => int($revCount) + , shortRev => $shortRev + }; +} + +1; diff --git a/src/lib/Hydra/Plugin/EmailNotification.pm b/src/lib/Hydra/Plugin/EmailNotification.pm index b373175f..3a478a60 100644 --- a/src/lib/Hydra/Plugin/EmailNotification.pm +++ b/src/lib/Hydra/Plugin/EmailNotification.pm @@ -28,6 +28,10 @@ The following dependent jobs also failed: * [% showJobName(b) %] ([% baseurl %]/build/[% b.id %]) [% END -%] +[% END -%] +[% IF nrCommits > 0 -%] +This is likely due to [% IF nrCommits > 1 -%][% nrCommits %] commits by [% END -%][% authorList %]. + [% END -%] [% IF build.buildstatus == 0 -%] Yay! @@ -66,7 +70,7 @@ sub buildFinished { my $to = $b->jobset->emailoverride ne "" ? $b->jobset->emailoverride : $b->maintainers; - foreach my $address (split ",", $to) { + foreach my $address (split ",", ($to // "")) { $address = trim $address; $addresses{$address} //= { builds => [] }; @@ -74,6 +78,14 @@ sub buildFinished { } } + my ($authors, $nrCommits, $emailable_authors) = getResponsibleAuthors($build, $self->{plugins}); + my $authorList; + if (scalar keys %{$authors} > 0) { + my @x = map { "$_ <$authors->{$_}>" } (sort keys %{$authors}); + $authorList = join(" or ", scalar @x > 1 ? join(", ", @x[0..scalar @x - 2]): (), $x[-1]); + $addresses{$_} = { builds => [ $build ] } foreach (@{$emailable_authors}); + } + # Send an email to each interested address. # !!! should use the Template Toolkit here. @@ -89,6 +101,8 @@ sub buildFinished { , baseurl => $self->{config}->{'base_uri'} || "http://localhost:3000" , showJobName => \&showJobName, showStatus => \&showStatus , showSystem => index($build->job->name, $build->system) == -1 + , nrCommits => $nrCommits + , authorList => $authorList }; my $body; diff --git a/src/lib/Hydra/Plugin/GitInput.pm b/src/lib/Hydra/Plugin/GitInput.pm index fc267e70..53f373d0 100644 --- a/src/lib/Hydra/Plugin/GitInput.pm +++ b/src/lib/Hydra/Plugin/GitInput.pm @@ -20,39 +20,34 @@ sub _cloneRepo { mkpath($cacheDir); my $clonePath = $cacheDir . "/" . sha256_hex($uri); - my $stdout = ""; my $stderr = ""; my $res; + my $res; if (! -d $clonePath) { # Clone everything and fetch the branch. # TODO: Optimize the first clone by using "git init $clonePath" and "git remote add origin $uri". - ($res, $stdout, $stderr) = captureStdoutStderr(600, "git", "clone", "--branch", $branch, $uri, $clonePath); - die "error cloning git repo at `$uri':\n$stderr" if $res; + $res = run(cmd => ["git", "clone", "--branch", $branch, $uri, $clonePath], timeout => 600); + die "error cloning git repo at `$uri':\n$res->{stderr}" if $res->{status}; } - chdir $clonePath or die $!; # !!! urgh, shouldn't do a chdir - # This command forces the update of the local branch to be in the same as # the remote branch for whatever the repository state is. This command mirrors # only one branch of the remote repository. - ($res, $stdout, $stderr) = captureStdoutStderr(600, - "git", "fetch", "-fu", "origin", "+$branch:$branch"); - ($res, $stdout, $stderr) = captureStdoutStderr(600, - "git", "fetch", "-fu", "origin") if $res; - die "error fetching latest change from git repo at `$uri':\n$stderr" if $res; + $res = run(cmd => ["git", "fetch", "-fu", "origin", "+$branch:$branch"], dir => $clonePath, timeout => 600); + $res = run(cmd => ["git", "fetch", "-fu", "origin"], dir => $clonePath, timeout => 600) if $res->{status}; + die "error fetching latest change from git repo at `$uri':\n$res->{stderr}" if $res->{status}; # If deepClone is defined, then we look at the content of the repository # to determine if this is a top-git branch. if (defined $deepClone) { # Checkout the branch to look at its content. - ($res, $stdout, $stderr) = captureStdoutStderr(600, "git", "checkout", "$branch"); - die "error checking out Git branch '$branch' at `$uri':\n$stderr" if $res; + $res = run(cmd => ["git", "checkout", "$branch"], dir => $clonePath); + die "error checking out Git branch '$branch' at `$uri':\n$res->{stderr}" if $res->{status}; if (-f ".topdeps") { # This is a TopGit branch. Fetch all the topic branches so # that builders can run "tg patch" and similar. - ($res, $stdout, $stderr) = captureStdoutStderr(600, - "tg", "remote", "--populate", "origin"); - print STDERR "warning: `tg remote --populate origin' failed:\n$stderr" if $res; + $res = run(cmd => ["tg", "remote", "--populate", "origin"], dir => $clonePath, timeout => 600); + print STDERR "warning: `tg remote --populate origin' failed:\n$res->{stderr}" if $res->{status}; } } @@ -64,7 +59,6 @@ sub _parseValue { (my $uri, my $branch, my $deepClone) = split ' ', $value; $branch = defined $branch ? $branch : "master"; return ($uri, $branch, $deepClone); - } sub fetchInput { @@ -80,19 +74,13 @@ sub fetchInput { my $sha256; my $storePath; - my ($res, $stdout, $stderr) = captureStdoutStderr(600, - ("git", "rev-parse", "$branch")); - die "error getting revision number of Git branch '$branch' at `$uri':\n$stderr" if $res; - - my ($revision) = split /\n/, $stdout; - die "error getting a well-formated revision number of Git branch '$branch' at `$uri':\n$stdout" + my $revision = grab(cmd => ["git", "rev-parse", "$branch"], dir => $clonePath, chomp => 1); + die "did not get a well-formated revision number of Git branch '$branch' at `$uri'" unless $revision =~ /^[0-9a-fA-F]+$/; - my $ref = "refs/heads/$branch"; - # Some simple caching: don't check a uri/branch/revision more than once. # TODO: Fix case where the branch is reset to a previous commit. - my $cachedInput ; + my $cachedInput; ($cachedInput) = $self->{db}->resultset('CachedGitInputs')->search( {uri => $uri, branch => $branch, revision => $revision}, {rows => 1}); @@ -123,10 +111,7 @@ sub fetchInput { $ENV{"NIX_PREFETCH_GIT_DEEP_CLONE"} = "1"; } - ($res, $stdout, $stderr) = captureStdoutStderr(600, "nix-prefetch-git", $clonePath, $revision); - die "cannot check out Git repository branch '$branch' at `$uri':\n$stderr" if $res; - - ($sha256, $storePath) = split ' ', $stdout; + ($sha256, $storePath) = split ' ', grab(cmd => ["nix-prefetch-git", $clonePath, $revision], chomp => 1); txn_do($self->{db}, sub { $self->{db}->resultset('CachedGitInputs')->update_or_create( @@ -143,12 +128,9 @@ sub fetchInput { # number of commits in the history of this revision (‘revCount’) # the output of git-describe (‘gitTag’), and the abbreviated # revision (‘shortRev’). - my $revCount = `git rev-list $revision | wc -l`; chomp $revCount; - die "git rev-list failed" if $? != 0; - my $gitTag = `git describe --always $revision`; chomp $gitTag; - die "git describe failed" if $? != 0; - my $shortRev = `git rev-parse --short $revision`; chomp $shortRev; - die "git rev-parse failed" if $? != 0; + my $revCount = scalar(split '\n', grab(cmd => ["git", "rev-list", "$revision"], dir => $clonePath)); + my $gitTag = grab(cmd => ["git", "describe", "--always", "$revision"], dir => $clonePath, chomp => 1); + my $shortRev = grab(cmd => ["git", "rev-parse", "--short", "$revision"], dir => $clonePath, chomp => 1); return { uri => $uri @@ -172,9 +154,7 @@ sub getCommits { my $clonePath = $self->_cloneRepo($uri, $branch, $deepClone); - my $out; - IPC::Run::run(["git", "log", "--pretty=format:%H%x09%an%x09%ae%x09%at", "$rev1..$rev2"], \undef, \$out) - or die "cannot get git logs: $?"; + my $out = grab(cmd => ["git", "log", "--pretty=format:%H%x09%an%x09%ae%x09%at", "$rev1..$rev2"], dir => $clonePath); my $res = []; foreach my $line (split /\n/, $out) { diff --git a/src/lib/Hydra/Plugin/HipChatNotification.pm b/src/lib/Hydra/Plugin/HipChatNotification.pm index c7b11352..702d6bfb 100644 --- a/src/lib/Hydra/Plugin/HipChatNotification.pm +++ b/src/lib/Hydra/Plugin/HipChatNotification.pm @@ -9,7 +9,7 @@ sub buildFinished { my ($self, $build, $dependents) = @_; my $cfg = $self->{config}->{hipchat}; - my @config = ref $cfg eq "ARRAY" ? @$cfg : ($cfg); + my @config = defined $cfg ? ref $cfg eq "ARRAY" ? @$cfg : ($cfg) : (); my $baseurl = $self->{config}->{'base_uri'} || "http://localhost:3000"; @@ -37,33 +37,7 @@ sub buildFinished { return if scalar keys %rooms == 0; - # Determine who broke/fixed the build. - my $prevBuild = getPreviousBuild($build); - - my $nrCommits = 0; - my %authors; - - if ($prevBuild) { - foreach my $curInput ($build->buildinputs_builds) { - next unless $curInput->type eq "git"; - my $prevInput = $prevBuild->buildinputs_builds->find({ name => $curInput->name }); - next unless defined $prevInput; - - next if $curInput->type ne $prevInput->type; - next if $curInput->uri ne $prevInput->uri; - - my @commits; - foreach my $plugin (@{$self->{plugins}}) { - push @commits, @{$plugin->getCommits($curInput->type, $curInput->uri, $prevInput->revision, $curInput->revision)}; - } - - foreach my $commit (@commits) { - print STDERR "$commit->{revision} by $commit->{author}\n"; - $authors{$commit->{author}} = $commit->{email}; - $nrCommits++; - } - } - } + my ($authors, $nrCommits) = getResponsibleAuthors($build, $self->{plugins}); # Send a message to each room. foreach my $roomId (keys %rooms) { @@ -83,16 +57,15 @@ sub buildFinished { $msg .= " (and ${\scalar @deps} others)" if scalar @deps > 0; $msg .= ": " . showStatus($build) . ""; - if (scalar keys %authors > 0) { + if (scalar keys %{$authors} > 0) { # FIXME: HTML escaping - my @x = map { "$_" } (sort keys %authors); + my @x = map { "$_" } (sort keys %{$authors}); $msg .= ", likely due to "; $msg .= "$nrCommits commits by " if $nrCommits > 1; $msg .= join(" or ", scalar @x > 1 ? join(", ", @x[0..scalar @x - 2]) : (), $x[-1]); } print STDERR "sending hipchat notification to room $roomId: $msg\n"; - next; my $ua = LWP::UserAgent->new(); my $resp = $ua->post('https://api.hipchat.com/v1/rooms/message?format=json&auth_token=' . $room->{room}->{token}, { diff --git a/src/lib/Hydra/Plugin/MercurialInput.pm b/src/lib/Hydra/Plugin/MercurialInput.pm index 02ea0ccc..160be7b4 100644 --- a/src/lib/Hydra/Plugin/MercurialInput.pm +++ b/src/lib/Hydra/Plugin/MercurialInput.pm @@ -12,21 +12,33 @@ sub supportedInputTypes { $inputTypes->{'hg'} = 'Mercurial checkout'; } +sub _parseValue { + my ($value) = @_; + (my $uri, my $id) = split ' ', $value; + $id = defined $id ? $id : "default"; + return ($uri, $id); +} + +sub _clonePath { + my ($uri) = @_; + my $cacheDir = getSCMCacheDir . "/hg"; + mkpath($cacheDir); + return $cacheDir . "/" . sha256_hex($uri); +} + sub fetchInput { my ($self, $type, $name, $value) = @_; return undef if $type ne "hg"; - (my $uri, my $id) = split ' ', $value; + (my $uri, my $id) = _parseValue($value); $id = defined $id ? $id : "default"; # init local hg clone my $stdout = ""; my $stderr = ""; - my $cacheDir = getSCMCacheDir . "/hg"; - mkpath($cacheDir); - my $clonePath = $cacheDir . "/" . sha256_hex($uri); + my $clonePath = _clonePath($uri); if (! -d $clonePath) { (my $res, $stdout, $stderr) = captureStdoutStderr(600, @@ -85,4 +97,32 @@ sub fetchInput { }; } +sub getCommits { + my ($self, $type, $value, $rev1, $rev2) = @_; + return [] if $type ne "hg"; + + return [] unless $rev1 =~ /^[0-9a-f]+$/; + return [] unless $rev2 =~ /^[0-9a-f]+$/; + + my ($uri, $id) = _parseValue($value); + + my $clonePath = _clonePath($uri); + chdir $clonePath or die $!; + + my $out; + IPC::Run::run(["hg", "log", "--template", "{node|short}\t{author|person}\t{author|email}\n", "-r", "$rev1:$rev2", $clonePath], \undef, \$out) + or die "cannot get mercurial logs: $?"; + + my $res = []; + foreach my $line (split /\n/, $out) { + if ($line ne "") { + my ($revision, $author, $email) = split "\t", $line; + push @$res, { revision => $revision, author => $author, email => $email }; + } + } + + return $res; +} + + 1; diff --git a/src/lib/Hydra/Plugin/PathInput.pm b/src/lib/Hydra/Plugin/PathInput.pm index d913782d..551fc94a 100644 --- a/src/lib/Hydra/Plugin/PathInput.pm +++ b/src/lib/Hydra/Plugin/PathInput.pm @@ -34,8 +34,13 @@ sub fetchInput { } else { print STDERR "copying input ", $name, " from $uri\n"; - $storePath = `nix-store --add "$uri"` - or die "cannot copy path $uri to the Nix store.\n"; + if ( $uri =~ /^\// ) { + $storePath = `nix-store --add "$uri"` + or die "cannot copy path $uri to the Nix store.\n"; + } else { + $storePath = `PRINT_PATH=1 nix-prefetch-url "$uri" | tail -n 1` + or die "cannot fetch $uri to the Nix store.\n"; + } chomp $storePath; $sha256 = (queryPathInfo($storePath, 0))[1] or die; diff --git a/src/lib/Hydra/Plugin/S3Backup.pm b/src/lib/Hydra/Plugin/S3Backup.pm new file mode 100644 index 00000000..4b3af23f --- /dev/null +++ b/src/lib/Hydra/Plugin/S3Backup.pm @@ -0,0 +1,149 @@ +package Hydra::Plugin::S3Backup; + +use strict; +use parent 'Hydra::Plugin'; +use File::Temp; +use File::Basename; +use Fcntl; +use IO::File; +use Net::Amazon::S3; +use Net::Amazon::S3::Client; +use Digest::SHA; +use Nix::Config; +use Nix::Store; +use Hydra::Model::DB; +use Hydra::Helper::CatalystUtils; + +my $client; +my %compressors = ( + xz => "| $Nix::Config::xz", + bzip2 => "| $Nix::Config::bzip2", + none => "" +); +my $lockfile = Hydra::Model::DB::getHydraPath . "/.hydra-s3backup.lock"; + +sub buildFinished { + my ($self, $build, $dependents) = @_; + + return unless $build->buildstatus == 0 or $build->buildstatus == 6; + + my $jobName = showJobName $build; + my $job = $build->job; + + my $cfg = $self->{config}->{s3backup}; + my @config = defined $cfg ? ref $cfg eq "ARRAY" ? @$cfg : ($cfg) : (); + + my @matching_configs = (); + foreach my $bucket_config (@config) { + push @matching_configs, $bucket_config if $jobName =~ /^$bucket_config->{jobs}$/; + } + + return unless @matching_configs; + unless (defined $client) { + $client = Net::Amazon::S3::Client->new( s3 => Net::Amazon::S3->new( retry => 1 ) ); + } + + # !!! Maybe should do per-bucket locking? + my $lockhandle = IO::File->new; + open($lockhandle, "+>", $lockfile) or die "Opening $lockfile: $!"; + flock($lockhandle, Fcntl::LOCK_SH) or die "Read-locking $lockfile: $!"; + + my @needed_paths = (); + foreach my $output ($build->buildoutputs) { + push @needed_paths, $output->path; + } + + my %narinfos = (); + my %compression_types = (); + foreach my $bucket_config (@matching_configs) { + my $compression_type = + exists $bucket_config->{compression_type} ? $bucket_config->{compression_type} : "bzip2"; + die "Unsupported compression type $compression_type" unless exists $compressors{$compression_type}; + if (exists $compression_types{$compression_type}) { + push @{$compression_types{$compression_type}}, $bucket_config; + } else { + $compression_types{$compression_type} = [ $bucket_config ]; + $narinfos{$compression_type} = []; + } + } + + my $build_id = $build->id; + my $tempdir = File::Temp->newdir("s3-backup-nars-$build_id" . "XXXXX"); + + my %seen = (); + # Upload nars and build narinfos + while (@needed_paths) { + my $path = shift @needed_paths; + next if exists $seen{$path}; + $seen{$path} = undef; + my $hash = substr basename($path), 0, 32; + my ($deriver, $narHash, $time, $narSize, $refs) = queryPathInfo($path, 0); + my $system; + if (defined $deriver and isValidPath($deriver)) { + $system = derivationFromPath($deriver)->{platform}; + } + foreach my $reference (@{$refs}) { + push @needed_paths, $reference; + } + while (my ($compression_type, $configs) = each %compression_types) { + my @incomplete_buckets = (); + # Don't do any work if all the buckets have this path + foreach my $bucket_config (@{$configs}) { + my $bucket = $client->bucket( name => $bucket_config->{name} ); + my $prefix = exists $bucket_config->{prefix} ? $bucket_config->{prefix} : ""; + push @incomplete_buckets, $bucket_config + unless $bucket->object( key => $prefix . "$hash.narinfo" )->exists; + } + next unless @incomplete_buckets; + my $compressor = $compressors{$compression_type}; + system("$Nix::Config::binDir/nix-store --export $path $compressor > $tempdir/nar") == 0 or die; + my $digest = Digest::SHA->new(256); + $digest->addfile("$tempdir/nar"); + my $file_hash = $digest->hexdigest; + my @stats = stat "$tempdir/nar" or die "Couldn't stat $tempdir/nar"; + my $file_size = $stats[7]; + my $narinfo = ""; + $narinfo .= "StorePath: $path\n"; + $narinfo .= "URL: $hash.nar\n"; + $narinfo .= "Compression: $compression_type\n"; + $narinfo .= "FileHash: sha256:$file_hash\n"; + $narinfo .= "FileSize: $file_size\n"; + $narinfo .= "NarHash: $narHash\n"; + $narinfo .= "NarSize: $narSize\n"; + $narinfo .= "References: " . join(" ", map { basename $_ } @{$refs}) . "\n"; + if (defined $deriver) { + $narinfo .= "Deriver: " . basename $deriver . "\n"; + if (defined $system) { + $narinfo .= "System: $system\n"; + } + } + push @{$narinfos{$compression_type}}, { hash => $hash, info => $narinfo }; + foreach my $bucket_config (@incomplete_buckets) { + my $bucket = $client->bucket( name => $bucket_config->{name} ); + my $prefix = exists $bucket_config->{prefix} ? $bucket_config->{prefix} : ""; + my $nar_object = $bucket->object( + key => $prefix . "$hash.nar", + content_type => "application/x-nix-archive" + ); + $nar_object->put_filename("$tempdir/nar"); + } + } + } + + # Upload narinfos + while (my ($compression_type, $infos) = each %narinfos) { + foreach my $bucket_config (@{$compression_types{$compression_type}}) { + foreach my $info (@{$infos}) { + my $bucket = $client->bucket( name => $bucket_config->{name} ); + my $prefix = exists $bucket_config->{prefix} ? $bucket_config->{prefix} : ""; + my $narinfo_object = $bucket->object( + key => $prefix . $info->{hash} . ".narinfo", + content_type => "text/x-nix-narinfo" + ); + $narinfo_object->put($info->{info}) unless $narinfo_object->exists; + } + } + } +} + +1; diff --git a/src/lib/Hydra/Schema/AggregateConstituents.pm b/src/lib/Hydra/Schema/AggregateConstituents.pm new file mode 100644 index 00000000..8112a49c --- /dev/null +++ b/src/lib/Hydra/Schema/AggregateConstituents.pm @@ -0,0 +1,111 @@ +use utf8; +package Hydra::Schema::AggregateConstituents; + +# Created by DBIx::Class::Schema::Loader +# DO NOT MODIFY THE FIRST PART OF THIS FILE + +=head1 NAME + +Hydra::Schema::AggregateConstituents + +=cut + +use strict; +use warnings; + +use base 'DBIx::Class::Core'; + +=head1 COMPONENTS LOADED + +=over 4 + +=item * L + +=back + +=cut + +__PACKAGE__->load_components("+Hydra::Component::ToJSON"); + +=head1 TABLE: C + +=cut + +__PACKAGE__->table("AggregateConstituents"); + +=head1 ACCESSORS + +=head2 aggregate + + data_type: 'integer' + is_foreign_key: 1 + is_nullable: 0 + +=head2 constituent + + data_type: 'integer' + is_foreign_key: 1 + is_nullable: 0 + +=cut + +__PACKAGE__->add_columns( + "aggregate", + { data_type => "integer", is_foreign_key => 1, is_nullable => 0 }, + "constituent", + { data_type => "integer", is_foreign_key => 1, is_nullable => 0 }, +); + +=head1 PRIMARY KEY + +=over 4 + +=item * L + +=item * L + +=back + +=cut + +__PACKAGE__->set_primary_key("aggregate", "constituent"); + +=head1 RELATIONS + +=head2 aggregate + +Type: belongs_to + +Related object: L + +=cut + +__PACKAGE__->belongs_to( + "aggregate", + "Hydra::Schema::Builds", + { id => "aggregate" }, + { is_deferrable => 0, on_delete => "CASCADE", on_update => "NO ACTION" }, +); + +=head2 constituent + +Type: belongs_to + +Related object: L + +=cut + +__PACKAGE__->belongs_to( + "constituent", + "Hydra::Schema::Builds", + { id => "constituent" }, + { is_deferrable => 0, on_delete => "CASCADE", on_update => "NO ACTION" }, +); + + +# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-08-15 00:20:01 +# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:TLNenyPLIWw2gWsOVhplZw + + +# You can replace this text with custom code or comments, and it will be preserved on regeneration +1; diff --git a/src/lib/Hydra/Schema/BuildInputs.pm b/src/lib/Hydra/Schema/BuildInputs.pm index d450fbe1..dafae860 100644 --- a/src/lib/Hydra/Schema/BuildInputs.pm +++ b/src/lib/Hydra/Schema/BuildInputs.pm @@ -72,6 +72,12 @@ __PACKAGE__->table("BuildInputs"); data_type: 'text' is_nullable: 1 +=head2 emailresponsible + + data_type: 'integer' + default_value: 0 + is_nullable: 0 + =head2 dependency data_type: 'integer' @@ -105,6 +111,8 @@ __PACKAGE__->add_columns( { data_type => "text", is_nullable => 1 }, "value", { data_type => "text", is_nullable => 1 }, + "emailresponsible", + { data_type => "integer", default_value => 0, is_nullable => 0 }, "dependency", { data_type => "integer", is_foreign_key => 1, is_nullable => 1 }, "path", @@ -168,7 +176,7 @@ __PACKAGE__->belongs_to( ); -# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-06-13 01:54:50 -# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:tKZAybbNaRIMs9n5tHkqPw +# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-10-08 13:08:15 +# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:OaJPzRM+8XGsu3eIkqeYEw 1; diff --git a/src/lib/Hydra/Schema/BuildOutputs.pm b/src/lib/Hydra/Schema/BuildOutputs.pm index 9bd656da..751eac4b 100644 --- a/src/lib/Hydra/Schema/BuildOutputs.pm +++ b/src/lib/Hydra/Schema/BuildOutputs.pm @@ -97,6 +97,14 @@ __PACKAGE__->belongs_to( # Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-06-13 01:54:50 # DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:V8MbzKvZNEaeHBJV67+ZMQ +my %hint = ( + columns => [ + 'path' + ], +); + +sub json_hint { + return \%hint; +} -# You can replace this text with custom code or comments, and it will be preserved on regeneration 1; diff --git a/src/lib/Hydra/Schema/BuildSteps.pm b/src/lib/Hydra/Schema/BuildSteps.pm index 742abe35..64bddd07 100644 --- a/src/lib/Hydra/Schema/BuildSteps.pm +++ b/src/lib/Hydra/Schema/BuildSteps.pm @@ -169,4 +169,21 @@ __PACKAGE__->has_many( # Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-06-13 01:54:50 # DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:OZsXJniZ/7EB2iSz7p5y4A +my %hint = ( + columns => [ + "machine", + "system", + "stepnr", + "drvpath", + "starttime", + ], + eager_relations => { + build => 'id' + } +); + +sub json_hint { + return \%hint; +} + 1; diff --git a/src/lib/Hydra/Schema/Builds.pm b/src/lib/Hydra/Schema/Builds.pm index ea27d579..2355e4ee 100644 --- a/src/lib/Hydra/Schema/Builds.pm +++ b/src/lib/Hydra/Schema/Builds.pm @@ -288,6 +288,36 @@ __PACKAGE__->set_primary_key("id"); =head1 RELATIONS +=head2 aggregateconstituents_aggregates + +Type: has_many + +Related object: L + +=cut + +__PACKAGE__->has_many( + "aggregateconstituents_aggregates", + "Hydra::Schema::AggregateConstituents", + { "foreign.aggregate" => "self.id" }, + undef, +); + +=head2 aggregateconstituents_constituents + +Type: has_many + +Related object: L + +=cut + +__PACKAGE__->has_many( + "aggregateconstituents_constituents", + "Hydra::Schema::AggregateConstituents", + { "foreign.constituent" => "self.id" }, + undef, +); + =head2 buildinputs_builds Type: has_many @@ -468,9 +498,37 @@ __PACKAGE__->has_many( undef, ); +=head2 aggregates -# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-06-13 01:54:50 -# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:isCEXACY/PwkvgKHcXvAIg +Type: many_to_many + +Composing rels: L -> aggregate + +=cut + +__PACKAGE__->many_to_many( + "aggregates", + "aggregateconstituents_constituents", + "aggregate", +); + +=head2 constituents + +Type: many_to_many + +Composing rels: L -> constituent + +=cut + +__PACKAGE__->many_to_many( + "constituents", + "aggregateconstituents_constituents", + "constituent", +); + + +# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-08-15 00:20:01 +# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:U1j/qm0vslb6Jvgu5mGMtw __PACKAGE__->has_many( "dependents", @@ -502,6 +560,8 @@ __PACKAGE__->has_many( __PACKAGE__->many_to_many("jobsetevals", "jobsetevalmembers", "eval"); +__PACKAGE__->many_to_many("constituents_", "aggregateconstituents_aggregates", "constituent"); + sub makeSource { my ($name, $query) = @_; my $source = __PACKAGE__->result_source_instance(); @@ -516,36 +576,6 @@ sub makeQueries { my $activeJobs = "(select distinct project, jobset, job, system from Builds where isCurrent = 1 $constraint)"; - makeSource( - "JobStatus$name", - # Urgh, can't use "*" in the "select" here because of the status change join. - < c.id and - ((x.buildStatus = 0 and c.buildStatus != 0) or - (x.buildStatus != 0 and c.buildStatus = 0))) -QUERY - ); - - makeSource("ActiveJobs$name", "select distinct project, jobset, job from Builds where isCurrent = 1 $constraint"); - makeSource( "LatestSucceeded$name", < [ + 'id', + 'finished', + 'timestamp', + 'starttime', + 'stoptime', + 'project', + 'jobset', + 'job', + 'nixname', + 'system', + 'priority', + 'busy', + 'buildstatus', + 'releasename' + ], + eager_relations => { + buildoutputs => 'name' + } +); + +sub json_hint { + return \%hint; +} + 1; diff --git a/src/lib/Hydra/Schema/CachedDarcsInputs.pm b/src/lib/Hydra/Schema/CachedDarcsInputs.pm new file mode 100644 index 00000000..59488060 --- /dev/null +++ b/src/lib/Hydra/Schema/CachedDarcsInputs.pm @@ -0,0 +1,98 @@ +use utf8; +package Hydra::Schema::CachedDarcsInputs; + +# Created by DBIx::Class::Schema::Loader +# DO NOT MODIFY THE FIRST PART OF THIS FILE + +=head1 NAME + +Hydra::Schema::CachedDarcsInputs + +=cut + +use strict; +use warnings; + +use base 'DBIx::Class::Core'; + +=head1 COMPONENTS LOADED + +=over 4 + +=item * L + +=back + +=cut + +__PACKAGE__->load_components("+Hydra::Component::ToJSON"); + +=head1 TABLE: C + +=cut + +__PACKAGE__->table("CachedDarcsInputs"); + +=head1 ACCESSORS + +=head2 uri + + data_type: 'text' + is_nullable: 0 + +=head2 revision + + data_type: 'text' + is_nullable: 0 + +=head2 sha256hash + + data_type: 'text' + is_nullable: 0 + +=head2 storepath + + data_type: 'text' + is_nullable: 0 + +=head2 revcount + + data_type: 'integer' + is_nullable: 0 + +=cut + +__PACKAGE__->add_columns( + "uri", + { data_type => "text", is_nullable => 0 }, + "revision", + { data_type => "text", is_nullable => 0 }, + "sha256hash", + { data_type => "text", is_nullable => 0 }, + "storepath", + { data_type => "text", is_nullable => 0 }, + "revcount", + { data_type => "integer", is_nullable => 0 }, +); + +=head1 PRIMARY KEY + +=over 4 + +=item * L + +=item * L + +=back + +=cut + +__PACKAGE__->set_primary_key("uri", "revision"); + + +# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-09-20 11:08:50 +# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:Yl1slt3SAizijgu0KUTn0A + + +# You can replace this text with custom code or comments, and it will be preserved on regeneration +1; diff --git a/src/lib/Hydra/Schema/Jobs.pm b/src/lib/Hydra/Schema/Jobs.pm index 6a703588..dcfa557e 100644 --- a/src/lib/Hydra/Schema/Jobs.pm +++ b/src/lib/Hydra/Schema/Jobs.pm @@ -137,8 +137,27 @@ __PACKAGE__->belongs_to( { is_deferrable => 0, on_delete => "CASCADE", on_update => "CASCADE" }, ); +=head2 starredjobs -# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-06-13 01:54:50 -# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:t2CCfUjFEz/lO4szROz1AQ +Type: has_many + +Related object: L + +=cut + +__PACKAGE__->has_many( + "starredjobs", + "Hydra::Schema::StarredJobs", + { + "foreign.job" => "self.name", + "foreign.jobset" => "self.jobset", + "foreign.project" => "self.project", + }, + undef, +); + + +# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-10-14 15:46:29 +# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:uYKWjewvKBEAuK53u7vKuw 1; diff --git a/src/lib/Hydra/Schema/JobsetEvalInputs.pm b/src/lib/Hydra/Schema/JobsetEvalInputs.pm index fa68fc40..f79c873b 100644 --- a/src/lib/Hydra/Schema/JobsetEvalInputs.pm +++ b/src/lib/Hydra/Schema/JobsetEvalInputs.pm @@ -169,6 +169,16 @@ __PACKAGE__->belongs_to( # Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-06-13 01:54:50 # DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:1Dp8B58leBLh4GK0GPw2zg +my %hint = ( + columns => [ + "revision", + "type", + "uri" + ], +); + +sub json_hint { + return \%hint; +} -# You can replace this text with custom code or comments, and it will be preserved on regeneration 1; diff --git a/src/lib/Hydra/Schema/JobsetEvals.pm b/src/lib/Hydra/Schema/JobsetEvals.pm index 5fcd5249..0bd21da2 100644 --- a/src/lib/Hydra/Schema/JobsetEvals.pm +++ b/src/lib/Hydra/Schema/JobsetEvals.pm @@ -199,4 +199,22 @@ __PACKAGE__->has_many( __PACKAGE__->many_to_many(builds => 'buildIds', 'build'); +my %hint = ( + columns => [ + "hasnewbuilds", + "id" + ], + relations => { + "builds" => "id" + }, + eager_relations => { + # altnr? Does anyone care? + jobsetevalinputs => "name" + } +); + +sub json_hint { + return \%hint; +} + 1; diff --git a/src/lib/Hydra/Schema/JobsetInputs.pm b/src/lib/Hydra/Schema/JobsetInputs.pm index 48464bbf..b67a3519 100644 --- a/src/lib/Hydra/Schema/JobsetInputs.pm +++ b/src/lib/Hydra/Schema/JobsetInputs.pm @@ -57,6 +57,12 @@ __PACKAGE__->table("JobsetInputs"); data_type: 'text' is_nullable: 0 +=head2 emailresponsible + + data_type: 'integer' + default_value: 0 + is_nullable: 0 + =cut __PACKAGE__->add_columns( @@ -68,6 +74,8 @@ __PACKAGE__->add_columns( { data_type => "text", is_nullable => 0 }, "type", { data_type => "text", is_nullable => 0 }, + "emailresponsible", + { data_type => "integer", default_value => 0, is_nullable => 0 }, ); =head1 PRIMARY KEY @@ -142,7 +150,17 @@ __PACKAGE__->has_many( ); -# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-06-13 01:54:50 -# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:UXBzqO0vHPql4LYyXpgEQg +# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-10-08 13:06:15 +# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:+mZZqLjQNwblb/EWW1alLQ + +my %hint = ( + relations => { + "jobsetinputalts" => "value" + } +); + +sub json_hint { + return \%hint; +} 1; diff --git a/src/lib/Hydra/Schema/Jobsets.pm b/src/lib/Hydra/Schema/Jobsets.pm index e4b4c6ad..7946b33b 100644 --- a/src/lib/Hydra/Schema/Jobsets.pm +++ b/src/lib/Hydra/Schema/Jobsets.pm @@ -118,6 +118,17 @@ __PACKAGE__->table("Jobsets"); default_value: 300 is_nullable: 0 +=head2 schedulingshares + + data_type: 'integer' + default_value: 100 + is_nullable: 0 + +=head2 fetcherrormsg + + data_type: 'text' + is_nullable: 1 + =cut __PACKAGE__->add_columns( @@ -151,6 +162,10 @@ __PACKAGE__->add_columns( { data_type => "integer", default_value => 3, is_nullable => 0 }, "checkinterval", { data_type => "integer", default_value => 300, is_nullable => 0 }, + "schedulingshares", + { data_type => "integer", default_value => 100, is_nullable => 0 }, + "fetcherrormsg", + { data_type => "text", is_nullable => 1 }, ); =head1 PRIMARY KEY @@ -271,8 +286,42 @@ __PACKAGE__->belongs_to( { is_deferrable => 0, on_delete => "CASCADE", on_update => "CASCADE" }, ); +=head2 starredjobs -# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-06-13 01:54:50 -# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:tsGR8MhZRIUeNwpcVczMUw +Type: has_many + +Related object: L + +=cut + +__PACKAGE__->has_many( + "starredjobs", + "Hydra::Schema::StarredJobs", + { + "foreign.jobset" => "self.name", + "foreign.project" => "self.project", + }, + undef, +); + + +# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-10-14 15:46:29 +# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:DTAGxP5RFvcNxP/ciJGo4Q + +my %hint = ( + columns => [ + "enabled", + "errormsg", + "fetcherrormsg", + "emailoverride" + ], + eager_relations => { + jobsetinputs => "name" + } +); + +sub json_hint { + return \%hint; +} 1; diff --git a/src/lib/Hydra/Schema/NrBuilds.pm b/src/lib/Hydra/Schema/NrBuilds.pm new file mode 100644 index 00000000..27ae2e83 --- /dev/null +++ b/src/lib/Hydra/Schema/NrBuilds.pm @@ -0,0 +1,75 @@ +use utf8; +package Hydra::Schema::NrBuilds; + +# Created by DBIx::Class::Schema::Loader +# DO NOT MODIFY THE FIRST PART OF THIS FILE + +=head1 NAME + +Hydra::Schema::NrBuilds + +=cut + +use strict; +use warnings; + +use base 'DBIx::Class::Core'; + +=head1 COMPONENTS LOADED + +=over 4 + +=item * L + +=back + +=cut + +__PACKAGE__->load_components("+Hydra::Component::ToJSON"); + +=head1 TABLE: C + +=cut + +__PACKAGE__->table("NrBuilds"); + +=head1 ACCESSORS + +=head2 what + + data_type: 'text' + is_nullable: 0 + +=head2 count + + data_type: 'integer' + is_nullable: 0 + +=cut + +__PACKAGE__->add_columns( + "what", + { data_type => "text", is_nullable => 0 }, + "count", + { data_type => "integer", is_nullable => 0 }, +); + +=head1 PRIMARY KEY + +=over 4 + +=item * L + +=back + +=cut + +__PACKAGE__->set_primary_key("what"); + + +# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-08-12 17:59:18 +# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:CK8eJGC803nGj0wnete9xg + + +# You can replace this text with custom code or comments, and it will be preserved on regeneration +1; diff --git a/src/lib/Hydra/Schema/Projects.pm b/src/lib/Hydra/Schema/Projects.pm index fcc87a30..75f40b6f 100644 --- a/src/lib/Hydra/Schema/Projects.pm +++ b/src/lib/Hydra/Schema/Projects.pm @@ -226,6 +226,21 @@ __PACKAGE__->has_many( undef, ); +=head2 starredjobs + +Type: has_many + +Related object: L + +=cut + +__PACKAGE__->has_many( + "starredjobs", + "Hydra::Schema::StarredJobs", + { "foreign.project" => "self.name" }, + undef, +); + =head2 viewjobs Type: has_many @@ -267,15 +282,26 @@ Composing rels: L -> username __PACKAGE__->many_to_many("usernames", "projectmembers", "username"); -# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-06-13 01:54:50 -# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:RffghAo9jAaqYk41y1Sdqw -# These lines were loaded from '/home/rbvermaa/src/hydra/src/lib/Hydra/Schema/Projects.pm' found in @INC. -# They are now part of the custom portion of this file -# for you to hand-edit. If you do not either delete -# this section or remove that file from @INC, this section -# will be repeated redundantly when you re-create this -# file again via Loader! See skip_load_external to disable -# this feature. +# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-10-14 15:46:29 +# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:PdNQ2mf5azBB6nI+iAm8fQ + +my %hint = ( + columns => [ + "name", + "displayname", + "description", + "enabled", + "hidden", + "owner" + ], + relations => { + releases => "name", + jobsets => "name" + } +); + +sub json_hint { + return \%hint; +} -# You can replace this text with custom content, and it will be preserved on regeneration 1; diff --git a/src/lib/Hydra/Schema/StarredJobs.pm b/src/lib/Hydra/Schema/StarredJobs.pm new file mode 100644 index 00000000..51bde91f --- /dev/null +++ b/src/lib/Hydra/Schema/StarredJobs.pm @@ -0,0 +1,161 @@ +use utf8; +package Hydra::Schema::StarredJobs; + +# Created by DBIx::Class::Schema::Loader +# DO NOT MODIFY THE FIRST PART OF THIS FILE + +=head1 NAME + +Hydra::Schema::StarredJobs + +=cut + +use strict; +use warnings; + +use base 'DBIx::Class::Core'; + +=head1 COMPONENTS LOADED + +=over 4 + +=item * L + +=back + +=cut + +__PACKAGE__->load_components("+Hydra::Component::ToJSON"); + +=head1 TABLE: C + +=cut + +__PACKAGE__->table("StarredJobs"); + +=head1 ACCESSORS + +=head2 username + + data_type: 'text' + is_foreign_key: 1 + is_nullable: 0 + +=head2 project + + data_type: 'text' + is_foreign_key: 1 + is_nullable: 0 + +=head2 jobset + + data_type: 'text' + is_foreign_key: 1 + is_nullable: 0 + +=head2 job + + data_type: 'text' + is_foreign_key: 1 + is_nullable: 0 + +=cut + +__PACKAGE__->add_columns( + "username", + { data_type => "text", is_foreign_key => 1, is_nullable => 0 }, + "project", + { data_type => "text", is_foreign_key => 1, is_nullable => 0 }, + "jobset", + { data_type => "text", is_foreign_key => 1, is_nullable => 0 }, + "job", + { data_type => "text", is_foreign_key => 1, is_nullable => 0 }, +); + +=head1 PRIMARY KEY + +=over 4 + +=item * L + +=item * L + +=item * L + +=item * L + +=back + +=cut + +__PACKAGE__->set_primary_key("username", "project", "jobset", "job"); + +=head1 RELATIONS + +=head2 job + +Type: belongs_to + +Related object: L + +=cut + +__PACKAGE__->belongs_to( + "job", + "Hydra::Schema::Jobs", + { jobset => "jobset", name => "job", project => "project" }, + { is_deferrable => 0, on_delete => "CASCADE", on_update => "CASCADE" }, +); + +=head2 jobset + +Type: belongs_to + +Related object: L + +=cut + +__PACKAGE__->belongs_to( + "jobset", + "Hydra::Schema::Jobsets", + { name => "jobset", project => "project" }, + { is_deferrable => 0, on_delete => "CASCADE", on_update => "CASCADE" }, +); + +=head2 project + +Type: belongs_to + +Related object: L + +=cut + +__PACKAGE__->belongs_to( + "project", + "Hydra::Schema::Projects", + { name => "project" }, + { is_deferrable => 0, on_delete => "CASCADE", on_update => "CASCADE" }, +); + +=head2 username + +Type: belongs_to + +Related object: L + +=cut + +__PACKAGE__->belongs_to( + "username", + "Hydra::Schema::Users", + { username => "username" }, + { is_deferrable => 0, on_delete => "CASCADE", on_update => "CASCADE" }, +); + + +# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-10-14 15:46:29 +# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:naj5aKWuw8hLE6klmvW9Eg + + +# You can replace this text with custom code or comments, and it will be preserved on regeneration +1; diff --git a/src/lib/Hydra/Schema/Users.pm b/src/lib/Hydra/Schema/Users.pm index 2fac38ed..245f44ee 100644 --- a/src/lib/Hydra/Schema/Users.pm +++ b/src/lib/Hydra/Schema/Users.pm @@ -135,6 +135,21 @@ __PACKAGE__->has_many( undef, ); +=head2 starredjobs + +Type: has_many + +Related object: L + +=cut + +__PACKAGE__->has_many( + "starredjobs", + "Hydra::Schema::StarredJobs", + { "foreign.username" => "self.username" }, + undef, +); + =head2 userroles Type: has_many @@ -161,14 +176,22 @@ Composing rels: L -> project __PACKAGE__->many_to_many("projects", "projectmembers", "project"); -# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-06-13 01:54:50 -# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:hy3MKvFxfL+1bTc7Hcb1zA -# These lines were loaded from '/home/rbvermaa/src/hydra/src/lib/Hydra/Schema/Users.pm' found in @INC. -# They are now part of the custom portion of this file -# for you to hand-edit. If you do not either delete -# this section or remove that file from @INC, this section -# will be repeated redundantly when you re-create this -# file again via Loader! See skip_load_external to disable -# this feature. +# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-10-14 15:46:29 +# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:Hv9Ukqud0d3uIUot0ErKeg + +my %hint = ( + columns => [ + "fullname", + "emailaddress", + "username" + ], + relations => { + userroles => "role" + } +); + +sub json_hint { + return \%hint; +} 1; diff --git a/src/lib/Hydra/View/NixExprs.pm b/src/lib/Hydra/View/NixExprs.pm index a88a0e0f..25c06b2e 100644 --- a/src/lib/Hydra/View/NixExprs.pm +++ b/src/lib/Hydra/View/NixExprs.pm @@ -19,31 +19,83 @@ sub escape { sub process { my ($self, $c) = @_; - my $res = "[\n"; + my %perSystem; foreach my $pkg (@{$c->stash->{nixPkgs}}) { my $build = $pkg->{build}; - $res .= " # $pkg->{name}\n"; - $res .= " { type = \"derivation\";\n"; - $res .= " name = " . escape ($build->get_column("releasename") or $build->nixname) . ";\n"; - $res .= " system = " . (escape $build->system) . ";\n"; - $res .= " outPath = " . (escape $pkg->{outPath}) . ";\n"; - $res .= " meta = {\n"; - $res .= " description = " . (escape $build->description) . ";\n" - if $build->description; - $res .= " longDescription = " . (escape $build->longdescription) . ";\n" - if $build->longdescription; - $res .= " license = " . (escape $build->license) . ";\n" - if $build->license; - $res .= " };\n"; - $res .= " }\n"; + $perSystem{$build->system}->{$build->get_column('job')} = $pkg; } - $res .= "]\n"; + my $res = <{$job}; + my $build = $pkg->{build}; + $res .= " # Hydra build ${\$build->id}\n"; + my $attr = $build->get_column('job'); + $attr =~ s/\./-/g; + $res .= " ${\escape $attr} = (mkFakeDerivation {\n"; + $res .= " type = \"derivation\";\n"; + $res .= " name = ${\escape ($build->get_column('releasename') or $build->nixname)};\n"; + $res .= " system = ${\escape $build->system};\n"; + $res .= " meta = {\n"; + $res .= " description = ${\escape $build->description};\n" + if $build->description; + $res .= " longDescription = ${\escape $build->longdescription};\n" + if $build->longdescription; + $res .= " license = ${\escape $build->license};\n" + if $build->license; + $res .= " maintainers = ${\escape $build->maintainers};\n" + if $build->maintainers; + $res .= " };\n"; + $res .= " } {\n"; + my @outputNames = sort (keys $pkg->{outputs}); + $res .= " ${\escape $_} = ${\escape $pkg->{outputs}->{$_}};\n" foreach @outputNames; + my $out = defined $pkg->{outputs}->{"out"} ? "out" : $outputNames[0]; + $res .= " }).$out;\n\n"; + } + + $res .= "}\n\n"; + $first = 0; + } + + $res .= "else " if !$first; + $res .= "{}\n"; my $tar = Archive::Tar->new; - $tar->add_data("channel/channel-name", ($c->stash->{channelName} or "unnamed-channel"), {mtime => 0}); - $tar->add_data("channel/default.nix", $res, {mtime => 0}); + $tar->add_data("channel/channel-name", ($c->stash->{channelName} or "unnamed-channel"), {mtime => 1}); + $tar->add_data("channel/default.nix", $res, {mtime => 1}); my $tardata = $tar->write; my $bzip2data; diff --git a/src/lib/Hydra/View/Plain.pm b/src/lib/Hydra/View/Plain.pm index 764770a0..d34cc44c 100644 --- a/src/lib/Hydra/View/Plain.pm +++ b/src/lib/Hydra/View/Plain.pm @@ -8,7 +8,7 @@ sub process { my ($self, $c) = @_; $c->response->content_encoding("utf-8"); $c->response->content_type('text/plain') unless $c->response->content_type() ne ""; - $self->SUPER::process($c); + $c->response->body($c->stash->{plain}->{data}); } 1; diff --git a/src/lib/Hydra/View/TT.pm b/src/lib/Hydra/View/TT.pm index ece42f65..d24ca086 100644 --- a/src/lib/Hydra/View/TT.pm +++ b/src/lib/Hydra/View/TT.pm @@ -8,17 +8,18 @@ __PACKAGE__->config( TEMPLATE_EXTENSION => '.tt', PRE_CHOMP => 1, POST_CHOMP => 1, - expose_methods => [qw/log_exists ellipsize/]); + expose_methods => [qw/buildLogExists buildStepLogExists/]); -sub log_exists { - my ($self, $c, $drvPath) = @_; - my $x = getDrvLogPath($drvPath); - return defined $x; +sub buildLogExists { + my ($self, $c, $build) = @_; + my @outPaths = map { $_->path } $build->buildoutputs->all; + return defined findLog($c, $build->drvpath, @outPaths); } -sub ellipsize { - my ($self, $c, $s, $n) = @_; - return length $s <= $n ? $s : substr($s, 0, $n - 3) . "..."; +sub buildStepLogExists { + my ($self, $c, $step) = @_; + my @outPaths = map { $_->path } $step->buildstepoutputs->all; + return defined findLog($c, $step->drvpath, @outPaths); } 1; diff --git a/src/root/build.tt b/src/root/build.tt index 6dfc209c..dde0e631 100644 --- a/src/root/build.tt +++ b/src/root/build.tt @@ -7,6 +7,7 @@ [% project = build.project %] [% jobset = build.jobset %] [% job = build.job %] +[% isAggregate = constituents.size > 0 %] [% BLOCK renderOutputs %] [% start=1; FOREACH output IN outputs %] @@ -22,7 +23,7 @@ [% FOREACH step IN build.buildsteps %] [% IF ( type == "All" ) || ( type == "Failed" && step.status != 0 ) || ( type == "Running" && step.busy == 1 ) %] - [% has_log = log_exists(step.drvpath); + [% has_log = buildStepLogExists(step); log = c.uri_for('/build' build.id 'nixlog' step.stepnr); %] [% step.stepnr %] @@ -67,7 +68,40 @@ [% END %]