Merge branch 'master' into libpqxx_undeprecate
This commit is contained in:
commit
a055796ef5
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -15,7 +15,6 @@ Makefile.in
|
||||||
/aclocal.m4
|
/aclocal.m4
|
||||||
/missing
|
/missing
|
||||||
/install-sh
|
/install-sh
|
||||||
/src/script/hydra-eval-guile-jobs
|
|
||||||
/src/sql/hydra-postgresql.sql
|
/src/sql/hydra-postgresql.sql
|
||||||
/src/sql/hydra-sqlite.sql
|
/src/sql/hydra-sqlite.sql
|
||||||
/src/sql/tmp.sqlite
|
/src/sql/tmp.sqlite
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
# Hydra
|
# Hydra
|
||||||
|
|
||||||
[Hydra](https://nixos.org/hydra/) is a continuous integration system based
|
Hydra is a continuous integration system based on the Nix package
|
||||||
on the Nix package manager. For more information, see the
|
manager. For more information, see the
|
||||||
[manual](http://nixos.org/hydra/manual/).
|
[manual](https://hydra.nixos.org/job/hydra/master/manual/latest/download-by-type/doc/manual).
|
||||||
|
|
||||||
For development see
|
For development see
|
||||||
[hacking instructions](http://nixos.org/hydra/manual/#chap-hacking).
|
[hacking instructions](https://hydra.nixos.org/job/hydra/master/manual/latest/download-by-type/doc/manual#chap-hacking).
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
|
16
configure.ac
16
configure.ac
|
@ -1,5 +1,4 @@
|
||||||
AC_INIT([Hydra], [m4_esyscmd([echo -n $(cat ./version)$VERSION_SUFFIX])],
|
AC_INIT([Hydra], [m4_esyscmd([echo -n $(cat ./version)$VERSION_SUFFIX])])
|
||||||
[nix-dev@cs.uu.nl], [hydra], [http://nixos.org/hydra/])
|
|
||||||
AC_CONFIG_AUX_DIR(config)
|
AC_CONFIG_AUX_DIR(config)
|
||||||
AM_INIT_AUTOMAKE([foreign serial-tests])
|
AM_INIT_AUTOMAKE([foreign serial-tests])
|
||||||
|
|
||||||
|
@ -53,15 +52,6 @@ fi
|
||||||
|
|
||||||
PKG_CHECK_MODULES([NIX], [nix-main nix-expr nix-store])
|
PKG_CHECK_MODULES([NIX], [nix-main nix-expr nix-store])
|
||||||
|
|
||||||
PKG_CHECK_MODULES([GUILE], [guile-2.0], [HAVE_GUILE=yes], [HAVE_GUILE=no])
|
|
||||||
|
|
||||||
if test "x$HAVE_GUILE" = xyes; then
|
|
||||||
AC_PATH_PROG([GUILE], [guile])
|
|
||||||
else
|
|
||||||
GUILE="guile"
|
|
||||||
fi
|
|
||||||
AC_SUBST([GUILE])
|
|
||||||
|
|
||||||
testPath="$(dirname $(type -p expr))"
|
testPath="$(dirname $(type -p expr))"
|
||||||
AC_SUBST(testPath)
|
AC_SUBST(testPath)
|
||||||
|
|
||||||
|
@ -80,13 +70,11 @@ AC_CONFIG_FILES([
|
||||||
src/lib/Makefile
|
src/lib/Makefile
|
||||||
src/root/Makefile
|
src/root/Makefile
|
||||||
src/script/Makefile
|
src/script/Makefile
|
||||||
src/script/hydra-eval-guile-jobs
|
|
||||||
tests/Makefile
|
tests/Makefile
|
||||||
tests/jobs/config.nix
|
tests/jobs/config.nix
|
||||||
])
|
])
|
||||||
|
|
||||||
AC_CONFIG_COMMANDS([executable-scripts],
|
AC_CONFIG_COMMANDS([executable-scripts], [])
|
||||||
[chmod +x src/script/hydra-eval-guile-jobs])
|
|
||||||
|
|
||||||
AC_CONFIG_HEADER([hydra-config.h])
|
AC_CONFIG_HEADER([hydra-config.h])
|
||||||
|
|
||||||
|
|
|
@ -3,14 +3,13 @@ DOCBOOK_FILES = installation.xml introduction.xml manual.xml projects.xml hackin
|
||||||
EXTRA_DIST = $(DOCBOOK_FILES)
|
EXTRA_DIST = $(DOCBOOK_FILES)
|
||||||
|
|
||||||
xsltproc_opts = \
|
xsltproc_opts = \
|
||||||
--param html.stylesheet \'style.css\' \
|
|
||||||
--param callout.graphics.extension \'.gif\' \
|
--param callout.graphics.extension \'.gif\' \
|
||||||
--param section.autolabel 1 \
|
--param section.autolabel 1 \
|
||||||
--param section.label.includes.component.label 1
|
--param section.label.includes.component.label 1
|
||||||
|
|
||||||
|
|
||||||
# Include the manual in the tarball.
|
# Include the manual in the tarball.
|
||||||
dist_html_DATA = manual.html style.css
|
dist_html_DATA = manual.html
|
||||||
|
|
||||||
# Embed Docbook's callout images in the distribution.
|
# Embed Docbook's callout images in the distribution.
|
||||||
EXTRA_DIST += images
|
EXTRA_DIST += images
|
||||||
|
|
|
@ -1,256 +0,0 @@
|
||||||
/* Copied from http://bakefile.sourceforge.net/, which appears
|
|
||||||
licensed under the GNU GPL. */
|
|
||||||
|
|
||||||
|
|
||||||
/***************************************************************************
|
|
||||||
Basic headers and text:
|
|
||||||
***************************************************************************/
|
|
||||||
|
|
||||||
body
|
|
||||||
{
|
|
||||||
font-family: "Nimbus Sans L", sans-serif;
|
|
||||||
background: white;
|
|
||||||
margin: 2em 1em 2em 1em;
|
|
||||||
}
|
|
||||||
|
|
||||||
h1, h2, h3, h4
|
|
||||||
{
|
|
||||||
color: #005aa0;
|
|
||||||
}
|
|
||||||
|
|
||||||
h1 /* title */
|
|
||||||
{
|
|
||||||
font-size: 200%;
|
|
||||||
}
|
|
||||||
|
|
||||||
h2 /* chapters, appendices, subtitle */
|
|
||||||
{
|
|
||||||
font-size: 180%;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Extra space between chapters, appendices. */
|
|
||||||
div.chapter > div.titlepage h2, div.appendix > div.titlepage h2
|
|
||||||
{
|
|
||||||
margin-top: 1.5em;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.section > div.titlepage h2 /* sections */
|
|
||||||
{
|
|
||||||
font-size: 150%;
|
|
||||||
margin-top: 1.5em;
|
|
||||||
}
|
|
||||||
|
|
||||||
h3 /* subsections */
|
|
||||||
{
|
|
||||||
font-size: 125%;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.simplesect h2
|
|
||||||
{
|
|
||||||
font-size: 110%;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.appendix h3
|
|
||||||
{
|
|
||||||
font-size: 150%;
|
|
||||||
margin-top: 1.5em;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.refnamediv h2, div.refsynopsisdiv h2, div.refsection h2 /* refentry parts */
|
|
||||||
{
|
|
||||||
margin-top: 1.4em;
|
|
||||||
font-size: 125%;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.refsection h3
|
|
||||||
{
|
|
||||||
font-size: 110%;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/***************************************************************************
|
|
||||||
Examples:
|
|
||||||
***************************************************************************/
|
|
||||||
|
|
||||||
div.example
|
|
||||||
{
|
|
||||||
border: 1px solid #b0b0b0;
|
|
||||||
padding: 6px 6px;
|
|
||||||
margin-left: 1.5em;
|
|
||||||
margin-right: 1.5em;
|
|
||||||
background: #f4f4f8;
|
|
||||||
border-radius: 0.4em;
|
|
||||||
box-shadow: 0.4em 0.4em 0.5em #e0e0e0;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.example p.title
|
|
||||||
{
|
|
||||||
margin-top: 0em;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.example pre
|
|
||||||
{
|
|
||||||
box-shadow: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/***************************************************************************
|
|
||||||
Screen dumps:
|
|
||||||
***************************************************************************/
|
|
||||||
|
|
||||||
pre.screen, pre.programlisting
|
|
||||||
{
|
|
||||||
border: 1px solid #b0b0b0;
|
|
||||||
padding: 3px 3px;
|
|
||||||
margin-left: 1.5em;
|
|
||||||
margin-right: 1.5em;
|
|
||||||
color: #600000;
|
|
||||||
background: #f4f4f8;
|
|
||||||
font-family: monospace;
|
|
||||||
border-radius: 0.4em;
|
|
||||||
box-shadow: 0.4em 0.4em 0.5em #e0e0e0;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.example pre.programlisting
|
|
||||||
{
|
|
||||||
border: 0px;
|
|
||||||
padding: 0 0;
|
|
||||||
margin: 0 0 0 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/***************************************************************************
|
|
||||||
Notes, warnings etc:
|
|
||||||
***************************************************************************/
|
|
||||||
|
|
||||||
.note, .warning
|
|
||||||
{
|
|
||||||
border: 1px solid #b0b0b0;
|
|
||||||
padding: 3px 3px;
|
|
||||||
margin-left: 1.5em;
|
|
||||||
margin-right: 1.5em;
|
|
||||||
margin-bottom: 1em;
|
|
||||||
padding: 0.3em 0.3em 0.3em 0.3em;
|
|
||||||
background: #fffff5;
|
|
||||||
border-radius: 0.4em;
|
|
||||||
box-shadow: 0.4em 0.4em 0.5em #e0e0e0;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.note, div.warning
|
|
||||||
{
|
|
||||||
font-style: italic;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.note h3, div.warning h3
|
|
||||||
{
|
|
||||||
color: red;
|
|
||||||
font-size: 100%;
|
|
||||||
padding-right: 0.5em;
|
|
||||||
display: inline;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.note p, div.warning p
|
|
||||||
{
|
|
||||||
margin-bottom: 0em;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.note h3 + p, div.warning h3 + p
|
|
||||||
{
|
|
||||||
display: inline;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.note h3
|
|
||||||
{
|
|
||||||
color: blue;
|
|
||||||
font-size: 100%;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.navfooter *
|
|
||||||
{
|
|
||||||
font-size: 90%;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/***************************************************************************
|
|
||||||
Links colors and highlighting:
|
|
||||||
***************************************************************************/
|
|
||||||
|
|
||||||
a { text-decoration: none; }
|
|
||||||
a:hover { text-decoration: underline; }
|
|
||||||
a:link { color: #0048b3; }
|
|
||||||
a:visited { color: #002a6a; }
|
|
||||||
|
|
||||||
|
|
||||||
/***************************************************************************
|
|
||||||
Table of contents:
|
|
||||||
***************************************************************************/
|
|
||||||
|
|
||||||
div.toc
|
|
||||||
{
|
|
||||||
font-size: 90%;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.toc dl
|
|
||||||
{
|
|
||||||
margin-top: 0em;
|
|
||||||
margin-bottom: 0em;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/***************************************************************************
|
|
||||||
Special elements:
|
|
||||||
***************************************************************************/
|
|
||||||
|
|
||||||
tt, code
|
|
||||||
{
|
|
||||||
color: #400000;
|
|
||||||
}
|
|
||||||
|
|
||||||
.term
|
|
||||||
{
|
|
||||||
font-weight: bold;
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
div.variablelist dd p, div.glosslist dd p
|
|
||||||
{
|
|
||||||
margin-top: 0em;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.variablelist dd, div.glosslist dd
|
|
||||||
{
|
|
||||||
margin-left: 1.5em;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.glosslist dt
|
|
||||||
{
|
|
||||||
font-style: italic;
|
|
||||||
}
|
|
||||||
|
|
||||||
.varname
|
|
||||||
{
|
|
||||||
color: #400000;
|
|
||||||
}
|
|
||||||
|
|
||||||
span.command strong
|
|
||||||
{
|
|
||||||
font-weight: normal;
|
|
||||||
color: #400000;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.calloutlist table
|
|
||||||
{
|
|
||||||
box-shadow: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
table
|
|
||||||
{
|
|
||||||
border-collapse: collapse;
|
|
||||||
box-shadow: 0.4em 0.4em 0.5em #e0e0e0;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.affiliation
|
|
||||||
{
|
|
||||||
font-style: italic;
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
{ hydraSrc ? builtins.fetchGit ./.
|
{ hydraSrc ? builtins.fetchGit ./.
|
||||||
, nixpkgs ? builtins.fetchGit { url = https://github.com/NixOS/nixpkgs-channels.git; ref = "nixos-19.09-small"; }
|
, nixpkgs ? builtins.fetchTarball https://github.com/NixOS/nixpkgs/archive/release-19.09.tar.gz
|
||||||
, officialRelease ? false
|
, officialRelease ? false
|
||||||
, shell ? false
|
, shell ? false
|
||||||
}:
|
}:
|
||||||
|
@ -129,11 +129,10 @@ rec {
|
||||||
buildInputs =
|
buildInputs =
|
||||||
[ makeWrapper autoconf automake libtool unzip nukeReferences pkgconfig sqlite libpqxx
|
[ makeWrapper autoconf automake libtool unzip nukeReferences pkgconfig sqlite libpqxx
|
||||||
gitAndTools.topGit mercurial darcs subversion bazaar openssl bzip2 libxslt
|
gitAndTools.topGit mercurial darcs subversion bazaar openssl bzip2 libxslt
|
||||||
guile # optional, for Guile + Guix support
|
|
||||||
perlDeps perl nix
|
perlDeps perl nix
|
||||||
postgresql95 # for running the tests
|
postgresql95 # for running the tests
|
||||||
boost
|
boost
|
||||||
nlohmann_json
|
(nlohmann_json.override { multipleHeaders = true; })
|
||||||
];
|
];
|
||||||
|
|
||||||
hydraPath = lib.makeBinPath (
|
hydraPath = lib.makeBinPath (
|
||||||
|
@ -155,9 +154,7 @@ rec {
|
||||||
|
|
||||||
preConfigure = "autoreconf -vfi";
|
preConfigure = "autoreconf -vfi";
|
||||||
|
|
||||||
NIX_LDFLAGS = [
|
NIX_LDFLAGS = [ "-lpthread" ];
|
||||||
"-lpthread"
|
|
||||||
];
|
|
||||||
|
|
||||||
enableParallelBuilding = true;
|
enableParallelBuilding = true;
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
bin_PROGRAMS = hydra-eval-jobs
|
bin_PROGRAMS = hydra-eval-jobs
|
||||||
|
|
||||||
hydra_eval_jobs_SOURCES = hydra-eval-jobs.cc
|
hydra_eval_jobs_SOURCES = hydra-eval-jobs.cc
|
||||||
hydra_eval_jobs_LDADD = $(NIX_LIBS)
|
hydra_eval_jobs_LDADD = $(NIX_LIBS) -lnixrust
|
||||||
hydra_eval_jobs_CXXFLAGS = $(NIX_CFLAGS) -I ../libhydra
|
hydra_eval_jobs_CXXFLAGS = $(NIX_CFLAGS) -I ../libhydra
|
||||||
|
|
|
@ -1,35 +1,63 @@
|
||||||
#include <map>
|
#include <map>
|
||||||
#include <iostream>
|
#include <iostream>
|
||||||
|
|
||||||
#define GC_LINUX_THREADS 1
|
|
||||||
#include <gc/gc_allocator.h>
|
|
||||||
|
|
||||||
#include "shared.hh"
|
#include "shared.hh"
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
#include "eval.hh"
|
#include "eval.hh"
|
||||||
#include "eval-inline.hh"
|
#include "eval-inline.hh"
|
||||||
#include "util.hh"
|
#include "util.hh"
|
||||||
#include "json.hh"
|
|
||||||
#include "get-drvs.hh"
|
#include "get-drvs.hh"
|
||||||
#include "globals.hh"
|
#include "globals.hh"
|
||||||
#include "common-eval-args.hh"
|
#include "common-eval-args.hh"
|
||||||
|
#include "attr-path.hh"
|
||||||
|
#include "derivations.hh"
|
||||||
|
|
||||||
#include "hydra-config.hh"
|
#include "hydra-config.hh"
|
||||||
|
|
||||||
#include <sys/types.h>
|
#include <sys/types.h>
|
||||||
#include <sys/wait.h>
|
#include <sys/wait.h>
|
||||||
|
#include <sys/resource.h>
|
||||||
|
|
||||||
|
#include <nlohmann/json.hpp>
|
||||||
|
|
||||||
using namespace nix;
|
using namespace nix;
|
||||||
|
|
||||||
|
|
||||||
static Path gcRootsDir;
|
static Path gcRootsDir;
|
||||||
|
static size_t maxMemorySize;
|
||||||
|
|
||||||
|
struct MyArgs : MixEvalArgs, MixCommonArgs
|
||||||
|
{
|
||||||
|
Path releaseExpr;
|
||||||
|
bool dryRun = false;
|
||||||
|
|
||||||
static void findJobs(EvalState & state, JSONObject & top,
|
MyArgs() : MixCommonArgs("hydra-eval-jobs")
|
||||||
Bindings & autoArgs, Value & v, const string & attrPath);
|
{
|
||||||
|
mkFlag()
|
||||||
|
.longName("help")
|
||||||
|
.description("show usage information")
|
||||||
|
.handler([&]() {
|
||||||
|
printHelp(programName, std::cout);
|
||||||
|
throw Exit();
|
||||||
|
});
|
||||||
|
|
||||||
|
mkFlag()
|
||||||
|
.longName("gc-roots-dir")
|
||||||
|
.description("garbage collector roots directory")
|
||||||
|
.labels({"path"})
|
||||||
|
.dest(&gcRootsDir);
|
||||||
|
|
||||||
static string queryMetaStrings(EvalState & state, DrvInfo & drv, const string & name, const string & subAttribute)
|
mkFlag()
|
||||||
|
.longName("dry-run")
|
||||||
|
.description("don't create store derivations")
|
||||||
|
.set(&dryRun, true);
|
||||||
|
|
||||||
|
expectArg("expr", &releaseExpr);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
static MyArgs myArgs;
|
||||||
|
|
||||||
|
static std::string queryMetaStrings(EvalState & state, DrvInfo & drv, const string & name, const string & subAttribute)
|
||||||
{
|
{
|
||||||
Strings res;
|
Strings res;
|
||||||
std::function<void(Value & v)> rec;
|
std::function<void(Value & v)> rec;
|
||||||
|
@ -54,169 +82,146 @@ static string queryMetaStrings(EvalState & state, DrvInfo & drv, const string &
|
||||||
return concatStringsSep(", ", res);
|
return concatStringsSep(", ", res);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static void worker(
|
||||||
static std::string lastAttrPath;
|
EvalState & state,
|
||||||
static bool comma = false;
|
Bindings & autoArgs,
|
||||||
static size_t maxHeapSize;
|
AutoCloseFD & to,
|
||||||
|
AutoCloseFD & from)
|
||||||
|
|
||||||
struct BailOut { };
|
|
||||||
|
|
||||||
|
|
||||||
bool lte(const std::string & s1, const std::string & s2)
|
|
||||||
{
|
{
|
||||||
size_t p1 = 0, p2 = 0;
|
Value vTop;
|
||||||
|
state.evalFile(lookupFileArg(state, myArgs.releaseExpr), vTop);
|
||||||
|
|
||||||
|
auto vRoot = state.allocValue();
|
||||||
|
state.autoCallFunction(autoArgs, vTop, *vRoot);
|
||||||
|
|
||||||
while (true) {
|
while (true) {
|
||||||
if (p1 == s1.size()) return p2 == s2.size();
|
/* Wait for the master to send us a job name. */
|
||||||
if (p2 == s2.size()) return true;
|
writeLine(to.get(), "next");
|
||||||
|
|
||||||
auto d1 = s1.find('.', p1);
|
auto s = readLine(from.get());
|
||||||
auto d2 = s2.find('.', p2);
|
if (s == "exit") break;
|
||||||
|
if (!hasPrefix(s, "do ")) abort();
|
||||||
|
std::string attrPath(s, 3);
|
||||||
|
|
||||||
auto c = s1.compare(p1, d1 - p1, s2, p2, d2 - p2);
|
debug("worker process %d at '%s'", getpid(), attrPath);
|
||||||
|
|
||||||
if (c < 0) return true;
|
/* Evaluate it and send info back to the master. */
|
||||||
if (c > 0) return false;
|
nlohmann::json reply;
|
||||||
|
|
||||||
p1 = d1 == std::string::npos ? s1.size() : d1 + 1;
|
try {
|
||||||
p2 = d2 == std::string::npos ? s2.size() : d2 + 1;
|
auto vTmp = findAlongAttrPath(state, attrPath, autoArgs, *vRoot);
|
||||||
}
|
|
||||||
}
|
auto v = state.allocValue();
|
||||||
|
state.autoCallFunction(autoArgs, *vTmp, *v);
|
||||||
|
|
||||||
|
if (auto drv = getDerivation(state, *v, false)) {
|
||||||
|
|
||||||
|
DrvInfo::Outputs outputs = drv->queryOutputs();
|
||||||
|
|
||||||
|
if (drv->querySystem() == "unknown")
|
||||||
|
throw EvalError("derivation must have a 'system' attribute");
|
||||||
|
|
||||||
|
auto drvPath = drv->queryDrvPath();
|
||||||
|
|
||||||
|
nlohmann::json job;
|
||||||
|
|
||||||
|
job["nixName"] = drv->queryName();
|
||||||
|
job["system"] =drv->querySystem();
|
||||||
|
job["drvPath"] = drvPath;
|
||||||
|
job["description"] = drv->queryMetaString("description");
|
||||||
|
job["license"] = queryMetaStrings(state, *drv, "license", "shortName");
|
||||||
|
job["homepage"] = drv->queryMetaString("homepage");
|
||||||
|
job["maintainers"] = queryMetaStrings(state, *drv, "maintainers", "email");
|
||||||
|
job["schedulingPriority"] = drv->queryMetaInt("schedulingPriority", 100);
|
||||||
|
job["timeout"] = drv->queryMetaInt("timeout", 36000);
|
||||||
|
job["maxSilent"] = drv->queryMetaInt("maxSilent", 7200);
|
||||||
|
job["isChannel"] = drv->queryMetaBool("isHydraChannel", false);
|
||||||
|
|
||||||
|
/* If this is an aggregate, then get its constituents. */
|
||||||
|
auto a = v->attrs->get(state.symbols.create("_hydraAggregate"));
|
||||||
|
if (a && state.forceBool(*(*a)->value, *(*a)->pos)) {
|
||||||
|
auto a = v->attrs->get(state.symbols.create("constituents"));
|
||||||
|
if (!a)
|
||||||
|
throw EvalError("derivation must have a ‘constituents’ attribute");
|
||||||
|
|
||||||
|
|
||||||
static void findJobsWrapped(EvalState & state, JSONObject & top,
|
PathSet context;
|
||||||
Bindings & autoArgs, Value & vIn, const string & attrPath)
|
state.coerceToString(*(*a)->pos, *(*a)->value, context, true, false);
|
||||||
{
|
for (auto & i : context)
|
||||||
if (lastAttrPath != "" && lte(attrPath, lastAttrPath)) return;
|
if (i.at(0) == '!') {
|
||||||
|
size_t index = i.find("!", 1);
|
||||||
|
job["constituents"].push_back(string(i, index + 1));
|
||||||
|
}
|
||||||
|
|
||||||
debug(format("at path `%1%'") % attrPath);
|
state.forceList(*(*a)->value, *(*a)->pos);
|
||||||
|
for (unsigned int n = 0; n < (*a)->value->listSize(); ++n) {
|
||||||
checkInterrupt();
|
auto v = (*a)->value->listElems()[n];
|
||||||
|
state.forceValue(*v);
|
||||||
Value v;
|
if (v->type == tString)
|
||||||
state.autoCallFunction(autoArgs, vIn, v);
|
job["namedConstituents"].push_back(state.forceStringNoCtx(*v));
|
||||||
|
|
||||||
if (v.type == tAttrs) {
|
|
||||||
|
|
||||||
auto drv = getDerivation(state, v, false);
|
|
||||||
|
|
||||||
if (drv) {
|
|
||||||
Path drvPath;
|
|
||||||
|
|
||||||
DrvInfo::Outputs outputs = drv->queryOutputs();
|
|
||||||
|
|
||||||
if (drv->querySystem() == "unknown")
|
|
||||||
throw EvalError("derivation must have a ‘system’ attribute");
|
|
||||||
|
|
||||||
if (comma) { std::cout << ","; comma = false; }
|
|
||||||
|
|
||||||
{
|
|
||||||
auto res = top.object(attrPath);
|
|
||||||
res.attr("nixName", drv->queryName());
|
|
||||||
res.attr("system", drv->querySystem());
|
|
||||||
res.attr("drvPath", drvPath = drv->queryDrvPath());
|
|
||||||
res.attr("description", drv->queryMetaString("description"));
|
|
||||||
res.attr("license", queryMetaStrings(state, *drv, "license", "shortName"));
|
|
||||||
res.attr("homepage", drv->queryMetaString("homepage"));
|
|
||||||
res.attr("maintainers", queryMetaStrings(state, *drv, "maintainers", "email"));
|
|
||||||
res.attr("schedulingPriority", drv->queryMetaInt("schedulingPriority", 100));
|
|
||||||
res.attr("timeout", drv->queryMetaInt("timeout", 36000));
|
|
||||||
res.attr("maxSilent", drv->queryMetaInt("maxSilent", 7200));
|
|
||||||
res.attr("isChannel", drv->queryMetaBool("isHydraChannel", false));
|
|
||||||
|
|
||||||
/* If this is an aggregate, then get its constituents. */
|
|
||||||
Bindings::iterator a = v.attrs->find(state.symbols.create("_hydraAggregate"));
|
|
||||||
if (a != v.attrs->end() && state.forceBool(*a->value, *a->pos)) {
|
|
||||||
Bindings::iterator a = v.attrs->find(state.symbols.create("constituents"));
|
|
||||||
if (a == v.attrs->end())
|
|
||||||
throw EvalError("derivation must have a ‘constituents’ attribute");
|
|
||||||
PathSet context;
|
|
||||||
state.coerceToString(*a->pos, *a->value, context, true, false);
|
|
||||||
PathSet drvs;
|
|
||||||
for (auto & i : context)
|
|
||||||
if (i.at(0) == '!') {
|
|
||||||
size_t index = i.find("!", 1);
|
|
||||||
drvs.insert(string(i, index + 1));
|
|
||||||
}
|
}
|
||||||
res.attr("constituents", concatStringsSep(" ", drvs));
|
}
|
||||||
|
|
||||||
|
/* Register the derivation as a GC root. !!! This
|
||||||
|
registers roots for jobs that we may have already
|
||||||
|
done. */
|
||||||
|
auto localStore = state.store.dynamic_pointer_cast<LocalFSStore>();
|
||||||
|
if (gcRootsDir != "" && localStore) {
|
||||||
|
Path root = gcRootsDir + "/" + std::string(baseNameOf(drvPath));
|
||||||
|
if (!pathExists(root))
|
||||||
|
localStore->addPermRoot(localStore->parseStorePath(drvPath), root, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
nlohmann::json out;
|
||||||
|
for (auto & j : outputs)
|
||||||
|
out[j.first] = j.second;
|
||||||
|
job["outputs"] = std::move(out);
|
||||||
|
|
||||||
|
reply["job"] = std::move(job);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Register the derivation as a GC root. !!! This
|
else if (v->type == tAttrs) {
|
||||||
registers roots for jobs that we may have already
|
auto attrs = nlohmann::json::array();
|
||||||
done. */
|
StringSet ss;
|
||||||
auto localStore = state.store.dynamic_pointer_cast<LocalFSStore>();
|
for (auto & i : v->attrs->lexicographicOrder()) {
|
||||||
if (gcRootsDir != "" && localStore) {
|
|
||||||
Path root = gcRootsDir + "/" + baseNameOf(drvPath);
|
|
||||||
if (!pathExists(root)) localStore->addPermRoot(drvPath, root, false);
|
|
||||||
}
|
|
||||||
|
|
||||||
auto res2 = res.object("outputs");
|
|
||||||
for (auto & j : outputs)
|
|
||||||
res2.attr(j.first, j.second);
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
GC_prof_stats_s gc;
|
|
||||||
GC_get_prof_stats(&gc, sizeof(gc));
|
|
||||||
|
|
||||||
if (gc.heapsize_full > maxHeapSize) {
|
|
||||||
printInfo("restarting hydra-eval-jobs after job '%s' because heap size is at %d bytes", attrPath, gc.heapsize_full);
|
|
||||||
lastAttrPath = attrPath;
|
|
||||||
throw BailOut();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
else {
|
|
||||||
if (!state.isDerivation(v)) {
|
|
||||||
for (auto & i : v.attrs->lexicographicOrder()) {
|
|
||||||
std::string name(i->name);
|
std::string name(i->name);
|
||||||
|
if (name.find('.') != std::string::npos || name.find(' ') != std::string::npos) {
|
||||||
/* Skip jobs with dots in the name. */
|
|
||||||
if (name.find('.') != std::string::npos) {
|
|
||||||
printError("skipping job with illegal name '%s'", name);
|
printError("skipping job with illegal name '%s'", name);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
attrs.push_back(name);
|
||||||
findJobs(state, top, autoArgs, *i->value,
|
|
||||||
(attrPath.empty() ? "" : attrPath + ".") + name);
|
|
||||||
}
|
}
|
||||||
|
reply["attrs"] = std::move(attrs);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
else if (v->type == tNull)
|
||||||
|
;
|
||||||
|
|
||||||
|
else throw TypeError("attribute '%s' is %s, which is not supported", attrPath, showType(*v));
|
||||||
|
|
||||||
|
} catch (EvalError & e) {
|
||||||
|
// Transmits the error we got from the previous evaluation
|
||||||
|
// in the JSON output.
|
||||||
|
reply["error"] = filterANSIEscapes(e.msg(), true);
|
||||||
|
// Don't forget to print it into the STDERR log, this is
|
||||||
|
// what's shown in the Hydra UI.
|
||||||
|
printError("error: %s", reply["error"]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
writeLine(to.get(), reply.dump());
|
||||||
|
|
||||||
|
/* If our RSS exceeds the maximum, exit. The master will
|
||||||
|
start a new process. */
|
||||||
|
struct rusage r;
|
||||||
|
getrusage(RUSAGE_SELF, &r);
|
||||||
|
if ((size_t) r.ru_maxrss > maxMemorySize * 1024) break;
|
||||||
}
|
}
|
||||||
|
|
||||||
else if (v.type == tNull) {
|
writeLine(to.get(), "restart");
|
||||||
// allow null values, meaning 'do nothing'
|
|
||||||
}
|
|
||||||
|
|
||||||
else
|
|
||||||
throw TypeError(format("unsupported value: %1%") % v);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
static void findJobs(EvalState & state, JSONObject & top,
|
|
||||||
Bindings & autoArgs, Value & v, const string & attrPath)
|
|
||||||
{
|
|
||||||
try {
|
|
||||||
findJobsWrapped(state, top, autoArgs, v, attrPath);
|
|
||||||
} catch (EvalError & e) {
|
|
||||||
if (comma) { std::cout << ","; comma = false; }
|
|
||||||
auto res = top.object(attrPath);
|
|
||||||
res.attr("error", filterANSIEscapes(e.msg(), true));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
int main(int argc, char * * argv)
|
int main(int argc, char * * argv)
|
||||||
{
|
{
|
||||||
assert(lte("abc", "def"));
|
|
||||||
assert(lte("abc", "def.foo"));
|
|
||||||
assert(!lte("def", "abc"));
|
|
||||||
assert(lte("nixpkgs.hello", "nixpkgs"));
|
|
||||||
assert(lte("nixpkgs.hello", "nixpkgs.hellooo"));
|
|
||||||
assert(lte("gitAndTools.git-annex.x86_64-darwin", "gitAndTools.git-annex.x86_64-linux"));
|
|
||||||
assert(lte("gitAndTools.git-annex.x86_64-linux", "gitAndTools.git-annex-remote-b2.aarch64-linux"));
|
|
||||||
|
|
||||||
/* Prevent undeclared dependencies in the evaluation via
|
/* Prevent undeclared dependencies in the evaluation via
|
||||||
$NIX_PATH. */
|
$NIX_PATH. */
|
||||||
unsetenv("NIX_PATH");
|
unsetenv("NIX_PATH");
|
||||||
|
@ -225,116 +230,214 @@ int main(int argc, char * * argv)
|
||||||
|
|
||||||
auto config = std::make_unique<::Config>();
|
auto config = std::make_unique<::Config>();
|
||||||
|
|
||||||
auto initialHeapSize = config->getStrOption("evaluator_initial_heap_size", "");
|
auto nrWorkers = config->getIntOption("evaluator_workers", 1);
|
||||||
if (initialHeapSize != "")
|
maxMemorySize = config->getIntOption("evaluator_max_memory_size", 4096);
|
||||||
setenv("GC_INITIAL_HEAP_SIZE", initialHeapSize.c_str(), 1);
|
|
||||||
|
|
||||||
maxHeapSize = config->getIntOption("evaluator_max_heap_size", 1UL << 30);
|
|
||||||
|
|
||||||
initNix();
|
initNix();
|
||||||
initGC();
|
initGC();
|
||||||
|
|
||||||
/* Read the current heap size, which is the initial heap size. */
|
|
||||||
GC_prof_stats_s gc;
|
|
||||||
GC_get_prof_stats(&gc, sizeof(gc));
|
|
||||||
auto initialHeapSizeInt = gc.heapsize_full;
|
|
||||||
|
|
||||||
/* Then make sure the maximum heap size will be bigger than the initial heap size. */
|
|
||||||
if (initialHeapSizeInt > maxHeapSize) {
|
|
||||||
printInfo("warning: evaluator_initial_heap_size (%d) bigger than evaluator_max_heap_size (%d).", initialHeapSizeInt, maxHeapSize);
|
|
||||||
maxHeapSize = initialHeapSizeInt * 1.1;
|
|
||||||
printInfo(" evaluator_max_heap_size now set to %d.", maxHeapSize);
|
|
||||||
}
|
|
||||||
|
|
||||||
Path releaseExpr;
|
|
||||||
|
|
||||||
struct MyArgs : LegacyArgs, MixEvalArgs
|
|
||||||
{
|
|
||||||
using LegacyArgs::LegacyArgs;
|
|
||||||
};
|
|
||||||
|
|
||||||
MyArgs myArgs(baseNameOf(argv[0]), [&](Strings::iterator & arg, const Strings::iterator & end) {
|
|
||||||
if (*arg == "--gc-roots-dir")
|
|
||||||
gcRootsDir = getArg(*arg, arg, end);
|
|
||||||
else if (*arg == "--dry-run")
|
|
||||||
settings.readOnlyMode = true;
|
|
||||||
else if (*arg != "" && arg->at(0) == '-')
|
|
||||||
return false;
|
|
||||||
else
|
|
||||||
releaseExpr = *arg;
|
|
||||||
return true;
|
|
||||||
});
|
|
||||||
|
|
||||||
myArgs.parseCmdline(argvToStrings(argc, argv));
|
myArgs.parseCmdline(argvToStrings(argc, argv));
|
||||||
|
|
||||||
JSONObject json(std::cout, true);
|
/* FIXME: The build hook in conjunction with import-from-derivation is causing "unexpected EOF" during eval */
|
||||||
std::cout.flush();
|
settings.builders = "";
|
||||||
|
|
||||||
do {
|
/* Prevent access to paths outside of the Nix search path and
|
||||||
|
to the environment. */
|
||||||
|
evalSettings.restrictEval = true;
|
||||||
|
|
||||||
Pipe pipe;
|
if (myArgs.dryRun) settings.readOnlyMode = true;
|
||||||
pipe.create();
|
|
||||||
|
|
||||||
ProcessOptions options;
|
if (myArgs.releaseExpr == "") throw UsageError("no expression specified");
|
||||||
options.allowVfork = false;
|
|
||||||
|
|
||||||
GC_atfork_prepare();
|
if (gcRootsDir == "") printMsg(lvlError, "warning: `--gc-roots-dir' not specified");
|
||||||
|
|
||||||
auto pid = startProcess([&]() {
|
struct State
|
||||||
pipe.readSide = -1;
|
{
|
||||||
|
std::set<std::string> todo{""};
|
||||||
|
std::set<std::string> active;
|
||||||
|
nlohmann::json jobs;
|
||||||
|
std::exception_ptr exc;
|
||||||
|
};
|
||||||
|
|
||||||
GC_atfork_child();
|
std::condition_variable wakeup;
|
||||||
GC_start_mark_threads();
|
|
||||||
|
|
||||||
if (lastAttrPath != "") debug("resuming from '%s'", lastAttrPath);
|
Sync<State> state_;
|
||||||
|
|
||||||
/* FIXME: The build hook in conjunction with import-from-derivation is causing "unexpected EOF" during eval */
|
/* Start a handler thread per worker process. */
|
||||||
settings.builders = "";
|
auto handler = [&]()
|
||||||
|
{
|
||||||
|
try {
|
||||||
|
pid_t pid = -1;
|
||||||
|
AutoCloseFD from, to;
|
||||||
|
|
||||||
/* Prevent access to paths outside of the Nix search path and
|
while (true) {
|
||||||
to the environment. */
|
|
||||||
evalSettings.restrictEval = true;
|
|
||||||
|
|
||||||
if (releaseExpr == "") throw UsageError("no expression specified");
|
/* Start a new worker process if necessary. */
|
||||||
|
if (pid == -1) {
|
||||||
|
Pipe toPipe, fromPipe;
|
||||||
|
toPipe.create();
|
||||||
|
fromPipe.create();
|
||||||
|
pid = startProcess(
|
||||||
|
[&,
|
||||||
|
to{std::make_shared<AutoCloseFD>(std::move(fromPipe.writeSide))},
|
||||||
|
from{std::make_shared<AutoCloseFD>(std::move(toPipe.readSide))}
|
||||||
|
]()
|
||||||
|
{
|
||||||
|
try {
|
||||||
|
EvalState state(myArgs.searchPath, openStore());
|
||||||
|
Bindings & autoArgs = *myArgs.getAutoArgs(state);
|
||||||
|
worker(state, autoArgs, *to, *from);
|
||||||
|
} catch (std::exception & e) {
|
||||||
|
nlohmann::json err;
|
||||||
|
err["error"] = e.what();
|
||||||
|
writeLine(to->get(), err.dump());
|
||||||
|
// Don't forget to print it into the STDERR log, this is
|
||||||
|
// what's shown in the Hydra UI.
|
||||||
|
printError("error: %s", err["error"]);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
ProcessOptions { .allowVfork = false });
|
||||||
|
from = std::move(fromPipe.readSide);
|
||||||
|
to = std::move(toPipe.writeSide);
|
||||||
|
debug("created worker process %d", pid);
|
||||||
|
}
|
||||||
|
|
||||||
if (gcRootsDir == "") printMsg(lvlError, "warning: `--gc-roots-dir' not specified");
|
/* Check whether the existing worker process is still there. */
|
||||||
|
auto s = readLine(from.get());
|
||||||
|
if (s == "restart") {
|
||||||
|
pid = -1;
|
||||||
|
continue;
|
||||||
|
} else if (s != "next") {
|
||||||
|
auto json = nlohmann::json::parse(s);
|
||||||
|
throw Error("worker error: %s", (std::string) json["error"]);
|
||||||
|
}
|
||||||
|
|
||||||
EvalState state(myArgs.searchPath, openStore());
|
/* Wait for a job name to become available. */
|
||||||
|
std::string attrPath;
|
||||||
|
|
||||||
Bindings & autoArgs = *myArgs.getAutoArgs(state);
|
while (true) {
|
||||||
|
checkInterrupt();
|
||||||
|
auto state(state_.lock());
|
||||||
|
if ((state->todo.empty() && state->active.empty()) || state->exc) {
|
||||||
|
writeLine(to.get(), "exit");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (!state->todo.empty()) {
|
||||||
|
attrPath = *state->todo.begin();
|
||||||
|
state->todo.erase(state->todo.begin());
|
||||||
|
state->active.insert(attrPath);
|
||||||
|
break;
|
||||||
|
} else
|
||||||
|
state.wait(wakeup);
|
||||||
|
}
|
||||||
|
|
||||||
Value v;
|
/* Tell the worker to evaluate it. */
|
||||||
state.evalFile(lookupFileArg(state, releaseExpr), v);
|
writeLine(to.get(), "do " + attrPath);
|
||||||
|
|
||||||
comma = lastAttrPath != "";
|
/* Wait for the response. */
|
||||||
|
auto response = nlohmann::json::parse(readLine(from.get()));
|
||||||
|
|
||||||
try {
|
/* Handle the response. */
|
||||||
findJobs(state, json, autoArgs, v, "");
|
StringSet newAttrs;
|
||||||
lastAttrPath = "";
|
|
||||||
} catch (BailOut &) { }
|
|
||||||
|
|
||||||
writeFull(pipe.writeSide.get(), lastAttrPath);
|
if (response.find("job") != response.end()) {
|
||||||
|
auto state(state_.lock());
|
||||||
|
state->jobs[attrPath] = response["job"];
|
||||||
|
}
|
||||||
|
|
||||||
exit(0);
|
if (response.find("attrs") != response.end()) {
|
||||||
}, options);
|
for (auto & i : response["attrs"]) {
|
||||||
|
auto s = (attrPath.empty() ? "" : attrPath + ".") + (std::string) i;
|
||||||
|
newAttrs.insert(s);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
GC_atfork_parent();
|
if (response.find("error") != response.end()) {
|
||||||
|
auto state(state_.lock());
|
||||||
|
state->jobs[attrPath]["error"] = response["error"];
|
||||||
|
}
|
||||||
|
|
||||||
pipe.writeSide = -1;
|
/* Add newly discovered job names to the queue. */
|
||||||
|
{
|
||||||
|
auto state(state_.lock());
|
||||||
|
state->active.erase(attrPath);
|
||||||
|
for (auto & s : newAttrs)
|
||||||
|
state->todo.insert(s);
|
||||||
|
wakeup.notify_all();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (...) {
|
||||||
|
auto state(state_.lock());
|
||||||
|
state->exc = std::current_exception();
|
||||||
|
wakeup.notify_all();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
int status;
|
std::vector<std::thread> threads;
|
||||||
while (true) {
|
for (size_t i = 0; i < nrWorkers; i++)
|
||||||
checkInterrupt();
|
threads.emplace_back(std::thread(handler));
|
||||||
if (waitpid(pid, &status, 0) == pid) break;
|
|
||||||
if (errno != EINTR) continue;
|
for (auto & thread : threads)
|
||||||
|
thread.join();
|
||||||
|
|
||||||
|
auto state(state_.lock());
|
||||||
|
|
||||||
|
if (state->exc)
|
||||||
|
std::rethrow_exception(state->exc);
|
||||||
|
|
||||||
|
/* For aggregate jobs that have named consistuents
|
||||||
|
(i.e. constituents that are a job name rather than a
|
||||||
|
derivation), look up the referenced job and add it to the
|
||||||
|
dependencies of the aggregate derivation. */
|
||||||
|
auto store = openStore();
|
||||||
|
|
||||||
|
for (auto i = state->jobs.begin(); i != state->jobs.end(); ++i) {
|
||||||
|
auto jobName = i.key();
|
||||||
|
auto & job = i.value();
|
||||||
|
|
||||||
|
auto named = job.find("namedConstituents");
|
||||||
|
if (named == job.end()) continue;
|
||||||
|
|
||||||
|
if (myArgs.dryRun) {
|
||||||
|
for (std::string jobName2 : *named) {
|
||||||
|
auto job2 = state->jobs.find(jobName2);
|
||||||
|
if (job2 == state->jobs.end())
|
||||||
|
throw Error("aggregate job '%s' references non-existent job '%s'", jobName, jobName2);
|
||||||
|
std::string drvPath2 = (*job2)["drvPath"];
|
||||||
|
job["constituents"].push_back(drvPath2);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
std::string drvPath = job["drvPath"];
|
||||||
|
auto drv = readDerivation(*store, drvPath);
|
||||||
|
|
||||||
|
for (std::string jobName2 : *named) {
|
||||||
|
auto job2 = state->jobs.find(jobName2);
|
||||||
|
if (job2 == state->jobs.end())
|
||||||
|
throw Error("aggregate job '%s' references non-existent job '%s'", jobName, jobName2);
|
||||||
|
std::string drvPath2 = (*job2)["drvPath"];
|
||||||
|
auto drv2 = readDerivation(*store, drvPath2);
|
||||||
|
job["constituents"].push_back(drvPath2);
|
||||||
|
drv.inputDrvs[store->parseStorePath(drvPath2)] = {drv2.outputs.begin()->first};
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string drvName(store->parseStorePath(drvPath).name());
|
||||||
|
assert(hasSuffix(drvName, drvExtension));
|
||||||
|
drvName.resize(drvName.size() - drvExtension.size());
|
||||||
|
auto h = hashDerivationModulo(*store, drv, true);
|
||||||
|
auto outPath = store->makeOutputPath("out", h, drvName);
|
||||||
|
drv.env["out"] = store->printStorePath(outPath);
|
||||||
|
drv.outputs.insert_or_assign("out", DerivationOutput(outPath.clone(), "", ""));
|
||||||
|
auto newDrvPath = store->printStorePath(writeDerivation(store, drv, drvName));
|
||||||
|
|
||||||
|
debug("rewrote aggregate derivation %s -> %s", drvPath, newDrvPath);
|
||||||
|
|
||||||
|
job["drvPath"] = newDrvPath;
|
||||||
|
job["outputs"]["out"] = store->printStorePath(outPath);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (status != 0)
|
job.erase("namedConstituents");
|
||||||
throw Exit(WIFEXITED(status) ? WEXITSTATUS(status) : 99);
|
}
|
||||||
|
|
||||||
maxHeapSize += 64 * 1024 * 1024;
|
std::cout << state->jobs.dump(2) << "\n";
|
||||||
|
|
||||||
lastAttrPath = drainFD(pipe.readSide.get());
|
|
||||||
} while (lastAttrPath != "");
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,4 +2,4 @@ bin_PROGRAMS = hydra-evaluator
|
||||||
|
|
||||||
hydra_evaluator_SOURCES = hydra-evaluator.cc
|
hydra_evaluator_SOURCES = hydra-evaluator.cc
|
||||||
hydra_evaluator_LDADD = $(NIX_LIBS) -lpqxx
|
hydra_evaluator_LDADD = $(NIX_LIBS) -lpqxx
|
||||||
hydra_evaluator_CXXFLAGS = $(NIX_CFLAGS) -Wall -I ../libhydra
|
hydra_evaluator_CXXFLAGS = $(NIX_CFLAGS) -Wall -I ../libhydra -Wno-deprecated-declarations
|
||||||
|
|
|
@ -15,6 +15,13 @@ using namespace nix;
|
||||||
|
|
||||||
typedef std::pair<std::string, std::string> JobsetName;
|
typedef std::pair<std::string, std::string> JobsetName;
|
||||||
|
|
||||||
|
enum class EvaluationStyle
|
||||||
|
{
|
||||||
|
SCHEDULE = 1,
|
||||||
|
ONESHOT = 2,
|
||||||
|
ONE_AT_A_TIME = 3,
|
||||||
|
};
|
||||||
|
|
||||||
struct Evaluator
|
struct Evaluator
|
||||||
{
|
{
|
||||||
std::unique_ptr<Config> config;
|
std::unique_ptr<Config> config;
|
||||||
|
@ -24,6 +31,7 @@ struct Evaluator
|
||||||
struct Jobset
|
struct Jobset
|
||||||
{
|
{
|
||||||
JobsetName name;
|
JobsetName name;
|
||||||
|
std::optional<EvaluationStyle> evaluation_style;
|
||||||
time_t lastCheckedTime, triggerTime;
|
time_t lastCheckedTime, triggerTime;
|
||||||
int checkInterval;
|
int checkInterval;
|
||||||
Pid pid;
|
Pid pid;
|
||||||
|
@ -60,9 +68,10 @@ struct Evaluator
|
||||||
pqxx::work txn(*conn);
|
pqxx::work txn(*conn);
|
||||||
|
|
||||||
auto res = txn.exec
|
auto res = txn.exec
|
||||||
("select project, j.name, lastCheckedTime, triggerTime, checkInterval from Jobsets j join Projects p on j.project = p.name "
|
("select project, j.name, lastCheckedTime, triggerTime, checkInterval, j.enabled as jobset_enabled from Jobsets j join Projects p on j.project = p.name "
|
||||||
"where j.enabled != 0 and p.enabled != 0");
|
"where j.enabled != 0 and p.enabled != 0");
|
||||||
|
|
||||||
|
|
||||||
auto state(state_.lock());
|
auto state(state_.lock());
|
||||||
|
|
||||||
std::set<JobsetName> seen;
|
std::set<JobsetName> seen;
|
||||||
|
@ -78,6 +87,17 @@ struct Evaluator
|
||||||
jobset.lastCheckedTime = row["lastCheckedTime"].as<time_t>(0);
|
jobset.lastCheckedTime = row["lastCheckedTime"].as<time_t>(0);
|
||||||
jobset.triggerTime = row["triggerTime"].as<time_t>(notTriggered);
|
jobset.triggerTime = row["triggerTime"].as<time_t>(notTriggered);
|
||||||
jobset.checkInterval = row["checkInterval"].as<time_t>();
|
jobset.checkInterval = row["checkInterval"].as<time_t>();
|
||||||
|
switch (row["jobset_enabled"].as<int>(0)) {
|
||||||
|
case 1:
|
||||||
|
jobset.evaluation_style = EvaluationStyle::SCHEDULE;
|
||||||
|
break;
|
||||||
|
case 2:
|
||||||
|
jobset.evaluation_style = EvaluationStyle::ONESHOT;
|
||||||
|
break;
|
||||||
|
case 3:
|
||||||
|
jobset.evaluation_style = EvaluationStyle::ONE_AT_A_TIME;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
seen.insert(name);
|
seen.insert(name);
|
||||||
}
|
}
|
||||||
|
@ -128,19 +148,100 @@ struct Evaluator
|
||||||
childStarted.notify_one();
|
childStarted.notify_one();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool shouldEvaluate(Jobset & jobset)
|
||||||
|
{
|
||||||
|
if (jobset.pid != -1) {
|
||||||
|
// Already running.
|
||||||
|
debug("shouldEvaluate %s:%s? no: already running",
|
||||||
|
jobset.name.first, jobset.name.second);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (jobset.triggerTime != std::numeric_limits<time_t>::max()) {
|
||||||
|
// An evaluation of this Jobset is requested
|
||||||
|
debug("shouldEvaluate %s:%s? yes: requested",
|
||||||
|
jobset.name.first, jobset.name.second);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (jobset.checkInterval <= 0) {
|
||||||
|
// Automatic scheduling is disabled. We allow requested
|
||||||
|
// evaluations, but never schedule start one.
|
||||||
|
debug("shouldEvaluate %s:%s? no: checkInterval <= 0",
|
||||||
|
jobset.name.first, jobset.name.second);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (jobset.lastCheckedTime + jobset.checkInterval <= time(0)) {
|
||||||
|
// Time to schedule a fresh evaluation. If the jobset
|
||||||
|
// is a ONE_AT_A_TIME jobset, ensure the previous jobset
|
||||||
|
// has no remaining, unfinished work.
|
||||||
|
|
||||||
|
auto conn(dbPool.get());
|
||||||
|
|
||||||
|
pqxx::work txn(*conn);
|
||||||
|
|
||||||
|
if (jobset.evaluation_style == EvaluationStyle::ONE_AT_A_TIME) {
|
||||||
|
auto evaluation_res = txn.parameterized
|
||||||
|
("select id from JobsetEvals "
|
||||||
|
"where project = $1 and jobset = $2 "
|
||||||
|
"order by id desc limit 1")
|
||||||
|
(jobset.name.first)
|
||||||
|
(jobset.name.second)
|
||||||
|
.exec();
|
||||||
|
|
||||||
|
if (evaluation_res.empty()) {
|
||||||
|
// First evaluation, so allow scheduling.
|
||||||
|
debug("shouldEvaluate(one-at-a-time) %s:%s? yes: no prior eval",
|
||||||
|
jobset.name.first, jobset.name.second);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
auto evaluation_id = evaluation_res[0][0].as<int>();
|
||||||
|
|
||||||
|
auto unfinished_build_res = txn.parameterized
|
||||||
|
("select id from Builds "
|
||||||
|
"join JobsetEvalMembers "
|
||||||
|
" on (JobsetEvalMembers.build = Builds.id) "
|
||||||
|
"where JobsetEvalMembers.eval = $1 "
|
||||||
|
" and builds.finished = 0 "
|
||||||
|
" limit 1")
|
||||||
|
(evaluation_id)
|
||||||
|
.exec();
|
||||||
|
|
||||||
|
// If the previous evaluation has no unfinished builds
|
||||||
|
// schedule!
|
||||||
|
if (unfinished_build_res.empty()) {
|
||||||
|
debug("shouldEvaluate(one-at-a-time) %s:%s? yes: no unfinished builds",
|
||||||
|
jobset.name.first, jobset.name.second);
|
||||||
|
return true;
|
||||||
|
} else {
|
||||||
|
debug("shouldEvaluate(one-at-a-time) %s:%s? no: at least one unfinished build",
|
||||||
|
jobset.name.first, jobset.name.second);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
} else {
|
||||||
|
// EvaluationStyle::ONESHOT, EvaluationStyle::SCHEDULED
|
||||||
|
debug("shouldEvaluate(oneshot/scheduled) %s:%s? yes: checkInterval elapsed",
|
||||||
|
jobset.name.first, jobset.name.second);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
void startEvals(State & state)
|
void startEvals(State & state)
|
||||||
{
|
{
|
||||||
std::vector<Jobsets::iterator> sorted;
|
std::vector<Jobsets::iterator> sorted;
|
||||||
|
|
||||||
time_t now = time(0);
|
|
||||||
|
|
||||||
/* Filter out jobsets that have been evaluated recently and have
|
/* Filter out jobsets that have been evaluated recently and have
|
||||||
not been triggered. */
|
not been triggered. */
|
||||||
for (auto i = state.jobsets.begin(); i != state.jobsets.end(); ++i)
|
for (auto i = state.jobsets.begin(); i != state.jobsets.end(); ++i)
|
||||||
if (evalOne ||
|
if (evalOne ||
|
||||||
(i->second.pid == -1 &&
|
(i->second.evaluation_style && shouldEvaluate(i->second)))
|
||||||
(i->second.triggerTime != std::numeric_limits<time_t>::max() ||
|
|
||||||
(i->second.checkInterval > 0 && i->second.lastCheckedTime + i->second.checkInterval <= now))))
|
|
||||||
sorted.push_back(i);
|
sorted.push_back(i);
|
||||||
|
|
||||||
/* Put jobsets in order of ascending trigger time, last checked
|
/* Put jobsets in order of ascending trigger time, last checked
|
||||||
|
|
|
@ -3,5 +3,5 @@ bin_PROGRAMS = hydra-queue-runner
|
||||||
hydra_queue_runner_SOURCES = hydra-queue-runner.cc queue-monitor.cc dispatcher.cc \
|
hydra_queue_runner_SOURCES = hydra-queue-runner.cc queue-monitor.cc dispatcher.cc \
|
||||||
builder.cc build-result.cc build-remote.cc \
|
builder.cc build-result.cc build-remote.cc \
|
||||||
build-result.hh counter.hh token-server.hh state.hh db.hh
|
build-result.hh counter.hh token-server.hh state.hh db.hh
|
||||||
hydra_queue_runner_LDADD = $(NIX_LIBS) -lpqxx
|
hydra_queue_runner_LDADD = $(NIX_LIBS) -lpqxx -lnixrust
|
||||||
hydra_queue_runner_CXXFLAGS = $(NIX_CFLAGS) -Wall -I ../libhydra
|
hydra_queue_runner_CXXFLAGS = $(NIX_CFLAGS) -Wall -I ../libhydra -Wno-deprecated-declarations
|
||||||
|
|
|
@ -82,10 +82,10 @@ static void openConnection(Machine::ptr machine, Path tmpDir, int stderrFD, Chil
|
||||||
|
|
||||||
|
|
||||||
static void copyClosureTo(std::timed_mutex & sendMutex, ref<Store> destStore,
|
static void copyClosureTo(std::timed_mutex & sendMutex, ref<Store> destStore,
|
||||||
FdSource & from, FdSink & to, const PathSet & paths,
|
FdSource & from, FdSink & to, const StorePathSet & paths,
|
||||||
bool useSubstitutes = false)
|
bool useSubstitutes = false)
|
||||||
{
|
{
|
||||||
PathSet closure;
|
StorePathSet closure;
|
||||||
for (auto & path : paths)
|
for (auto & path : paths)
|
||||||
destStore->computeFSClosure(path, closure);
|
destStore->computeFSClosure(path, closure);
|
||||||
|
|
||||||
|
@ -94,20 +94,21 @@ static void copyClosureTo(std::timed_mutex & sendMutex, ref<Store> destStore,
|
||||||
garbage-collect paths that are already there. Optionally, ask
|
garbage-collect paths that are already there. Optionally, ask
|
||||||
the remote host to substitute missing paths. */
|
the remote host to substitute missing paths. */
|
||||||
// FIXME: substitute output pollutes our build log
|
// FIXME: substitute output pollutes our build log
|
||||||
to << cmdQueryValidPaths << 1 << useSubstitutes << closure;
|
to << cmdQueryValidPaths << 1 << useSubstitutes;
|
||||||
|
writeStorePaths(*destStore, to, closure);
|
||||||
to.flush();
|
to.flush();
|
||||||
|
|
||||||
/* Get back the set of paths that are already valid on the remote
|
/* Get back the set of paths that are already valid on the remote
|
||||||
host. */
|
host. */
|
||||||
auto present = readStorePaths<PathSet>(*destStore, from);
|
auto present = readStorePaths<StorePathSet>(*destStore, from);
|
||||||
|
|
||||||
if (present.size() == closure.size()) return;
|
if (present.size() == closure.size()) return;
|
||||||
|
|
||||||
Paths sorted = destStore->topoSortPaths(closure);
|
auto sorted = destStore->topoSortPaths(closure);
|
||||||
|
|
||||||
Paths missing;
|
StorePathSet missing;
|
||||||
for (auto i = sorted.rbegin(); i != sorted.rend(); ++i)
|
for (auto i = sorted.rbegin(); i != sorted.rend(); ++i)
|
||||||
if (present.find(*i) == present.end()) missing.push_back(*i);
|
if (!present.count(*i)) missing.insert(i->clone());
|
||||||
|
|
||||||
printMsg(lvlDebug, format("sending %1% missing paths") % missing.size());
|
printMsg(lvlDebug, format("sending %1% missing paths") % missing.size());
|
||||||
|
|
||||||
|
@ -131,7 +132,7 @@ void State::buildRemote(ref<Store> destStore,
|
||||||
{
|
{
|
||||||
assert(BuildResult::TimedOut == 8);
|
assert(BuildResult::TimedOut == 8);
|
||||||
|
|
||||||
string base = baseNameOf(step->drvPath);
|
string base(step->drvPath.to_string());
|
||||||
result.logFile = logDir + "/" + string(base, 0, 2) + "/" + string(base, 2);
|
result.logFile = logDir + "/" + string(base, 0, 2) + "/" + string(base, 2);
|
||||||
AutoDelete autoDelete(result.logFile, false);
|
AutoDelete autoDelete(result.logFile, false);
|
||||||
|
|
||||||
|
@ -217,22 +218,22 @@ void State::buildRemote(ref<Store> destStore,
|
||||||
outputs of the input derivations. */
|
outputs of the input derivations. */
|
||||||
updateStep(ssSendingInputs);
|
updateStep(ssSendingInputs);
|
||||||
|
|
||||||
PathSet inputs;
|
StorePathSet inputs;
|
||||||
BasicDerivation basicDrv(step->drv);
|
BasicDerivation basicDrv(*step->drv);
|
||||||
|
|
||||||
if (sendDerivation)
|
if (sendDerivation)
|
||||||
inputs.insert(step->drvPath);
|
inputs.insert(step->drvPath.clone());
|
||||||
else
|
else
|
||||||
for (auto & p : step->drv.inputSrcs)
|
for (auto & p : step->drv->inputSrcs)
|
||||||
inputs.insert(p);
|
inputs.insert(p.clone());
|
||||||
|
|
||||||
for (auto & input : step->drv.inputDrvs) {
|
for (auto & input : step->drv->inputDrvs) {
|
||||||
Derivation drv2 = readDerivation(input.first);
|
Derivation drv2 = readDerivation(*localStore, localStore->printStorePath(input.first));
|
||||||
for (auto & name : input.second) {
|
for (auto & name : input.second) {
|
||||||
auto i = drv2.outputs.find(name);
|
auto i = drv2.outputs.find(name);
|
||||||
if (i == drv2.outputs.end()) continue;
|
if (i == drv2.outputs.end()) continue;
|
||||||
inputs.insert(i->second.path);
|
inputs.insert(i->second.path.clone());
|
||||||
basicDrv.inputSrcs.insert(i->second.path);
|
basicDrv.inputSrcs.insert(i->second.path.clone());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -241,14 +242,15 @@ void State::buildRemote(ref<Store> destStore,
|
||||||
this will copy the inputs to the binary cache from the local
|
this will copy the inputs to the binary cache from the local
|
||||||
store. */
|
store. */
|
||||||
if (localStore != std::shared_ptr<Store>(destStore))
|
if (localStore != std::shared_ptr<Store>(destStore))
|
||||||
copyClosure(ref<Store>(localStore), destStore, step->drv.inputSrcs, NoRepair, NoCheckSigs);
|
copyClosure(ref<Store>(localStore), destStore, step->drv->inputSrcs, NoRepair, NoCheckSigs);
|
||||||
|
|
||||||
/* Copy the input closure. */
|
/* Copy the input closure. */
|
||||||
if (!machine->isLocalhost()) {
|
if (!machine->isLocalhost()) {
|
||||||
auto mc1 = std::make_shared<MaintainCount<counter>>(nrStepsWaiting);
|
auto mc1 = std::make_shared<MaintainCount<counter>>(nrStepsWaiting);
|
||||||
mc1.reset();
|
mc1.reset();
|
||||||
MaintainCount<counter> mc2(nrStepsCopyingTo);
|
MaintainCount<counter> mc2(nrStepsCopyingTo);
|
||||||
printMsg(lvlDebug, format("sending closure of ‘%1%’ to ‘%2%’") % step->drvPath % machine->sshName);
|
printMsg(lvlDebug, "sending closure of ‘%s’ to ‘%s’",
|
||||||
|
localStore->printStorePath(step->drvPath), machine->sshName);
|
||||||
|
|
||||||
auto now1 = std::chrono::steady_clock::now();
|
auto now1 = std::chrono::steady_clock::now();
|
||||||
|
|
||||||
|
@ -272,14 +274,19 @@ void State::buildRemote(ref<Store> destStore,
|
||||||
logFD = -1;
|
logFD = -1;
|
||||||
|
|
||||||
/* Do the build. */
|
/* Do the build. */
|
||||||
printMsg(lvlDebug, format("building ‘%1%’ on ‘%2%’") % step->drvPath % machine->sshName);
|
printMsg(lvlDebug, "building ‘%s’ on ‘%s’",
|
||||||
|
localStore->printStorePath(step->drvPath),
|
||||||
|
machine->sshName);
|
||||||
|
|
||||||
updateStep(ssBuilding);
|
updateStep(ssBuilding);
|
||||||
|
|
||||||
if (sendDerivation)
|
if (sendDerivation) {
|
||||||
to << cmdBuildPaths << PathSet({step->drvPath});
|
to << cmdBuildPaths;
|
||||||
else
|
writeStorePaths(*localStore, to, singleton(step->drvPath));
|
||||||
to << cmdBuildDerivation << step->drvPath << basicDrv;
|
} else {
|
||||||
|
to << cmdBuildDerivation << localStore->printStorePath(step->drvPath);
|
||||||
|
writeDerivation(to, *localStore, basicDrv);
|
||||||
|
}
|
||||||
to << maxSilentTime << buildTimeout;
|
to << maxSilentTime << buildTimeout;
|
||||||
if (GET_PROTOCOL_MINOR(remoteVersion) >= 2)
|
if (GET_PROTOCOL_MINOR(remoteVersion) >= 2)
|
||||||
to << maxLogSize;
|
to << maxLogSize;
|
||||||
|
@ -380,7 +387,8 @@ void State::buildRemote(ref<Store> destStore,
|
||||||
/* If the path was substituted or already valid, then we didn't
|
/* If the path was substituted or already valid, then we didn't
|
||||||
get a build log. */
|
get a build log. */
|
||||||
if (result.isCached) {
|
if (result.isCached) {
|
||||||
printMsg(lvlInfo, format("outputs of ‘%1%’ substituted or already valid on ‘%2%’") % step->drvPath % machine->sshName);
|
printMsg(lvlInfo, "outputs of ‘%s’ substituted or already valid on ‘%s’",
|
||||||
|
localStore->printStorePath(step->drvPath), machine->sshName);
|
||||||
unlink(result.logFile.c_str());
|
unlink(result.logFile.c_str());
|
||||||
result.logFile = "";
|
result.logFile = "";
|
||||||
}
|
}
|
||||||
|
@ -395,13 +403,12 @@ void State::buildRemote(ref<Store> destStore,
|
||||||
|
|
||||||
auto now1 = std::chrono::steady_clock::now();
|
auto now1 = std::chrono::steady_clock::now();
|
||||||
|
|
||||||
PathSet outputs;
|
auto outputs = step->drv->outputPaths();
|
||||||
for (auto & output : step->drv.outputs)
|
|
||||||
outputs.insert(output.second.path);
|
|
||||||
|
|
||||||
/* Query the size of the output paths. */
|
/* Query the size of the output paths. */
|
||||||
size_t totalNarSize = 0;
|
size_t totalNarSize = 0;
|
||||||
to << cmdQueryPathInfos << outputs;
|
to << cmdQueryPathInfos;
|
||||||
|
writeStorePaths(*localStore, to, outputs);
|
||||||
to.flush();
|
to.flush();
|
||||||
while (true) {
|
while (true) {
|
||||||
if (readString(from) == "") break;
|
if (readString(from) == "") break;
|
||||||
|
@ -416,8 +423,8 @@ void State::buildRemote(ref<Store> destStore,
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
printMsg(lvlDebug, format("copying outputs of ‘%s’ from ‘%s’ (%d bytes)")
|
printMsg(lvlDebug, "copying outputs of ‘%s’ from ‘%s’ (%d bytes)",
|
||||||
% step->drvPath % machine->sshName % totalNarSize);
|
localStore->printStorePath(step->drvPath), machine->sshName, totalNarSize);
|
||||||
|
|
||||||
/* Block until we have the required amount of memory
|
/* Block until we have the required amount of memory
|
||||||
available, which is twice the NAR size (namely the
|
available, which is twice the NAR size (namely the
|
||||||
|
@ -431,10 +438,11 @@ void State::buildRemote(ref<Store> destStore,
|
||||||
|
|
||||||
auto resMs = std::chrono::duration_cast<std::chrono::milliseconds>(resStop - resStart).count();
|
auto resMs = std::chrono::duration_cast<std::chrono::milliseconds>(resStop - resStart).count();
|
||||||
if (resMs >= 1000)
|
if (resMs >= 1000)
|
||||||
printMsg(lvlError, format("warning: had to wait %d ms for %d memory tokens for %s")
|
printMsg(lvlError, "warning: had to wait %d ms for %d memory tokens for %s",
|
||||||
% resMs % totalNarSize % step->drvPath);
|
resMs, totalNarSize, localStore->printStorePath(step->drvPath));
|
||||||
|
|
||||||
to << cmdExportPaths << 0 << outputs;
|
to << cmdExportPaths << 0;
|
||||||
|
writeStorePaths(*localStore, to, outputs);
|
||||||
to.flush();
|
to.flush();
|
||||||
destStore->importPaths(from, result.accessor, NoCheckSigs);
|
destStore->importPaths(from, result.accessor, NoCheckSigs);
|
||||||
|
|
||||||
|
|
|
@ -14,16 +14,14 @@ BuildOutput getBuildOutput(nix::ref<Store> store,
|
||||||
BuildOutput res;
|
BuildOutput res;
|
||||||
|
|
||||||
/* Compute the closure size. */
|
/* Compute the closure size. */
|
||||||
PathSet outputs;
|
auto outputs = drv.outputPaths();
|
||||||
for (auto & output : drv.outputs)
|
StorePathSet closure;
|
||||||
outputs.insert(output.second.path);
|
|
||||||
PathSet closure;
|
|
||||||
for (auto & output : outputs)
|
for (auto & output : outputs)
|
||||||
store->computeFSClosure(output, closure);
|
store->computeFSClosure(singleton(output), closure);
|
||||||
for (auto & path : closure) {
|
for (auto & path : closure) {
|
||||||
auto info = store->queryPathInfo(path);
|
auto info = store->queryPathInfo(path);
|
||||||
res.closureSize += info->narSize;
|
res.closureSize += info->narSize;
|
||||||
if (outputs.find(path) != outputs.end()) res.size += info->narSize;
|
if (outputs.count(path)) res.size += info->narSize;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Get build products. */
|
/* Get build products. */
|
||||||
|
@ -39,11 +37,13 @@ BuildOutput getBuildOutput(nix::ref<Store> store,
|
||||||
, std::regex::extended);
|
, std::regex::extended);
|
||||||
|
|
||||||
for (auto & output : outputs) {
|
for (auto & output : outputs) {
|
||||||
Path failedFile = output + "/nix-support/failed";
|
auto outputS = store->printStorePath(output);
|
||||||
|
|
||||||
|
Path failedFile = outputS + "/nix-support/failed";
|
||||||
if (accessor->stat(failedFile).type == FSAccessor::Type::tRegular)
|
if (accessor->stat(failedFile).type == FSAccessor::Type::tRegular)
|
||||||
res.failed = true;
|
res.failed = true;
|
||||||
|
|
||||||
Path productsFile = output + "/nix-support/hydra-build-products";
|
Path productsFile = outputS + "/nix-support/hydra-build-products";
|
||||||
if (accessor->stat(productsFile).type != FSAccessor::Type::tRegular)
|
if (accessor->stat(productsFile).type != FSAccessor::Type::tRegular)
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
|
@ -72,7 +72,7 @@ BuildOutput getBuildOutput(nix::ref<Store> store,
|
||||||
auto st = accessor->stat(product.path);
|
auto st = accessor->stat(product.path);
|
||||||
if (st.type == FSAccessor::Type::tMissing) continue;
|
if (st.type == FSAccessor::Type::tMissing) continue;
|
||||||
|
|
||||||
product.name = product.path == output ? "" : baseNameOf(product.path);
|
product.name = product.path == store->printStorePath(output) ? "" : baseNameOf(product.path);
|
||||||
|
|
||||||
if (st.type == FSAccessor::Type::tRegular) {
|
if (st.type == FSAccessor::Type::tRegular) {
|
||||||
product.isRegular = true;
|
product.isRegular = true;
|
||||||
|
@ -91,14 +91,14 @@ BuildOutput getBuildOutput(nix::ref<Store> store,
|
||||||
if (!explicitProducts) {
|
if (!explicitProducts) {
|
||||||
for (auto & output : drv.outputs) {
|
for (auto & output : drv.outputs) {
|
||||||
BuildProduct product;
|
BuildProduct product;
|
||||||
product.path = output.second.path;
|
product.path = store->printStorePath(output.second.path);
|
||||||
product.type = "nix-build";
|
product.type = "nix-build";
|
||||||
product.subtype = output.first == "out" ? "" : output.first;
|
product.subtype = output.first == "out" ? "" : output.first;
|
||||||
product.name = storePathToName(product.path);
|
product.name = output.second.path.name();
|
||||||
|
|
||||||
auto st = accessor->stat(product.path);
|
auto st = accessor->stat(product.path);
|
||||||
if (st.type == FSAccessor::Type::tMissing)
|
if (st.type == FSAccessor::Type::tMissing)
|
||||||
throw Error(format("getting status of ‘%1%’") % product.path);
|
throw Error("getting status of ‘%s’", product.path);
|
||||||
if (st.type == FSAccessor::Type::tDirectory)
|
if (st.type == FSAccessor::Type::tDirectory)
|
||||||
res.products.push_back(product);
|
res.products.push_back(product);
|
||||||
}
|
}
|
||||||
|
@ -106,7 +106,7 @@ BuildOutput getBuildOutput(nix::ref<Store> store,
|
||||||
|
|
||||||
/* Get the release name from $output/nix-support/hydra-release-name. */
|
/* Get the release name from $output/nix-support/hydra-release-name. */
|
||||||
for (auto & output : outputs) {
|
for (auto & output : outputs) {
|
||||||
Path p = output + "/nix-support/hydra-release-name";
|
auto p = store->printStorePath(output) + "/nix-support/hydra-release-name";
|
||||||
if (accessor->stat(p).type != FSAccessor::Type::tRegular) continue;
|
if (accessor->stat(p).type != FSAccessor::Type::tRegular) continue;
|
||||||
try {
|
try {
|
||||||
res.releaseName = trim(accessor->readFile(p));
|
res.releaseName = trim(accessor->readFile(p));
|
||||||
|
@ -116,7 +116,7 @@ BuildOutput getBuildOutput(nix::ref<Store> store,
|
||||||
|
|
||||||
/* Get metrics. */
|
/* Get metrics. */
|
||||||
for (auto & output : outputs) {
|
for (auto & output : outputs) {
|
||||||
Path metricsFile = output + "/nix-support/hydra-metrics";
|
auto metricsFile = store->printStorePath(output) + "/nix-support/hydra-metrics";
|
||||||
if (accessor->stat(metricsFile).type != FSAccessor::Type::tRegular) continue;
|
if (accessor->stat(metricsFile).type != FSAccessor::Type::tRegular) continue;
|
||||||
for (auto & line : tokenizeString<Strings>(accessor->readFile(metricsFile), "\n")) {
|
for (auto & line : tokenizeString<Strings>(accessor->readFile(metricsFile), "\n")) {
|
||||||
auto fields = tokenizeString<std::vector<std::string>>(line);
|
auto fields = tokenizeString<std::vector<std::string>>(line);
|
||||||
|
|
|
@ -18,7 +18,7 @@ void setThreadName(const std::string & name)
|
||||||
|
|
||||||
void State::builder(MachineReservation::ptr reservation)
|
void State::builder(MachineReservation::ptr reservation)
|
||||||
{
|
{
|
||||||
setThreadName("bld~" + baseNameOf(reservation->step->drvPath));
|
setThreadName("bld~" + std::string(reservation->step->drvPath.to_string()));
|
||||||
|
|
||||||
StepResult res = sRetry;
|
StepResult res = sRetry;
|
||||||
|
|
||||||
|
@ -39,8 +39,10 @@ void State::builder(MachineReservation::ptr reservation)
|
||||||
auto destStore = getDestStore();
|
auto destStore = getDestStore();
|
||||||
res = doBuildStep(destStore, reservation, activeStep);
|
res = doBuildStep(destStore, reservation, activeStep);
|
||||||
} catch (std::exception & e) {
|
} catch (std::exception & e) {
|
||||||
printMsg(lvlError, format("uncaught exception building ‘%1%’ on ‘%2%’: %3%")
|
printMsg(lvlError, "uncaught exception building ‘%s’ on ‘%s’: %s",
|
||||||
% reservation->step->drvPath % reservation->machine->sshName % e.what());
|
localStore->printStorePath(reservation->step->drvPath),
|
||||||
|
reservation->machine->sshName,
|
||||||
|
e.what());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -60,7 +62,7 @@ void State::builder(MachineReservation::ptr reservation)
|
||||||
nrRetries++;
|
nrRetries++;
|
||||||
if (step_->tries > maxNrRetries) maxNrRetries = step_->tries; // yeah yeah, not atomic
|
if (step_->tries > maxNrRetries) maxNrRetries = step_->tries; // yeah yeah, not atomic
|
||||||
int delta = retryInterval * std::pow(retryBackoff, step_->tries - 1) + (rand() % 10);
|
int delta = retryInterval * std::pow(retryBackoff, step_->tries - 1) + (rand() % 10);
|
||||||
printMsg(lvlInfo, format("will retry ‘%1%’ after %2%s") % step->drvPath % delta);
|
printMsg(lvlInfo, "will retry ‘%s’ after %ss", localStore->printStorePath(step->drvPath), delta);
|
||||||
step_->after = std::chrono::system_clock::now() + std::chrono::seconds(delta);
|
step_->after = std::chrono::system_clock::now() + std::chrono::seconds(delta);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -95,7 +97,7 @@ State::StepResult State::doBuildStep(nix::ref<Store> destStore,
|
||||||
cancelled (namely if there are no more Builds referring to
|
cancelled (namely if there are no more Builds referring to
|
||||||
it). */
|
it). */
|
||||||
BuildID buildId;
|
BuildID buildId;
|
||||||
Path buildDrvPath;
|
std::optional<StorePath> buildDrvPath;
|
||||||
unsigned int maxSilentTime, buildTimeout;
|
unsigned int maxSilentTime, buildTimeout;
|
||||||
unsigned int repeats = step->isDeterministic ? 1 : 0;
|
unsigned int repeats = step->isDeterministic ? 1 : 0;
|
||||||
|
|
||||||
|
@ -116,7 +118,7 @@ State::StepResult State::doBuildStep(nix::ref<Store> destStore,
|
||||||
possibility, we retry this step (putting it back in
|
possibility, we retry this step (putting it back in
|
||||||
the runnable queue). If there are really no strong
|
the runnable queue). If there are really no strong
|
||||||
pointers to the step, it will be deleted. */
|
pointers to the step, it will be deleted. */
|
||||||
printMsg(lvlInfo, format("maybe cancelling build step ‘%1%’") % step->drvPath);
|
printMsg(lvlInfo, "maybe cancelling build step ‘%s’", localStore->printStorePath(step->drvPath));
|
||||||
return sMaybeCancelled;
|
return sMaybeCancelled;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -138,15 +140,15 @@ State::StepResult State::doBuildStep(nix::ref<Store> destStore,
|
||||||
if (!build) build = *dependents.begin();
|
if (!build) build = *dependents.begin();
|
||||||
|
|
||||||
buildId = build->id;
|
buildId = build->id;
|
||||||
buildDrvPath = build->drvPath;
|
buildDrvPath = build->drvPath.clone();
|
||||||
maxSilentTime = build->maxSilentTime;
|
maxSilentTime = build->maxSilentTime;
|
||||||
buildTimeout = build->buildTimeout;
|
buildTimeout = build->buildTimeout;
|
||||||
|
|
||||||
printInfo("performing step ‘%s’ %d times on ‘%s’ (needed by build %d and %d others)",
|
printInfo("performing step ‘%s’ %d times on ‘%s’ (needed by build %d and %d others)",
|
||||||
step->drvPath, repeats + 1, machine->sshName, buildId, (dependents.size() - 1));
|
localStore->printStorePath(step->drvPath), repeats + 1, machine->sshName, buildId, (dependents.size() - 1));
|
||||||
}
|
}
|
||||||
|
|
||||||
bool quit = buildId == buildOne && step->drvPath == buildDrvPath;
|
bool quit = buildId == buildOne && step->drvPath == *buildDrvPath;
|
||||||
|
|
||||||
RemoteResult result;
|
RemoteResult result;
|
||||||
BuildOutput res;
|
BuildOutput res;
|
||||||
|
@ -166,7 +168,7 @@ State::StepResult State::doBuildStep(nix::ref<Store> destStore,
|
||||||
try {
|
try {
|
||||||
auto store = destStore.dynamic_pointer_cast<BinaryCacheStore>();
|
auto store = destStore.dynamic_pointer_cast<BinaryCacheStore>();
|
||||||
if (uploadLogsToBinaryCache && store && pathExists(result.logFile)) {
|
if (uploadLogsToBinaryCache && store && pathExists(result.logFile)) {
|
||||||
store->upsertFile("log/" + baseNameOf(step->drvPath), readFile(result.logFile), "text/plain; charset=utf-8");
|
store->upsertFile("log/" + std::string(step->drvPath.to_string()), readFile(result.logFile), "text/plain; charset=utf-8");
|
||||||
unlink(result.logFile.c_str());
|
unlink(result.logFile.c_str());
|
||||||
}
|
}
|
||||||
} catch (...) {
|
} catch (...) {
|
||||||
|
@ -218,7 +220,7 @@ State::StepResult State::doBuildStep(nix::ref<Store> destStore,
|
||||||
|
|
||||||
if (result.stepStatus == bsSuccess) {
|
if (result.stepStatus == bsSuccess) {
|
||||||
updateStep(ssPostProcessing);
|
updateStep(ssPostProcessing);
|
||||||
res = getBuildOutput(destStore, ref<FSAccessor>(result.accessor), step->drv);
|
res = getBuildOutput(destStore, ref<FSAccessor>(result.accessor), *step->drv);
|
||||||
}
|
}
|
||||||
|
|
||||||
result.accessor = 0;
|
result.accessor = 0;
|
||||||
|
@ -255,8 +257,8 @@ State::StepResult State::doBuildStep(nix::ref<Store> destStore,
|
||||||
/* The step had a hopefully temporary failure (e.g. network
|
/* The step had a hopefully temporary failure (e.g. network
|
||||||
issue). Retry a number of times. */
|
issue). Retry a number of times. */
|
||||||
if (result.canRetry) {
|
if (result.canRetry) {
|
||||||
printMsg(lvlError, format("possibly transient failure building ‘%1%’ on ‘%2%’: %3%")
|
printMsg(lvlError, "possibly transient failure building ‘%s’ on ‘%s’: %s",
|
||||||
% step->drvPath % machine->sshName % result.errorMsg);
|
localStore->printStorePath(step->drvPath), machine->sshName, result.errorMsg);
|
||||||
assert(stepNr);
|
assert(stepNr);
|
||||||
bool retry;
|
bool retry;
|
||||||
{
|
{
|
||||||
|
@ -275,7 +277,7 @@ State::StepResult State::doBuildStep(nix::ref<Store> destStore,
|
||||||
|
|
||||||
assert(stepNr);
|
assert(stepNr);
|
||||||
|
|
||||||
for (auto & path : step->drv.outputPaths())
|
for (auto & path : step->drv->outputPaths())
|
||||||
addRoot(path);
|
addRoot(path);
|
||||||
|
|
||||||
/* Register success in the database for all Build objects that
|
/* Register success in the database for all Build objects that
|
||||||
|
@ -308,7 +310,8 @@ State::StepResult State::doBuildStep(nix::ref<Store> destStore,
|
||||||
no new referrers can have been added in the
|
no new referrers can have been added in the
|
||||||
meantime or be added afterwards. */
|
meantime or be added afterwards. */
|
||||||
if (direct.empty()) {
|
if (direct.empty()) {
|
||||||
printMsg(lvlDebug, format("finishing build step ‘%1%’") % step->drvPath);
|
printMsg(lvlDebug, "finishing build step ‘%s’",
|
||||||
|
localStore->printStorePath(step->drvPath));
|
||||||
steps_->erase(step->drvPath);
|
steps_->erase(step->drvPath);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -373,96 +376,8 @@ State::StepResult State::doBuildStep(nix::ref<Store> destStore,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
} else {
|
} else
|
||||||
|
failStep(*conn, step, buildId, result, machine, stepFinished, quit);
|
||||||
/* Register failure in the database for all Build objects that
|
|
||||||
directly or indirectly depend on this step. */
|
|
||||||
|
|
||||||
std::vector<BuildID> dependentIDs;
|
|
||||||
|
|
||||||
while (true) {
|
|
||||||
/* Get the builds and steps that depend on this step. */
|
|
||||||
std::set<Build::ptr> indirect;
|
|
||||||
{
|
|
||||||
auto steps_(steps.lock());
|
|
||||||
std::set<Step::ptr> steps;
|
|
||||||
getDependents(step, indirect, steps);
|
|
||||||
|
|
||||||
/* If there are no builds left, delete all referring
|
|
||||||
steps from ‘steps’. As for the success case, we can
|
|
||||||
be certain no new referrers can be added. */
|
|
||||||
if (indirect.empty()) {
|
|
||||||
for (auto & s : steps) {
|
|
||||||
printMsg(lvlDebug, format("finishing build step ‘%1%’") % s->drvPath);
|
|
||||||
steps_->erase(s->drvPath);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (indirect.empty() && stepFinished) break;
|
|
||||||
|
|
||||||
/* Update the database. */
|
|
||||||
{
|
|
||||||
auto mc = startDbUpdate();
|
|
||||||
|
|
||||||
pqxx::work txn(*conn);
|
|
||||||
|
|
||||||
/* Create failed build steps for every build that
|
|
||||||
depends on this, except when this step is cached
|
|
||||||
and is the top-level of that build (since then it's
|
|
||||||
redundant with the build's isCachedBuild field). */
|
|
||||||
for (auto & build2 : indirect) {
|
|
||||||
if ((result.stepStatus == bsCachedFailure && build2->drvPath == step->drvPath) ||
|
|
||||||
(result.stepStatus != bsCachedFailure && buildId == build2->id) ||
|
|
||||||
build2->finishedInDB)
|
|
||||||
continue;
|
|
||||||
createBuildStep(txn, 0, build2->id, step, machine->sshName,
|
|
||||||
result.stepStatus, result.errorMsg, buildId == build2->id ? 0 : buildId);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Mark all builds that depend on this derivation as failed. */
|
|
||||||
for (auto & build2 : indirect) {
|
|
||||||
if (build2->finishedInDB) continue;
|
|
||||||
printMsg(lvlError, format("marking build %1% as failed") % build2->id);
|
|
||||||
txn.exec_params0
|
|
||||||
("update Builds set finished = 1, buildStatus = $2, startTime = $3, stopTime = $4, isCachedBuild = $5, notificationPendingSince = $4 where id = $1 and finished = 0",
|
|
||||||
build2->id,
|
|
||||||
(int) (build2->drvPath != step->drvPath && result.buildStatus() == bsFailed ? bsDepFailed : result.buildStatus()),
|
|
||||||
result.startTime,
|
|
||||||
result.stopTime,
|
|
||||||
result.stepStatus == bsCachedFailure ? 1 : 0);
|
|
||||||
nrBuildsDone++;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Remember failed paths in the database so that they
|
|
||||||
won't be built again. */
|
|
||||||
if (result.stepStatus != bsCachedFailure && result.canCache)
|
|
||||||
for (auto & path : step->drv.outputPaths())
|
|
||||||
txn.exec_params0("insert into FailedPaths values ($1)", path);
|
|
||||||
|
|
||||||
txn.commit();
|
|
||||||
}
|
|
||||||
|
|
||||||
stepFinished = true;
|
|
||||||
|
|
||||||
/* Remove the indirect dependencies from ‘builds’. This
|
|
||||||
will cause them to be destroyed. */
|
|
||||||
for (auto & b : indirect) {
|
|
||||||
auto builds_(builds.lock());
|
|
||||||
b->finishedInDB = true;
|
|
||||||
builds_->erase(b->id);
|
|
||||||
dependentIDs.push_back(b->id);
|
|
||||||
if (buildOne == b->id) quit = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Send notification about this build and its dependents. */
|
|
||||||
{
|
|
||||||
pqxx::work txn(*conn);
|
|
||||||
notifyBuildFinished(txn, buildId, dependentIDs);
|
|
||||||
txn.commit();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// FIXME: keep stats about aborted steps?
|
// FIXME: keep stats about aborted steps?
|
||||||
nrStepsDone++;
|
nrStepsDone++;
|
||||||
|
@ -478,8 +393,109 @@ State::StepResult State::doBuildStep(nix::ref<Store> destStore,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void State::addRoot(const Path & storePath)
|
void State::failStep(
|
||||||
|
Connection & conn,
|
||||||
|
Step::ptr step,
|
||||||
|
BuildID buildId,
|
||||||
|
const RemoteResult & result,
|
||||||
|
Machine::ptr machine,
|
||||||
|
bool & stepFinished,
|
||||||
|
bool & quit)
|
||||||
{
|
{
|
||||||
auto root = rootsDir + "/" + baseNameOf(storePath);
|
/* Register failure in the database for all Build objects that
|
||||||
|
directly or indirectly depend on this step. */
|
||||||
|
|
||||||
|
std::vector<BuildID> dependentIDs;
|
||||||
|
|
||||||
|
while (true) {
|
||||||
|
/* Get the builds and steps that depend on this step. */
|
||||||
|
std::set<Build::ptr> indirect;
|
||||||
|
{
|
||||||
|
auto steps_(steps.lock());
|
||||||
|
std::set<Step::ptr> steps;
|
||||||
|
getDependents(step, indirect, steps);
|
||||||
|
|
||||||
|
/* If there are no builds left, delete all referring
|
||||||
|
steps from ‘steps’. As for the success case, we can
|
||||||
|
be certain no new referrers can be added. */
|
||||||
|
if (indirect.empty()) {
|
||||||
|
for (auto & s : steps) {
|
||||||
|
printMsg(lvlDebug, "finishing build step ‘%s’",
|
||||||
|
localStore->printStorePath(s->drvPath));
|
||||||
|
steps_->erase(s->drvPath);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (indirect.empty() && stepFinished) break;
|
||||||
|
|
||||||
|
/* Update the database. */
|
||||||
|
{
|
||||||
|
auto mc = startDbUpdate();
|
||||||
|
|
||||||
|
pqxx::work txn(conn);
|
||||||
|
|
||||||
|
/* Create failed build steps for every build that
|
||||||
|
depends on this, except when this step is cached
|
||||||
|
and is the top-level of that build (since then it's
|
||||||
|
redundant with the build's isCachedBuild field). */
|
||||||
|
for (auto & build : indirect) {
|
||||||
|
if ((result.stepStatus == bsCachedFailure && build->drvPath == step->drvPath) ||
|
||||||
|
((result.stepStatus != bsCachedFailure && result.stepStatus != bsUnsupported) && buildId == build->id) ||
|
||||||
|
build->finishedInDB)
|
||||||
|
continue;
|
||||||
|
createBuildStep(txn,
|
||||||
|
0, build->id, step, machine ? machine->sshName : "",
|
||||||
|
result.stepStatus, result.errorMsg, buildId == build->id ? 0 : buildId);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Mark all builds that depend on this derivation as failed. */
|
||||||
|
for (auto & build : indirect) {
|
||||||
|
if (build->finishedInDB) continue;
|
||||||
|
printMsg(lvlError, format("marking build %1% as failed") % build->id);
|
||||||
|
txn.exec_params0
|
||||||
|
("update Builds set finished = 1, buildStatus = $2, startTime = $3, stopTime = $4, isCachedBuild = $5, notificationPendingSince = $4 where id = $1 and finished = 0",
|
||||||
|
build->id,
|
||||||
|
(int) (build->drvPath != step->drvPath && result.buildStatus() == bsFailed ? bsDepFailed : result.buildStatus()),
|
||||||
|
result.startTime,
|
||||||
|
result.stopTime,
|
||||||
|
result.stepStatus == bsCachedFailure ? 1 : 0);
|
||||||
|
nrBuildsDone++;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Remember failed paths in the database so that they
|
||||||
|
won't be built again. */
|
||||||
|
if (result.stepStatus != bsCachedFailure && result.canCache)
|
||||||
|
for (auto & path : step->drv.outputPaths())
|
||||||
|
txn.exec_params0("insert into FailedPaths values ($1)", localStore->printStorePath(path));
|
||||||
|
|
||||||
|
txn.commit();
|
||||||
|
}
|
||||||
|
|
||||||
|
stepFinished = true;
|
||||||
|
|
||||||
|
/* Remove the indirect dependencies from ‘builds’. This
|
||||||
|
will cause them to be destroyed. */
|
||||||
|
for (auto & b : indirect) {
|
||||||
|
auto builds_(builds.lock());
|
||||||
|
b->finishedInDB = true;
|
||||||
|
builds_->erase(b->id);
|
||||||
|
dependentIDs.push_back(b->id);
|
||||||
|
if (buildOne == b->id) quit = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Send notification about this build and its dependents. */
|
||||||
|
{
|
||||||
|
pqxx::work txn(conn);
|
||||||
|
notifyBuildFinished(txn, buildId, dependentIDs);
|
||||||
|
txn.commit();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void State::addRoot(const StorePath & storePath)
|
||||||
|
{
|
||||||
|
auto root = rootsDir + "/" + std::string(storePath.to_string());
|
||||||
if (!pathExists(root)) writeFile(root, "");
|
if (!pathExists(root)) writeFile(root, "");
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,7 +10,7 @@ using namespace nix;
|
||||||
|
|
||||||
void State::makeRunnable(Step::ptr step)
|
void State::makeRunnable(Step::ptr step)
|
||||||
{
|
{
|
||||||
printMsg(lvlChatty, format("step ‘%1%’ is now runnable") % step->drvPath);
|
printMsg(lvlChatty, "step ‘%s’ is now runnable", localStore->printStorePath(step->drvPath));
|
||||||
|
|
||||||
{
|
{
|
||||||
auto step_(step->state.lock());
|
auto step_(step->state.lock());
|
||||||
|
@ -248,7 +248,7 @@ system_time State::doDispatch()
|
||||||
/* Can this machine do this step? */
|
/* Can this machine do this step? */
|
||||||
if (!mi.machine->supportsStep(step)) {
|
if (!mi.machine->supportsStep(step)) {
|
||||||
debug("machine '%s' does not support step '%s' (system type '%s')",
|
debug("machine '%s' does not support step '%s' (system type '%s')",
|
||||||
mi.machine->sshName, step->drvPath, step->drv.platform);
|
mi.machine->sshName, localStore->printStorePath(step->drvPath), step->drv->platform);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -300,6 +300,8 @@ system_time State::doDispatch()
|
||||||
|
|
||||||
} while (keepGoing);
|
} while (keepGoing);
|
||||||
|
|
||||||
|
abortUnsupported();
|
||||||
|
|
||||||
return sleepUntil;
|
return sleepUntil;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -314,6 +316,96 @@ void State::wakeDispatcher()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void State::abortUnsupported()
|
||||||
|
{
|
||||||
|
/* Make a copy of 'runnable' and 'machines' so we don't block them
|
||||||
|
very long. */
|
||||||
|
auto runnable2 = *runnable.lock();
|
||||||
|
auto machines2 = *machines.lock();
|
||||||
|
|
||||||
|
system_time now = std::chrono::system_clock::now();
|
||||||
|
auto now2 = time(0);
|
||||||
|
|
||||||
|
std::unordered_set<Step::ptr> aborted;
|
||||||
|
|
||||||
|
size_t count = 0;
|
||||||
|
|
||||||
|
for (auto & wstep : runnable2) {
|
||||||
|
auto step(wstep.lock());
|
||||||
|
if (!step) continue;
|
||||||
|
|
||||||
|
bool supported = false;
|
||||||
|
for (auto & machine : machines2) {
|
||||||
|
if (machine.second->supportsStep(step)) {
|
||||||
|
step->state.lock()->lastSupported = now;
|
||||||
|
supported = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!supported)
|
||||||
|
count++;
|
||||||
|
|
||||||
|
if (!supported
|
||||||
|
&& std::chrono::duration_cast<std::chrono::seconds>(now - step->state.lock()->lastSupported).count() >= maxUnsupportedTime)
|
||||||
|
{
|
||||||
|
printError("aborting unsupported build step '%s' (type '%s')",
|
||||||
|
localStore->printStorePath(step->drvPath),
|
||||||
|
step->systemType);
|
||||||
|
|
||||||
|
aborted.insert(step);
|
||||||
|
|
||||||
|
auto conn(dbPool.get());
|
||||||
|
|
||||||
|
std::set<Build::ptr> dependents;
|
||||||
|
std::set<Step::ptr> steps;
|
||||||
|
getDependents(step, dependents, steps);
|
||||||
|
|
||||||
|
/* Maybe the step got cancelled. */
|
||||||
|
if (dependents.empty()) continue;
|
||||||
|
|
||||||
|
/* Find the build that has this step as the top-level (if
|
||||||
|
any). */
|
||||||
|
Build::ptr build;
|
||||||
|
for (auto build2 : dependents) {
|
||||||
|
if (build2->drvPath == step->drvPath)
|
||||||
|
build = build2;
|
||||||
|
}
|
||||||
|
if (!build) build = *dependents.begin();
|
||||||
|
|
||||||
|
bool stepFinished = false;
|
||||||
|
bool quit = false;
|
||||||
|
|
||||||
|
failStep(
|
||||||
|
*conn, step, build->id,
|
||||||
|
RemoteResult {
|
||||||
|
.stepStatus = bsUnsupported,
|
||||||
|
.errorMsg = fmt("unsupported system type '%s'",
|
||||||
|
step->systemType),
|
||||||
|
.startTime = now2,
|
||||||
|
.stopTime = now2,
|
||||||
|
},
|
||||||
|
nullptr, stepFinished, quit);
|
||||||
|
|
||||||
|
if (quit) exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Clean up 'runnable'. */
|
||||||
|
{
|
||||||
|
auto runnable_(runnable.lock());
|
||||||
|
for (auto i = runnable_->begin(); i != runnable_->end(); ) {
|
||||||
|
if (aborted.count(i->lock()))
|
||||||
|
i = runnable_->erase(i);
|
||||||
|
else
|
||||||
|
++i;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
nrUnsupportedSteps = count;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
void Jobset::addStep(time_t startTime, time_t duration)
|
void Jobset::addStep(time_t startTime, time_t duration)
|
||||||
{
|
{
|
||||||
auto steps_(steps.lock());
|
auto steps_(steps.lock());
|
||||||
|
|
|
@ -39,14 +39,15 @@ static uint64_t getMemSize()
|
||||||
|
|
||||||
std::string getEnvOrDie(const std::string & key)
|
std::string getEnvOrDie(const std::string & key)
|
||||||
{
|
{
|
||||||
char * value = getenv(key.c_str());
|
auto value = getEnv(key);
|
||||||
if (!value) throw Error("environment variable '%s' is not set", key);
|
if (!value) throw Error("environment variable '%s' is not set", key);
|
||||||
return value;
|
return *value;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
State::State()
|
State::State()
|
||||||
: config(std::make_unique<::Config>())
|
: config(std::make_unique<::Config>())
|
||||||
|
, maxUnsupportedTime(config->getIntOption("max_unsupported_time", 0))
|
||||||
, dbPool(config->getIntOption("max_db_connections", 128))
|
, dbPool(config->getIntOption("max_db_connections", 128))
|
||||||
, memoryTokens(config->getIntOption("nar_buffer_size", getMemSize() / 2))
|
, memoryTokens(config->getIntOption("nar_buffer_size", getMemSize() / 2))
|
||||||
, maxOutputSize(config->getIntOption("max_output_size", 2ULL << 30))
|
, maxOutputSize(config->getIntOption("max_output_size", 2ULL << 30))
|
||||||
|
@ -161,7 +162,7 @@ void State::monitorMachinesFile()
|
||||||
{
|
{
|
||||||
string defaultMachinesFile = "/etc/nix/machines";
|
string defaultMachinesFile = "/etc/nix/machines";
|
||||||
auto machinesFiles = tokenizeString<std::vector<Path>>(
|
auto machinesFiles = tokenizeString<std::vector<Path>>(
|
||||||
getEnv("NIX_REMOTE_SYSTEMS", pathExists(defaultMachinesFile) ? defaultMachinesFile : ""), ":");
|
getEnv("NIX_REMOTE_SYSTEMS").value_or(pathExists(defaultMachinesFile) ? defaultMachinesFile : ""), ":");
|
||||||
|
|
||||||
if (machinesFiles.empty()) {
|
if (machinesFiles.empty()) {
|
||||||
parseMachines("localhost " +
|
parseMachines("localhost " +
|
||||||
|
@ -219,6 +220,7 @@ void State::monitorMachinesFile()
|
||||||
sleep(30);
|
sleep(30);
|
||||||
} catch (std::exception & e) {
|
} catch (std::exception & e) {
|
||||||
printMsg(lvlError, format("reloading machines file: %1%") % e.what());
|
printMsg(lvlError, format("reloading machines file: %1%") % e.what());
|
||||||
|
sleep(5);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -253,7 +255,7 @@ unsigned int State::createBuildStep(pqxx::work & txn, time_t startTime, BuildID
|
||||||
buildId,
|
buildId,
|
||||||
stepNr,
|
stepNr,
|
||||||
0, // == build
|
0, // == build
|
||||||
step->drvPath,
|
localStore->printStorePath(step->drvPath),
|
||||||
status == bsBusy ? 1 : 0,
|
status == bsBusy ? 1 : 0,
|
||||||
startTime != 0 ? std::make_optional(startTime) : std::nullopt,
|
startTime != 0 ? std::make_optional(startTime) : std::nullopt,
|
||||||
step->drv.platform,
|
step->drv.platform,
|
||||||
|
@ -268,7 +270,7 @@ unsigned int State::createBuildStep(pqxx::work & txn, time_t startTime, BuildID
|
||||||
for (auto & output : step->drv.outputs)
|
for (auto & output : step->drv.outputs)
|
||||||
txn.exec_params0
|
txn.exec_params0
|
||||||
("insert into BuildStepOutputs (build, stepnr, name, path) values ($1, $2, $3, $4)",
|
("insert into BuildStepOutputs (build, stepnr, name, path) values ($1, $2, $3, $4)",
|
||||||
buildId, stepNr, output.first, output.second.path);
|
buildId, stepNr, output.first, localStore->printStorePath(output.second.path));
|
||||||
|
|
||||||
if (status == bsBusy)
|
if (status == bsBusy)
|
||||||
txn.exec(fmt("notify step_started, '%d\t%d'", buildId, stepNr));
|
txn.exec(fmt("notify step_started, '%d\t%d'", buildId, stepNr));
|
||||||
|
@ -309,7 +311,7 @@ void State::finishBuildStep(pqxx::work & txn, const RemoteResult & result,
|
||||||
|
|
||||||
|
|
||||||
int State::createSubstitutionStep(pqxx::work & txn, time_t startTime, time_t stopTime,
|
int State::createSubstitutionStep(pqxx::work & txn, time_t startTime, time_t stopTime,
|
||||||
Build::ptr build, const Path & drvPath, const string & outputName, const Path & storePath)
|
Build::ptr build, const StorePath & drvPath, const string & outputName, const StorePath & storePath)
|
||||||
{
|
{
|
||||||
restart:
|
restart:
|
||||||
auto stepNr = allocBuildStep(txn, build->id);
|
auto stepNr = allocBuildStep(txn, build->id);
|
||||||
|
@ -319,7 +321,7 @@ int State::createSubstitutionStep(pqxx::work & txn, time_t startTime, time_t sto
|
||||||
build->id,
|
build->id,
|
||||||
stepNr,
|
stepNr,
|
||||||
1, // == substitution
|
1, // == substitution
|
||||||
drvPath,
|
(localStore->printStorePath(drvPath)),
|
||||||
0,
|
0,
|
||||||
0,
|
0,
|
||||||
startTime,
|
startTime,
|
||||||
|
@ -329,7 +331,8 @@ int State::createSubstitutionStep(pqxx::work & txn, time_t startTime, time_t sto
|
||||||
|
|
||||||
txn.exec_params0
|
txn.exec_params0
|
||||||
("insert into BuildStepOutputs (build, stepnr, name, path) values ($1, $2, $3, $4)",
|
("insert into BuildStepOutputs (build, stepnr, name, path) values ($1, $2, $3, $4)",
|
||||||
build->id, stepNr, outputName, storePath);
|
build->id, stepNr, outputName,
|
||||||
|
localStore->printStorePath(storePath));
|
||||||
|
|
||||||
return stepNr;
|
return stepNr;
|
||||||
}
|
}
|
||||||
|
@ -450,7 +453,7 @@ bool State::checkCachedFailure(Step::ptr step, Connection & conn)
|
||||||
{
|
{
|
||||||
pqxx::work txn(conn);
|
pqxx::work txn(conn);
|
||||||
for (auto & path : step->drv.outputPaths())
|
for (auto & path : step->drv.outputPaths())
|
||||||
if (!txn.exec_params("select 1 from FailedPaths where path = $1", path).empty())
|
if (!txn.exec_params("select 1 from FailedPaths where path = $1", localStore->printStorePath(path)).empty())
|
||||||
return true;
|
return true;
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
@ -486,7 +489,7 @@ std::shared_ptr<PathLocks> State::acquireGlobalLock()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void State::dumpStatus(Connection & conn, bool log)
|
void State::dumpStatus(Connection & conn)
|
||||||
{
|
{
|
||||||
std::ostringstream out;
|
std::ostringstream out;
|
||||||
|
|
||||||
|
@ -518,6 +521,7 @@ void State::dumpStatus(Connection & conn, bool log)
|
||||||
root.attr("nrStepsCopyingTo", nrStepsCopyingTo);
|
root.attr("nrStepsCopyingTo", nrStepsCopyingTo);
|
||||||
root.attr("nrStepsCopyingFrom", nrStepsCopyingFrom);
|
root.attr("nrStepsCopyingFrom", nrStepsCopyingFrom);
|
||||||
root.attr("nrStepsWaiting", nrStepsWaiting);
|
root.attr("nrStepsWaiting", nrStepsWaiting);
|
||||||
|
root.attr("nrUnsupportedSteps", nrUnsupportedSteps);
|
||||||
root.attr("bytesSent", bytesSent);
|
root.attr("bytesSent", bytesSent);
|
||||||
root.attr("bytesReceived", bytesReceived);
|
root.attr("bytesReceived", bytesReceived);
|
||||||
root.attr("nrBuildsRead", nrBuildsRead);
|
root.attr("nrBuildsRead", nrBuildsRead);
|
||||||
|
@ -666,11 +670,6 @@ void State::dumpStatus(Connection & conn, bool log)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (log && time(0) >= lastStatusLogged + statusLogInterval) {
|
|
||||||
printMsg(lvlInfo, format("status: %1%") % out.str());
|
|
||||||
lastStatusLogged = time(0);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
{
|
||||||
auto mc = startDbUpdate();
|
auto mc = startDbUpdate();
|
||||||
pqxx::work txn(conn);
|
pqxx::work txn(conn);
|
||||||
|
@ -762,7 +761,7 @@ void State::run(BuildID buildOne)
|
||||||
Store::Params localParams;
|
Store::Params localParams;
|
||||||
localParams["max-connections"] = "16";
|
localParams["max-connections"] = "16";
|
||||||
localParams["max-connection-age"] = "600";
|
localParams["max-connection-age"] = "600";
|
||||||
localStore = openStore(getEnv("NIX_REMOTE"), localParams);
|
localStore = openStore(getEnv("NIX_REMOTE").value_or(""), localParams);
|
||||||
|
|
||||||
auto storeUri = config->getStrOption("store_uri");
|
auto storeUri = config->getStrOption("store_uri");
|
||||||
_destStore = storeUri == "" ? localStore : openStore(storeUri);
|
_destStore = storeUri == "" ? localStore : openStore(storeUri);
|
||||||
|
@ -779,7 +778,7 @@ void State::run(BuildID buildOne)
|
||||||
{
|
{
|
||||||
auto conn(dbPool.get());
|
auto conn(dbPool.get());
|
||||||
clearBusy(*conn, 0);
|
clearBusy(*conn, 0);
|
||||||
dumpStatus(*conn, false);
|
dumpStatus(*conn);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::thread(&State::monitorMachinesFile, this).detach();
|
std::thread(&State::monitorMachinesFile, this).detach();
|
||||||
|
@ -842,8 +841,8 @@ void State::run(BuildID buildOne)
|
||||||
auto conn(dbPool.get());
|
auto conn(dbPool.get());
|
||||||
receiver dumpStatus_(*conn, "dump_status");
|
receiver dumpStatus_(*conn, "dump_status");
|
||||||
while (true) {
|
while (true) {
|
||||||
conn->await_notification(statusLogInterval / 2 + 1, 0);
|
conn->await_notification();
|
||||||
dumpStatus(*conn, true);
|
dumpStatus(*conn);
|
||||||
}
|
}
|
||||||
} catch (std::exception & e) {
|
} catch (std::exception & e) {
|
||||||
printMsg(lvlError, format("main thread: %1%") % e.what());
|
printMsg(lvlError, format("main thread: %1%") % e.what());
|
||||||
|
|
|
@ -83,7 +83,7 @@ bool State::getQueuedBuilds(Connection & conn,
|
||||||
them yet (since we don't want a long-running transaction). */
|
them yet (since we don't want a long-running transaction). */
|
||||||
std::vector<BuildID> newIDs;
|
std::vector<BuildID> newIDs;
|
||||||
std::map<BuildID, Build::ptr> newBuildsByID;
|
std::map<BuildID, Build::ptr> newBuildsByID;
|
||||||
std::multimap<Path, BuildID> newBuildsByPath;
|
std::multimap<StorePath, BuildID> newBuildsByPath;
|
||||||
|
|
||||||
unsigned int newLastBuildId = lastBuildId;
|
unsigned int newLastBuildId = lastBuildId;
|
||||||
|
|
||||||
|
@ -102,9 +102,9 @@ bool State::getQueuedBuilds(Connection & conn,
|
||||||
if (id > newLastBuildId) newLastBuildId = id;
|
if (id > newLastBuildId) newLastBuildId = id;
|
||||||
if (builds_->count(id)) continue;
|
if (builds_->count(id)) continue;
|
||||||
|
|
||||||
auto build = std::make_shared<Build>();
|
auto build = std::make_shared<Build>(
|
||||||
|
localStore->parseStorePath(row["drvPath"].as<string>()));
|
||||||
build->id = id;
|
build->id = id;
|
||||||
build->drvPath = row["drvPath"].as<string>();
|
|
||||||
build->projectName = row["project"].as<string>();
|
build->projectName = row["project"].as<string>();
|
||||||
build->jobsetName = row["jobset"].as<string>();
|
build->jobsetName = row["jobset"].as<string>();
|
||||||
build->jobName = row["job"].as<string>();
|
build->jobName = row["job"].as<string>();
|
||||||
|
@ -117,14 +117,14 @@ bool State::getQueuedBuilds(Connection & conn,
|
||||||
|
|
||||||
newIDs.push_back(id);
|
newIDs.push_back(id);
|
||||||
newBuildsByID[id] = build;
|
newBuildsByID[id] = build;
|
||||||
newBuildsByPath.emplace(std::make_pair(build->drvPath, id));
|
newBuildsByPath.emplace(std::make_pair(build->drvPath.clone(), id));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
std::set<Step::ptr> newRunnable;
|
std::set<Step::ptr> newRunnable;
|
||||||
unsigned int nrAdded;
|
unsigned int nrAdded;
|
||||||
std::function<void(Build::ptr)> createBuild;
|
std::function<void(Build::ptr)> createBuild;
|
||||||
std::set<Path> finishedDrvs;
|
std::set<StorePath> finishedDrvs;
|
||||||
|
|
||||||
createBuild = [&](Build::ptr build) {
|
createBuild = [&](Build::ptr build) {
|
||||||
printMsg(lvlTalkative, format("loading build %1% (%2%)") % build->id % build->fullJobName());
|
printMsg(lvlTalkative, format("loading build %1% (%2%)") % build->id % build->fullJobName());
|
||||||
|
@ -160,7 +160,8 @@ bool State::getQueuedBuilds(Connection & conn,
|
||||||
|
|
||||||
/* Some step previously failed, so mark the build as
|
/* Some step previously failed, so mark the build as
|
||||||
failed right away. */
|
failed right away. */
|
||||||
printMsg(lvlError, format("marking build %d as cached failure due to ‘%s’") % build->id % ex.step->drvPath);
|
printMsg(lvlError, "marking build %d as cached failure due to ‘%s’",
|
||||||
|
build->id, localStore->printStorePath(ex.step->drvPath));
|
||||||
if (!build->finishedInDB) {
|
if (!build->finishedInDB) {
|
||||||
auto mc = startDbUpdate();
|
auto mc = startDbUpdate();
|
||||||
pqxx::work txn(conn);
|
pqxx::work txn(conn);
|
||||||
|
@ -171,14 +172,14 @@ bool State::getQueuedBuilds(Connection & conn,
|
||||||
|
|
||||||
auto res = txn.exec_params1
|
auto res = txn.exec_params1
|
||||||
("select max(build) from BuildSteps where drvPath = $1 and startTime != 0 and stopTime != 0 and status = 1",
|
("select max(build) from BuildSteps where drvPath = $1 and startTime != 0 and stopTime != 0 and status = 1",
|
||||||
ex.step->drvPath);
|
localStore->printStorePathh(ex.step->drvPath));
|
||||||
if (!res[0].is_null()) propagatedFrom = res[0].as<BuildID>();
|
if (!res[0].is_null()) propagatedFrom = res[0].as<BuildID>();
|
||||||
|
|
||||||
if (!propagatedFrom) {
|
if (!propagatedFrom) {
|
||||||
for (auto & output : ex.step->drv.outputs) {
|
for (auto & output : ex.step->drv.outputs) {
|
||||||
auto res = txn.exec_params
|
auto res = txn.exec_params
|
||||||
("select max(s.build) from BuildSteps s join BuildStepOutputs o on s.build = o.build where path = $1 and startTime != 0 and stopTime != 0 and status = 1",
|
("select max(s.build) from BuildSteps s join BuildStepOutputs o on s.build = o.build where path = $1 and startTime != 0 and stopTime != 0 and status = 1",
|
||||||
output.second.path);
|
localStore->printStorePath(output.second.path));
|
||||||
if (!res[0][0].is_null()) {
|
if (!res[0][0].is_null()) {
|
||||||
propagatedFrom = res[0][0].as<BuildID>();
|
propagatedFrom = res[0][0].as<BuildID>();
|
||||||
break;
|
break;
|
||||||
|
@ -217,7 +218,7 @@ bool State::getQueuedBuilds(Connection & conn,
|
||||||
/* If we didn't get a step, it means the step's outputs are
|
/* If we didn't get a step, it means the step's outputs are
|
||||||
all valid. So we mark this as a finished, cached build. */
|
all valid. So we mark this as a finished, cached build. */
|
||||||
if (!step) {
|
if (!step) {
|
||||||
Derivation drv = readDerivation(build->drvPath);
|
Derivation drv = readDerivation(*localStore, localStore->printStorePath(build->drvPath));
|
||||||
BuildOutput res = getBuildOutputCached(conn, destStore, drv);
|
BuildOutput res = getBuildOutputCached(conn, destStore, drv);
|
||||||
|
|
||||||
for (auto & path : drv.outputPaths())
|
for (auto & path : drv.outputPaths())
|
||||||
|
@ -227,7 +228,7 @@ bool State::getQueuedBuilds(Connection & conn,
|
||||||
auto mc = startDbUpdate();
|
auto mc = startDbUpdate();
|
||||||
pqxx::work txn(conn);
|
pqxx::work txn(conn);
|
||||||
time_t now = time(0);
|
time_t now = time(0);
|
||||||
printMsg(lvlInfo, format("marking build %1% as succeeded (cached)") % build->id);
|
printMsg(lvlInfo, "marking build %1% as succeeded (cached)", build->id);
|
||||||
markSucceededBuild(txn, build, res, true, now, now);
|
markSucceededBuild(txn, build, res, true, now, now);
|
||||||
notifyBuildFinished(txn, build->id, {});
|
notifyBuildFinished(txn, build->id, {});
|
||||||
txn.commit();
|
txn.commit();
|
||||||
|
@ -250,8 +251,8 @@ bool State::getQueuedBuilds(Connection & conn,
|
||||||
|
|
||||||
build->propagatePriorities();
|
build->propagatePriorities();
|
||||||
|
|
||||||
printMsg(lvlChatty, format("added build %1% (top-level step %2%, %3% new steps)")
|
printMsg(lvlChatty, "added build %1% (top-level step %2%, %3% new steps)",
|
||||||
% build->id % step->drvPath % newSteps.size());
|
build->id, localStore->printStorePath(step->drvPath), newSteps.size());
|
||||||
};
|
};
|
||||||
|
|
||||||
/* Now instantiate build steps for each new build. The builder
|
/* Now instantiate build steps for each new build. The builder
|
||||||
|
@ -271,7 +272,7 @@ bool State::getQueuedBuilds(Connection & conn,
|
||||||
try {
|
try {
|
||||||
createBuild(build);
|
createBuild(build);
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
e.addPrefix(format("while loading build %1%: ") % build->id);
|
e.addPrefix(fmt("while loading build %1%: ", build->id));
|
||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -358,10 +359,12 @@ void State::processQueueChange(Connection & conn)
|
||||||
activeStepState->cancelled = true;
|
activeStepState->cancelled = true;
|
||||||
if (activeStepState->pid != -1) {
|
if (activeStepState->pid != -1) {
|
||||||
printInfo("killing builder process %d of build step ‘%s’",
|
printInfo("killing builder process %d of build step ‘%s’",
|
||||||
activeStepState->pid, activeStep->step->drvPath);
|
activeStepState->pid,
|
||||||
|
localStore->printStorePath(activeStep->step->drvPath));
|
||||||
if (kill(activeStepState->pid, SIGINT) == -1)
|
if (kill(activeStepState->pid, SIGINT) == -1)
|
||||||
printError("error killing build step ‘%s’: %s",
|
printError("error killing build step ‘%s’: %s",
|
||||||
activeStep->step->drvPath, strerror(errno));
|
localStore->printStorePath(activeStep->step->drvPath),
|
||||||
|
strerror(errno));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -370,8 +373,8 @@ void State::processQueueChange(Connection & conn)
|
||||||
|
|
||||||
|
|
||||||
Step::ptr State::createStep(ref<Store> destStore,
|
Step::ptr State::createStep(ref<Store> destStore,
|
||||||
Connection & conn, Build::ptr build, const Path & drvPath,
|
Connection & conn, Build::ptr build, const StorePath & drvPath,
|
||||||
Build::ptr referringBuild, Step::ptr referringStep, std::set<Path> & finishedDrvs,
|
Build::ptr referringBuild, Step::ptr referringStep, std::set<StorePath> & finishedDrvs,
|
||||||
std::set<Step::ptr> & newSteps, std::set<Step::ptr> & newRunnable)
|
std::set<Step::ptr> & newSteps, std::set<Step::ptr> & newRunnable)
|
||||||
{
|
{
|
||||||
if (finishedDrvs.find(drvPath) != finishedDrvs.end()) return 0;
|
if (finishedDrvs.find(drvPath) != finishedDrvs.end()) return 0;
|
||||||
|
@ -399,8 +402,7 @@ Step::ptr State::createStep(ref<Store> destStore,
|
||||||
|
|
||||||
/* If it doesn't exist, create it. */
|
/* If it doesn't exist, create it. */
|
||||||
if (!step) {
|
if (!step) {
|
||||||
step = std::make_shared<Step>();
|
step = std::make_shared<Step>(drvPath.clone());
|
||||||
step->drvPath = drvPath;
|
|
||||||
isNew = true;
|
isNew = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -414,28 +416,28 @@ Step::ptr State::createStep(ref<Store> destStore,
|
||||||
if (referringStep)
|
if (referringStep)
|
||||||
step_->rdeps.push_back(referringStep);
|
step_->rdeps.push_back(referringStep);
|
||||||
|
|
||||||
(*steps_)[drvPath] = step;
|
steps_->insert_or_assign(drvPath.clone(), step);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!isNew) return step;
|
if (!isNew) return step;
|
||||||
|
|
||||||
printMsg(lvlDebug, format("considering derivation ‘%1%’") % drvPath);
|
printMsg(lvlDebug, "considering derivation ‘%1%’", localStore->printStorePath(drvPath));
|
||||||
|
|
||||||
/* Initialize the step. Note that the step may be visible in
|
/* Initialize the step. Note that the step may be visible in
|
||||||
‘steps’ before this point, but that doesn't matter because
|
‘steps’ before this point, but that doesn't matter because
|
||||||
it's not runnable yet, and other threads won't make it
|
it's not runnable yet, and other threads won't make it
|
||||||
runnable while step->created == false. */
|
runnable while step->created == false. */
|
||||||
step->drv = readDerivation(drvPath);
|
step->drv = std::make_unique<Derivation>(readDerivation(*localStore, localStore->printStorePath(drvPath)));
|
||||||
step->parsedDrv = std::make_unique<ParsedDerivation>(drvPath, step->drv);
|
step->parsedDrv = std::make_unique<ParsedDerivation>(drvPath.clone(), *step->drv);
|
||||||
|
|
||||||
step->preferLocalBuild = step->parsedDrv->willBuildLocally();
|
step->preferLocalBuild = step->parsedDrv->willBuildLocally();
|
||||||
step->isDeterministic = get(step->drv.env, "isDetermistic", "0") == "1";
|
step->isDeterministic = get(step->drv->env, "isDetermistic").value_or("0") == "1";
|
||||||
|
|
||||||
step->systemType = step->drv.platform;
|
step->systemType = step->drv->platform;
|
||||||
{
|
{
|
||||||
auto i = step->drv.env.find("requiredSystemFeatures");
|
auto i = step->drv->env.find("requiredSystemFeatures");
|
||||||
StringSet features;
|
StringSet features;
|
||||||
if (i != step->drv.env.end())
|
if (i != step->drv->env.end())
|
||||||
features = step->requiredSystemFeatures = tokenizeString<std::set<std::string>>(i->second);
|
features = step->requiredSystemFeatures = tokenizeString<std::set<std::string>>(i->second);
|
||||||
if (step->preferLocalBuild)
|
if (step->preferLocalBuild)
|
||||||
features.insert("local");
|
features.insert("local");
|
||||||
|
@ -451,12 +453,13 @@ Step::ptr State::createStep(ref<Store> destStore,
|
||||||
|
|
||||||
/* Are all outputs valid? */
|
/* Are all outputs valid? */
|
||||||
bool valid = true;
|
bool valid = true;
|
||||||
PathSet outputs = step->drv.outputPaths();
|
auto outputs = step->drv->outputPaths();
|
||||||
DerivationOutputs missing;
|
DerivationOutputs missing;
|
||||||
for (auto & i : step->drv.outputs)
|
for (auto & i : step->drv->outputs)
|
||||||
if (!destStore->isValidPath(i.second.path)) {
|
if (!destStore->isValidPath(i.second.path)) {
|
||||||
valid = false;
|
valid = false;
|
||||||
missing[i.first] = i.second;
|
missing.insert_or_assign(i.first,
|
||||||
|
DerivationOutput(i.second.path.clone(), std::string(i.second.hashAlgo), std::string(i.second.hash)));
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Try to copy the missing paths from the local store or from
|
/* Try to copy the missing paths from the local store or from
|
||||||
|
@ -469,7 +472,7 @@ Step::ptr State::createStep(ref<Store> destStore,
|
||||||
avail++;
|
avail++;
|
||||||
else if (useSubstitutes) {
|
else if (useSubstitutes) {
|
||||||
SubstitutablePathInfos infos;
|
SubstitutablePathInfos infos;
|
||||||
localStore->querySubstitutablePathInfos({i.second.path}, infos);
|
localStore->querySubstitutablePathInfos(singleton(i.second.path), infos);
|
||||||
if (infos.size() == 1)
|
if (infos.size() == 1)
|
||||||
avail++;
|
avail++;
|
||||||
}
|
}
|
||||||
|
@ -482,14 +485,18 @@ Step::ptr State::createStep(ref<Store> destStore,
|
||||||
time_t startTime = time(0);
|
time_t startTime = time(0);
|
||||||
|
|
||||||
if (localStore->isValidPath(i.second.path))
|
if (localStore->isValidPath(i.second.path))
|
||||||
printInfo("copying output ‘%1%’ of ‘%2%’ from local store", i.second.path, drvPath);
|
printInfo("copying output ‘%1%’ of ‘%2%’ from local store",
|
||||||
|
localStore->printStorePath(i.second.path),
|
||||||
|
localStore->printStorePath(drvPath));
|
||||||
else {
|
else {
|
||||||
printInfo("substituting output ‘%1%’ of ‘%2%’", i.second.path, drvPath);
|
printInfo("substituting output ‘%1%’ of ‘%2%’",
|
||||||
|
localStore->printStorePath(i.second.path),
|
||||||
|
localStore->printStorePath(drvPath));
|
||||||
localStore->ensurePath(i.second.path);
|
localStore->ensurePath(i.second.path);
|
||||||
// FIXME: should copy directly from substituter to destStore.
|
// FIXME: should copy directly from substituter to destStore.
|
||||||
}
|
}
|
||||||
|
|
||||||
copyClosure(ref<Store>(localStore), destStore, {i.second.path});
|
copyClosure(ref<Store>(localStore), destStore, singleton(i.second.path));
|
||||||
|
|
||||||
time_t stopTime = time(0);
|
time_t stopTime = time(0);
|
||||||
|
|
||||||
|
@ -501,7 +508,10 @@ Step::ptr State::createStep(ref<Store> destStore,
|
||||||
}
|
}
|
||||||
|
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
printError("while copying/substituting output ‘%s’ of ‘%s’: %s", i.second.path, drvPath, e.what());
|
printError("while copying/substituting output ‘%s’ of ‘%s’: %s",
|
||||||
|
localStore->printStorePath(i.second.path),
|
||||||
|
localStore->printStorePath(drvPath),
|
||||||
|
e.what());
|
||||||
valid = false;
|
valid = false;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -511,15 +521,15 @@ Step::ptr State::createStep(ref<Store> destStore,
|
||||||
|
|
||||||
// FIXME: check whether all outputs are in the binary cache.
|
// FIXME: check whether all outputs are in the binary cache.
|
||||||
if (valid) {
|
if (valid) {
|
||||||
finishedDrvs.insert(drvPath);
|
finishedDrvs.insert(drvPath.clone());
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* No, we need to build. */
|
/* No, we need to build. */
|
||||||
printMsg(lvlDebug, format("creating build step ‘%1%’") % drvPath);
|
printMsg(lvlDebug, "creating build step ‘%1%’", localStore->printStorePath(drvPath));
|
||||||
|
|
||||||
/* Create steps for the dependencies. */
|
/* Create steps for the dependencies. */
|
||||||
for (auto & i : step->drv.inputDrvs) {
|
for (auto & i : step->drv->inputDrvs) {
|
||||||
auto dep = createStep(destStore, conn, build, i.first, 0, step, finishedDrvs, newSteps, newRunnable);
|
auto dep = createStep(destStore, conn, build, i.first, 0, step, finishedDrvs, newSteps, newRunnable);
|
||||||
if (dep) {
|
if (dep) {
|
||||||
auto step_(step->state.lock());
|
auto step_(step->state.lock());
|
||||||
|
@ -610,7 +620,7 @@ BuildOutput State::getBuildOutputCached(Connection & conn, nix::ref<nix::Store>
|
||||||
("select id, buildStatus, releaseName, closureSize, size from Builds b "
|
("select id, buildStatus, releaseName, closureSize, size from Builds b "
|
||||||
"join BuildOutputs o on b.id = o.build "
|
"join BuildOutputs o on b.id = o.build "
|
||||||
"where finished = 1 and (buildStatus = 0 or buildStatus = 6) and path = $1",
|
"where finished = 1 and (buildStatus = 0 or buildStatus = 6) and path = $1",
|
||||||
output.second.path);
|
localStore->printStorePath(output.second.path));
|
||||||
if (r.empty()) continue;
|
if (r.empty()) continue;
|
||||||
BuildID id = r[0][0].as<BuildID>();
|
BuildID id = r[0][0].as<BuildID>();
|
||||||
|
|
||||||
|
|
|
@ -68,7 +68,7 @@ struct RemoteResult
|
||||||
std::unique_ptr<nix::TokenServer::Token> tokens;
|
std::unique_ptr<nix::TokenServer::Token> tokens;
|
||||||
std::shared_ptr<nix::FSAccessor> accessor;
|
std::shared_ptr<nix::FSAccessor> accessor;
|
||||||
|
|
||||||
BuildStatus buildStatus()
|
BuildStatus buildStatus() const
|
||||||
{
|
{
|
||||||
return stepStatus == bsCachedFailure ? bsFailed : stepStatus;
|
return stepStatus == bsCachedFailure ? bsFailed : stepStatus;
|
||||||
}
|
}
|
||||||
|
@ -123,8 +123,8 @@ struct Build
|
||||||
typedef std::weak_ptr<Build> wptr;
|
typedef std::weak_ptr<Build> wptr;
|
||||||
|
|
||||||
BuildID id;
|
BuildID id;
|
||||||
nix::Path drvPath;
|
nix::StorePath drvPath;
|
||||||
std::map<std::string, nix::Path> outputs;
|
std::map<std::string, nix::StorePath> outputs;
|
||||||
std::string projectName, jobsetName, jobName;
|
std::string projectName, jobsetName, jobName;
|
||||||
time_t timestamp;
|
time_t timestamp;
|
||||||
unsigned int maxSilentTime, buildTimeout;
|
unsigned int maxSilentTime, buildTimeout;
|
||||||
|
@ -136,6 +136,9 @@ struct Build
|
||||||
|
|
||||||
std::atomic_bool finishedInDB{false};
|
std::atomic_bool finishedInDB{false};
|
||||||
|
|
||||||
|
Build(nix::StorePath && drvPath) : drvPath(std::move(drvPath))
|
||||||
|
{ }
|
||||||
|
|
||||||
std::string fullJobName()
|
std::string fullJobName()
|
||||||
{
|
{
|
||||||
return projectName + ":" + jobsetName + ":" + jobName;
|
return projectName + ":" + jobsetName + ":" + jobName;
|
||||||
|
@ -150,8 +153,8 @@ struct Step
|
||||||
typedef std::shared_ptr<Step> ptr;
|
typedef std::shared_ptr<Step> ptr;
|
||||||
typedef std::weak_ptr<Step> wptr;
|
typedef std::weak_ptr<Step> wptr;
|
||||||
|
|
||||||
nix::Path drvPath;
|
nix::StorePath drvPath;
|
||||||
nix::Derivation drv;
|
std::unique_ptr<nix::Derivation> drv;
|
||||||
std::unique_ptr<nix::ParsedDerivation> parsedDrv;
|
std::unique_ptr<nix::ParsedDerivation> parsedDrv;
|
||||||
std::set<std::string> requiredSystemFeatures;
|
std::set<std::string> requiredSystemFeatures;
|
||||||
bool preferLocalBuild;
|
bool preferLocalBuild;
|
||||||
|
@ -195,12 +198,19 @@ struct Step
|
||||||
|
|
||||||
/* The time at which this step became runnable. */
|
/* The time at which this step became runnable. */
|
||||||
system_time runnableSince;
|
system_time runnableSince;
|
||||||
|
|
||||||
|
/* The time that we last saw a machine that supports this
|
||||||
|
step. */
|
||||||
|
system_time lastSupported = std::chrono::system_clock::now();
|
||||||
};
|
};
|
||||||
|
|
||||||
std::atomic_bool finished{false}; // debugging
|
std::atomic_bool finished{false}; // debugging
|
||||||
|
|
||||||
nix::Sync<State> state;
|
nix::Sync<State> state;
|
||||||
|
|
||||||
|
Step(nix::StorePath && drvPath) : drvPath(std::move(drvPath))
|
||||||
|
{ }
|
||||||
|
|
||||||
~Step()
|
~Step()
|
||||||
{
|
{
|
||||||
//printMsg(lvlError, format("destroying step %1%") % drvPath);
|
//printMsg(lvlError, format("destroying step %1%") % drvPath);
|
||||||
|
@ -252,7 +262,7 @@ struct Machine
|
||||||
{
|
{
|
||||||
/* Check that this machine is of the type required by the
|
/* Check that this machine is of the type required by the
|
||||||
step. */
|
step. */
|
||||||
if (!systemTypes.count(step->drv.platform == "builtin" ? nix::settings.thisSystem : step->drv.platform))
|
if (!systemTypes.count(step->drv->platform == "builtin" ? nix::settings.thisSystem : step->drv->platform))
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
/* Check that the step requires all mandatory features of this
|
/* Check that the step requires all mandatory features of this
|
||||||
|
@ -297,6 +307,9 @@ private:
|
||||||
const float retryBackoff = 3.0;
|
const float retryBackoff = 3.0;
|
||||||
const unsigned int maxParallelCopyClosure = 4;
|
const unsigned int maxParallelCopyClosure = 4;
|
||||||
|
|
||||||
|
/* Time in seconds before unsupported build steps are aborted. */
|
||||||
|
const unsigned int maxUnsupportedTime = 0;
|
||||||
|
|
||||||
nix::Path hydraData, logDir;
|
nix::Path hydraData, logDir;
|
||||||
|
|
||||||
bool useSubstitutes = false;
|
bool useSubstitutes = false;
|
||||||
|
@ -313,7 +326,7 @@ private:
|
||||||
queued builds). Note that these are weak pointers. Steps are
|
queued builds). Note that these are weak pointers. Steps are
|
||||||
kept alive by being reachable from Builds or by being in
|
kept alive by being reachable from Builds or by being in
|
||||||
progress. */
|
progress. */
|
||||||
typedef std::map<nix::Path, Step::wptr> Steps;
|
typedef std::map<nix::StorePath, Step::wptr> Steps;
|
||||||
nix::Sync<Steps> steps;
|
nix::Sync<Steps> steps;
|
||||||
|
|
||||||
/* Build steps that have no unbuilt dependencies. */
|
/* Build steps that have no unbuilt dependencies. */
|
||||||
|
@ -342,6 +355,7 @@ private:
|
||||||
counter nrStepsCopyingTo{0};
|
counter nrStepsCopyingTo{0};
|
||||||
counter nrStepsCopyingFrom{0};
|
counter nrStepsCopyingFrom{0};
|
||||||
counter nrStepsWaiting{0};
|
counter nrStepsWaiting{0};
|
||||||
|
counter nrUnsupportedSteps{0};
|
||||||
counter nrRetries{0};
|
counter nrRetries{0};
|
||||||
counter maxNrRetries{0};
|
counter maxNrRetries{0};
|
||||||
counter totalStepTime{0}; // total time for steps, including closure copying
|
counter totalStepTime{0}; // total time for steps, including closure copying
|
||||||
|
@ -406,9 +420,6 @@ private:
|
||||||
size_t maxOutputSize;
|
size_t maxOutputSize;
|
||||||
size_t maxLogSize;
|
size_t maxLogSize;
|
||||||
|
|
||||||
time_t lastStatusLogged = 0;
|
|
||||||
const int statusLogInterval = 300;
|
|
||||||
|
|
||||||
/* Steps that were busy while we encounted a PostgreSQL
|
/* Steps that were busy while we encounted a PostgreSQL
|
||||||
error. These need to be cleared at a later time to prevent them
|
error. These need to be cleared at a later time to prevent them
|
||||||
from showing up as busy until the queue runner is restarted. */
|
from showing up as busy until the queue runner is restarted. */
|
||||||
|
@ -454,7 +465,7 @@ private:
|
||||||
const std::string & machine);
|
const std::string & machine);
|
||||||
|
|
||||||
int createSubstitutionStep(pqxx::work & txn, time_t startTime, time_t stopTime,
|
int createSubstitutionStep(pqxx::work & txn, time_t startTime, time_t stopTime,
|
||||||
Build::ptr build, const nix::Path & drvPath, const std::string & outputName, const nix::Path & storePath);
|
Build::ptr build, const nix::StorePath & drvPath, const std::string & outputName, const nix::StorePath & storePath);
|
||||||
|
|
||||||
void updateBuild(pqxx::work & txn, Build::ptr build, BuildStatus status);
|
void updateBuild(pqxx::work & txn, Build::ptr build, BuildStatus status);
|
||||||
|
|
||||||
|
@ -473,10 +484,19 @@ private:
|
||||||
const nix::Derivation & drv);
|
const nix::Derivation & drv);
|
||||||
|
|
||||||
Step::ptr createStep(nix::ref<nix::Store> store,
|
Step::ptr createStep(nix::ref<nix::Store> store,
|
||||||
Connection & conn, Build::ptr build, const nix::Path & drvPath,
|
Connection & conn, Build::ptr build, const nix::StorePath & drvPath,
|
||||||
Build::ptr referringBuild, Step::ptr referringStep, std::set<nix::Path> & finishedDrvs,
|
Build::ptr referringBuild, Step::ptr referringStep, std::set<nix::StorePath> & finishedDrvs,
|
||||||
std::set<Step::ptr> & newSteps, std::set<Step::ptr> & newRunnable);
|
std::set<Step::ptr> & newSteps, std::set<Step::ptr> & newRunnable);
|
||||||
|
|
||||||
|
void failStep(
|
||||||
|
Connection & conn,
|
||||||
|
Step::ptr step,
|
||||||
|
BuildID buildId,
|
||||||
|
const RemoteResult & result,
|
||||||
|
Machine::ptr machine,
|
||||||
|
bool & stepFinished,
|
||||||
|
bool & quit);
|
||||||
|
|
||||||
Jobset::ptr createJobset(pqxx::work & txn,
|
Jobset::ptr createJobset(pqxx::work & txn,
|
||||||
const std::string & projectName, const std::string & jobsetName);
|
const std::string & projectName, const std::string & jobsetName);
|
||||||
|
|
||||||
|
@ -491,6 +511,8 @@ private:
|
||||||
|
|
||||||
void wakeDispatcher();
|
void wakeDispatcher();
|
||||||
|
|
||||||
|
void abortUnsupported();
|
||||||
|
|
||||||
void builder(MachineReservation::ptr reservation);
|
void builder(MachineReservation::ptr reservation);
|
||||||
|
|
||||||
/* Perform the given build step. Return true if the step is to be
|
/* Perform the given build step. Return true if the step is to be
|
||||||
|
@ -521,9 +543,9 @@ private:
|
||||||
has it. */
|
has it. */
|
||||||
std::shared_ptr<nix::PathLocks> acquireGlobalLock();
|
std::shared_ptr<nix::PathLocks> acquireGlobalLock();
|
||||||
|
|
||||||
void dumpStatus(Connection & conn, bool log);
|
void dumpStatus(Connection & conn);
|
||||||
|
|
||||||
void addRoot(const nix::Path & storePath);
|
void addRoot(const nix::StorePath & storePath);
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
|
||||||
|
|
|
@ -7,7 +7,7 @@
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
MakeError(NoTokens, Error)
|
MakeError(NoTokens, Error);
|
||||||
|
|
||||||
/* This class hands out tokens. There are only ‘maxTokens’ tokens
|
/* This class hands out tokens. There are only ‘maxTokens’ tokens
|
||||||
available. Calling get(N) will return a Token object, representing
|
available. Calling get(N) will return a Token object, representing
|
||||||
|
|
|
@ -88,7 +88,7 @@ sub jobsetToHash {
|
||||||
triggertime => $jobset->triggertime,
|
triggertime => $jobset->triggertime,
|
||||||
fetcherrormsg => $jobset->fetcherrormsg,
|
fetcherrormsg => $jobset->fetcherrormsg,
|
||||||
errortime => $jobset->errortime,
|
errortime => $jobset->errortime,
|
||||||
haserrormsg => $jobset->errormsg eq "" ? JSON::false : JSON::true
|
haserrormsg => defined($jobset->errormsg) && $jobset->errormsg ne "" ? JSON::true : JSON::false
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -193,7 +193,8 @@ sub checkPath {
|
||||||
sub serveFile {
|
sub serveFile {
|
||||||
my ($c, $path) = @_;
|
my ($c, $path) = @_;
|
||||||
|
|
||||||
my $res = run(cmd => ["nix", "ls-store", "--store", getStoreUri(), "--json", "$path"]);
|
my $res = run(cmd => ["nix", "--experimental-features", "nix-command",
|
||||||
|
"ls-store", "--store", getStoreUri(), "--json", "$path"]);
|
||||||
|
|
||||||
if ($res->{status}) {
|
if ($res->{status}) {
|
||||||
notFound($c, "File '$path' does not exist.") if $res->{stderr} =~ /does not exist/;
|
notFound($c, "File '$path' does not exist.") if $res->{stderr} =~ /does not exist/;
|
||||||
|
@ -217,7 +218,8 @@ sub serveFile {
|
||||||
|
|
||||||
elsif ($ls->{type} eq "regular") {
|
elsif ($ls->{type} eq "regular") {
|
||||||
|
|
||||||
$c->stash->{'plain'} = { data => grab(cmd => ["nix", "cat-store", "--store", getStoreUri(), "$path"]) };
|
$c->stash->{'plain'} = { data => grab(cmd => ["nix", "--experimental-features", "nix-command",
|
||||||
|
"cat-store", "--store", getStoreUri(), "$path"]) };
|
||||||
|
|
||||||
# Detect MIME type. Borrowed from Catalyst::Plugin::Static::Simple.
|
# Detect MIME type. Borrowed from Catalyst::Plugin::Static::Simple.
|
||||||
my $type = "text/plain";
|
my $type = "text/plain";
|
||||||
|
|
|
@ -82,7 +82,7 @@ sub overview : Chained('job') PathPart('') Args(0) {
|
||||||
# If this is an aggregate job, then get its constituents.
|
# If this is an aggregate job, then get its constituents.
|
||||||
my @constituents = $c->model('DB::Builds')->search(
|
my @constituents = $c->model('DB::Builds')->search(
|
||||||
{ aggregate => { -in => $job->builds->search({}, { columns => ["id"], order_by => "id desc", rows => 15 })->as_query } },
|
{ aggregate => { -in => $job->builds->search({}, { columns => ["id"], order_by => "id desc", rows => 15 })->as_query } },
|
||||||
{ join => 'aggregateconstituents_constituents',
|
{ join => 'aggregateconstituents_constituents',
|
||||||
columns => ['id', 'job', 'finished', 'buildstatus'],
|
columns => ['id', 'job', 'finished', 'buildstatus'],
|
||||||
+select => ['aggregateconstituents_constituents.aggregate'],
|
+select => ['aggregateconstituents_constituents.aggregate'],
|
||||||
+as => ['aggregate']
|
+as => ['aggregate']
|
||||||
|
@ -99,7 +99,7 @@ sub overview : Chained('job') PathPart('') Args(0) {
|
||||||
|
|
||||||
foreach my $agg (keys %$aggregates) {
|
foreach my $agg (keys %$aggregates) {
|
||||||
# FIXME: could be done in one query.
|
# FIXME: could be done in one query.
|
||||||
$aggregates->{$agg}->{build} =
|
$aggregates->{$agg}->{build} =
|
||||||
$c->model('DB::Builds')->find({id => $agg}, {columns => [@buildListColumns]}) or die;
|
$c->model('DB::Builds')->find({id => $agg}, {columns => [@buildListColumns]}) or die;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -172,7 +172,7 @@ sub get_builds : Chained('job') PathPart('') CaptureArgs(0) {
|
||||||
my ($self, $c) = @_;
|
my ($self, $c) = @_;
|
||||||
$c->stash->{allBuilds} = $c->stash->{job}->builds;
|
$c->stash->{allBuilds} = $c->stash->{job}->builds;
|
||||||
$c->stash->{latestSucceeded} = $c->model('DB')->resultset('LatestSucceededForJob')
|
$c->stash->{latestSucceeded} = $c->model('DB')->resultset('LatestSucceededForJob')
|
||||||
->search({}, {bind => [$c->stash->{project}->name, $c->stash->{jobset}->name, $c->stash->{job}->name]});
|
->search({}, {bind => [$c->stash->{jobset}->name, $c->stash->{job}->name]});
|
||||||
$c->stash->{channelBaseName} =
|
$c->stash->{channelBaseName} =
|
||||||
$c->stash->{project}->name . "-" . $c->stash->{jobset}->name . "-" . $c->stash->{job}->name;
|
$c->stash->{project}->name . "-" . $c->stash->{jobset}->name . "-" . $c->stash->{job}->name;
|
||||||
}
|
}
|
||||||
|
|
|
@ -162,7 +162,7 @@ sub get_builds : Chained('jobsetChain') PathPart('') CaptureArgs(0) {
|
||||||
my ($self, $c) = @_;
|
my ($self, $c) = @_;
|
||||||
$c->stash->{allBuilds} = $c->stash->{jobset}->builds;
|
$c->stash->{allBuilds} = $c->stash->{jobset}->builds;
|
||||||
$c->stash->{latestSucceeded} = $c->model('DB')->resultset('LatestSucceededForJobset')
|
$c->stash->{latestSucceeded} = $c->model('DB')->resultset('LatestSucceededForJobset')
|
||||||
->search({}, {bind => [$c->stash->{project}->name, $c->stash->{jobset}->name]});
|
->search({}, {bind => [$c->stash->{jobset}->name]});
|
||||||
$c->stash->{channelBaseName} =
|
$c->stash->{channelBaseName} =
|
||||||
$c->stash->{project}->name . "-" . $c->stash->{jobset}->name;
|
$c->stash->{project}->name . "-" . $c->stash->{jobset}->name;
|
||||||
}
|
}
|
||||||
|
@ -223,15 +223,10 @@ sub updateJobset {
|
||||||
error($c, "Cannot rename jobset to ‘$jobsetName’ since that identifier is already taken.")
|
error($c, "Cannot rename jobset to ‘$jobsetName’ since that identifier is already taken.")
|
||||||
if $jobsetName ne $oldName && defined $c->stash->{project}->jobsets->find({ name => $jobsetName });
|
if $jobsetName ne $oldName && defined $c->stash->{project}->jobsets->find({ name => $jobsetName });
|
||||||
|
|
||||||
# When the expression is in a .scm file, assume it's a Guile + Guix
|
|
||||||
# build expression.
|
|
||||||
my $exprType =
|
|
||||||
$c->stash->{params}->{"nixexprpath"} =~ /.scm$/ ? "guile" : "nix";
|
|
||||||
|
|
||||||
my ($nixExprPath, $nixExprInput) = nixExprPathFromParams $c;
|
my ($nixExprPath, $nixExprInput) = nixExprPathFromParams $c;
|
||||||
|
|
||||||
my $enabled = int($c->stash->{params}->{enabled});
|
my $enabled = int($c->stash->{params}->{enabled});
|
||||||
die if $enabled < 0 || $enabled > 2;
|
die if $enabled < 0 || $enabled > 3;
|
||||||
|
|
||||||
my $shares = int($c->stash->{params}->{schedulingshares} // 1);
|
my $shares = int($c->stash->{params}->{schedulingshares} // 1);
|
||||||
error($c, "The number of scheduling shares must be positive.") if $shares <= 0;
|
error($c, "The number of scheduling shares must be positive.") if $shares <= 0;
|
||||||
|
|
|
@ -68,8 +68,14 @@ sub handleDeclarativeJobsetBuild {
|
||||||
my $id = $build->id;
|
my $id = $build->id;
|
||||||
die "Declarative jobset build $id failed" unless $build->buildstatus == 0;
|
die "Declarative jobset build $id failed" unless $build->buildstatus == 0;
|
||||||
my $declPath = ($build->buildoutputs)[0]->path;
|
my $declPath = ($build->buildoutputs)[0]->path;
|
||||||
my $declText = readNixFile($declPath)
|
my $declText = eval {
|
||||||
or die "Couldn't read declarative specification file $declPath: $!";
|
readNixFile($declPath)
|
||||||
|
};
|
||||||
|
if ($@) {
|
||||||
|
print STDERR "ERROR: failed to readNixFile $declPath: ", $@, "\n";
|
||||||
|
die;
|
||||||
|
}
|
||||||
|
|
||||||
my $declSpec = decode_json($declText);
|
my $declSpec = decode_json($declText);
|
||||||
txn_do($db, sub {
|
txn_do($db, sub {
|
||||||
my @kept = keys %$declSpec;
|
my @kept = keys %$declSpec;
|
||||||
|
|
|
@ -509,7 +509,8 @@ sub getStoreUri {
|
||||||
# Read a file from the (possibly remote) nix store
|
# Read a file from the (possibly remote) nix store
|
||||||
sub readNixFile {
|
sub readNixFile {
|
||||||
my ($path) = @_;
|
my ($path) = @_;
|
||||||
return grab(cmd => ["nix", "cat-store", "--store", getStoreUri(), "$path"]);
|
return grab(cmd => ["nix", "--experimental-features", "nix-command",
|
||||||
|
"cat-store", "--store", getStoreUri(), "$path"]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -7,6 +7,53 @@ use LWP::UserAgent;
|
||||||
use Hydra::Helper::CatalystUtils;
|
use Hydra::Helper::CatalystUtils;
|
||||||
use JSON;
|
use JSON;
|
||||||
|
|
||||||
|
=head1 NAME
|
||||||
|
|
||||||
|
SlackNotification - hydra-notify plugin for sending Slack notifications about
|
||||||
|
build results
|
||||||
|
|
||||||
|
=head1 DESCRIPTION
|
||||||
|
|
||||||
|
This plugin reports build statuses to various Slack channels. One can configure
|
||||||
|
which builds are reported to which channels, and whether reports should be on
|
||||||
|
state change (regressions and improvements), or for each build.
|
||||||
|
|
||||||
|
=head1 CONFIGURATION
|
||||||
|
|
||||||
|
The module is configured using the C<slack> block in Hydra's config file. There
|
||||||
|
can be multiple such blocks in the config file, each configuring different (or
|
||||||
|
even the same) set of builds and how they report to Slack channels.
|
||||||
|
|
||||||
|
The following entries are recognized in the C<slack> block:
|
||||||
|
|
||||||
|
=over 4
|
||||||
|
|
||||||
|
=item jobs
|
||||||
|
|
||||||
|
A pattern for job names. All builds whose job name matches this pattern will
|
||||||
|
emit a message to the designated Slack channel (see C<url>). The pattern will
|
||||||
|
match the whole name, thus leaving this field empty will result in no
|
||||||
|
notifications being sent. To match on all builds, use C<.*>.
|
||||||
|
|
||||||
|
=item url
|
||||||
|
|
||||||
|
The URL to a L<Slack incoming webhook|https://api.slack.com/messaging/webhooks>.
|
||||||
|
|
||||||
|
Slack administrators have to prepare one incoming webhook for each channel. This
|
||||||
|
URL should be treated as secret, as anyone knowing the URL could post a message
|
||||||
|
to the Slack workspace (or more precisely, the channel behind it).
|
||||||
|
|
||||||
|
=item force
|
||||||
|
|
||||||
|
(Optional) An I<integer> indicating whether to report on every build or only on
|
||||||
|
changes in the status. If not provided, defaults to 0, that is, sending reports
|
||||||
|
only when build status changes from success to failure, and vice-versa. Any
|
||||||
|
other value results in reporting on every build.
|
||||||
|
|
||||||
|
=back
|
||||||
|
|
||||||
|
=cut
|
||||||
|
|
||||||
sub isEnabled {
|
sub isEnabled {
|
||||||
my ($self) = @_;
|
my ($self) = @_;
|
||||||
return defined $self->{config}->{slack};
|
return defined $self->{config}->{slack};
|
||||||
|
@ -40,20 +87,32 @@ sub buildFinished {
|
||||||
# we send one aggregate message.
|
# we send one aggregate message.
|
||||||
my %channels;
|
my %channels;
|
||||||
foreach my $b ($build, @{$dependents}) {
|
foreach my $b ($build, @{$dependents}) {
|
||||||
my $prevBuild = getPreviousBuild($b);
|
|
||||||
my $jobName = showJobName $b;
|
my $jobName = showJobName $b;
|
||||||
|
my $buildStatus = $b->buildstatus;
|
||||||
|
my $cancelledOrAborted = $buildStatus == 4 || $buildStatus == 3;
|
||||||
|
|
||||||
|
my $prevBuild = getPreviousBuild($b);
|
||||||
|
my $sameAsPrevious = defined $prevBuild && ($buildStatus == $prevBuild->buildstatus);
|
||||||
|
my $prevBuildStatus = (defined $prevBuild) ? $prevBuild->buildstatus : -1;
|
||||||
|
my $prevBuildId = (defined $prevBuild) ? $prevBuild->id : -1;
|
||||||
|
|
||||||
|
print STDERR "SlackNotification_Debug job name $jobName status $buildStatus (previous: $prevBuildStatus from $prevBuildId)\n";
|
||||||
|
|
||||||
foreach my $channel (@config) {
|
foreach my $channel (@config) {
|
||||||
my $force = $channel->{force};
|
|
||||||
next unless $jobName =~ /^$channel->{jobs}$/;
|
next unless $jobName =~ /^$channel->{jobs}$/;
|
||||||
|
|
||||||
# If build is cancelled or aborted, do not send email.
|
my $force = $channel->{force};
|
||||||
next if ! $force && ($b->buildstatus == 4 || $b->buildstatus == 3);
|
|
||||||
|
print STDERR "SlackNotification_Debug found match with '$channel->{jobs}' with force=$force\n";
|
||||||
|
|
||||||
|
# If build is cancelled or aborted, do not send Slack notification.
|
||||||
|
next if ! $force && $cancelledOrAborted;
|
||||||
|
|
||||||
# If there is a previous (that is not cancelled or aborted) build
|
# If there is a previous (that is not cancelled or aborted) build
|
||||||
# with same buildstatus, do not send email.
|
# with same buildstatus, do not send Slack notification.
|
||||||
next if ! $force && defined $prevBuild && ($b->buildstatus == $prevBuild->buildstatus);
|
next if ! $force && $sameAsPrevious;
|
||||||
|
|
||||||
|
print STDERR "SlackNotification_Debug adding $jobName to the report list\n";
|
||||||
$channels{$channel->{url}} //= { channel => $channel, builds => [] };
|
$channels{$channel->{url}} //= { channel => $channel, builds => [] };
|
||||||
push @{$channels{$channel->{url}}->{builds}}, $b;
|
push @{$channels{$channel->{url}}->{builds}}, $b;
|
||||||
}
|
}
|
||||||
|
@ -93,6 +152,8 @@ sub buildFinished {
|
||||||
$text .= join(" or ", scalar @x > 1 ? join(", ", @x[0..scalar @x - 2]) : (), $x[-1]);
|
$text .= join(" or ", scalar @x > 1 ? join(", ", @x[0..scalar @x - 2]) : (), $x[-1]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
print STDERR "SlackNotification_Debug POSTing to url ending with: ${\substr $url, -8}\n";
|
||||||
|
|
||||||
my $msg =
|
my $msg =
|
||||||
{ attachments =>
|
{ attachments =>
|
||||||
[{ fallback => "Job " . showJobName($build) . " build number " . $build->id . ": " . showStatus($build),
|
[{ fallback => "Job " . showJobName($build) . " build number " . $build->id . ": " . showStatus($build),
|
||||||
|
|
|
@ -27,11 +27,11 @@ use base 'DBIx::Class::Core';
|
||||||
|
|
||||||
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
||||||
|
|
||||||
=head1 TABLE: C<AggregateConstituents>
|
=head1 TABLE: C<aggregateconstituents>
|
||||||
|
|
||||||
=cut
|
=cut
|
||||||
|
|
||||||
__PACKAGE__->table("AggregateConstituents");
|
__PACKAGE__->table("aggregateconstituents");
|
||||||
|
|
||||||
=head1 ACCESSORS
|
=head1 ACCESSORS
|
||||||
|
|
||||||
|
@ -103,8 +103,8 @@ __PACKAGE__->belongs_to(
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
||||||
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-08-15 00:20:01
|
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2020-02-06 12:22:36
|
||||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:TLNenyPLIWw2gWsOVhplZw
|
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:bQfQoSstlaFy7zw8i1R+ow
|
||||||
|
|
||||||
|
|
||||||
# You can replace this text with custom code or comments, and it will be preserved on regeneration
|
# You can replace this text with custom code or comments, and it will be preserved on regeneration
|
||||||
|
|
|
@ -27,11 +27,11 @@ use base 'DBIx::Class::Core';
|
||||||
|
|
||||||
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
||||||
|
|
||||||
=head1 TABLE: C<BuildInputs>
|
=head1 TABLE: C<buildinputs>
|
||||||
|
|
||||||
=cut
|
=cut
|
||||||
|
|
||||||
__PACKAGE__->table("BuildInputs");
|
__PACKAGE__->table("buildinputs");
|
||||||
|
|
||||||
=head1 ACCESSORS
|
=head1 ACCESSORS
|
||||||
|
|
||||||
|
@ -40,6 +40,7 @@ __PACKAGE__->table("BuildInputs");
|
||||||
data_type: 'integer'
|
data_type: 'integer'
|
||||||
is_auto_increment: 1
|
is_auto_increment: 1
|
||||||
is_nullable: 0
|
is_nullable: 0
|
||||||
|
sequence: 'buildinputs_id_seq'
|
||||||
|
|
||||||
=head2 build
|
=head2 build
|
||||||
|
|
||||||
|
@ -98,7 +99,12 @@ __PACKAGE__->table("BuildInputs");
|
||||||
|
|
||||||
__PACKAGE__->add_columns(
|
__PACKAGE__->add_columns(
|
||||||
"id",
|
"id",
|
||||||
{ data_type => "integer", is_auto_increment => 1, is_nullable => 0 },
|
{
|
||||||
|
data_type => "integer",
|
||||||
|
is_auto_increment => 1,
|
||||||
|
is_nullable => 0,
|
||||||
|
sequence => "buildinputs_id_seq",
|
||||||
|
},
|
||||||
"build",
|
"build",
|
||||||
{ data_type => "integer", is_foreign_key => 1, is_nullable => 1 },
|
{ data_type => "integer", is_foreign_key => 1, is_nullable => 1 },
|
||||||
"name",
|
"name",
|
||||||
|
@ -176,8 +182,8 @@ __PACKAGE__->belongs_to(
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
||||||
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-10-08 13:08:15
|
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2020-02-06 12:22:36
|
||||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:OaJPzRM+8XGsu3eIkqeYEw
|
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:/Fwb8emBsvwrZlEab2X+gQ
|
||||||
|
|
||||||
my %hint = (
|
my %hint = (
|
||||||
columns => [
|
columns => [
|
||||||
|
|
|
@ -27,11 +27,11 @@ use base 'DBIx::Class::Core';
|
||||||
|
|
||||||
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
||||||
|
|
||||||
=head1 TABLE: C<BuildMetrics>
|
=head1 TABLE: C<buildmetrics>
|
||||||
|
|
||||||
=cut
|
=cut
|
||||||
|
|
||||||
__PACKAGE__->table("BuildMetrics");
|
__PACKAGE__->table("buildmetrics");
|
||||||
|
|
||||||
=head1 ACCESSORS
|
=head1 ACCESSORS
|
||||||
|
|
||||||
|
@ -177,8 +177,8 @@ __PACKAGE__->belongs_to(
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
||||||
# Created by DBIx::Class::Schema::Loader v0.07043 @ 2015-07-30 16:52:20
|
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2020-02-06 12:22:36
|
||||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:qoPm5/le+sVHigW4Dmum2Q
|
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:Roy7h/K9u7DQOzet4B1sbA
|
||||||
|
|
||||||
sub json_hint {
|
sub json_hint {
|
||||||
return { columns => ['value', 'unit'] };
|
return { columns => ['value', 'unit'] };
|
||||||
|
|
|
@ -27,11 +27,11 @@ use base 'DBIx::Class::Core';
|
||||||
|
|
||||||
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
||||||
|
|
||||||
=head1 TABLE: C<BuildOutputs>
|
=head1 TABLE: C<buildoutputs>
|
||||||
|
|
||||||
=cut
|
=cut
|
||||||
|
|
||||||
__PACKAGE__->table("BuildOutputs");
|
__PACKAGE__->table("buildoutputs");
|
||||||
|
|
||||||
=head1 ACCESSORS
|
=head1 ACCESSORS
|
||||||
|
|
||||||
|
@ -94,8 +94,8 @@ __PACKAGE__->belongs_to(
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
||||||
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-06-13 01:54:50
|
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2020-02-06 12:22:36
|
||||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:V8MbzKvZNEaeHBJV67+ZMQ
|
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:71R9clwAP6vzDh10EukTaw
|
||||||
|
|
||||||
my %hint = (
|
my %hint = (
|
||||||
columns => [
|
columns => [
|
||||||
|
|
|
@ -27,11 +27,11 @@ use base 'DBIx::Class::Core';
|
||||||
|
|
||||||
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
||||||
|
|
||||||
=head1 TABLE: C<BuildProducts>
|
=head1 TABLE: C<buildproducts>
|
||||||
|
|
||||||
=cut
|
=cut
|
||||||
|
|
||||||
__PACKAGE__->table("BuildProducts");
|
__PACKAGE__->table("buildproducts");
|
||||||
|
|
||||||
=head1 ACCESSORS
|
=head1 ACCESSORS
|
||||||
|
|
||||||
|
@ -143,8 +143,8 @@ __PACKAGE__->belongs_to(
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
||||||
# Created by DBIx::Class::Schema::Loader v0.07043 @ 2016-04-13 14:49:33
|
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2020-02-06 12:22:36
|
||||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:kONECZn56f7sqfrLviiUOQ
|
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:iI0gmKqQxiPBTy5QsM6tpQ
|
||||||
|
|
||||||
my %hint = (
|
my %hint = (
|
||||||
columns => [
|
columns => [
|
||||||
|
|
|
@ -27,11 +27,11 @@ use base 'DBIx::Class::Core';
|
||||||
|
|
||||||
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
||||||
|
|
||||||
=head1 TABLE: C<BuildStepOutputs>
|
=head1 TABLE: C<buildstepoutputs>
|
||||||
|
|
||||||
=cut
|
=cut
|
||||||
|
|
||||||
__PACKAGE__->table("BuildStepOutputs");
|
__PACKAGE__->table("buildstepoutputs");
|
||||||
|
|
||||||
=head1 ACCESSORS
|
=head1 ACCESSORS
|
||||||
|
|
||||||
|
@ -119,8 +119,8 @@ __PACKAGE__->belongs_to(
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
||||||
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-06-13 01:54:50
|
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2020-02-06 12:22:36
|
||||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:A/4v3ugXYbuYoKPlOvC6mg
|
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:Y6DpbTM6z4cOGoYIhD3i1A
|
||||||
|
|
||||||
|
|
||||||
# You can replace this text with custom code or comments, and it will be preserved on regeneration
|
# You can replace this text with custom code or comments, and it will be preserved on regeneration
|
||||||
|
|
|
@ -27,11 +27,11 @@ use base 'DBIx::Class::Core';
|
||||||
|
|
||||||
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
||||||
|
|
||||||
=head1 TABLE: C<BuildSteps>
|
=head1 TABLE: C<buildsteps>
|
||||||
|
|
||||||
=cut
|
=cut
|
||||||
|
|
||||||
__PACKAGE__->table("BuildSteps");
|
__PACKAGE__->table("buildsteps");
|
||||||
|
|
||||||
=head1 ACCESSORS
|
=head1 ACCESSORS
|
||||||
|
|
||||||
|
@ -215,8 +215,8 @@ __PACKAGE__->belongs_to(
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
||||||
# Created by DBIx::Class::Schema::Loader v0.07045 @ 2016-12-07 13:48:19
|
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2020-02-06 12:22:36
|
||||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:3FYkqSUfgWmiqZzmX8J4TA
|
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:AMjHq4g/fSUv/lZuZOljYg
|
||||||
|
|
||||||
my %hint = (
|
my %hint = (
|
||||||
columns => [
|
columns => [
|
||||||
|
|
|
@ -27,11 +27,11 @@ use base 'DBIx::Class::Core';
|
||||||
|
|
||||||
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
||||||
|
|
||||||
=head1 TABLE: C<Builds>
|
=head1 TABLE: C<builds>
|
||||||
|
|
||||||
=cut
|
=cut
|
||||||
|
|
||||||
__PACKAGE__->table("Builds");
|
__PACKAGE__->table("builds");
|
||||||
|
|
||||||
=head1 ACCESSORS
|
=head1 ACCESSORS
|
||||||
|
|
||||||
|
@ -40,6 +40,7 @@ __PACKAGE__->table("Builds");
|
||||||
data_type: 'integer'
|
data_type: 'integer'
|
||||||
is_auto_increment: 1
|
is_auto_increment: 1
|
||||||
is_nullable: 0
|
is_nullable: 0
|
||||||
|
sequence: 'builds_id_seq'
|
||||||
|
|
||||||
=head2 finished
|
=head2 finished
|
||||||
|
|
||||||
|
@ -63,6 +64,12 @@ __PACKAGE__->table("Builds");
|
||||||
is_foreign_key: 1
|
is_foreign_key: 1
|
||||||
is_nullable: 0
|
is_nullable: 0
|
||||||
|
|
||||||
|
=head2 jobset_id
|
||||||
|
|
||||||
|
data_type: 'integer'
|
||||||
|
is_foreign_key: 1
|
||||||
|
is_nullable: 0
|
||||||
|
|
||||||
=head2 job
|
=head2 job
|
||||||
|
|
||||||
data_type: 'text'
|
data_type: 'text'
|
||||||
|
@ -200,7 +207,12 @@ __PACKAGE__->table("Builds");
|
||||||
|
|
||||||
__PACKAGE__->add_columns(
|
__PACKAGE__->add_columns(
|
||||||
"id",
|
"id",
|
||||||
{ data_type => "integer", is_auto_increment => 1, is_nullable => 0 },
|
{
|
||||||
|
data_type => "integer",
|
||||||
|
is_auto_increment => 1,
|
||||||
|
is_nullable => 0,
|
||||||
|
sequence => "builds_id_seq",
|
||||||
|
},
|
||||||
"finished",
|
"finished",
|
||||||
{ data_type => "integer", is_nullable => 0 },
|
{ data_type => "integer", is_nullable => 0 },
|
||||||
"timestamp",
|
"timestamp",
|
||||||
|
@ -209,6 +221,8 @@ __PACKAGE__->add_columns(
|
||||||
{ data_type => "text", is_foreign_key => 1, is_nullable => 0 },
|
{ data_type => "text", is_foreign_key => 1, is_nullable => 0 },
|
||||||
"jobset",
|
"jobset",
|
||||||
{ data_type => "text", is_foreign_key => 1, is_nullable => 0 },
|
{ data_type => "text", is_foreign_key => 1, is_nullable => 0 },
|
||||||
|
"jobset_id",
|
||||||
|
{ data_type => "integer", is_foreign_key => 1, is_nullable => 0 },
|
||||||
"job",
|
"job",
|
||||||
{ data_type => "text", is_foreign_key => 1, is_nullable => 0 },
|
{ data_type => "text", is_foreign_key => 1, is_nullable => 0 },
|
||||||
"nixname",
|
"nixname",
|
||||||
|
@ -451,6 +465,21 @@ Related object: L<Hydra::Schema::Jobsets>
|
||||||
__PACKAGE__->belongs_to(
|
__PACKAGE__->belongs_to(
|
||||||
"jobset",
|
"jobset",
|
||||||
"Hydra::Schema::Jobsets",
|
"Hydra::Schema::Jobsets",
|
||||||
|
{ id => "jobset_id" },
|
||||||
|
{ is_deferrable => 0, on_delete => "CASCADE", on_update => "NO ACTION" },
|
||||||
|
);
|
||||||
|
|
||||||
|
=head2 jobset_project_jobset
|
||||||
|
|
||||||
|
Type: belongs_to
|
||||||
|
|
||||||
|
Related object: L<Hydra::Schema::Jobsets>
|
||||||
|
|
||||||
|
=cut
|
||||||
|
|
||||||
|
__PACKAGE__->belongs_to(
|
||||||
|
"jobset_project_jobset",
|
||||||
|
"Hydra::Schema::Jobsets",
|
||||||
{ name => "jobset", project => "project" },
|
{ name => "jobset", project => "project" },
|
||||||
{ is_deferrable => 0, on_delete => "NO ACTION", on_update => "CASCADE" },
|
{ is_deferrable => 0, on_delete => "NO ACTION", on_update => "CASCADE" },
|
||||||
);
|
);
|
||||||
|
@ -544,8 +573,8 @@ __PACKAGE__->many_to_many(
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
||||||
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2019-08-19 16:12:37
|
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2020-02-06 12:34:25
|
||||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:VjYbAQwv4THW2VfWQ5ajYQ
|
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:EEXlcKN/ydXJ129vT0jTUw
|
||||||
|
|
||||||
__PACKAGE__->has_many(
|
__PACKAGE__->has_many(
|
||||||
"dependents",
|
"dependents",
|
||||||
|
@ -608,8 +637,8 @@ QUERY
|
||||||
|
|
||||||
makeQueries('', "");
|
makeQueries('', "");
|
||||||
makeQueries('ForProject', "and project = ?");
|
makeQueries('ForProject', "and project = ?");
|
||||||
makeQueries('ForJobset', "and project = ? and jobset = ?");
|
makeQueries('ForJobset', "and jobset_id = (select id from jobsets j where j.name = ?)");
|
||||||
makeQueries('ForJob', "and project = ? and jobset = ? and job = ?");
|
makeQueries('ForJob', "and jobset_id = (select id from jobsets j where j.name = ?) and job = ?");
|
||||||
|
|
||||||
|
|
||||||
my %hint = (
|
my %hint = (
|
||||||
|
|
|
@ -27,11 +27,11 @@ use base 'DBIx::Class::Core';
|
||||||
|
|
||||||
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
||||||
|
|
||||||
=head1 TABLE: C<CachedBazaarInputs>
|
=head1 TABLE: C<cachedbazaarinputs>
|
||||||
|
|
||||||
=cut
|
=cut
|
||||||
|
|
||||||
__PACKAGE__->table("CachedBazaarInputs");
|
__PACKAGE__->table("cachedbazaarinputs");
|
||||||
|
|
||||||
=head1 ACCESSORS
|
=head1 ACCESSORS
|
||||||
|
|
||||||
|
@ -83,8 +83,8 @@ __PACKAGE__->add_columns(
|
||||||
__PACKAGE__->set_primary_key("uri", "revision");
|
__PACKAGE__->set_primary_key("uri", "revision");
|
||||||
|
|
||||||
|
|
||||||
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-06-13 01:54:50
|
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2020-02-06 12:22:36
|
||||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:zvun8uhxwrr7B8EsqBoCjA
|
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:X8L4C57lMOctdqOKSmfA/g
|
||||||
|
|
||||||
|
|
||||||
# You can replace this text with custom content, and it will be preserved on regeneration
|
# You can replace this text with custom content, and it will be preserved on regeneration
|
||||||
|
|
|
@ -27,11 +27,11 @@ use base 'DBIx::Class::Core';
|
||||||
|
|
||||||
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
||||||
|
|
||||||
=head1 TABLE: C<CachedCVSInputs>
|
=head1 TABLE: C<cachedcvsinputs>
|
||||||
|
|
||||||
=cut
|
=cut
|
||||||
|
|
||||||
__PACKAGE__->table("CachedCVSInputs");
|
__PACKAGE__->table("cachedcvsinputs");
|
||||||
|
|
||||||
=head1 ACCESSORS
|
=head1 ACCESSORS
|
||||||
|
|
||||||
|
@ -99,8 +99,8 @@ __PACKAGE__->add_columns(
|
||||||
__PACKAGE__->set_primary_key("uri", "module", "sha256hash");
|
__PACKAGE__->set_primary_key("uri", "module", "sha256hash");
|
||||||
|
|
||||||
|
|
||||||
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-06-13 01:54:50
|
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2020-02-06 12:22:36
|
||||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:Vi1qzjW52Lnsl0JSmGzy0w
|
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:6eQ+i/th+oVZNRiDPd2luA
|
||||||
|
|
||||||
# You can replace this text with custom content, and it will be preserved on regeneration
|
# You can replace this text with custom content, and it will be preserved on regeneration
|
||||||
1;
|
1;
|
||||||
|
|
|
@ -27,11 +27,11 @@ use base 'DBIx::Class::Core';
|
||||||
|
|
||||||
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
||||||
|
|
||||||
=head1 TABLE: C<CachedDarcsInputs>
|
=head1 TABLE: C<cacheddarcsinputs>
|
||||||
|
|
||||||
=cut
|
=cut
|
||||||
|
|
||||||
__PACKAGE__->table("CachedDarcsInputs");
|
__PACKAGE__->table("cacheddarcsinputs");
|
||||||
|
|
||||||
=head1 ACCESSORS
|
=head1 ACCESSORS
|
||||||
|
|
||||||
|
@ -90,8 +90,8 @@ __PACKAGE__->add_columns(
|
||||||
__PACKAGE__->set_primary_key("uri", "revision");
|
__PACKAGE__->set_primary_key("uri", "revision");
|
||||||
|
|
||||||
|
|
||||||
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-09-20 11:08:50
|
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2020-02-06 12:22:36
|
||||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:Yl1slt3SAizijgu0KUTn0A
|
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:Buwq42sBXQVfYUy01WMyYw
|
||||||
|
|
||||||
|
|
||||||
# You can replace this text with custom code or comments, and it will be preserved on regeneration
|
# You can replace this text with custom code or comments, and it will be preserved on regeneration
|
||||||
|
|
|
@ -27,11 +27,11 @@ use base 'DBIx::Class::Core';
|
||||||
|
|
||||||
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
||||||
|
|
||||||
=head1 TABLE: C<CachedGitInputs>
|
=head1 TABLE: C<cachedgitinputs>
|
||||||
|
|
||||||
=cut
|
=cut
|
||||||
|
|
||||||
__PACKAGE__->table("CachedGitInputs");
|
__PACKAGE__->table("cachedgitinputs");
|
||||||
|
|
||||||
=head1 ACCESSORS
|
=head1 ACCESSORS
|
||||||
|
|
||||||
|
@ -92,7 +92,7 @@ __PACKAGE__->add_columns(
|
||||||
__PACKAGE__->set_primary_key("uri", "branch", "revision");
|
__PACKAGE__->set_primary_key("uri", "branch", "revision");
|
||||||
|
|
||||||
|
|
||||||
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-06-13 01:54:50
|
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2020-02-06 12:22:36
|
||||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:I4hI02FKRMkw76WV/KBocA
|
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:0sdK9uQZpx869oqS5thRLw
|
||||||
|
|
||||||
1;
|
1;
|
||||||
|
|
|
@ -27,11 +27,11 @@ use base 'DBIx::Class::Core';
|
||||||
|
|
||||||
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
||||||
|
|
||||||
=head1 TABLE: C<CachedHgInputs>
|
=head1 TABLE: C<cachedhginputs>
|
||||||
|
|
||||||
=cut
|
=cut
|
||||||
|
|
||||||
__PACKAGE__->table("CachedHgInputs");
|
__PACKAGE__->table("cachedhginputs");
|
||||||
|
|
||||||
=head1 ACCESSORS
|
=head1 ACCESSORS
|
||||||
|
|
||||||
|
@ -92,8 +92,8 @@ __PACKAGE__->add_columns(
|
||||||
__PACKAGE__->set_primary_key("uri", "branch", "revision");
|
__PACKAGE__->set_primary_key("uri", "branch", "revision");
|
||||||
|
|
||||||
|
|
||||||
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-06-13 01:54:50
|
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2020-02-06 12:22:36
|
||||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:qS/eiiZXmpc7KpTHdtaT7g
|
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:dYfjQ0SJG/mBrsZemAW3zw
|
||||||
|
|
||||||
|
|
||||||
# You can replace this text with custom content, and it will be preserved on regeneration
|
# You can replace this text with custom content, and it will be preserved on regeneration
|
||||||
|
|
|
@ -27,11 +27,11 @@ use base 'DBIx::Class::Core';
|
||||||
|
|
||||||
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
||||||
|
|
||||||
=head1 TABLE: C<CachedPathInputs>
|
=head1 TABLE: C<cachedpathinputs>
|
||||||
|
|
||||||
=cut
|
=cut
|
||||||
|
|
||||||
__PACKAGE__->table("CachedPathInputs");
|
__PACKAGE__->table("cachedpathinputs");
|
||||||
|
|
||||||
=head1 ACCESSORS
|
=head1 ACCESSORS
|
||||||
|
|
||||||
|
@ -90,7 +90,7 @@ __PACKAGE__->add_columns(
|
||||||
__PACKAGE__->set_primary_key("srcpath", "sha256hash");
|
__PACKAGE__->set_primary_key("srcpath", "sha256hash");
|
||||||
|
|
||||||
|
|
||||||
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-06-13 01:54:50
|
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2020-02-06 12:22:36
|
||||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:28rja0vR1glJJ15hzVfjsQ
|
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:oV7tbWLNEMC8byKf9UnAlw
|
||||||
|
|
||||||
1;
|
1;
|
||||||
|
|
|
@ -27,11 +27,11 @@ use base 'DBIx::Class::Core';
|
||||||
|
|
||||||
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
||||||
|
|
||||||
=head1 TABLE: C<CachedSubversionInputs>
|
=head1 TABLE: C<cachedsubversioninputs>
|
||||||
|
|
||||||
=cut
|
=cut
|
||||||
|
|
||||||
__PACKAGE__->table("CachedSubversionInputs");
|
__PACKAGE__->table("cachedsubversioninputs");
|
||||||
|
|
||||||
=head1 ACCESSORS
|
=head1 ACCESSORS
|
||||||
|
|
||||||
|
@ -83,7 +83,7 @@ __PACKAGE__->add_columns(
|
||||||
__PACKAGE__->set_primary_key("uri", "revision");
|
__PACKAGE__->set_primary_key("uri", "revision");
|
||||||
|
|
||||||
|
|
||||||
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-06-13 01:54:50
|
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2020-02-06 12:22:36
|
||||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:3qXfnvkOVj25W94bfhQ65w
|
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:VGt/0HG84eNZr9OIA8jzow
|
||||||
|
|
||||||
1;
|
1;
|
||||||
|
|
|
@ -27,11 +27,11 @@ use base 'DBIx::Class::Core';
|
||||||
|
|
||||||
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
||||||
|
|
||||||
=head1 TABLE: C<FailedPaths>
|
=head1 TABLE: C<failedpaths>
|
||||||
|
|
||||||
=cut
|
=cut
|
||||||
|
|
||||||
__PACKAGE__->table("FailedPaths");
|
__PACKAGE__->table("failedpaths");
|
||||||
|
|
||||||
=head1 ACCESSORS
|
=head1 ACCESSORS
|
||||||
|
|
||||||
|
@ -57,8 +57,8 @@ __PACKAGE__->add_columns("path", { data_type => "text", is_nullable => 0 });
|
||||||
__PACKAGE__->set_primary_key("path");
|
__PACKAGE__->set_primary_key("path");
|
||||||
|
|
||||||
|
|
||||||
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2015-06-10 14:48:16
|
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2020-02-06 12:22:36
|
||||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:WFgjfjH+szE6Ntcicmaflw
|
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:jr3XiGO4lWAzqfATbsMwFw
|
||||||
|
|
||||||
|
|
||||||
# You can replace this text with custom code or comments, and it will be preserved on regeneration
|
# You can replace this text with custom code or comments, and it will be preserved on regeneration
|
||||||
|
|
|
@ -27,11 +27,11 @@ use base 'DBIx::Class::Core';
|
||||||
|
|
||||||
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
||||||
|
|
||||||
=head1 TABLE: C<Jobs>
|
=head1 TABLE: C<jobs>
|
||||||
|
|
||||||
=cut
|
=cut
|
||||||
|
|
||||||
__PACKAGE__->table("Jobs");
|
__PACKAGE__->table("jobs");
|
||||||
|
|
||||||
=head1 ACCESSORS
|
=head1 ACCESSORS
|
||||||
|
|
||||||
|
@ -47,6 +47,12 @@ __PACKAGE__->table("Jobs");
|
||||||
is_foreign_key: 1
|
is_foreign_key: 1
|
||||||
is_nullable: 0
|
is_nullable: 0
|
||||||
|
|
||||||
|
=head2 jobset_id
|
||||||
|
|
||||||
|
data_type: 'integer'
|
||||||
|
is_foreign_key: 1
|
||||||
|
is_nullable: 0
|
||||||
|
|
||||||
=head2 name
|
=head2 name
|
||||||
|
|
||||||
data_type: 'text'
|
data_type: 'text'
|
||||||
|
@ -59,6 +65,8 @@ __PACKAGE__->add_columns(
|
||||||
{ data_type => "text", is_foreign_key => 1, is_nullable => 0 },
|
{ data_type => "text", is_foreign_key => 1, is_nullable => 0 },
|
||||||
"jobset",
|
"jobset",
|
||||||
{ data_type => "text", is_foreign_key => 1, is_nullable => 0 },
|
{ data_type => "text", is_foreign_key => 1, is_nullable => 0 },
|
||||||
|
"jobset_id",
|
||||||
|
{ data_type => "integer", is_foreign_key => 1, is_nullable => 0 },
|
||||||
"name",
|
"name",
|
||||||
{ data_type => "text", is_nullable => 0 },
|
{ data_type => "text", is_nullable => 0 },
|
||||||
);
|
);
|
||||||
|
@ -130,6 +138,21 @@ Related object: L<Hydra::Schema::Jobsets>
|
||||||
__PACKAGE__->belongs_to(
|
__PACKAGE__->belongs_to(
|
||||||
"jobset",
|
"jobset",
|
||||||
"Hydra::Schema::Jobsets",
|
"Hydra::Schema::Jobsets",
|
||||||
|
{ id => "jobset_id" },
|
||||||
|
{ is_deferrable => 0, on_delete => "CASCADE", on_update => "NO ACTION" },
|
||||||
|
);
|
||||||
|
|
||||||
|
=head2 jobset_project_jobset
|
||||||
|
|
||||||
|
Type: belongs_to
|
||||||
|
|
||||||
|
Related object: L<Hydra::Schema::Jobsets>
|
||||||
|
|
||||||
|
=cut
|
||||||
|
|
||||||
|
__PACKAGE__->belongs_to(
|
||||||
|
"jobset_project_jobset",
|
||||||
|
"Hydra::Schema::Jobsets",
|
||||||
{ name => "jobset", project => "project" },
|
{ name => "jobset", project => "project" },
|
||||||
{ is_deferrable => 0, on_delete => "CASCADE", on_update => "CASCADE" },
|
{ is_deferrable => 0, on_delete => "CASCADE", on_update => "CASCADE" },
|
||||||
);
|
);
|
||||||
|
@ -169,7 +192,25 @@ __PACKAGE__->has_many(
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
||||||
# Created by DBIx::Class::Schema::Loader v0.07043 @ 2015-07-30 16:52:20
|
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2020-02-06 12:33:28
|
||||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:vDAo9bzLca+QWfhOb9OLMg
|
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:C5Tyh8Ke4yC6q7KIFVOHcQ
|
||||||
|
|
||||||
|
=head2 builds
|
||||||
|
|
||||||
|
Type: has_many
|
||||||
|
|
||||||
|
Related object: L<Hydra::Sc2hema::Builds>
|
||||||
|
|
||||||
|
=cut
|
||||||
|
|
||||||
|
__PACKAGE__->has_many(
|
||||||
|
"builds",
|
||||||
|
"Hydra::Schema::Builds",
|
||||||
|
{
|
||||||
|
"foreign.job" => "self.name",
|
||||||
|
"foreign.jobset_id" => "self.jobset_id",
|
||||||
|
},
|
||||||
|
undef,
|
||||||
|
);
|
||||||
|
|
||||||
1;
|
1;
|
||||||
|
|
|
@ -27,11 +27,11 @@ use base 'DBIx::Class::Core';
|
||||||
|
|
||||||
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
||||||
|
|
||||||
=head1 TABLE: C<JobsetEvalInputs>
|
=head1 TABLE: C<jobsetevalinputs>
|
||||||
|
|
||||||
=cut
|
=cut
|
||||||
|
|
||||||
__PACKAGE__->table("JobsetEvalInputs");
|
__PACKAGE__->table("jobsetevalinputs");
|
||||||
|
|
||||||
=head1 ACCESSORS
|
=head1 ACCESSORS
|
||||||
|
|
||||||
|
@ -166,8 +166,8 @@ __PACKAGE__->belongs_to(
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
||||||
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-06-13 01:54:50
|
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2020-02-06 12:22:36
|
||||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:1Dp8B58leBLh4GK0GPw2zg
|
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:/cFQGBLhvpmBO1UJztgIAg
|
||||||
|
|
||||||
my %hint = (
|
my %hint = (
|
||||||
columns => [
|
columns => [
|
||||||
|
|
|
@ -27,11 +27,11 @@ use base 'DBIx::Class::Core';
|
||||||
|
|
||||||
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
||||||
|
|
||||||
=head1 TABLE: C<JobsetEvalMembers>
|
=head1 TABLE: C<jobsetevalmembers>
|
||||||
|
|
||||||
=cut
|
=cut
|
||||||
|
|
||||||
__PACKAGE__->table("JobsetEvalMembers");
|
__PACKAGE__->table("jobsetevalmembers");
|
||||||
|
|
||||||
=head1 ACCESSORS
|
=head1 ACCESSORS
|
||||||
|
|
||||||
|
@ -110,8 +110,8 @@ __PACKAGE__->belongs_to(
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
||||||
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-06-13 01:54:50
|
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2020-02-06 12:22:36
|
||||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:ccPNQe/QnSjTAC3uGWe8Ng
|
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:T+dJFh/sDO8WsasqYVLRSQ
|
||||||
|
|
||||||
|
|
||||||
# You can replace this text with custom content, and it will be preserved on regeneration
|
# You can replace this text with custom content, and it will be preserved on regeneration
|
||||||
|
|
|
@ -27,11 +27,11 @@ use base 'DBIx::Class::Core';
|
||||||
|
|
||||||
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
||||||
|
|
||||||
=head1 TABLE: C<JobsetEvals>
|
=head1 TABLE: C<jobsetevals>
|
||||||
|
|
||||||
=cut
|
=cut
|
||||||
|
|
||||||
__PACKAGE__->table("JobsetEvals");
|
__PACKAGE__->table("jobsetevals");
|
||||||
|
|
||||||
=head1 ACCESSORS
|
=head1 ACCESSORS
|
||||||
|
|
||||||
|
@ -40,6 +40,7 @@ __PACKAGE__->table("JobsetEvals");
|
||||||
data_type: 'integer'
|
data_type: 'integer'
|
||||||
is_auto_increment: 1
|
is_auto_increment: 1
|
||||||
is_nullable: 0
|
is_nullable: 0
|
||||||
|
sequence: 'jobsetevals_id_seq'
|
||||||
|
|
||||||
=head2 project
|
=head2 project
|
||||||
|
|
||||||
|
@ -88,11 +89,21 @@ __PACKAGE__->table("JobsetEvals");
|
||||||
data_type: 'integer'
|
data_type: 'integer'
|
||||||
is_nullable: 1
|
is_nullable: 1
|
||||||
|
|
||||||
|
=head2 flake
|
||||||
|
|
||||||
|
data_type: 'text'
|
||||||
|
is_nullable: 1
|
||||||
|
|
||||||
=cut
|
=cut
|
||||||
|
|
||||||
__PACKAGE__->add_columns(
|
__PACKAGE__->add_columns(
|
||||||
"id",
|
"id",
|
||||||
{ data_type => "integer", is_auto_increment => 1, is_nullable => 0 },
|
{
|
||||||
|
data_type => "integer",
|
||||||
|
is_auto_increment => 1,
|
||||||
|
is_nullable => 0,
|
||||||
|
sequence => "jobsetevals_id_seq",
|
||||||
|
},
|
||||||
"project",
|
"project",
|
||||||
{ data_type => "text", is_foreign_key => 1, is_nullable => 0 },
|
{ data_type => "text", is_foreign_key => 1, is_nullable => 0 },
|
||||||
"jobset",
|
"jobset",
|
||||||
|
@ -111,6 +122,8 @@ __PACKAGE__->add_columns(
|
||||||
{ data_type => "integer", is_nullable => 1 },
|
{ data_type => "integer", is_nullable => 1 },
|
||||||
"nrsucceeded",
|
"nrsucceeded",
|
||||||
{ data_type => "integer", is_nullable => 1 },
|
{ data_type => "integer", is_nullable => 1 },
|
||||||
|
"flake",
|
||||||
|
{ data_type => "text", is_nullable => 1 },
|
||||||
);
|
);
|
||||||
|
|
||||||
=head1 PRIMARY KEY
|
=head1 PRIMARY KEY
|
||||||
|
@ -188,8 +201,8 @@ __PACKAGE__->belongs_to(
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
||||||
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-06-13 01:54:50
|
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2020-02-09 15:21:11
|
||||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:SlEiF8oN6FBK262uSiMKiw
|
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:Ar6GRni8AcAQmuZyg6tFKw
|
||||||
|
|
||||||
__PACKAGE__->has_many(
|
__PACKAGE__->has_many(
|
||||||
"buildIds",
|
"buildIds",
|
||||||
|
|
|
@ -27,11 +27,11 @@ use base 'DBIx::Class::Core';
|
||||||
|
|
||||||
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
||||||
|
|
||||||
=head1 TABLE: C<JobsetInputAlts>
|
=head1 TABLE: C<jobsetinputalts>
|
||||||
|
|
||||||
=cut
|
=cut
|
||||||
|
|
||||||
__PACKAGE__->table("JobsetInputAlts");
|
__PACKAGE__->table("jobsetinputalts");
|
||||||
|
|
||||||
=head1 ACCESSORS
|
=head1 ACCESSORS
|
||||||
|
|
||||||
|
@ -121,7 +121,7 @@ __PACKAGE__->belongs_to(
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
||||||
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-06-13 01:54:50
|
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2020-02-06 12:22:36
|
||||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:UUO37lIuEYm0GiR92m/fyA
|
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:nh8dQDL9FtgzXcwjDufDMQ
|
||||||
|
|
||||||
1;
|
1;
|
||||||
|
|
|
@ -27,11 +27,11 @@ use base 'DBIx::Class::Core';
|
||||||
|
|
||||||
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
||||||
|
|
||||||
=head1 TABLE: C<JobsetInputs>
|
=head1 TABLE: C<jobsetinputs>
|
||||||
|
|
||||||
=cut
|
=cut
|
||||||
|
|
||||||
__PACKAGE__->table("JobsetInputs");
|
__PACKAGE__->table("jobsetinputs");
|
||||||
|
|
||||||
=head1 ACCESSORS
|
=head1 ACCESSORS
|
||||||
|
|
||||||
|
@ -130,28 +130,9 @@ __PACKAGE__->has_many(
|
||||||
undef,
|
undef,
|
||||||
);
|
);
|
||||||
|
|
||||||
=head2 jobsets
|
|
||||||
|
|
||||||
Type: has_many
|
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2020-02-06 12:22:36
|
||||||
|
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:5uKwEhDXso4IR1TFmwRxiA
|
||||||
Related object: L<Hydra::Schema::Jobsets>
|
|
||||||
|
|
||||||
=cut
|
|
||||||
|
|
||||||
__PACKAGE__->has_many(
|
|
||||||
"jobsets",
|
|
||||||
"Hydra::Schema::Jobsets",
|
|
||||||
{
|
|
||||||
"foreign.name" => "self.jobset",
|
|
||||||
"foreign.nixexprinput" => "self.name",
|
|
||||||
"foreign.project" => "self.project",
|
|
||||||
},
|
|
||||||
undef,
|
|
||||||
);
|
|
||||||
|
|
||||||
|
|
||||||
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-10-08 13:06:15
|
|
||||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:+mZZqLjQNwblb/EWW1alLQ
|
|
||||||
|
|
||||||
my %hint = (
|
my %hint = (
|
||||||
relations => {
|
relations => {
|
||||||
|
|
|
@ -27,11 +27,11 @@ use base 'DBIx::Class::Core';
|
||||||
|
|
||||||
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
||||||
|
|
||||||
=head1 TABLE: C<JobsetRenames>
|
=head1 TABLE: C<jobsetrenames>
|
||||||
|
|
||||||
=cut
|
=cut
|
||||||
|
|
||||||
__PACKAGE__->table("JobsetRenames");
|
__PACKAGE__->table("jobsetrenames");
|
||||||
|
|
||||||
=head1 ACCESSORS
|
=head1 ACCESSORS
|
||||||
|
|
||||||
|
@ -110,8 +110,8 @@ __PACKAGE__->belongs_to(
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
||||||
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2014-04-23 23:13:51
|
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2020-02-06 12:22:36
|
||||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:SBpKWF9swFc9T1Uc0VFlgA
|
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:eOQbJ2O/p0G1317m3IC/KA
|
||||||
|
|
||||||
|
|
||||||
# You can replace this text with custom code or comments, and it will be preserved on regeneration
|
# You can replace this text with custom code or comments, and it will be preserved on regeneration
|
||||||
|
|
|
@ -27,20 +27,26 @@ use base 'DBIx::Class::Core';
|
||||||
|
|
||||||
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
||||||
|
|
||||||
=head1 TABLE: C<Jobsets>
|
=head1 TABLE: C<jobsets>
|
||||||
|
|
||||||
=cut
|
=cut
|
||||||
|
|
||||||
__PACKAGE__->table("Jobsets");
|
__PACKAGE__->table("jobsets");
|
||||||
|
|
||||||
=head1 ACCESSORS
|
=head1 ACCESSORS
|
||||||
|
|
||||||
=head2 name
|
=head2 name
|
||||||
|
|
||||||
data_type: 'text'
|
data_type: 'text'
|
||||||
is_foreign_key: 1
|
|
||||||
is_nullable: 0
|
is_nullable: 0
|
||||||
|
|
||||||
|
=head2 id
|
||||||
|
|
||||||
|
data_type: 'integer'
|
||||||
|
is_auto_increment: 1
|
||||||
|
is_nullable: 0
|
||||||
|
sequence: 'jobsets_id_seq'
|
||||||
|
|
||||||
=head2 project
|
=head2 project
|
||||||
|
|
||||||
data_type: 'text'
|
data_type: 'text'
|
||||||
|
@ -55,13 +61,12 @@ __PACKAGE__->table("Jobsets");
|
||||||
=head2 nixexprinput
|
=head2 nixexprinput
|
||||||
|
|
||||||
data_type: 'text'
|
data_type: 'text'
|
||||||
is_foreign_key: 1
|
is_nullable: 1
|
||||||
is_nullable: 0
|
|
||||||
|
|
||||||
=head2 nixexprpath
|
=head2 nixexprpath
|
||||||
|
|
||||||
data_type: 'text'
|
data_type: 'text'
|
||||||
is_nullable: 0
|
is_nullable: 1
|
||||||
|
|
||||||
=head2 errormsg
|
=head2 errormsg
|
||||||
|
|
||||||
|
@ -139,19 +144,37 @@ __PACKAGE__->table("Jobsets");
|
||||||
data_type: 'integer'
|
data_type: 'integer'
|
||||||
is_nullable: 1
|
is_nullable: 1
|
||||||
|
|
||||||
|
=head2 type
|
||||||
|
|
||||||
|
data_type: 'integer'
|
||||||
|
default_value: 0
|
||||||
|
is_nullable: 0
|
||||||
|
|
||||||
|
=head2 flake
|
||||||
|
|
||||||
|
data_type: 'text'
|
||||||
|
is_nullable: 1
|
||||||
|
|
||||||
=cut
|
=cut
|
||||||
|
|
||||||
__PACKAGE__->add_columns(
|
__PACKAGE__->add_columns(
|
||||||
"name",
|
"name",
|
||||||
{ data_type => "text", is_foreign_key => 1, is_nullable => 0 },
|
{ data_type => "text", is_nullable => 0 },
|
||||||
|
"id",
|
||||||
|
{
|
||||||
|
data_type => "integer",
|
||||||
|
is_auto_increment => 1,
|
||||||
|
is_nullable => 0,
|
||||||
|
sequence => "jobsets_id_seq",
|
||||||
|
},
|
||||||
"project",
|
"project",
|
||||||
{ data_type => "text", is_foreign_key => 1, is_nullable => 0 },
|
{ data_type => "text", is_foreign_key => 1, is_nullable => 0 },
|
||||||
"description",
|
"description",
|
||||||
{ data_type => "text", is_nullable => 1 },
|
{ data_type => "text", is_nullable => 1 },
|
||||||
"nixexprinput",
|
"nixexprinput",
|
||||||
{ data_type => "text", is_foreign_key => 1, is_nullable => 0 },
|
{ data_type => "text", is_nullable => 1 },
|
||||||
"nixexprpath",
|
"nixexprpath",
|
||||||
{ data_type => "text", is_nullable => 0 },
|
{ data_type => "text", is_nullable => 1 },
|
||||||
"errormsg",
|
"errormsg",
|
||||||
{ data_type => "text", is_nullable => 1 },
|
{ data_type => "text", is_nullable => 1 },
|
||||||
"errortime",
|
"errortime",
|
||||||
|
@ -180,6 +203,10 @@ __PACKAGE__->add_columns(
|
||||||
{ data_type => "boolean", is_nullable => 1 },
|
{ data_type => "boolean", is_nullable => 1 },
|
||||||
"starttime",
|
"starttime",
|
||||||
{ data_type => "integer", is_nullable => 1 },
|
{ data_type => "integer", is_nullable => 1 },
|
||||||
|
"type",
|
||||||
|
{ data_type => "integer", default_value => 0, is_nullable => 0 },
|
||||||
|
"flake",
|
||||||
|
{ data_type => "text", is_nullable => 1 },
|
||||||
);
|
);
|
||||||
|
|
||||||
=head1 PRIMARY KEY
|
=head1 PRIMARY KEY
|
||||||
|
@ -196,6 +223,20 @@ __PACKAGE__->add_columns(
|
||||||
|
|
||||||
__PACKAGE__->set_primary_key("project", "name");
|
__PACKAGE__->set_primary_key("project", "name");
|
||||||
|
|
||||||
|
=head1 UNIQUE CONSTRAINTS
|
||||||
|
|
||||||
|
=head2 C<jobsets_id_unique>
|
||||||
|
|
||||||
|
=over 4
|
||||||
|
|
||||||
|
=item * L</id>
|
||||||
|
|
||||||
|
=back
|
||||||
|
|
||||||
|
=cut
|
||||||
|
|
||||||
|
__PACKAGE__->add_unique_constraint("jobsets_id_unique", ["id"]);
|
||||||
|
|
||||||
=head1 RELATIONS
|
=head1 RELATIONS
|
||||||
|
|
||||||
=head2 buildmetrics
|
=head2 buildmetrics
|
||||||
|
@ -216,7 +257,7 @@ __PACKAGE__->has_many(
|
||||||
undef,
|
undef,
|
||||||
);
|
);
|
||||||
|
|
||||||
=head2 builds
|
=head2 builds_jobset_ids
|
||||||
|
|
||||||
Type: has_many
|
Type: has_many
|
||||||
|
|
||||||
|
@ -225,7 +266,22 @@ Related object: L<Hydra::Schema::Builds>
|
||||||
=cut
|
=cut
|
||||||
|
|
||||||
__PACKAGE__->has_many(
|
__PACKAGE__->has_many(
|
||||||
"builds",
|
"builds_jobset_ids",
|
||||||
|
"Hydra::Schema::Builds",
|
||||||
|
{ "foreign.jobset_id" => "self.id" },
|
||||||
|
undef,
|
||||||
|
);
|
||||||
|
|
||||||
|
=head2 builds_project_jobsets
|
||||||
|
|
||||||
|
Type: has_many
|
||||||
|
|
||||||
|
Related object: L<Hydra::Schema::Builds>
|
||||||
|
|
||||||
|
=cut
|
||||||
|
|
||||||
|
__PACKAGE__->has_many(
|
||||||
|
"builds_project_jobsets",
|
||||||
"Hydra::Schema::Builds",
|
"Hydra::Schema::Builds",
|
||||||
{
|
{
|
||||||
"foreign.jobset" => "self.name",
|
"foreign.jobset" => "self.name",
|
||||||
|
@ -234,7 +290,7 @@ __PACKAGE__->has_many(
|
||||||
undef,
|
undef,
|
||||||
);
|
);
|
||||||
|
|
||||||
=head2 jobs
|
=head2 jobs_jobset_ids
|
||||||
|
|
||||||
Type: has_many
|
Type: has_many
|
||||||
|
|
||||||
|
@ -243,7 +299,22 @@ Related object: L<Hydra::Schema::Jobs>
|
||||||
=cut
|
=cut
|
||||||
|
|
||||||
__PACKAGE__->has_many(
|
__PACKAGE__->has_many(
|
||||||
"jobs",
|
"jobs_jobset_ids",
|
||||||
|
"Hydra::Schema::Jobs",
|
||||||
|
{ "foreign.jobset_id" => "self.id" },
|
||||||
|
undef,
|
||||||
|
);
|
||||||
|
|
||||||
|
=head2 jobs_project_jobsets
|
||||||
|
|
||||||
|
Type: has_many
|
||||||
|
|
||||||
|
Related object: L<Hydra::Schema::Jobs>
|
||||||
|
|
||||||
|
=cut
|
||||||
|
|
||||||
|
__PACKAGE__->has_many(
|
||||||
|
"jobs_project_jobsets",
|
||||||
"Hydra::Schema::Jobs",
|
"Hydra::Schema::Jobs",
|
||||||
{
|
{
|
||||||
"foreign.jobset" => "self.name",
|
"foreign.jobset" => "self.name",
|
||||||
|
@ -270,21 +341,6 @@ __PACKAGE__->has_many(
|
||||||
undef,
|
undef,
|
||||||
);
|
);
|
||||||
|
|
||||||
=head2 jobsetinput
|
|
||||||
|
|
||||||
Type: belongs_to
|
|
||||||
|
|
||||||
Related object: L<Hydra::Schema::JobsetInputs>
|
|
||||||
|
|
||||||
=cut
|
|
||||||
|
|
||||||
__PACKAGE__->belongs_to(
|
|
||||||
"jobsetinput",
|
|
||||||
"Hydra::Schema::JobsetInputs",
|
|
||||||
{ jobset => "name", name => "nixexprinput", project => "project" },
|
|
||||||
{ is_deferrable => 0, on_delete => "NO ACTION", on_update => "NO ACTION" },
|
|
||||||
);
|
|
||||||
|
|
||||||
=head2 jobsetinputs
|
=head2 jobsetinputs
|
||||||
|
|
||||||
Type: has_many
|
Type: has_many
|
||||||
|
@ -352,8 +408,43 @@ __PACKAGE__->has_many(
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
||||||
# Created by DBIx::Class::Schema::Loader v0.07045 @ 2017-03-09 13:03:05
|
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2020-02-09 15:32:17
|
||||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:ivYvsUyhEeaeI4EmRQ0/QQ
|
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:P8+t7rgpOqkGwRdM2b+3Bw
|
||||||
|
|
||||||
|
|
||||||
|
=head2 builds
|
||||||
|
|
||||||
|
Type: has_many
|
||||||
|
|
||||||
|
Related object: L<Hydra::Schema::Builds>
|
||||||
|
|
||||||
|
=cut
|
||||||
|
|
||||||
|
__PACKAGE__->has_many(
|
||||||
|
"builds",
|
||||||
|
"Hydra::Schema::Builds",
|
||||||
|
{ "foreign.jobset_id" => "self.id" },
|
||||||
|
undef,
|
||||||
|
);
|
||||||
|
|
||||||
|
=head2 jobs
|
||||||
|
|
||||||
|
Type: has_many
|
||||||
|
|
||||||
|
Related object: L<Hydra::Schema::Jobs>
|
||||||
|
|
||||||
|
=cut
|
||||||
|
|
||||||
|
__PACKAGE__->has_many(
|
||||||
|
"jobs",
|
||||||
|
"Hydra::Schema::Jobs",
|
||||||
|
{ "foreign.jobset_id" => "self.id" },
|
||||||
|
undef,
|
||||||
|
);
|
||||||
|
|
||||||
|
__PACKAGE__->add_column(
|
||||||
|
"+id" => { retrieve_on_insert => 1 }
|
||||||
|
);
|
||||||
|
|
||||||
my %hint = (
|
my %hint = (
|
||||||
columns => [
|
columns => [
|
||||||
|
|
|
@ -27,11 +27,11 @@ use base 'DBIx::Class::Core';
|
||||||
|
|
||||||
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
||||||
|
|
||||||
=head1 TABLE: C<NewsItems>
|
=head1 TABLE: C<newsitems>
|
||||||
|
|
||||||
=cut
|
=cut
|
||||||
|
|
||||||
__PACKAGE__->table("NewsItems");
|
__PACKAGE__->table("newsitems");
|
||||||
|
|
||||||
=head1 ACCESSORS
|
=head1 ACCESSORS
|
||||||
|
|
||||||
|
@ -40,6 +40,7 @@ __PACKAGE__->table("NewsItems");
|
||||||
data_type: 'integer'
|
data_type: 'integer'
|
||||||
is_auto_increment: 1
|
is_auto_increment: 1
|
||||||
is_nullable: 0
|
is_nullable: 0
|
||||||
|
sequence: 'newsitems_id_seq'
|
||||||
|
|
||||||
=head2 contents
|
=head2 contents
|
||||||
|
|
||||||
|
@ -61,7 +62,12 @@ __PACKAGE__->table("NewsItems");
|
||||||
|
|
||||||
__PACKAGE__->add_columns(
|
__PACKAGE__->add_columns(
|
||||||
"id",
|
"id",
|
||||||
{ data_type => "integer", is_auto_increment => 1, is_nullable => 0 },
|
{
|
||||||
|
data_type => "integer",
|
||||||
|
is_auto_increment => 1,
|
||||||
|
is_nullable => 0,
|
||||||
|
sequence => "newsitems_id_seq",
|
||||||
|
},
|
||||||
"contents",
|
"contents",
|
||||||
{ data_type => "text", is_nullable => 0 },
|
{ data_type => "text", is_nullable => 0 },
|
||||||
"createtime",
|
"createtime",
|
||||||
|
@ -100,7 +106,7 @@ __PACKAGE__->belongs_to(
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
||||||
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-06-13 01:54:50
|
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2020-02-06 12:22:36
|
||||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:3CRNsvd+YnZp9c80tuZREQ
|
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:r6vX8VG/+NQraIVKFgHzxQ
|
||||||
|
|
||||||
1;
|
1;
|
||||||
|
|
|
@ -27,11 +27,11 @@ use base 'DBIx::Class::Core';
|
||||||
|
|
||||||
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
||||||
|
|
||||||
=head1 TABLE: C<NrBuilds>
|
=head1 TABLE: C<nrbuilds>
|
||||||
|
|
||||||
=cut
|
=cut
|
||||||
|
|
||||||
__PACKAGE__->table("NrBuilds");
|
__PACKAGE__->table("nrbuilds");
|
||||||
|
|
||||||
=head1 ACCESSORS
|
=head1 ACCESSORS
|
||||||
|
|
||||||
|
@ -67,8 +67,8 @@ __PACKAGE__->add_columns(
|
||||||
__PACKAGE__->set_primary_key("what");
|
__PACKAGE__->set_primary_key("what");
|
||||||
|
|
||||||
|
|
||||||
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-08-12 17:59:18
|
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2020-02-06 12:22:36
|
||||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:CK8eJGC803nGj0wnete9xg
|
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:qv1I8Wu7KXHAs+pyBn2ofA
|
||||||
|
|
||||||
|
|
||||||
# You can replace this text with custom code or comments, and it will be preserved on regeneration
|
# You can replace this text with custom code or comments, and it will be preserved on regeneration
|
||||||
|
|
|
@ -27,11 +27,11 @@ use base 'DBIx::Class::Core';
|
||||||
|
|
||||||
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
||||||
|
|
||||||
=head1 TABLE: C<ProjectMembers>
|
=head1 TABLE: C<projectmembers>
|
||||||
|
|
||||||
=cut
|
=cut
|
||||||
|
|
||||||
__PACKAGE__->table("ProjectMembers");
|
__PACKAGE__->table("projectmembers");
|
||||||
|
|
||||||
=head1 ACCESSORS
|
=head1 ACCESSORS
|
||||||
|
|
||||||
|
@ -103,8 +103,8 @@ __PACKAGE__->belongs_to(
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
||||||
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-06-13 01:54:50
|
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2020-02-06 12:22:36
|
||||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:imPoiaitrTbX0vVNlF6dPA
|
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:e/hYmoNmcEUoGhRqtwdyQw
|
||||||
|
|
||||||
|
|
||||||
# You can replace this text with custom content, and it will be preserved on regeneration
|
# You can replace this text with custom content, and it will be preserved on regeneration
|
||||||
|
|
|
@ -27,11 +27,11 @@ use base 'DBIx::Class::Core';
|
||||||
|
|
||||||
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
||||||
|
|
||||||
=head1 TABLE: C<Projects>
|
=head1 TABLE: C<projects>
|
||||||
|
|
||||||
=cut
|
=cut
|
||||||
|
|
||||||
__PACKAGE__->table("Projects");
|
__PACKAGE__->table("projects");
|
||||||
|
|
||||||
=head1 ACCESSORS
|
=head1 ACCESSORS
|
||||||
|
|
||||||
|
@ -303,8 +303,8 @@ Composing rels: L</projectmembers> -> username
|
||||||
__PACKAGE__->many_to_many("usernames", "projectmembers", "username");
|
__PACKAGE__->many_to_many("usernames", "projectmembers", "username");
|
||||||
|
|
||||||
|
|
||||||
# Created by DBIx::Class::Schema::Loader v0.07043 @ 2016-03-11 10:39:17
|
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2020-02-06 12:22:36
|
||||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:1ats3brIVhRTWLToIYSoaQ
|
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:pcF/8351zyo9VL6N5eimdQ
|
||||||
|
|
||||||
my %hint = (
|
my %hint = (
|
||||||
columns => [
|
columns => [
|
||||||
|
|
|
@ -27,11 +27,11 @@ use base 'DBIx::Class::Core';
|
||||||
|
|
||||||
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
||||||
|
|
||||||
=head1 TABLE: C<ReleaseMembers>
|
=head1 TABLE: C<releasemembers>
|
||||||
|
|
||||||
=cut
|
=cut
|
||||||
|
|
||||||
__PACKAGE__->table("ReleaseMembers");
|
__PACKAGE__->table("releasemembers");
|
||||||
|
|
||||||
=head1 ACCESSORS
|
=head1 ACCESSORS
|
||||||
|
|
||||||
|
@ -135,7 +135,7 @@ __PACKAGE__->belongs_to(
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
||||||
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-06-13 01:54:50
|
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2020-02-06 12:22:36
|
||||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:7M7WPlGQT6rNHKJ+82/KSA
|
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:k4z2YeB4gRAeAP6hmR93sQ
|
||||||
|
|
||||||
1;
|
1;
|
||||||
|
|
|
@ -27,11 +27,11 @@ use base 'DBIx::Class::Core';
|
||||||
|
|
||||||
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
||||||
|
|
||||||
=head1 TABLE: C<Releases>
|
=head1 TABLE: C<releases>
|
||||||
|
|
||||||
=cut
|
=cut
|
||||||
|
|
||||||
__PACKAGE__->table("Releases");
|
__PACKAGE__->table("releases");
|
||||||
|
|
||||||
=head1 ACCESSORS
|
=head1 ACCESSORS
|
||||||
|
|
||||||
|
@ -119,7 +119,7 @@ __PACKAGE__->has_many(
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
||||||
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-06-13 01:54:50
|
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2020-02-06 12:22:36
|
||||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:qISBiwvboB8dIdinaE45mg
|
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:b4M/tHOhsy234tgTf+wqjQ
|
||||||
|
|
||||||
1;
|
1;
|
||||||
|
|
|
@ -27,11 +27,11 @@ use base 'DBIx::Class::Core';
|
||||||
|
|
||||||
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
||||||
|
|
||||||
=head1 TABLE: C<SchemaVersion>
|
=head1 TABLE: C<schemaversion>
|
||||||
|
|
||||||
=cut
|
=cut
|
||||||
|
|
||||||
__PACKAGE__->table("SchemaVersion");
|
__PACKAGE__->table("schemaversion");
|
||||||
|
|
||||||
=head1 ACCESSORS
|
=head1 ACCESSORS
|
||||||
|
|
||||||
|
@ -45,8 +45,8 @@ __PACKAGE__->table("SchemaVersion");
|
||||||
__PACKAGE__->add_columns("version", { data_type => "integer", is_nullable => 0 });
|
__PACKAGE__->add_columns("version", { data_type => "integer", is_nullable => 0 });
|
||||||
|
|
||||||
|
|
||||||
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-06-13 01:54:50
|
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2020-02-06 12:22:36
|
||||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:08/7gbEQp1TqBiWFJXVY0w
|
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:2wy4FsRYVVo2RTCWXcmgvg
|
||||||
|
|
||||||
|
|
||||||
# You can replace this text with custom code or comments, and it will be preserved on regeneration
|
# You can replace this text with custom code or comments, and it will be preserved on regeneration
|
||||||
|
|
|
@ -27,11 +27,11 @@ use base 'DBIx::Class::Core';
|
||||||
|
|
||||||
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
||||||
|
|
||||||
=head1 TABLE: C<StarredJobs>
|
=head1 TABLE: C<starredjobs>
|
||||||
|
|
||||||
=cut
|
=cut
|
||||||
|
|
||||||
__PACKAGE__->table("StarredJobs");
|
__PACKAGE__->table("starredjobs");
|
||||||
|
|
||||||
=head1 ACCESSORS
|
=head1 ACCESSORS
|
||||||
|
|
||||||
|
@ -153,8 +153,8 @@ __PACKAGE__->belongs_to(
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
||||||
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-10-14 15:46:29
|
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2020-02-06 12:22:36
|
||||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:naj5aKWuw8hLE6klmvW9Eg
|
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:fw4FfzmOhzDk0ZoSuNr2ww
|
||||||
|
|
||||||
|
|
||||||
# You can replace this text with custom code or comments, and it will be preserved on regeneration
|
# You can replace this text with custom code or comments, and it will be preserved on regeneration
|
||||||
|
|
|
@ -27,11 +27,11 @@ use base 'DBIx::Class::Core';
|
||||||
|
|
||||||
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
||||||
|
|
||||||
=head1 TABLE: C<SystemStatus>
|
=head1 TABLE: C<systemstatus>
|
||||||
|
|
||||||
=cut
|
=cut
|
||||||
|
|
||||||
__PACKAGE__->table("SystemStatus");
|
__PACKAGE__->table("systemstatus");
|
||||||
|
|
||||||
=head1 ACCESSORS
|
=head1 ACCESSORS
|
||||||
|
|
||||||
|
@ -67,8 +67,8 @@ __PACKAGE__->add_columns(
|
||||||
__PACKAGE__->set_primary_key("what");
|
__PACKAGE__->set_primary_key("what");
|
||||||
|
|
||||||
|
|
||||||
# Created by DBIx::Class::Schema::Loader v0.07043 @ 2015-07-30 16:01:22
|
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2020-02-06 12:22:36
|
||||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:JCYi4+HwM22iucdFkhBjMg
|
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:GeXpTVktMXjHENa/P3qOxw
|
||||||
|
|
||||||
|
|
||||||
# You can replace this text with custom code or comments, and it will be preserved on regeneration
|
# You can replace this text with custom code or comments, and it will be preserved on regeneration
|
||||||
|
|
|
@ -27,11 +27,11 @@ use base 'DBIx::Class::Core';
|
||||||
|
|
||||||
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
||||||
|
|
||||||
=head1 TABLE: C<SystemTypes>
|
=head1 TABLE: C<systemtypes>
|
||||||
|
|
||||||
=cut
|
=cut
|
||||||
|
|
||||||
__PACKAGE__->table("SystemTypes");
|
__PACKAGE__->table("systemtypes");
|
||||||
|
|
||||||
=head1 ACCESSORS
|
=head1 ACCESSORS
|
||||||
|
|
||||||
|
@ -68,7 +68,7 @@ __PACKAGE__->add_columns(
|
||||||
__PACKAGE__->set_primary_key("system");
|
__PACKAGE__->set_primary_key("system");
|
||||||
|
|
||||||
|
|
||||||
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-06-13 01:54:50
|
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2020-02-06 12:22:36
|
||||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:8cC34cEw9T3+x+7uRs4KHQ
|
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:fYeKQQSS5J8rjO3t+Hbz0g
|
||||||
|
|
||||||
1;
|
1;
|
||||||
|
|
|
@ -27,11 +27,11 @@ use base 'DBIx::Class::Core';
|
||||||
|
|
||||||
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
||||||
|
|
||||||
=head1 TABLE: C<UriRevMapper>
|
=head1 TABLE: C<urirevmapper>
|
||||||
|
|
||||||
=cut
|
=cut
|
||||||
|
|
||||||
__PACKAGE__->table("UriRevMapper");
|
__PACKAGE__->table("urirevmapper");
|
||||||
|
|
||||||
=head1 ACCESSORS
|
=head1 ACCESSORS
|
||||||
|
|
||||||
|
@ -67,8 +67,8 @@ __PACKAGE__->add_columns(
|
||||||
__PACKAGE__->set_primary_key("baseuri");
|
__PACKAGE__->set_primary_key("baseuri");
|
||||||
|
|
||||||
|
|
||||||
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-06-13 01:54:50
|
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2020-02-06 12:22:36
|
||||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:G2GAF/Rb7cRkRegH94LwIA
|
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:FOg2/BVJK3yg8MAYMrqZOQ
|
||||||
|
|
||||||
|
|
||||||
# You can replace this text with custom content, and it will be preserved on regeneration
|
# You can replace this text with custom content, and it will be preserved on regeneration
|
||||||
|
|
|
@ -27,11 +27,11 @@ use base 'DBIx::Class::Core';
|
||||||
|
|
||||||
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
||||||
|
|
||||||
=head1 TABLE: C<UserRoles>
|
=head1 TABLE: C<userroles>
|
||||||
|
|
||||||
=cut
|
=cut
|
||||||
|
|
||||||
__PACKAGE__->table("UserRoles");
|
__PACKAGE__->table("userroles");
|
||||||
|
|
||||||
=head1 ACCESSORS
|
=head1 ACCESSORS
|
||||||
|
|
||||||
|
@ -87,7 +87,7 @@ __PACKAGE__->belongs_to(
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
||||||
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-06-13 01:54:50
|
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2020-02-06 12:22:36
|
||||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:aS+ivlFpndqIv8U578zz9A
|
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:LUw2PDFvUHs0E0UZ3oHFxw
|
||||||
|
|
||||||
1;
|
1;
|
||||||
|
|
|
@ -27,11 +27,11 @@ use base 'DBIx::Class::Core';
|
||||||
|
|
||||||
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
||||||
|
|
||||||
=head1 TABLE: C<Users>
|
=head1 TABLE: C<users>
|
||||||
|
|
||||||
=cut
|
=cut
|
||||||
|
|
||||||
__PACKAGE__->table("Users");
|
__PACKAGE__->table("users");
|
||||||
|
|
||||||
=head1 ACCESSORS
|
=head1 ACCESSORS
|
||||||
|
|
||||||
|
@ -192,8 +192,8 @@ Composing rels: L</projectmembers> -> project
|
||||||
__PACKAGE__->many_to_many("projects", "projectmembers", "project");
|
__PACKAGE__->many_to_many("projects", "projectmembers", "project");
|
||||||
|
|
||||||
|
|
||||||
# Created by DBIx::Class::Schema::Loader v0.07043 @ 2016-05-27 11:32:14
|
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2020-02-06 12:22:36
|
||||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:Az1+V+ztJoWUt50NLQR3xg
|
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:4/WZ95asbnGmK+nEHb4sLQ
|
||||||
|
|
||||||
my %hint = (
|
my %hint = (
|
||||||
columns => [
|
columns => [
|
||||||
|
|
|
@ -12,7 +12,7 @@ struct Connection : pqxx::connection
|
||||||
std::string getFlags()
|
std::string getFlags()
|
||||||
{
|
{
|
||||||
using namespace nix;
|
using namespace nix;
|
||||||
auto s = getEnv("HYDRA_DBI", "dbi:Pg:dbname=hydra;");
|
auto s = getEnv("HYDRA_DBI").value_or("dbi:Pg:dbname=hydra;");
|
||||||
std::string prefix = "dbi:Pg:";
|
std::string prefix = "dbi:Pg:";
|
||||||
if (std::string(s, 0, prefix.size()) != prefix)
|
if (std::string(s, 0, prefix.size()) != prefix)
|
||||||
throw Error("$HYDRA_DBI does not denote a PostgreSQL database");
|
throw Error("$HYDRA_DBI does not denote a PostgreSQL database");
|
||||||
|
|
|
@ -14,9 +14,9 @@ struct Config
|
||||||
|
|
||||||
/* Read hydra.conf. */
|
/* Read hydra.conf. */
|
||||||
auto hydraConfigFile = getEnv("HYDRA_CONFIG");
|
auto hydraConfigFile = getEnv("HYDRA_CONFIG");
|
||||||
if (pathExists(hydraConfigFile)) {
|
if (hydraConfigFile && pathExists(*hydraConfigFile)) {
|
||||||
|
|
||||||
for (auto line : tokenizeString<Strings>(readFile(hydraConfigFile), "\n")) {
|
for (auto line : tokenizeString<Strings>(readFile(*hydraConfigFile), "\n")) {
|
||||||
line = trim(string(line, 0, line.find('#')));
|
line = trim(string(line, 0, line.find('#')));
|
||||||
|
|
||||||
auto eq = line.find('=');
|
auto eq = line.find('=');
|
||||||
|
|
|
@ -186,7 +186,7 @@ END;
|
||||||
IF b.finished && b.buildstatus != 0; nrFailedConstituents = nrFailedConstituents + 1; END;
|
IF b.finished && b.buildstatus != 0; nrFailedConstituents = nrFailedConstituents + 1; END;
|
||||||
END;
|
END;
|
||||||
%];
|
%];
|
||||||
[%+ IF nrFinished == nrMembers && nrFailedConstituents == 0 %]
|
[%+ IF nrFinished == nrConstituents && nrFailedConstituents == 0 %]
|
||||||
all [% nrConstituents %] constituent builds succeeded
|
all [% nrConstituents %] constituent builds succeeded
|
||||||
[% ELSE %]
|
[% ELSE %]
|
||||||
[% nrFailedConstituents %] out of [% nrConstituents %] constituent builds failed
|
[% nrFailedConstituents %] out of [% nrConstituents %] constituent builds failed
|
||||||
|
@ -292,11 +292,9 @@ END;
|
||||||
<th>Last successful build [% INCLUDE renderDateTime timestamp = prevSuccessfulBuild.timestamp %]</th>
|
<th>Last successful build [% INCLUDE renderDateTime timestamp = prevSuccessfulBuild.timestamp %]</th>
|
||||||
[% IF prevSuccessfulBuild && firstBrokenBuild && firstBrokenBuild.id != build.id %]
|
[% IF prevSuccessfulBuild && firstBrokenBuild && firstBrokenBuild.id != build.id %]
|
||||||
<th>First broken build [% INCLUDE renderDateTime timestamp = firstBrokenBuild.timestamp %]
|
<th>First broken build [% INCLUDE renderDateTime timestamp = firstBrokenBuild.timestamp %]
|
||||||
<a class="btn btn-mini" href="[% c.uri_for(c.controller('API').action_for('logdiff') prevSuccessfulBuild.id firstBrokenBuild.id ) %]">log diff</a>
|
|
||||||
</th>
|
</th>
|
||||||
[% END %]
|
[% END %]
|
||||||
<th>This build [% INCLUDE renderDateTime timestamp = build.timestamp %]
|
<th>This build [% INCLUDE renderDateTime timestamp = build.timestamp %]
|
||||||
<a class="btn btn-mini" href="[% c.uri_for(c.controller('API').action_for('logdiff') prevSuccessfulBuild.id build.id) %]">log diff</a>
|
|
||||||
</th>
|
</th>
|
||||||
</thead>
|
</thead>
|
||||||
<tr>
|
<tr>
|
||||||
|
|
|
@ -229,9 +229,9 @@ BLOCK renderBuildStatusIcon;
|
||||||
[% ELSIF buildstatus == 6 %]
|
[% ELSIF buildstatus == 6 %]
|
||||||
<img src="[% c.uri_for("/static/images/emojione-red-x-274c.svg") %]" height="[% size %]" width="[% size %]" title="Failed with output" alt="Failed with output" class="build-status" />
|
<img src="[% c.uri_for("/static/images/emojione-red-x-274c.svg") %]" height="[% size %]" width="[% size %]" title="Failed with output" alt="Failed with output" class="build-status" />
|
||||||
[% ELSIF buildstatus == 7 %]
|
[% ELSIF buildstatus == 7 %]
|
||||||
<img src="[% c.uri_for("/static/images/emojione-red-x-274c.svg") %]" height="[% size %]" width="[% size %]" title="Timed out" alt="Timed out" class="build-status" />
|
<img src="[% c.uri_for("/static/images/emojione-stopsign-1f6d1.svg") %]" height="[% size %]" width="[% size %]" title="Timed out" alt="Timed out" class="build-status" />
|
||||||
[% ELSIF buildstatus == 10 %]
|
[% ELSIF buildstatus == 10 %]
|
||||||
<img src="[% c.uri_for("/static/images/emojione-red-x-274c.svg") %]" height="[% size %]" width="[% size %]" title="Log limit exceeded" alt="Log limit exceeded" class="build-status" />
|
<img src="[% c.uri_for("/static/images/emojione-stopsign-1f6d1.svg") %]" height="[% size %]" width="[% size %]" title="Log limit exceeded" alt="Log limit exceeded" class="build-status" />
|
||||||
[% ELSIF buildstatus == 11 %]
|
[% ELSIF buildstatus == 11 %]
|
||||||
<img src="[% c.uri_for("/static/images/emojione-red-x-274c.svg") %]" height="[% size %]" width="[% size %]" title="Output size limit exceeded" alt="Output size limit exceeded" class="build-status" />
|
<img src="[% c.uri_for("/static/images/emojione-red-x-274c.svg") %]" height="[% size %]" width="[% size %]" title="Output size limit exceeded" alt="Output size limit exceeded" class="build-status" />
|
||||||
[% ELSIF buildstatus == 12 %]
|
[% ELSIF buildstatus == 12 %]
|
||||||
|
@ -584,10 +584,10 @@ BLOCK renderJobsetOverview %]
|
||||||
<td><span class="[% IF !j.enabled %]disabled-jobset[% END %] [%+ IF j.hidden %]hidden-jobset[% END %]">[% IF showProject; INCLUDE renderFullJobsetName project=j.get_column('project') jobset=j.name inRow=1; ELSE; INCLUDE renderJobsetName project=j.get_column('project') jobset=j.name inRow=1; END %]</span></td>
|
<td><span class="[% IF !j.enabled %]disabled-jobset[% END %] [%+ IF j.hidden %]hidden-jobset[% END %]">[% IF showProject; INCLUDE renderFullJobsetName project=j.get_column('project') jobset=j.name inRow=1; ELSE; INCLUDE renderJobsetName project=j.get_column('project') jobset=j.name inRow=1; END %]</span></td>
|
||||||
<td>[% HTML.escape(j.description) %]</td>
|
<td>[% HTML.escape(j.description) %]</td>
|
||||||
<td>[% IF j.lastcheckedtime;
|
<td>[% IF j.lastcheckedtime;
|
||||||
INCLUDE renderDateTime timestamp = j.lastcheckedtime;
|
INCLUDE renderDateTime timestamp = j.lastcheckedtime;
|
||||||
IF j.errormsg || j.fetcherrormsg; %] <span class = 'label label-warning'>Error</span>[% END;
|
IF j.errormsg || j.fetcherrormsg; %] <span class = 'label label-warning'>Error</span>[% END;
|
||||||
ELSE; "-";
|
ELSE; "-";
|
||||||
END %]</td>
|
END %]</td>
|
||||||
[% IF j.get_column('nrtotal') > 0 %]
|
[% IF j.get_column('nrtotal') > 0 %]
|
||||||
[% successrate = ( j.get_column('nrsucceeded') / j.get_column('nrtotal') )*100 %]
|
[% successrate = ( j.get_column('nrsucceeded') / j.get_column('nrtotal') )*100 %]
|
||||||
[% IF j.get_column('nrscheduled') > 0 %]
|
[% IF j.get_column('nrscheduled') > 0 %]
|
||||||
|
|
|
@ -68,6 +68,7 @@
|
||||||
<input type="hidden" name="enabled" value="[% jobset.enabled %]" />
|
<input type="hidden" name="enabled" value="[% jobset.enabled %]" />
|
||||||
<button type="button" class="btn" value="1">Enabled</button>
|
<button type="button" class="btn" value="1">Enabled</button>
|
||||||
<button type="button" class="btn" value="2">One-shot</button>
|
<button type="button" class="btn" value="2">One-shot</button>
|
||||||
|
<button type="button" class="btn" value="3">One-at-a-time</button>
|
||||||
<button type="button" class="btn" value="0">Disabled</button>
|
<button type="button" class="btn" value="0">Disabled</button>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
|
@ -129,7 +129,7 @@
|
||||||
<table class="info-table">
|
<table class="info-table">
|
||||||
<tr>
|
<tr>
|
||||||
<th>State:</th>
|
<th>State:</th>
|
||||||
<td>[% IF jobset.enabled == 0; "Disabled"; ELSIF jobset.enabled == 1; "Enabled"; ELSIF jobset.enabled == 2; "One-shot"; END %]</td>
|
<td>[% IF jobset.enabled == 0; "Disabled"; ELSIF jobset.enabled == 1; "Enabled"; ELSIF jobset.enabled == 2; "One-shot"; ELSIF jobset.enabled == 3; "One-at-a-time"; END %]</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<th>Description:</th>
|
<th>Description:</th>
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
EXTRA_DIST = \
|
EXTRA_DIST = \
|
||||||
$(distributable_scripts) \
|
$(distributable_scripts)
|
||||||
hydra-eval-guile-jobs.in
|
|
||||||
|
|
||||||
distributable_scripts = \
|
distributable_scripts = \
|
||||||
|
hydra-backfill-ids \
|
||||||
hydra-init \
|
hydra-init \
|
||||||
hydra-eval-jobset \
|
hydra-eval-jobset \
|
||||||
hydra-server \
|
hydra-server \
|
||||||
|
@ -16,5 +16,4 @@ distributable_scripts = \
|
||||||
nix-prefetch-hg
|
nix-prefetch-hg
|
||||||
|
|
||||||
bin_SCRIPTS = \
|
bin_SCRIPTS = \
|
||||||
$(distributable_scripts) \
|
$(distributable_scripts)
|
||||||
hydra-eval-guile-jobs
|
|
||||||
|
|
164
src/script/hydra-backfill-ids
Executable file
164
src/script/hydra-backfill-ids
Executable file
|
@ -0,0 +1,164 @@
|
||||||
|
#! /usr/bin/env perl
|
||||||
|
|
||||||
|
use strict;
|
||||||
|
use utf8;
|
||||||
|
use Hydra::Model::DB;
|
||||||
|
|
||||||
|
STDOUT->autoflush();
|
||||||
|
STDERR->autoflush(1);
|
||||||
|
binmode STDERR, ":encoding(utf8)";
|
||||||
|
|
||||||
|
my $db = Hydra::Model::DB->new();
|
||||||
|
my $vacuum = $db->storage->dbh->prepare("VACUUM;");
|
||||||
|
|
||||||
|
my $dryRun = defined $ENV{'HYDRA_DRY_RUN'};
|
||||||
|
|
||||||
|
my $batchSize = 10000;
|
||||||
|
my $iterationsPerVacuum = 500;
|
||||||
|
|
||||||
|
sub backfillJobsJobsetId {
|
||||||
|
my ($skipLocked) = @_;
|
||||||
|
my $logPrefix;
|
||||||
|
|
||||||
|
if ($skipLocked) {
|
||||||
|
$logPrefix = "(pass 1/2)";
|
||||||
|
} else {
|
||||||
|
$logPrefix = "(pass 2/2)";
|
||||||
|
}
|
||||||
|
|
||||||
|
print STDERR "$logPrefix Backfilling Jobs records where jobset_id is NULL...\n";
|
||||||
|
|
||||||
|
my $totalToGoSth = $db->storage->dbh->prepare(<<QUERY);
|
||||||
|
SELECT COUNT(*) FROM jobs WHERE jobset_id IS NULL
|
||||||
|
QUERY
|
||||||
|
|
||||||
|
$totalToGoSth->execute();
|
||||||
|
my ($totalToGo) = $totalToGoSth->fetchrow_array;
|
||||||
|
|
||||||
|
my $skipLockedStmt = $skipLocked ? "FOR UPDATE SKIP LOCKED" : "";
|
||||||
|
my $update10kJobs = $db->storage->dbh->prepare(<<QUERY);
|
||||||
|
UPDATE jobs
|
||||||
|
SET jobset_id = (
|
||||||
|
SELECT jobsets.id
|
||||||
|
FROM jobsets
|
||||||
|
WHERE jobsets.name = jobs.jobset
|
||||||
|
AND jobsets.project = jobs.project
|
||||||
|
)
|
||||||
|
WHERE (jobs.project, jobs.jobset, jobs.name) in (
|
||||||
|
SELECT jobsprime.project, jobsprime.jobset, jobsprime.name
|
||||||
|
FROM jobs jobsprime
|
||||||
|
WHERE jobsprime.jobset_id IS NULL
|
||||||
|
$skipLockedStmt
|
||||||
|
LIMIT ?
|
||||||
|
);
|
||||||
|
QUERY
|
||||||
|
|
||||||
|
print STDERR "$logPrefix Total Jobs records without a jobset_id: $totalToGo\n";
|
||||||
|
|
||||||
|
my $iteration = 0;
|
||||||
|
my $affected;
|
||||||
|
do {
|
||||||
|
$iteration++;
|
||||||
|
$affected = $update10kJobs->execute($batchSize);
|
||||||
|
print STDERR "$logPrefix (batch #$iteration; $totalToGo remaining) Jobs.jobset_id: affected $affected rows...\n";
|
||||||
|
$totalToGo -= $affected;
|
||||||
|
|
||||||
|
if ($iteration % $iterationsPerVacuum == 0) {
|
||||||
|
print STDERR "$logPrefix (batch #$iteration) Vacuuming...\n";
|
||||||
|
$vacuum->execute();
|
||||||
|
}
|
||||||
|
} while ($affected > 0);
|
||||||
|
|
||||||
|
|
||||||
|
if ($skipLocked) {
|
||||||
|
backfillJobsJobsetId(0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
sub backfillBuildsJobsetId {
|
||||||
|
my ($skipLocked) = @_;
|
||||||
|
my $logPrefix;
|
||||||
|
|
||||||
|
if ($skipLocked) {
|
||||||
|
$logPrefix = "(pass 1/2)";
|
||||||
|
print STDERR "$logPrefix Backfilling unlocked Builds records where jobset_id is NULL...\n";
|
||||||
|
} else {
|
||||||
|
$logPrefix = "(pass 2/2)";
|
||||||
|
print STDERR "$logPrefix Backfilling all Builds records where jobset_id is NULL...\n";
|
||||||
|
}
|
||||||
|
|
||||||
|
my $skipLockedStmt = $skipLocked ? "FOR UPDATE SKIP LOCKED" : "";
|
||||||
|
my $update10kBuilds = $db->storage->dbh->prepare(<<"QUERY");
|
||||||
|
WITH updateprogress AS (
|
||||||
|
UPDATE builds
|
||||||
|
SET jobset_id = (
|
||||||
|
SELECT jobsets.id
|
||||||
|
FROM jobsets
|
||||||
|
WHERE jobsets.name = builds.jobset
|
||||||
|
AND jobsets.project = builds.project
|
||||||
|
)
|
||||||
|
WHERE builds.id in (
|
||||||
|
SELECT buildprime.id
|
||||||
|
FROM builds buildprime
|
||||||
|
WHERE buildprime.jobset_id IS NULL
|
||||||
|
AND buildprime.id >= ?
|
||||||
|
ORDER BY buildprime.id
|
||||||
|
$skipLockedStmt
|
||||||
|
LIMIT ?
|
||||||
|
)
|
||||||
|
RETURNING id
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
count(*) AS affected,
|
||||||
|
max(updateprogress.id) AS highest_id
|
||||||
|
FROM updateprogress;
|
||||||
|
|
||||||
|
QUERY
|
||||||
|
|
||||||
|
my $lowestNullIdSth = $db->storage->dbh->prepare(<<QUERY);
|
||||||
|
SELECT id FROM builds WHERE jobset_id IS NULL ORDER BY id LIMIT 1
|
||||||
|
QUERY
|
||||||
|
$lowestNullIdSth->execute();
|
||||||
|
my ($highestId) = $lowestNullIdSth->fetchrow_array;
|
||||||
|
|
||||||
|
my $totalToGoSth = $db->storage->dbh->prepare(<<QUERY);
|
||||||
|
SELECT COUNT(*) FROM builds WHERE jobset_id IS NULL AND id >= ?
|
||||||
|
QUERY
|
||||||
|
$totalToGoSth->execute($highestId);
|
||||||
|
my ($totalToGo) = $totalToGoSth->fetchrow_array;
|
||||||
|
|
||||||
|
print STDERR "$logPrefix Total Builds records without a jobset_id: $totalToGo, starting at $highestId\n";
|
||||||
|
|
||||||
|
my $iteration = 0;
|
||||||
|
my $affected;
|
||||||
|
do {
|
||||||
|
my $previousHighId = $highestId;
|
||||||
|
$iteration++;
|
||||||
|
$update10kBuilds->execute($highestId, $batchSize);
|
||||||
|
($affected, $highestId) = $update10kBuilds->fetchrow_array;
|
||||||
|
|
||||||
|
print STDERR "$logPrefix (batch #$iteration; $totalToGo remaining) Builds.jobset_id: affected $affected rows; max ID: $previousHighId -> $highestId\n";
|
||||||
|
$totalToGo -= $affected;
|
||||||
|
|
||||||
|
if ($iteration % $iterationsPerVacuum == 0) {
|
||||||
|
print STDERR "$logPrefix (batch #$iteration) Vacuuming...\n";
|
||||||
|
$vacuum->execute();
|
||||||
|
}
|
||||||
|
} while ($affected > 0);
|
||||||
|
|
||||||
|
if ($skipLocked) {
|
||||||
|
backfillBuildsJobsetId(0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
die "syntax: $0\n" unless @ARGV == 0;
|
||||||
|
|
||||||
|
print STDERR "Beginning with a VACUUM\n";
|
||||||
|
$vacuum->execute();
|
||||||
|
|
||||||
|
backfillJobsJobsetId(1);
|
||||||
|
backfillBuildsJobsetId(1);
|
||||||
|
|
||||||
|
print STDERR "Ending with a VACUUM\n";
|
||||||
|
$vacuum->execute();
|
|
@ -1,249 +0,0 @@
|
||||||
#!/bin/sh
|
|
||||||
# Aside from this initial boilerplate, this is actually -*- scheme -*- code.
|
|
||||||
main="(module-ref (resolve-interface '(hydra-eval-guile-jobs)) 'eval-guile-jobs)"
|
|
||||||
|
|
||||||
# Keep the host's GUILE_LOAD_PATH unchanged to allow the installed Guix to
|
|
||||||
# be used. This moves Guix modules possibly out of control, but solves
|
|
||||||
# bootstrapping issues.
|
|
||||||
#
|
|
||||||
# Use `--fresh-auto-compile' to ignore any available .go, and force
|
|
||||||
# recompilation. This is because checkouts in the store has mtime set to
|
|
||||||
# the epoch, and thus .go files look newer, even though they may not
|
|
||||||
# correspond.
|
|
||||||
|
|
||||||
exec ${GUILE:-@GUILE@} --no-auto-compile --fresh-auto-compile \
|
|
||||||
-l "$0" -c "(apply $main (cdr (command-line)))" "$@"
|
|
||||||
!#
|
|
||||||
;;; Copyright © 2012, 2013, 2014 Ludovic Courtès <ludo@gnu.org>
|
|
||||||
;;;
|
|
||||||
;;; This file is part of Hydra.
|
|
||||||
;;;
|
|
||||||
;;; Hydra is free software: you can redistribute it and/or modify
|
|
||||||
;;; it under the terms of the GNU General Public License as published by
|
|
||||||
;;; the Free Software Foundation, either version 3 of the License, or
|
|
||||||
;;; (at your option) any later version.
|
|
||||||
;;;
|
|
||||||
;;; Hydra is distributed in the hope that it will be useful,
|
|
||||||
;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
;;; GNU General Public License for more details.
|
|
||||||
;;;
|
|
||||||
;;; You should have received a copy of the GNU General Public License
|
|
||||||
;;; along with Hydra. If not, see <http://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
(define-module (hydra-eval-guile-jobs)
|
|
||||||
#:use-module (sxml simple)
|
|
||||||
#:use-module (ice-9 match)
|
|
||||||
#:use-module (ice-9 regex)
|
|
||||||
#:use-module (srfi srfi-1)
|
|
||||||
#:use-module (srfi srfi-11)
|
|
||||||
#:export (job-evaluations->xml
|
|
||||||
eval-guile-jobs))
|
|
||||||
|
|
||||||
(define (guix-variable module name)
|
|
||||||
"Dynamically link variable NAME under Guix module MODULE and return it.
|
|
||||||
Note: this is used instead of `@', because when using `@' in an uncompiled
|
|
||||||
file, Guile tries to load the module directly as it reads the source, which
|
|
||||||
fails in our case, leading to the creation of empty (guix ...) modules."
|
|
||||||
;; TODO: fail with an XML error description
|
|
||||||
(let ((m (resolve-interface `(guix ,module))))
|
|
||||||
(module-ref m name)))
|
|
||||||
|
|
||||||
(define (%derivation-system drv)
|
|
||||||
;; XXX: Awful hack to workaround the fact that `derivation-system', which
|
|
||||||
;; is a macro, cannot be referred to dynamically.
|
|
||||||
(struct-ref drv 3))
|
|
||||||
|
|
||||||
(define strip-store-path
|
|
||||||
(let* ((store (or (getenv "NIX_STORE_DIR") "/nix/store"))
|
|
||||||
(store-path-rx
|
|
||||||
(make-regexp (string-append "^.*" (regexp-quote store)
|
|
||||||
"/[^-]+-(.+)$"))))
|
|
||||||
(lambda (path)
|
|
||||||
(or (and=> (regexp-exec store-path-rx path)
|
|
||||||
(lambda (match)
|
|
||||||
(let ((path (match:substring match 1)))
|
|
||||||
path)))
|
|
||||||
path))))
|
|
||||||
|
|
||||||
(define (derivation-path->name drv)
|
|
||||||
"Return the base name of DRV, sans hash and `.drv' extension."
|
|
||||||
(let ((d (strip-store-path drv)))
|
|
||||||
(if (string-suffix? ".drv" d)
|
|
||||||
(string-drop-right d 4)
|
|
||||||
d)))
|
|
||||||
|
|
||||||
(define (register-gc-root drv roots-dir)
|
|
||||||
"Register a permanent garbage collector root under ROOTS-DIR for DRV."
|
|
||||||
(let ((root (string-append roots-dir "/" (basename drv))))
|
|
||||||
(unless (file-exists? root)
|
|
||||||
(symlink drv root))))
|
|
||||||
|
|
||||||
(define* (job-evaluations->sxml jobs
|
|
||||||
#:key gc-roots-dir)
|
|
||||||
"Return the hydra-eval-jobs SXML form for the result of JOBS, a list of
|
|
||||||
symbol/thunk pairs."
|
|
||||||
`(*TOP*
|
|
||||||
(*PI* xml "version='1.0' encoding='utf-8'")
|
|
||||||
"\n"
|
|
||||||
(jobs "\n"
|
|
||||||
,@(map (match-lambda
|
|
||||||
(((? symbol? name) . (? thunk? thunk))
|
|
||||||
(let* ((result (save-module-excursion
|
|
||||||
(lambda ()
|
|
||||||
(set-current-module %user-module)
|
|
||||||
(with-output-to-port (%make-void-port "w")
|
|
||||||
thunk))))
|
|
||||||
(drv (assoc-ref result 'derivation)))
|
|
||||||
(define (opt-attr xml-name name)
|
|
||||||
(match (assoc name result)
|
|
||||||
((_ . value)
|
|
||||||
`((,xml-name ,value)))
|
|
||||||
(_
|
|
||||||
'())))
|
|
||||||
|
|
||||||
(when gc-roots-dir
|
|
||||||
;; Register DRV as a GC root so that it's not collected by
|
|
||||||
;; the time 'hydra-queue-runner' attempts to build it.
|
|
||||||
(register-gc-root drv gc-roots-dir))
|
|
||||||
|
|
||||||
;; XXX: Add <arg ...> tags?
|
|
||||||
`(job (@ (jobName ,name)
|
|
||||||
(drvPath ,drv)
|
|
||||||
,@(opt-attr 'homepage 'home-page)
|
|
||||||
(license
|
|
||||||
,(let loop ((license (assoc-ref result 'license)))
|
|
||||||
(match license
|
|
||||||
((? struct?)
|
|
||||||
(struct-ref license 0))
|
|
||||||
((l ...)
|
|
||||||
(string-join (map loop l)))
|
|
||||||
(_ ""))))
|
|
||||||
,@(opt-attr 'description 'description)
|
|
||||||
(maintainers
|
|
||||||
,(string-join (or (assoc-ref result 'maintainers)
|
|
||||||
'())
|
|
||||||
", "))
|
|
||||||
(maxSilent
|
|
||||||
,(number->string (or (assoc-ref result
|
|
||||||
'max-silent-time)
|
|
||||||
3600)))
|
|
||||||
(timeout
|
|
||||||
,(number->string (or (assoc-ref result 'timeout)
|
|
||||||
72000)))
|
|
||||||
(nixName ,(derivation-path->name drv))
|
|
||||||
(schedulingPriority
|
|
||||||
,(number->string (or (assoc-ref result
|
|
||||||
'scheduling-priority)
|
|
||||||
10)))
|
|
||||||
(system
|
|
||||||
,(call-with-input-file drv
|
|
||||||
(compose %derivation-system
|
|
||||||
(guix-variable 'derivations
|
|
||||||
'read-derivation)))))
|
|
||||||
;; Resolve Guix modules lazily.
|
|
||||||
,(map (match-lambda
|
|
||||||
((name . path)
|
|
||||||
`(output (@ (name ,name) (path ,path)))))
|
|
||||||
((guix-variable 'derivations
|
|
||||||
'derivation-path->output-paths)
|
|
||||||
drv))
|
|
||||||
|
|
||||||
"\n"))))
|
|
||||||
jobs))))
|
|
||||||
|
|
||||||
(define* (job-evaluations->xml jobs port
|
|
||||||
#:key gc-roots-dir)
|
|
||||||
(set-port-encoding! port "UTF-8")
|
|
||||||
(sxml->xml (job-evaluations->sxml jobs #:gc-roots-dir gc-roots-dir)
|
|
||||||
port))
|
|
||||||
|
|
||||||
|
|
||||||
;;;
|
|
||||||
;;; Command-line entry point.
|
|
||||||
;;;
|
|
||||||
|
|
||||||
(define (parse-arguments args)
|
|
||||||
"Traverse ARGS, a list of command-line arguments compatible with
|
|
||||||
`hydra-eval-jobs', and return the name of the file that defines the jobs, an
|
|
||||||
expression that returns the entry point in that file (a unary procedure), the
|
|
||||||
list of name/value pairs passed to that entry point, as well as a GC root
|
|
||||||
directory or #f."
|
|
||||||
(define (module-directory dir)
|
|
||||||
(let ((d (string-append dir "/share/guile/site/2.0")))
|
|
||||||
(if (file-exists? d)
|
|
||||||
d
|
|
||||||
dir)))
|
|
||||||
|
|
||||||
(let loop ((args args)
|
|
||||||
(result '())
|
|
||||||
(file #f)
|
|
||||||
(entry 'hydra-jobs)
|
|
||||||
(roots-dir #f))
|
|
||||||
(match args
|
|
||||||
(()
|
|
||||||
(if (not file)
|
|
||||||
(error "hydra-eval-guile-jobs: no expression file given")
|
|
||||||
(values file entry (reverse result) roots-dir)))
|
|
||||||
(("-I" name=dir rest ...)
|
|
||||||
(let* ((dir (match (string-tokenize name=dir
|
|
||||||
(char-set-complement (char-set
|
|
||||||
#\=)))
|
|
||||||
((_ dir) dir)
|
|
||||||
((dir) dir)))
|
|
||||||
(dir* (module-directory dir)))
|
|
||||||
(format (current-error-port) "adding `~a' to the load path~%" dir*)
|
|
||||||
(set! %load-path (cons dir* %load-path))
|
|
||||||
(set! %load-compiled-path (cons dir* %load-compiled-path)))
|
|
||||||
(loop rest result file entry roots-dir))
|
|
||||||
(("--argstr" name value rest ...)
|
|
||||||
(loop rest (alist-cons (string->symbol name) value result)
|
|
||||||
file entry roots-dir))
|
|
||||||
(("--arg" name expr rest ...)
|
|
||||||
(let ((value (eval (call-with-input-string expr read)
|
|
||||||
(current-module))))
|
|
||||||
(loop rest (alist-cons (string->symbol name) value result)
|
|
||||||
file entry roots-dir)))
|
|
||||||
(("--gc-roots-dir" dir rest ...)
|
|
||||||
(loop rest result file entry dir))
|
|
||||||
(("-j" _ rest ...) ; XXX: what's this?
|
|
||||||
(loop rest result file entry roots-dir))
|
|
||||||
(("--entry" expr rest ...) ; entry point, like `guile -e'
|
|
||||||
(let ((expr (call-with-input-string expr read)))
|
|
||||||
(loop rest result file expr roots-dir)))
|
|
||||||
((file rest ...) ; source file that defines the jobs
|
|
||||||
(loop rest result file entry roots-dir))
|
|
||||||
(_
|
|
||||||
(error "hydra-eval-guile-jobs: invalid arguments" args)))))
|
|
||||||
|
|
||||||
(define %user-module
|
|
||||||
;; Hydra user module.
|
|
||||||
;; TODO: Make it a sandbox.
|
|
||||||
(let ((m (make-module)))
|
|
||||||
(beautify-user-module! m)
|
|
||||||
m))
|
|
||||||
|
|
||||||
(define (eval-guile-jobs . args)
|
|
||||||
(setlocale LC_ALL "")
|
|
||||||
|
|
||||||
(let-values (((file entry args gc-roots-dir)
|
|
||||||
(parse-arguments args)))
|
|
||||||
|
|
||||||
(save-module-excursion
|
|
||||||
(lambda ()
|
|
||||||
(set-current-module %user-module)
|
|
||||||
|
|
||||||
;; The standard output must contain only XML.
|
|
||||||
(with-output-to-port (%make-void-port "w")
|
|
||||||
(lambda ()
|
|
||||||
(primitive-load file)))))
|
|
||||||
|
|
||||||
(let* ((entry (eval entry %user-module))
|
|
||||||
(store ((guix-variable 'store 'open-connection)))
|
|
||||||
(jobs (entry store args)))
|
|
||||||
(unless (string? gc-roots-dir)
|
|
||||||
(format (current-error-port)
|
|
||||||
"warning: --gc-roots-dir not specified~%"))
|
|
||||||
|
|
||||||
(job-evaluations->xml jobs (current-output-port)
|
|
||||||
#:gc-roots-dir gc-roots-dir))))
|
|
|
@ -82,7 +82,7 @@ sub getPath {
|
||||||
|
|
||||||
my $substituter = $config->{eval_substituter};
|
my $substituter = $config->{eval_substituter};
|
||||||
|
|
||||||
system("nix", "copy", "--from", $substituter, "--", $path)
|
system("nix", "--experimental-features", "nix-command", "copy", "--from", $substituter, "--", $path)
|
||||||
if defined $substituter;
|
if defined $substituter;
|
||||||
|
|
||||||
return isValidPath($path);
|
return isValidPath($path);
|
||||||
|
@ -143,7 +143,7 @@ sub fetchInputSystemBuild {
|
||||||
$jobsetName ||= $jobset->name;
|
$jobsetName ||= $jobset->name;
|
||||||
|
|
||||||
my @latestBuilds = $db->resultset('LatestSucceededForJob')
|
my @latestBuilds = $db->resultset('LatestSucceededForJob')
|
||||||
->search({}, {bind => [$projectName, $jobsetName, $jobName]});
|
->search({}, {bind => [$jobsetName, $jobName]});
|
||||||
|
|
||||||
my @validBuilds = ();
|
my @validBuilds = ();
|
||||||
foreach my $build (@latestBuilds) {
|
foreach my $build (@latestBuilds) {
|
||||||
|
@ -264,53 +264,31 @@ sub fetchInput {
|
||||||
|
|
||||||
|
|
||||||
sub booleanToString {
|
sub booleanToString {
|
||||||
my ($exprType, $value) = @_;
|
my ($value) = @_;
|
||||||
my $result;
|
return $value;
|
||||||
if ($exprType eq "guile") {
|
|
||||||
if ($value eq "true") {
|
|
||||||
$result = "#t";
|
|
||||||
} else {
|
|
||||||
$result = "#f";
|
|
||||||
}
|
|
||||||
$result = $value;
|
|
||||||
} else {
|
|
||||||
$result = $value;
|
|
||||||
}
|
|
||||||
return $result;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
sub buildInputToString {
|
sub buildInputToString {
|
||||||
my ($exprType, $input) = @_;
|
my ($input) = @_;
|
||||||
my $result;
|
return
|
||||||
if ($exprType eq "guile") {
|
"{ outPath = builtins.storePath " . $input->{storePath} . "" .
|
||||||
$result = "'((file-name . \"" . ${input}->{storePath} . "\")" .
|
"; inputType = \"" . $input->{type} . "\"" .
|
||||||
(defined $input->{revision} ? "(revision . \"" . $input->{revision} . "\")" : "") .
|
(defined $input->{uri} ? "; uri = \"" . $input->{uri} . "\"" : "") .
|
||||||
(defined $input->{revCount} ? "(revision-count . " . $input->{revCount} . ")" : "") .
|
(defined $input->{revNumber} ? "; rev = " . $input->{revNumber} . "" : "") .
|
||||||
(defined $input->{gitTag} ? "(git-tag . \"" . $input->{gitTag} . "\")" : "") .
|
(defined $input->{revision} ? "; rev = \"" . $input->{revision} . "\"" : "") .
|
||||||
(defined $input->{shortRev} ? "(short-revision . \"" . $input->{shortRev} . "\")" : "") .
|
(defined $input->{revCount} ? "; revCount = " . $input->{revCount} . "" : "") .
|
||||||
(defined $input->{version} ? "(version . \"" . $input->{version} . "\")" : "") .
|
(defined $input->{gitTag} ? "; gitTag = \"" . $input->{gitTag} . "\"" : "") .
|
||||||
")";
|
(defined $input->{shortRev} ? "; shortRev = \"" . $input->{shortRev} . "\"" : "") .
|
||||||
} else {
|
(defined $input->{version} ? "; version = \"" . $input->{version} . "\"" : "") .
|
||||||
$result = "{ outPath = builtins.storePath " . $input->{storePath} . "" .
|
(defined $input->{outputName} ? "; outputName = \"" . $input->{outputName} . "\"" : "") .
|
||||||
"; inputType = \"" . $input->{type} . "\"" .
|
(defined $input->{drvPath} ? "; drvPath = builtins.storePath " . $input->{drvPath} . "" : "") .
|
||||||
(defined $input->{uri} ? "; uri = \"" . $input->{uri} . "\"" : "") .
|
";}";
|
||||||
(defined $input->{revNumber} ? "; rev = " . $input->{revNumber} . "" : "") .
|
|
||||||
(defined $input->{revision} ? "; rev = \"" . $input->{revision} . "\"" : "") .
|
|
||||||
(defined $input->{revCount} ? "; revCount = " . $input->{revCount} . "" : "") .
|
|
||||||
(defined $input->{gitTag} ? "; gitTag = \"" . $input->{gitTag} . "\"" : "") .
|
|
||||||
(defined $input->{shortRev} ? "; shortRev = \"" . $input->{shortRev} . "\"" : "") .
|
|
||||||
(defined $input->{version} ? "; version = \"" . $input->{version} . "\"" : "") .
|
|
||||||
(defined $input->{outputName} ? "; outputName = \"" . $input->{outputName} . "\"" : "") .
|
|
||||||
(defined $input->{drvPath} ? "; drvPath = builtins.storePath " . $input->{drvPath} . "" : "") .
|
|
||||||
";}";
|
|
||||||
}
|
|
||||||
return $result;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
sub inputsToArgs {
|
sub inputsToArgs {
|
||||||
my ($inputInfo, $exprType) = @_;
|
my ($inputInfo) = @_;
|
||||||
my @res = ();
|
my @res = ();
|
||||||
|
|
||||||
foreach my $input (sort keys %{$inputInfo}) {
|
foreach my $input (sort keys %{$inputInfo}) {
|
||||||
|
@ -327,14 +305,12 @@ sub inputsToArgs {
|
||||||
push @res, "--argstr", $input, $alt->{value};
|
push @res, "--argstr", $input, $alt->{value};
|
||||||
}
|
}
|
||||||
elsif ($alt->{type} eq "boolean") {
|
elsif ($alt->{type} eq "boolean") {
|
||||||
push @res, "--arg", $input, booleanToString($exprType, $alt->{value});
|
push @res, "--arg", $input, booleanToString($alt->{value});
|
||||||
}
|
}
|
||||||
elsif ($alt->{type} eq "nix") {
|
elsif ($alt->{type} eq "nix") {
|
||||||
die "input type ‘nix’ only supported for Nix-based jobsets\n" unless $exprType eq "nix";
|
|
||||||
push @res, "--arg", $input, $alt->{value};
|
push @res, "--arg", $input, $alt->{value};
|
||||||
}
|
}
|
||||||
elsif ($alt->{type} eq "eval") {
|
elsif ($alt->{type} eq "eval") {
|
||||||
die "input type ‘eval’ only supported for Nix-based jobsets\n" unless $exprType eq "nix";
|
|
||||||
my $s = "{ ";
|
my $s = "{ ";
|
||||||
# FIXME: escape $_. But dots should not be escaped.
|
# FIXME: escape $_. But dots should not be escaped.
|
||||||
$s .= "$_ = builtins.storePath ${\$alt->{jobs}->{$_}}; "
|
$s .= "$_ = builtins.storePath ${\$alt->{jobs}->{$_}}; "
|
||||||
|
@ -343,7 +319,7 @@ sub inputsToArgs {
|
||||||
push @res, "--arg", $input, $s;
|
push @res, "--arg", $input, $s;
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
push @res, "--arg", $input, buildInputToString($exprType, $alt);
|
push @res, "--arg", $input, buildInputToString($alt);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -352,18 +328,16 @@ sub inputsToArgs {
|
||||||
|
|
||||||
|
|
||||||
sub evalJobs {
|
sub evalJobs {
|
||||||
my ($inputInfo, $exprType, $nixExprInputName, $nixExprPath) = @_;
|
my ($inputInfo, $nixExprInputName, $nixExprPath) = @_;
|
||||||
|
|
||||||
my $nixExprInput = $inputInfo->{$nixExprInputName}->[0]
|
my $nixExprInput = $inputInfo->{$nixExprInputName}->[0]
|
||||||
or die "cannot find the input containing the job expression\n";
|
or die "cannot find the input containing the job expression\n";
|
||||||
|
|
||||||
my $evaluator = ($exprType eq "guile") ? "hydra-eval-guile-jobs" : "hydra-eval-jobs";
|
my @cmd = ("hydra-eval-jobs",
|
||||||
|
|
||||||
my @cmd = ($evaluator,
|
|
||||||
"<" . $nixExprInputName . "/" . $nixExprPath . ">",
|
"<" . $nixExprInputName . "/" . $nixExprPath . ">",
|
||||||
"--gc-roots-dir", getGCRootsDir,
|
"--gc-roots-dir", getGCRootsDir,
|
||||||
"-j", 1,
|
"-j", 1,
|
||||||
inputsToArgs($inputInfo, $exprType));
|
inputsToArgs($inputInfo));
|
||||||
|
|
||||||
if (defined $ENV{'HYDRA_DEBUG'}) {
|
if (defined $ENV{'HYDRA_DEBUG'}) {
|
||||||
sub escape {
|
sub escape {
|
||||||
|
@ -376,7 +350,7 @@ sub evalJobs {
|
||||||
}
|
}
|
||||||
|
|
||||||
(my $res, my $jobsJSON, my $stderr) = captureStdoutStderr(21600, @cmd);
|
(my $res, my $jobsJSON, my $stderr) = captureStdoutStderr(21600, @cmd);
|
||||||
die "$evaluator returned " . ($res & 127 ? "signal $res" : "exit code " . ($res >> 8))
|
die "hydra-eval-jobs returned " . ($res & 127 ? "signal $res" : "exit code " . ($res >> 8))
|
||||||
. ":\n" . ($stderr ? decode("utf-8", $stderr) : "(no output)\n")
|
. ":\n" . ($stderr ? decode("utf-8", $stderr) : "(no output)\n")
|
||||||
if $res;
|
if $res;
|
||||||
|
|
||||||
|
@ -417,7 +391,12 @@ sub checkBuild {
|
||||||
my $build;
|
my $build;
|
||||||
|
|
||||||
txn_do($db, sub {
|
txn_do($db, sub {
|
||||||
my $job = $jobset->jobs->update_or_create({ name => $jobName });
|
my $job = $jobset->jobs->update_or_create({
|
||||||
|
name => $jobName,
|
||||||
|
jobset_id => $jobset->id,
|
||||||
|
project => $jobset->project,
|
||||||
|
jobset => $jobset->name,
|
||||||
|
});
|
||||||
|
|
||||||
# Don't add a build that has already been scheduled for this
|
# Don't add a build that has already been scheduled for this
|
||||||
# job, or has been built but is still a "current" build for
|
# job, or has been built but is still a "current" build for
|
||||||
|
@ -464,6 +443,9 @@ sub checkBuild {
|
||||||
# Add the build to the database.
|
# Add the build to the database.
|
||||||
$build = $job->builds->create(
|
$build = $job->builds->create(
|
||||||
{ timestamp => $time
|
{ timestamp => $time
|
||||||
|
, project => $jobset->project
|
||||||
|
, jobset => $jobset->name
|
||||||
|
, jobset_id => $jobset->id
|
||||||
, description => null($buildInfo->{description})
|
, description => null($buildInfo->{description})
|
||||||
, license => null($buildInfo->{license})
|
, license => null($buildInfo->{license})
|
||||||
, homepage => null($buildInfo->{homepage})
|
, homepage => null($buildInfo->{homepage})
|
||||||
|
@ -587,7 +569,6 @@ sub checkJobsetWrapped {
|
||||||
$jobset->discard_changes;
|
$jobset->discard_changes;
|
||||||
$inputInfo->{"declInput"} = [ $declInput ];
|
$inputInfo->{"declInput"} = [ $declInput ];
|
||||||
}
|
}
|
||||||
my $exprType = $jobset->nixexprpath =~ /.scm$/ ? "guile" : "nix";
|
|
||||||
|
|
||||||
# Fetch all values for all inputs.
|
# Fetch all values for all inputs.
|
||||||
my $checkoutStart = clock_gettime(CLOCK_MONOTONIC);
|
my $checkoutStart = clock_gettime(CLOCK_MONOTONIC);
|
||||||
|
@ -613,7 +594,7 @@ sub checkJobsetWrapped {
|
||||||
# Hash the arguments to hydra-eval-jobs and check the
|
# Hash the arguments to hydra-eval-jobs and check the
|
||||||
# JobsetInputHashes to see if the previous evaluation had the same
|
# JobsetInputHashes to see if the previous evaluation had the same
|
||||||
# inputs. If so, bail out.
|
# inputs. If so, bail out.
|
||||||
my @args = ($jobset->nixexprinput, $jobset->nixexprpath, inputsToArgs($inputInfo, $exprType));
|
my @args = ($jobset->nixexprinput, $jobset->nixexprpath, inputsToArgs($inputInfo));
|
||||||
my $argsHash = sha256_hex("@args");
|
my $argsHash = sha256_hex("@args");
|
||||||
my $prevEval = getPrevJobsetEval($db, $jobset, 0);
|
my $prevEval = getPrevJobsetEval($db, $jobset, 0);
|
||||||
if (defined $prevEval && $prevEval->hash eq $argsHash && !$dryRun && !$jobset->forceeval) {
|
if (defined $prevEval && $prevEval->hash eq $argsHash && !$dryRun && !$jobset->forceeval) {
|
||||||
|
@ -628,7 +609,7 @@ sub checkJobsetWrapped {
|
||||||
|
|
||||||
# Evaluate the job expression.
|
# Evaluate the job expression.
|
||||||
my $evalStart = clock_gettime(CLOCK_MONOTONIC);
|
my $evalStart = clock_gettime(CLOCK_MONOTONIC);
|
||||||
my ($jobs, $nixExprInput) = evalJobs($inputInfo, $exprType, $jobset->nixexprinput, $jobset->nixexprpath);
|
my ($jobs, $nixExprInput) = evalJobs($inputInfo, $jobset->nixexprinput, $jobset->nixexprpath);
|
||||||
my $evalStop = clock_gettime(CLOCK_MONOTONIC);
|
my $evalStop = clock_gettime(CLOCK_MONOTONIC);
|
||||||
|
|
||||||
if ($jobsetsJobset) {
|
if ($jobsetsJobset) {
|
||||||
|
@ -716,7 +697,7 @@ sub checkJobsetWrapped {
|
||||||
foreach my $job (values %{$jobs}) {
|
foreach my $job (values %{$jobs}) {
|
||||||
next unless $job->{constituents};
|
next unless $job->{constituents};
|
||||||
my $x = $drvPathToId{$job->{drvPath}} or die;
|
my $x = $drvPathToId{$job->{drvPath}} or die;
|
||||||
foreach my $drvPath (split / /, $job->{constituents}) {
|
foreach my $drvPath (@{$job->{constituents}}) {
|
||||||
my $constituent = $drvPathToId{$drvPath};
|
my $constituent = $drvPathToId{$drvPath};
|
||||||
if (defined $constituent) {
|
if (defined $constituent) {
|
||||||
$db->resultset('AggregateConstituents')->update_or_create({aggregate => $x->{id}, constituent => $constituent->{id}});
|
$db->resultset('AggregateConstituents')->update_or_create({aggregate => $x->{id}, constituent => $constituent->{id}});
|
||||||
|
|
|
@ -44,6 +44,17 @@ my @versions = $db->resultset('SchemaVersion')->all;
|
||||||
die "couldn't get Hydra schema version!" if scalar @versions != 1;
|
die "couldn't get Hydra schema version!" if scalar @versions != 1;
|
||||||
my $schemaVersion = $versions[0]->version;
|
my $schemaVersion = $versions[0]->version;
|
||||||
|
|
||||||
|
if ($schemaVersion <= 60) {
|
||||||
|
print STDERR <<QUOTE;
|
||||||
|
WARNING: Schema version 62 and 63 make nullable jobset_id fields on
|
||||||
|
Builds and Jobs non-nullable. On big Hydra servers, this
|
||||||
|
migration will take many hours. Because of that, the
|
||||||
|
migration is not automatic, and must be performed manually.
|
||||||
|
|
||||||
|
To backfill these IDs, run: hydra-backfill-ids
|
||||||
|
QUOTE
|
||||||
|
}
|
||||||
|
|
||||||
for (my $n = $schemaVersion; $n < $maxSchemaVersion; $n++) {
|
for (my $n = $schemaVersion; $n < $maxSchemaVersion; $n++) {
|
||||||
my $m = $n + 1;
|
my $m = $n + 1;
|
||||||
print STDERR "upgrading Hydra schema from version $n to $m\n";
|
print STDERR "upgrading Hydra schema from version $n to $m\n";
|
||||||
|
|
|
@ -34,6 +34,7 @@ sub sendQueueRunnerStats {
|
||||||
gauge("hydra.queue.steps.unfinished", $json->{nrUnfinishedSteps});
|
gauge("hydra.queue.steps.unfinished", $json->{nrUnfinishedSteps});
|
||||||
gauge("hydra.queue.steps.finished", $json->{nrStepsDone});
|
gauge("hydra.queue.steps.finished", $json->{nrStepsDone});
|
||||||
gauge("hydra.queue.steps.retries", $json->{nrRetries});
|
gauge("hydra.queue.steps.retries", $json->{nrRetries});
|
||||||
|
gauge("hydra.queue.steps.unsupported", $json->{nrUnsupportedSteps});
|
||||||
gauge("hydra.queue.steps.max_retries", $json->{maxNrRetries});
|
gauge("hydra.queue.steps.max_retries", $json->{maxNrRetries});
|
||||||
if ($json->{nrStepsDone}) {
|
if ($json->{nrStepsDone}) {
|
||||||
gauge("hydra.queue.steps.avg_total_time", $json->{avgStepTime});
|
gauge("hydra.queue.steps.avg_total_time", $json->{avgStepTime});
|
||||||
|
|
|
@ -2,7 +2,6 @@ sqldir = $(libexecdir)/hydra/sql
|
||||||
nobase_dist_sql_DATA = \
|
nobase_dist_sql_DATA = \
|
||||||
hydra-postgresql.sql \
|
hydra-postgresql.sql \
|
||||||
hydra.sql \
|
hydra.sql \
|
||||||
hydra-sqlite.sql \
|
|
||||||
test.sql \
|
test.sql \
|
||||||
upgrade-*.sql \
|
upgrade-*.sql \
|
||||||
update-dbix.pl
|
update-dbix.pl
|
||||||
|
@ -10,10 +9,5 @@ nobase_dist_sql_DATA = \
|
||||||
hydra-postgresql.sql: hydra.sql
|
hydra-postgresql.sql: hydra.sql
|
||||||
cpp -P -E -traditional-cpp -DPOSTGRESQL hydra.sql > $@ || rm -f $@
|
cpp -P -E -traditional-cpp -DPOSTGRESQL hydra.sql > $@ || rm -f $@
|
||||||
|
|
||||||
hydra-sqlite.sql: hydra.sql
|
update-dbix: hydra-postgresql.sql
|
||||||
cpp -P -E -traditional-cpp -DSQLITE hydra.sql > $@ || rm -f $@
|
./update-dbix-harness.sh
|
||||||
|
|
||||||
update-dbix: hydra-sqlite.sql
|
|
||||||
rm -f tmp.sqlite
|
|
||||||
sqlite3 tmp.sqlite < hydra-sqlite.sql
|
|
||||||
perl -I ../lib -MDBIx::Class::Schema::Loader=make_schema_at,dump_to_dir:../lib update-dbix.pl
|
|
||||||
|
|
|
@ -52,15 +52,16 @@ create table ProjectMembers (
|
||||||
-- describing build jobs.
|
-- describing build jobs.
|
||||||
create table Jobsets (
|
create table Jobsets (
|
||||||
name text not null,
|
name text not null,
|
||||||
|
id serial not null,
|
||||||
project text not null,
|
project text not null,
|
||||||
description text,
|
description text,
|
||||||
nixExprInput text not null, -- name of the jobsetInput containing the Nix or Guix expression
|
nixExprInput text, -- name of the jobsetInput containing the Nix or Guix expression
|
||||||
nixExprPath text not null, -- relative path of the Nix or Guix expression
|
nixExprPath text, -- relative path of the Nix or Guix expression
|
||||||
errorMsg text, -- used to signal the last evaluation error etc. for this jobset
|
errorMsg text, -- used to signal the last evaluation error etc. for this jobset
|
||||||
errorTime integer, -- timestamp associated with errorMsg
|
errorTime integer, -- timestamp associated with errorMsg
|
||||||
lastCheckedTime integer, -- last time the evaluator looked at this jobset
|
lastCheckedTime integer, -- last time the evaluator looked at this jobset
|
||||||
triggerTime integer, -- set if we were triggered by a push event
|
triggerTime integer, -- set if we were triggered by a push event
|
||||||
enabled integer not null default 1, -- 0 = disabled, 1 = enabled, 2 = one-shot
|
enabled integer not null default 1, -- 0 = disabled, 1 = enabled, 2 = one-shot, 3 = one-at-a-time
|
||||||
enableEmail integer not null default 1,
|
enableEmail integer not null default 1,
|
||||||
hidden integer not null default 0,
|
hidden integer not null default 0,
|
||||||
emailOverride text not null,
|
emailOverride text not null,
|
||||||
|
@ -70,9 +71,14 @@ create table Jobsets (
|
||||||
fetchErrorMsg text,
|
fetchErrorMsg text,
|
||||||
forceEval boolean,
|
forceEval boolean,
|
||||||
startTime integer, -- if jobset is currently running
|
startTime integer, -- if jobset is currently running
|
||||||
|
type integer not null default 0, -- 0 == legacy, 1 == flake
|
||||||
|
flake text,
|
||||||
check (schedulingShares > 0),
|
check (schedulingShares > 0),
|
||||||
|
check ((type = 0) = (nixExprInput is not null and nixExprPath is not null)),
|
||||||
|
check ((type = 1) = (flake is not null)),
|
||||||
primary key (project, name),
|
primary key (project, name),
|
||||||
foreign key (project) references Projects(name) on delete cascade on update cascade
|
foreign key (project) references Projects(name) on delete cascade on update cascade,
|
||||||
|
constraint Jobsets_id_unique UNIQUE(id)
|
||||||
#ifdef SQLITE
|
#ifdef SQLITE
|
||||||
,
|
,
|
||||||
foreign key (project, name, nixExprInput) references JobsetInputs(project, jobset, name)
|
foreign key (project, name, nixExprInput) references JobsetInputs(project, jobset, name)
|
||||||
|
@ -140,9 +146,11 @@ create table JobsetInputAlts (
|
||||||
create table Jobs (
|
create table Jobs (
|
||||||
project text not null,
|
project text not null,
|
||||||
jobset text not null,
|
jobset text not null,
|
||||||
|
jobset_id integer not null,
|
||||||
name text not null,
|
name text not null,
|
||||||
|
|
||||||
primary key (project, jobset, name),
|
primary key (project, jobset, name),
|
||||||
|
foreign key (jobset_id) references Jobsets(id) on delete cascade,
|
||||||
foreign key (project) references Projects(name) on delete cascade on update cascade,
|
foreign key (project) references Projects(name) on delete cascade on update cascade,
|
||||||
foreign key (project, jobset) references Jobsets(project, name) on delete cascade on update cascade
|
foreign key (project, jobset) references Jobsets(project, name) on delete cascade on update cascade
|
||||||
);
|
);
|
||||||
|
@ -162,6 +170,7 @@ create table Builds (
|
||||||
-- Info about the inputs.
|
-- Info about the inputs.
|
||||||
project text not null,
|
project text not null,
|
||||||
jobset text not null,
|
jobset text not null,
|
||||||
|
jobset_id integer not null,
|
||||||
job text not null,
|
job text not null,
|
||||||
|
|
||||||
-- Info about the build result.
|
-- Info about the build result.
|
||||||
|
@ -181,7 +190,8 @@ create table Builds (
|
||||||
|
|
||||||
-- Copy of the nixExprInput/nixExprPath fields of the jobset that
|
-- Copy of the nixExprInput/nixExprPath fields of the jobset that
|
||||||
-- instantiated this build. Needed if we want to reproduce this
|
-- instantiated this build. Needed if we want to reproduce this
|
||||||
-- build.
|
-- build. FIXME: this should be stored in JobsetEvals, storing it
|
||||||
|
-- here is denormal.
|
||||||
nixExprInput text,
|
nixExprInput text,
|
||||||
nixExprPath text,
|
nixExprPath text,
|
||||||
|
|
||||||
|
@ -227,6 +237,7 @@ create table Builds (
|
||||||
check (finished = 0 or (stoptime is not null and stoptime != 0)),
|
check (finished = 0 or (stoptime is not null and stoptime != 0)),
|
||||||
check (finished = 0 or (starttime is not null and starttime != 0)),
|
check (finished = 0 or (starttime is not null and starttime != 0)),
|
||||||
|
|
||||||
|
foreign key (jobset_id) references Jobsets(id) on delete cascade,
|
||||||
foreign key (project) references Projects(name) on update cascade,
|
foreign key (project) references Projects(name) on update cascade,
|
||||||
foreign key (project, jobset) references Jobsets(project, name) on update cascade,
|
foreign key (project, jobset) references Jobsets(project, name) on update cascade,
|
||||||
foreign key (project, jobset, job) references Jobs(project, jobset, name) on update cascade
|
foreign key (project, jobset, job) references Jobs(project, jobset, name) on update cascade
|
||||||
|
@ -522,6 +533,8 @@ create table JobsetEvals (
|
||||||
nrBuilds integer,
|
nrBuilds integer,
|
||||||
nrSucceeded integer, -- set lazily when all builds are finished
|
nrSucceeded integer, -- set lazily when all builds are finished
|
||||||
|
|
||||||
|
flake text, -- immutable flake reference
|
||||||
|
|
||||||
foreign key (project) references Projects(name) on delete cascade on update cascade,
|
foreign key (project) references Projects(name) on delete cascade on update cascade,
|
||||||
foreign key (project, jobset) references Jobsets(project, name) on delete cascade on update cascade
|
foreign key (project, jobset) references Jobsets(project, name) on delete cascade on update cascade
|
||||||
);
|
);
|
||||||
|
@ -669,6 +682,8 @@ create index IndexBuildsOnProject on Builds(project);
|
||||||
create index IndexBuildsOnTimestamp on Builds(timestamp);
|
create index IndexBuildsOnTimestamp on Builds(timestamp);
|
||||||
create index IndexBuildsOnFinishedStopTime on Builds(finished, stoptime DESC);
|
create index IndexBuildsOnFinishedStopTime on Builds(finished, stoptime DESC);
|
||||||
create index IndexBuildsOnJobFinishedId on builds(project, jobset, job, system, finished, id DESC);
|
create index IndexBuildsOnJobFinishedId on builds(project, jobset, job, system, finished, id DESC);
|
||||||
|
create index IndexBuildsOnJobsetIdFinishedId on Builds(id DESC, finished, job, jobset_id);
|
||||||
|
create index IndexFinishedSuccessfulBuilds on Builds(id DESC, buildstatus, finished, job, jobset_id) where buildstatus = 0 and finished = 1;
|
||||||
create index IndexBuildsOnDrvPath on Builds(drvPath);
|
create index IndexBuildsOnDrvPath on Builds(drvPath);
|
||||||
create index IndexCachedHgInputsOnHash on CachedHgInputs(uri, branch, sha256hash);
|
create index IndexCachedHgInputsOnHash on CachedHgInputs(uri, branch, sha256hash);
|
||||||
create index IndexCachedGitInputsOnHash on CachedGitInputs(uri, branch, sha256hash);
|
create index IndexCachedGitInputsOnHash on CachedGitInputs(uri, branch, sha256hash);
|
||||||
|
|
40
src/sql/update-dbix-harness.sh
Executable file
40
src/sql/update-dbix-harness.sh
Executable file
|
@ -0,0 +1,40 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
readonly scratch=$(mktemp -d -t tmp.XXXXXXXXXX)
|
||||||
|
|
||||||
|
readonly socket=$scratch/socket
|
||||||
|
readonly data=$scratch/data
|
||||||
|
readonly dbname=hydra-update-dbix
|
||||||
|
|
||||||
|
function finish {
|
||||||
|
set +e
|
||||||
|
pg_ctl -D "$data" \
|
||||||
|
-o "-F -h '' -k \"$socket\"" \
|
||||||
|
-w stop -m immediate
|
||||||
|
|
||||||
|
if [ -f "$data/postmaster.pid" ]; then
|
||||||
|
pg_ctl -D "$data" \
|
||||||
|
-o "-F -h '' -k \"$socket\"" \
|
||||||
|
-w kill TERM "$(cat "$data/postmaster.pid")"
|
||||||
|
fi
|
||||||
|
|
||||||
|
rm -rf "$scratch"
|
||||||
|
}
|
||||||
|
trap finish EXIT
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
mkdir -p "$socket"
|
||||||
|
initdb -D "$data"
|
||||||
|
|
||||||
|
pg_ctl -D "$data" \
|
||||||
|
-o "-F -h '' -k \"${socket}\"" \
|
||||||
|
-w start
|
||||||
|
|
||||||
|
createdb -h "$socket" "$dbname"
|
||||||
|
|
||||||
|
psql -h "$socket" "$dbname" -f ./hydra-postgresql.sql
|
||||||
|
|
||||||
|
perl -I ../lib \
|
||||||
|
-MDBIx::Class::Schema::Loader=make_schema_at,dump_to_dir:../lib \
|
||||||
|
update-dbix.pl "dbi:Pg:dbname=$dbname;host=$socket"
|
|
@ -1,8 +1,49 @@
|
||||||
|
use Cwd;
|
||||||
|
|
||||||
|
die "$0: dbi connection string required \n" if scalar @ARGV != 1;
|
||||||
|
|
||||||
make_schema_at("Hydra::Schema", {
|
make_schema_at("Hydra::Schema", {
|
||||||
naming => { ALL => "v5" },
|
naming => { ALL => "v5" },
|
||||||
relationships => 1,
|
relationships => 1,
|
||||||
moniker_map => sub { return "$_"; },
|
moniker_map => {
|
||||||
|
"aggregateconstituents" => "AggregateConstituents",
|
||||||
|
"buildinputs" => "BuildInputs",
|
||||||
|
"buildmetrics" => "BuildMetrics",
|
||||||
|
"buildoutputs" => "BuildOutputs",
|
||||||
|
"buildproducts" => "BuildProducts",
|
||||||
|
"builds" => "Builds",
|
||||||
|
"buildstepoutputs" => "BuildStepOutputs",
|
||||||
|
"buildsteps" => "BuildSteps",
|
||||||
|
"cachedbazaarinputs" => "CachedBazaarInputs",
|
||||||
|
"cachedcvsinputs" => "CachedCVSInputs",
|
||||||
|
"cacheddarcsinputs" => "CachedDarcsInputs",
|
||||||
|
"cachedgitinputs" => "CachedGitInputs",
|
||||||
|
"cachedhginputs" => "CachedHgInputs",
|
||||||
|
"cachedpathinputs" => "CachedPathInputs",
|
||||||
|
"cachedsubversioninputs" => "CachedSubversionInputs",
|
||||||
|
"failedpaths" => "FailedPaths",
|
||||||
|
"jobs" => "Jobs",
|
||||||
|
"jobsetevalinputs" => "JobsetEvalInputs",
|
||||||
|
"jobsetevalmembers" => "JobsetEvalMembers",
|
||||||
|
"jobsetevals" => "JobsetEvals",
|
||||||
|
"jobsetinputalts" => "JobsetInputAlts",
|
||||||
|
"jobsetinputs" => "JobsetInputs",
|
||||||
|
"jobsetrenames" => "JobsetRenames",
|
||||||
|
"jobsets" => "Jobsets",
|
||||||
|
"newsitems" => "NewsItems",
|
||||||
|
"nrbuilds" => "NrBuilds",
|
||||||
|
"projectmembers" => "ProjectMembers",
|
||||||
|
"projects" => "Projects",
|
||||||
|
"releasemembers" => "ReleaseMembers",
|
||||||
|
"releases" => "Releases",
|
||||||
|
"schemaversion" => "SchemaVersion",
|
||||||
|
"starredjobs" => "StarredJobs",
|
||||||
|
"systemstatus" => "SystemStatus",
|
||||||
|
"systemtypes" => "SystemTypes",
|
||||||
|
"urirevmapper" => "UriRevMapper",
|
||||||
|
"userroles" => "UserRoles",
|
||||||
|
"users" => "Users",
|
||||||
|
} , #sub { return "$_"; },
|
||||||
components => [ "+Hydra::Component::ToJSON" ],
|
components => [ "+Hydra::Component::ToJSON" ],
|
||||||
rel_name_map => { buildsteps_builds => "buildsteps" }
|
rel_name_map => { buildsteps_builds => "buildsteps" }
|
||||||
}, ["dbi:SQLite:tmp.sqlite"]);
|
}, [$ARGV[0]]);
|
||||||
|
|
||||||
|
|
7
src/sql/upgrade-58.sql
Normal file
7
src/sql/upgrade-58.sql
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
alter table Jobsets alter column nixExprInput drop not null;
|
||||||
|
alter table Jobsets alter column nixExprPath drop not null;
|
||||||
|
alter table Jobsets add column type integer default 0;
|
||||||
|
alter table Jobsets add column flake text;
|
||||||
|
alter table Jobsets add check ((type = 0) = (nixExprInput is not null and nixExprPath is not null));
|
||||||
|
alter table Jobsets add check ((type = 1) = (flake is not null));
|
||||||
|
alter table JobsetEvals add column flake text;
|
4
src/sql/upgrade-59.sql
Normal file
4
src/sql/upgrade-59.sql
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
-- will automatically add unique IDs to Jobsets.
|
||||||
|
ALTER TABLE Jobsets
|
||||||
|
ADD COLUMN id SERIAL NOT NULL,
|
||||||
|
ADD CONSTRAINT Jobsets_id_unique UNIQUE (id);
|
10
src/sql/upgrade-60.sql
Normal file
10
src/sql/upgrade-60.sql
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
-- Add the jobset_id columns to the Jobs table. This will go
|
||||||
|
-- quickly, since the field is nullable. Note this is just part one of
|
||||||
|
-- this migration. Future steps involve a piecemeal backfilling, and
|
||||||
|
-- then making the column non-null.
|
||||||
|
|
||||||
|
ALTER TABLE Jobs
|
||||||
|
ADD COLUMN jobset_id integer NULL,
|
||||||
|
ADD FOREIGN KEY (jobset_id)
|
||||||
|
REFERENCES Jobsets(id)
|
||||||
|
ON DELETE CASCADE;
|
10
src/sql/upgrade-61.sql
Normal file
10
src/sql/upgrade-61.sql
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
-- Add the jobset_id columns to the Builds table. This will go
|
||||||
|
-- quickly, since the field is nullable. Note this is just part one of
|
||||||
|
-- this migration. Future steps involve a piecemeal backfilling, and
|
||||||
|
-- then making the column non-null.
|
||||||
|
|
||||||
|
ALTER TABLE Builds
|
||||||
|
ADD COLUMN jobset_id integer NULL,
|
||||||
|
ADD FOREIGN KEY (jobset_id)
|
||||||
|
REFERENCES Jobsets(id)
|
||||||
|
ON DELETE CASCADE;
|
7
src/sql/upgrade-62.sql
Normal file
7
src/sql/upgrade-62.sql
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
-- Make the Jobs.jobset_id column NOT NULL. If this upgrade fails,
|
||||||
|
-- either the admin didn't run the backfiller or there is a bug. If
|
||||||
|
-- the admin ran the backfiller and there are null columns, it is
|
||||||
|
-- very important to figure out where the nullable columns came from.
|
||||||
|
|
||||||
|
ALTER TABLE Jobs
|
||||||
|
ALTER COLUMN jobset_id SET NOT NULL;
|
7
src/sql/upgrade-63.sql
Normal file
7
src/sql/upgrade-63.sql
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
-- Make the Builds.jobset_id column NOT NULL. If this upgrade fails,
|
||||||
|
-- either the admin didn't run the backfiller or there is a bug. If
|
||||||
|
-- the admin ran the backfiller and there are null columns, it is
|
||||||
|
-- very important to figure out where the nullable columns came from.
|
||||||
|
|
||||||
|
ALTER TABLE Builds
|
||||||
|
ALTER COLUMN jobset_id SET NOT NULL;
|
4
src/sql/upgrade-64.sql
Normal file
4
src/sql/upgrade-64.sql
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
-- Index more exactly what the latest-finished query looks for.
|
||||||
|
create index IndexFinishedSuccessfulBuilds
|
||||||
|
on Builds(id DESC, buildstatus, finished, job, jobset_id)
|
||||||
|
where buildstatus = 0 and finished = 1;
|
2
src/sql/upgrade-65.sql
Normal file
2
src/sql/upgrade-65.sql
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
-- Add an index like IndexBuildsOnJobFinishedId using jobset_id
|
||||||
|
create index IndexBuildsOnJobsetIdFinishedId on Builds(id DESC, finished, job, jobset_id);
|
|
@ -14,6 +14,7 @@ TESTS_ENVIRONMENT = \
|
||||||
NIX_BUILD_HOOK= \
|
NIX_BUILD_HOOK= \
|
||||||
PGHOST=/tmp \
|
PGHOST=/tmp \
|
||||||
PERL5LIB="$(srcdir):$(abs_top_srcdir)/src/lib:$$PERL5LIB" \
|
PERL5LIB="$(srcdir):$(abs_top_srcdir)/src/lib:$$PERL5LIB" \
|
||||||
|
PYTHONPATH= \
|
||||||
PATH=$(abs_top_srcdir)/src/hydra-evaluator:$(abs_top_srcdir)/src/script:$(abs_top_srcdir)/src/hydra-eval-jobs:$(abs_top_srcdir)/src/hydra-queue-runner:$$PATH \
|
PATH=$(abs_top_srcdir)/src/hydra-evaluator:$(abs_top_srcdir)/src/script:$(abs_top_srcdir)/src/hydra-eval-jobs:$(abs_top_srcdir)/src/hydra-queue-runner:$$PATH \
|
||||||
perl -w
|
perl -w
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue