Merge remote-tracking branch 'origin/next' into released

This commit is contained in:
Graham Christensen 2018-03-05 07:42:12 -05:00
commit 716c8c592a
No known key found for this signature in database
GPG key ID: ACA1C1D120C83D5C
44 changed files with 2308 additions and 1251 deletions

View file

@ -9,11 +9,12 @@
## Automatic Building ## Automatic Building
Users who are _trusted_ (see: ./config.public.json) or _known_ (see: Users who are _trusted_ or _known_ (see: Trusted Users vs Known Users)
./config.known-users.json) will have their PRs automatically trigger will have their PRs automatically trigger builds if their commits
builds if their commits follow the well-defined format of Nixpkgs, follow the well-defined format of Nixpkgs. Specifically: prefixing the
specifically prefixing the commit title with the package attribute. commit title with the package attribute. This includes package bumps
This includes package bumps as well as other changes. as well as other changes.
Example messages and the builds: Example messages and the builds:
|Message|Automatic Build| |Message|Automatic Build|
@ -107,6 +108,28 @@ This is will build `list`, `of`, `attrs`, `looks`, `good`, `to`, `me!`:
@grahamcofborg build list of attrs looks good to me! @grahamcofborg build list of attrs looks good to me!
``` ```
## Trusted Users vs Known Users
Known users have their builds executed on platforms with working
sandboxing. At the time of writing, that means:
- `x86_64-linux`
- `aarch64_linux`
Trusted users have their builds executed on _all_ platforms, even if
they don't have good sandboxing. This opens the host up to a higher
risk of security issues, so only well known, trusted member of the
community should be added to the trusted user list.
At the time of writing, trusted users have their builds run on the
following platforms:
- `x86_64-linux`
- `aarch64_linux`
- `x86_64-darwin`
See ./config.public.json and ./config.known-users.json for a list of
all the trusted and known users.
# How does OfBorg call nix-build? # How does OfBorg call nix-build?
@ -217,13 +240,9 @@ function rabbitmq_conn($timeout = 3) {
return $connection; return $connection;
} }
function gh_client() { function gh_secret() {
$client = new \Github\Client(); return "github webhook secret";
$client->authenticate('githubusername',
'githubpassword',
Github\Client::AUTH_HTTP_PASSWORD);
return $client;
} }
``` ```

View file

@ -1,7 +1,5 @@
[ [
"bhipple", "bhipple",
"dotlambda",
"dywedir", "dywedir",
"unode", "unode"
"nlewo"
] ]

View file

@ -34,6 +34,8 @@
"disassembler", "disassembler",
"domenkozar", "domenkozar",
"dotlambda", "dotlambda",
"dtzwill",
"dywedir",
"edolstra", "edolstra",
"edwtjo", "edwtjo",
"ehmry", "ehmry",
@ -73,6 +75,7 @@
"ndowens", "ndowens",
"nequissimus", "nequissimus",
"nicolaspetton", "nicolaspetton",
"nlewo",
"obadz", "obadz",
"ocharles", "ocharles",
"offlinehacker", "offlinehacker",

View file

@ -6,6 +6,11 @@
"path": "/var/lib/nginx/ofborg/logs/" "path": "/var/lib/nginx/ofborg/logs/"
}, },
"runner": { "runner": {
"repos": [
"nixos/nixpkgs",
"nixos/ofborg",
"grahamc/nixpkgs"
],
"trusted_users": [ "trusted_users": [
"7c6f434c", "7c6f434c",
"adisbladis", "adisbladis",
@ -14,6 +19,7 @@
"aneeshusa", "aneeshusa",
"aszlig", "aszlig",
"copumpkin", "copumpkin",
"dezgeg",
"disassembler", "disassembler",
"domenkozar", "domenkozar",
"dtzwill", "dtzwill",
@ -48,10 +54,6 @@
] ]
}, },
"tag_paths": { "tag_paths": {
"6.topic: darwin": [
"pkgs/top-level/darwin-packages.nix",
"pkgs/stdenv/darwin"
],
"6.topic: emacs": [ "6.topic: emacs": [
"nixos/modules/services/editors/emacs.nix", "nixos/modules/services/editors/emacs.nix",
"nixos/modules/services/editors/emacs.xml", "nixos/modules/services/editors/emacs.xml",
@ -130,6 +132,7 @@
], ],
"6.topic: vim": [ "6.topic: vim": [
"pkgs/applications/editors/vim", "pkgs/applications/editors/vim",
"pkgs/misc/vim-plugins",
"doc/languages-frameworks/vim.md" "doc/languages-frameworks/vim.md"
], ],
"6.topic: xfce": [ "6.topic: xfce": [

View file

@ -3,7 +3,7 @@
header('Content-Type: application/json'); header('Content-Type: application/json');
$d = array('attempts' => []); $d = array('attempts' => []);
$root = "/var/lib/nginx/ofborg/"; $root = "/var/log/ofborg/";
function abrt($msg) { function abrt($msg) {
echo $msg; echo $msg;
@ -19,8 +19,8 @@ if (!isset($_SERVER['REQUEST_URI']) || empty($_SERVER['REQUEST_URI'])) {
} }
$reqd = substr($_SERVER['REQUEST_URI'], strlen("/logs/")); $reqd = substr($_SERVER['REQUEST_URI'], strlen("/logs/"));
$req = realpath("$root/logs/$reqd"); $req = realpath("$root/$reqd");
$serve_root = "https://logs.nix.gsc.io/logfile/$reqd"; $serve_root = "https://logs.nix.ci/logfile/$reqd";
if ($req === false) { if ($req === false) {
abrt("absent"); abrt("absent");
@ -42,7 +42,19 @@ if ($handle = opendir($req)) {
} }
if (is_file($req . '/' . $entry)) { if (is_file($req . '/' . $entry)) {
$d['attempts'][$entry] = [ "log_url" => "$serve_root/$entry" ]; if (substr($entry, -strlen(".metadata.json"),strlen(".metadata.json")) == ".metadata.json") {
$metadata = json_decode(file_get_contents($req . '/' . $entry), JSON_OBJECT_AS_ARRAY);
$attempt = $metadata['attempt_id'];
if (!isset($d['attempts'][$attempt])) {
$d['attempts'][$attempt] = [];
}
$d['attempts'][$attempt]['metadata'] = $metadata;
} else {
if (!isset($d['attempts'][$entry])) {
$d['attempts'][$entry] = [];
}
$d['attempts'][$entry]['log_url'] = "$serve_root/$entry";
}
} }
} }
} }

View file

@ -1,7 +1,7 @@
{ {
"url": "https://github.com/nixos/nixpkgs-channels.git", "url": "https://github.com/nixos/nixpkgs-channels.git",
"rev": "e860b651d6e297658e960c165fd231dbc0de1f9b", "rev": "c2fbd472a4ebaae739257a3df93aef25f19dd04f",
"date": "2018-02-08T13:54:43+08:00", "date": "2018-02-23T08:08:30+00:00",
"sha256": "0bsnarxm8g1r9qhk388as78cajidd5sqmmyvhmpsqqlzb3bn7ryv", "sha256": "07mir6nqb98mbykabppgj4dli66h18qsyi4zqp640x22k6bkp2vp",
"fetchSubmodules": true "fetchSubmodules": true
} }

View file

@ -182,6 +182,13 @@ rec {
sha256 = "1bxsh6fags7nr36vlz07ik2a1rzyipc8x1y30kjk832hf2pzadmw"; sha256 = "1bxsh6fags7nr36vlz07ik2a1rzyipc8x1y30kjk832hf2pzadmw";
inherit dependencies buildDependencies features; inherit dependencies buildDependencies features;
}; };
either_1_4_0_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate {
crateName = "either";
version = "1.4.0";
authors = [ "bluss" ];
sha256 = "04kpfd84lvyrkb2z4sljlz2d3d5qczd0sb1yy37fgijq2yx3vb37";
inherit dependencies buildDependencies features;
};
enum_primitive_0_1_1_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { enum_primitive_0_1_1_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate {
crateName = "enum_primitive"; crateName = "enum_primitive";
version = "0.1.1"; version = "0.1.1";
@ -410,7 +417,8 @@ rec {
crateName = "ofborg"; crateName = "ofborg";
version = "0.1.1"; version = "0.1.1";
authors = [ "Graham Christensen <graham@grahamc.com>" ]; authors = [ "Graham Christensen <graham@grahamc.com>" ];
src = include [ "Cargo.toml" "Cargo.lock" "src" "test-srcs" ] ./../ofborg; src = include [ "Cargo.toml" "Cargo.lock" "src" "test-srcs" "build.rs" ] ./../ofborg;
build = "build.rs";
inherit dependencies buildDependencies features; inherit dependencies buildDependencies features;
}; };
openssl_0_9_23_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { openssl_0_9_23_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate {
@ -973,6 +981,17 @@ rec {
dtoa_0_4_2_features = f: updateFeatures f (rec { dtoa_0_4_2_features = f: updateFeatures f (rec {
dtoa_0_4_2.default = (f.dtoa_0_4_2.default or true); dtoa_0_4_2.default = (f.dtoa_0_4_2.default or true);
}) []; }) [];
either_1_4_0 = { features?(either_1_4_0_features {}) }: either_1_4_0_ {
dependencies = mapFeatures features ([]);
features = mkFeatures (features.either_1_4_0 or {});
};
either_1_4_0_features = f: updateFeatures f (rec {
either_1_4_0.default = (f.either_1_4_0.default or true);
either_1_4_0.use_std =
(f.either_1_4_0.use_std or false) ||
(f.either_1_4_0.default or false) ||
(either_1_4_0.default or false);
}) [];
enum_primitive_0_1_1 = { features?(enum_primitive_0_1_1_features {}) }: enum_primitive_0_1_1_ { enum_primitive_0_1_1 = { features?(enum_primitive_0_1_1_features {}) }: enum_primitive_0_1_1_ {
dependencies = mapFeatures features ([ num_traits_0_1_41 ]); dependencies = mapFeatures features ([ num_traits_0_1_41 ]);
}; };
@ -1324,10 +1343,11 @@ rec {
num_cpus_1_8_0.default = (f.num_cpus_1_8_0.default or true); num_cpus_1_8_0.default = (f.num_cpus_1_8_0.default or true);
}) [ libc_0_2_36_features ]; }) [ libc_0_2_36_features ];
ofborg_0_1_1 = { features?(ofborg_0_1_1_features {}) }: ofborg_0_1_1_ { ofborg_0_1_1 = { features?(ofborg_0_1_1_features {}) }: ofborg_0_1_1_ {
dependencies = mapFeatures features ([ amqp_0_1_0 env_logger_0_4_3 fs2_0_4_3 hubcaps_0_3_16 hyper_0_10_13 hyper_native_tls_0_2_4 log_0_3_8 lru_cache_0_1_1 md5_0_3_6 serde_1_0_27 serde_derive_1_0_27 serde_json_1_0_9 tempfile_2_2_0 uuid_0_4_0 ]); dependencies = mapFeatures features ([ amqp_0_1_0 either_1_4_0 env_logger_0_4_3 fs2_0_4_3 hubcaps_0_3_16 hyper_0_10_13 hyper_native_tls_0_2_4 log_0_3_8 lru_cache_0_1_1 md5_0_3_6 serde_1_0_27 serde_derive_1_0_27 serde_json_1_0_9 tempfile_2_2_0 uuid_0_4_0 ]);
}; };
ofborg_0_1_1_features = f: updateFeatures f (rec { ofborg_0_1_1_features = f: updateFeatures f (rec {
amqp_0_1_0.default = true; amqp_0_1_0.default = true;
either_1_4_0.default = true;
env_logger_0_4_3.default = true; env_logger_0_4_3.default = true;
fs2_0_4_3.default = true; fs2_0_4_3.default = true;
hubcaps_0_3_16.default = true; hubcaps_0_3_16.default = true;
@ -1343,7 +1363,7 @@ rec {
tempfile_2_2_0.default = true; tempfile_2_2_0.default = true;
uuid_0_4_0.default = true; uuid_0_4_0.default = true;
uuid_0_4_0.v4 = true; uuid_0_4_0.v4 = true;
}) [ amqp_0_1_0_features env_logger_0_4_3_features fs2_0_4_3_features hubcaps_0_3_16_features hyper_0_10_13_features hyper_native_tls_0_2_4_features log_0_3_8_features lru_cache_0_1_1_features md5_0_3_6_features serde_1_0_27_features serde_derive_1_0_27_features serde_json_1_0_9_features tempfile_2_2_0_features uuid_0_4_0_features ]; }) [ amqp_0_1_0_features either_1_4_0_features env_logger_0_4_3_features fs2_0_4_3_features hubcaps_0_3_16_features hyper_0_10_13_features hyper_native_tls_0_2_4_features log_0_3_8_features lru_cache_0_1_1_features md5_0_3_6_features serde_1_0_27_features serde_derive_1_0_27_features serde_json_1_0_9_features tempfile_2_2_0_features uuid_0_4_0_features ];
openssl_0_9_23 = { features?(openssl_0_9_23_features {}) }: openssl_0_9_23_ { openssl_0_9_23 = { features?(openssl_0_9_23_features {}) }: openssl_0_9_23_ {
dependencies = mapFeatures features ([ bitflags_0_9_1 foreign_types_0_3_2 lazy_static_1_0_0 libc_0_2_36 openssl_sys_0_9_24 ]); dependencies = mapFeatures features ([ bitflags_0_9_1 foreign_types_0_3_2 lazy_static_1_0_0 libc_0_2_36 openssl_sys_0_9_24 ]);
features = mkFeatures (features.openssl_0_9_23 or {}); features = mkFeatures (features.openssl_0_9_23 or {});

7
ofborg/Cargo.lock generated
View file

@ -132,6 +132,11 @@ name = "dtoa"
version = "0.4.2" version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "either"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "enum_primitive" name = "enum_primitive"
version = "0.1.1" version = "0.1.1"
@ -377,6 +382,7 @@ name = "ofborg"
version = "0.1.1" version = "0.1.1"
dependencies = [ dependencies = [
"amqp 0.1.0 (git+https://github.com/grahamc/rust-amqp.git)", "amqp 0.1.0 (git+https://github.com/grahamc/rust-amqp.git)",
"either 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
"fs2 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", "fs2 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
"hubcaps 0.3.16 (git+https://github.com/grahamc/hubcaps.git)", "hubcaps 0.3.16 (git+https://github.com/grahamc/hubcaps.git)",
@ -761,6 +767,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
"checksum core-foundation 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "25bfd746d203017f7d5cbd31ee5d8e17f94b6521c7af77ece6c9e4b2d4b16c67" "checksum core-foundation 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "25bfd746d203017f7d5cbd31ee5d8e17f94b6521c7af77ece6c9e4b2d4b16c67"
"checksum core-foundation-sys 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "065a5d7ffdcbc8fa145d6f0746f3555025b9097a9e9cda59f7467abae670c78d" "checksum core-foundation-sys 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "065a5d7ffdcbc8fa145d6f0746f3555025b9097a9e9cda59f7467abae670c78d"
"checksum dtoa 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "09c3753c3db574d215cba4ea76018483895d7bff25a31b49ba45db21c48e50ab" "checksum dtoa 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "09c3753c3db574d215cba4ea76018483895d7bff25a31b49ba45db21c48e50ab"
"checksum either 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "740178ddf48b1a9e878e6d6509a1442a2d42fd2928aae8e7a6f8a36fb01981b3"
"checksum enum_primitive 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "be4551092f4d519593039259a9ed8daedf0da12e5109c5280338073eaeb81180" "checksum enum_primitive 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "be4551092f4d519593039259a9ed8daedf0da12e5109c5280338073eaeb81180"
"checksum env_logger 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "15abd780e45b3ea4f76b4e9a26ff4843258dd8a3eed2775a0e7368c2e7936c2f" "checksum env_logger 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "15abd780e45b3ea4f76b4e9a26ff4843258dd8a3eed2775a0e7368c2e7936c2f"
"checksum env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3ddf21e73e016298f5cb37d6ef8e8da8e39f91f9ec8b0df44b7deb16a9f8cd5b" "checksum env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3ddf21e73e016298f5cb37d6ef8e8da8e39f91f9ec8b0df44b7deb16a9f8cd5b"

View file

@ -2,11 +2,12 @@
name = "ofborg" name = "ofborg"
version = "0.1.1" version = "0.1.1"
authors = ["Graham Christensen <graham@grahamc.com>"] authors = ["Graham Christensen <graham@grahamc.com>"]
include = ["Cargo.toml", "Cargo.lock", "src", "test-srcs"] include = ["Cargo.toml", "Cargo.lock", "src", "test-srcs", "build.rs"]
build = "build.rs"
[dependencies] [dependencies]
either = "1.4.0"
log = "= 0.3.8" log = "= 0.3.8"
env_logger = "= 0.4.3" env_logger = "= 0.4.3"
# amqp = { path = "./rust-amqp/" } # for testing patches # amqp = { path = "./rust-amqp/" } # for testing patches
@ -24,6 +25,5 @@ hyper = "0.10.*"
hyper-native-tls = "0.2.4" hyper-native-tls = "0.2.4"
lru-cache = "0.1.1" lru-cache = "0.1.1"
#[patch.crates-io] #[patch.crates-io]
#amq-proto = { path = "rust-amq-proto" } #amq-proto = { path = "rust-amq-proto" }

618
ofborg/build.rs Normal file
View file

@ -0,0 +1,618 @@
use std::env;
use std::fs::File;
use std::io::Write;
use std::path::Path;
enum MetricType {
Ticker(Metric),
Counter(Metric),
}
impl MetricType {
fn collector_type(&self) -> String {
match self {
&MetricType::Ticker(_) => {
String::from("u64")
}
&MetricType::Counter(_) => {
String::from("u64")
}
}
}
fn enum_matcher_types(&self) -> String {
let fields = self.enum_field_types();
if fields.len() > 0 {
format!("{}({})", self.variant(), fields.join(", "))
} else {
format!("{}", self.variant())
}
}
fn variant(&self) -> String {
match self {
&MetricType::Ticker(ref event) => {
event.variant.clone()
}
&MetricType::Counter(ref event) => {
event.variant.clone()
}
}
}
fn metric_type(&self) -> String {
match self {
&MetricType::Ticker(_) => {
String::from("counter")
}
&MetricType::Counter(_) => {
String::from("counter")
}
}
}
fn metric_name(&self) -> String {
match self {
&MetricType::Ticker(ref event) => {
event.metric_name.clone()
}
&MetricType::Counter(ref event) => {
event.metric_name.clone()
}
}
}
fn description(&self) -> String {
match self {
&MetricType::Ticker(ref event) => {
event.description.clone()
}
&MetricType::Counter(ref event) => {
event.description.clone()
}
}
}
fn enum_index_types(&self) -> Vec<String> {
let event: &Metric;
match self {
&MetricType::Ticker(ref i_event) => {
event = i_event;
}
&MetricType::Counter(ref i_event) => {
event = i_event;
}
}
let fields: Vec<String> = event.fields
.iter()
.map(|&(ref _fieldname, ref fieldtype)| fieldtype.clone())
.collect();
return fields
}
fn enum_field_types(&self) -> Vec<String> {
let mut extra_fields: Vec<String> = vec![];
match self {
&MetricType::Ticker(_) => {}
&MetricType::Counter(_) => {
extra_fields = vec![self.collector_type()];
}
}
let mut fields: Vec<String> = self.enum_index_types();
fields.append(&mut extra_fields);
return fields
}
fn enum_index_names(&self) -> Vec<String> {
let event: &Metric;
match self {
&MetricType::Ticker(ref i_event) => {
event = i_event;
}
&MetricType::Counter(ref i_event) => {
event = i_event;
}
}
let fields: Vec<String> = event.fields
.iter()
.map(|&(ref fieldname, ref _fieldtype)| fieldname.clone())
.collect();
return fields
}
fn enum_field_names(&self) -> Vec<String> {
let mut extra_fields: Vec<String> = vec![];
match self {
&MetricType::Ticker(_) => {}
&MetricType::Counter(_) => {
extra_fields = vec!["value".to_owned()];
}
}
let mut fields: Vec<String> = self.enum_index_names();
fields.append(&mut extra_fields);
return fields
}
fn record_value(&self) -> String {
match self {
&MetricType::Ticker(_) => {
String::from("1")
}
&MetricType::Counter(_) => {
String::from("value")
}
}
}
}
struct Metric {
variant: String,
fields: Vec<(String,String)>, // Vec because it is sorted
metric_name: String,
description: String,
}
fn name_to_parts(name: &str) -> Vec<String> {
let mut parts: Vec<String> = vec![];
let mut buf = String::from("");
for c in name.chars() {
if char::is_uppercase(c) && buf.len() > 0 {
parts.push(buf.to_owned());
buf = String::from("");
}
buf.push_str(&c.to_string());
}
if buf.len() > 0 {
parts.push(buf.to_owned());
std::mem::drop(buf);
}
return parts;
}
impl Metric {
pub fn ticker(name: &str, desc: &str, fields: Option<Vec<(&str,&str)>>) -> MetricType {
let parts = name_to_parts(name);
MetricType::Ticker(Metric {
variant: parts
.iter()
.map(|f| f.clone().to_owned())
.collect(),
fields: fields
.unwrap_or(vec![])
.iter()
.map(|&(ref fieldname, ref fieldtype)| (fieldname.clone().to_owned(), fieldtype.clone().to_owned()))
.collect(),
metric_name: parts.join("_").to_lowercase(),
description: desc.to_owned(),
})
}
pub fn counter(name: &str, desc: &str, fields: Option<Vec<(&str,&str)>>) -> MetricType {
let parts = name_to_parts(name);
MetricType::Counter(Metric {
variant: parts
.iter()
.map(|f| f.clone().to_owned())
.collect(),
fields: fields
.unwrap_or(vec![])
.iter()
.map(|&(ref fieldname, ref fieldtype)| (fieldname.clone().to_owned(), fieldtype.clone().to_owned()))
.collect(),
metric_name: parts.join("_").to_lowercase(),
description: desc.to_owned(),
})
}
}
fn events() -> Vec<MetricType> {
return vec![
Metric::ticker(
"StatCollectorLegacyEvent",
"Number of received legacy events",
Some(vec![("event", "String")]),
),
Metric::ticker(
"StatCollectorBogusEvent",
"Number of received unparseable events",
None,
),
Metric::ticker(
"JobReceived",
"Number of received worker jobs",
None,
),
Metric::counter(
"EvaluationDuration",
"Amount of time spent running evaluations",
Some(vec![
("branch", "String"),
]),
),
Metric::ticker(
"EvaluationDurationCount",
"Number of timed evaluations performed",
Some(vec![
("branch", "String"),
]),
),
Metric::ticker(
"TargetBranchFailsEvaluation",
"Number of PR evaluations which failed because the target branch failed",
Some(vec![
("branch", "String"),
]),
),
Metric::ticker(
"JobDecodeSuccess",
"Number of successfully decoded jobs",
None,
),
Metric::ticker(
"JobDecodeFailure",
"Number of jobs which failed to parse",
None,
),
Metric::ticker(
"IssueAlreadyClosed",
"Number of jobs for issues which are already closed",
None,
),
Metric::ticker(
"IssueFetchFailed",
"Number of failed fetches for GitHub issues",
None,
),
Metric::ticker(
"TaskEvaluationCheckComplete",
"Number of completed evaluation tasks",
None,
),
/*
Metric::counter(
"TimeElapsed",
"",
None
),
Metric::counter(
"EnvironmentsAllocatedCount",
"",
None
),
Metric::counter(
"EnvironmentsAllocatedBytes",
"",
None
),
Metric::counter(
"ListElementsCount",
"",
None
),
Metric::counter(
"ListElementsBytes",
"",
None
),
Metric::counter(
"ListConcatenations",
"",
None
),
Metric::counter(
"ValuesAllocatedCount",
"",
None
),
Metric::counter(
"ValuesAllocatedBytes",
"",
None
),
Metric::counter(
"SetsAllocatedCount",
"",
None
),
Metric::counter(
"SetsAllocatedBytes",
"",
None
),
Metric::counter(
"RightBiasedUnions",
"",
None
),
Metric::counter(
"ValuesCopiedInRightBiasedUnions",
"",
None
),
Metric::counter(
"SymbolsInSymbolTable",
"",
None
),
Metric::counter(
"SizeOfSymbolTable",
"",
None
),
Metric::counter(
"NumberOfThunks",
"",
None
),
Metric::counter(
"NumberOfThunksAvoided",
"",
None
),
Metric::counter(
"NumberOfAttrLookups",
"",
None
),
Metric::counter(
"NumberOfPrimopCalls",
"",
None
),
Metric::counter(
"NumberOfFunctionCalls",
"",
None
),
Metric::counter(
"TotalAllocations",
"",
None
),
Metric::counter(
"CurrentBoehmHeapSizeBytes",
"",
None
),
Metric::counter(
"TotalBoehmHeapAllocationsBytes",
"",
None
),
*/
];
}
fn main() {
let out_dir = env::var("OUT_DIR").unwrap();
let dest_path = Path::new(&out_dir).join("events.rs");
let mut f = File::create(&dest_path).unwrap();
println!("cargo:rerun-if-changed=build.rs");
// Write the Event enum, which contains all possible event types
f.write_all(b"
use std::collections::HashMap;
use std::sync::Arc;
use std::sync::Mutex;
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(rename_all=\"kebab-case\")]
pub enum Event {
").unwrap();
let variants: Vec<String> = events()
.iter()
.map(|mtype| format!(" {}", mtype.enum_matcher_types()) )
.collect();
f.write_all(variants.join(",\n").as_bytes()).unwrap();
f.write_all("\n}\n\n".as_bytes()).unwrap();
f.write_all(b"pub fn event_metric_name(event: &Event) -> String {
match event {
").unwrap();
let variants: Vec<String> = events()
.iter()
.map(|mtype| {
let fields: Vec<String> = mtype.enum_field_names()
.iter()
.map(|_| String::from("_"))
.collect();
let variant_match: String;
if fields.len() > 0 {
variant_match = format!(
"{}({})",
&mtype.variant(),
fields
.join(", "));
} else {
variant_match = format!("{}", &mtype.variant());
}
format!(" &Event::{} => String::from(\"{}\")",
&variant_match,
&mtype.metric_name(),
)
}).collect();
f.write_all(variants.join(",\n").as_bytes()).unwrap();
f.write_all("}\n }".as_bytes()).unwrap();
// Create a struct to hold all the possible metrics
f.write_all(b"
#[derive(Debug, Clone)]
pub struct MetricCollector {
").unwrap();
let variants: Vec<String> = events()
.iter()
.map(|mtype| {
let mut fields: Vec<String> = mtype.enum_index_types();
fields.push("String".to_owned()); // Instance
format!(" {}: Arc<Mutex<HashMap<({}),{}>>>",
mtype.metric_name(),
fields.join(", "),
mtype.collector_type(),
)
}).collect();
f.write_all(variants.join(",\n").as_bytes()).unwrap();
f.write_all("\n}\n\n".as_bytes()).unwrap();
// Create a struct to hold all the possible metrics
f.write_all(b"
impl MetricCollector {
pub fn new() -> MetricCollector {
MetricCollector {
").unwrap();
let variants: Vec<String> = events()
.iter()
.map(|mtype| {
let mut fields: Vec<String> = mtype.enum_field_types();
fields.push("String".to_owned()); // Instance
format!(" {}: Arc::new(Mutex::new(HashMap::new()))",
&mtype.metric_name(),
)
}).collect();
f.write_all(variants.join(",\n").as_bytes()).unwrap();
f.write_all("\n }\n".as_bytes()).unwrap();
f.write_all("\n }\n".as_bytes()).unwrap();
f.write_all(b"
pub fn record(&self, instance: String, event: Event) {
match event {
").unwrap();
let variants: Vec<String> = events()
.iter()
.map(|mtype| {
let fields: Vec<String> = mtype.enum_field_names();
let variant_match: String;
if fields.len() > 0 {
variant_match = format!("{}({})", &mtype.variant(), fields.join(", "));
} else {
variant_match = format!("{}", &mtype.variant());
}
let mut index_fields: Vec<String> = mtype.enum_index_names();
index_fields.push("instance".to_owned());
format!("
Event::{} => {{
let mut accum_table = self.{}
.lock()
.expect(\"Failed to unwrap metric mutex for {}\");
let accum = accum_table
.entry(({}))
.or_insert(0);
*accum += {};
}}
",
variant_match,
&mtype.metric_name(),
&mtype.metric_name(),
index_fields.join(", "),
&mtype.record_value(),
)
}).collect();
f.write_all(variants.join(",\n").as_bytes()).unwrap();
f.write_all("\n }\n".as_bytes()).unwrap();
f.write_all("\n }\n".as_bytes()).unwrap();
f.write_all(b"pub fn prometheus_output(&self) -> String {
let mut output = String::new();
").unwrap();
let variants: Vec<String> = events()
.iter()
.map(|mtype| {
let mut index_fields: Vec<String> = mtype.enum_index_names();
index_fields.push("instance".to_owned());
let ref_index_fields: Vec<String> = index_fields
.iter()
.map(|m| format!("ref {}", m))
.collect();
let for_matcher: String;
if index_fields.len() > 1 {
for_matcher = format!("({})",
ref_index_fields.join(", "));
} else {
for_matcher = ref_index_fields.join(", ");
}
let key_value_pairs: Vec<String> = index_fields
.iter()
.map(|name| format!(" format!(\"{}=\\\"{{}}\\\"\", {})", &name, &name))
.collect();
format!("
output.push_str(\"# HELP ofborg_{} {}\n\");
output.push_str(\"# TYPE ofborg_{} {}\n\");
let table = self.{}.lock()
.expect(\"Failed to unwrap metric mutex for {}\");
let values: Vec<String> = (*table)
.iter()
.map(|(&{}, value)| {{
let kvs: Vec<String> = vec![
{}
];
format!(\"ofborg_{}{{{{{{}}}}}} {{}}\", kvs.join(\",\"), value)
}})
.collect();
output.push_str(&values.join(\"\n\"));
output.push_str(\"\n\");
",
&mtype.metric_name(),
&mtype.description(),
&mtype.metric_name(),
&mtype.metric_type(),
&mtype.metric_name(),
&mtype.metric_name(),
for_matcher,
&key_value_pairs.join(",\n"),
&mtype.metric_name(),
)
}).collect();
f.write_all(variants.join("\n").as_bytes()).unwrap();
f.write_all("return output;\n }".as_bytes()).unwrap();
f.write_all("\n}".as_bytes()).unwrap();
}

View file

@ -2,16 +2,22 @@
pub struct ACL { pub struct ACL {
trusted_users: Vec<String>, trusted_users: Vec<String>,
known_users: Vec<String>, known_users: Vec<String>,
repos: Vec<String>,
} }
impl ACL { impl ACL {
pub fn new(trusted_users: Vec<String>, known_users: Vec<String>) -> ACL { pub fn new(repos: Vec<String>, trusted_users: Vec<String>, known_users: Vec<String>) -> ACL {
return ACL { return ACL {
trusted_users: trusted_users, trusted_users: trusted_users,
known_users: known_users, known_users: known_users,
repos: repos,
}; };
} }
pub fn is_repo_eligible(&self, name: &str) -> bool {
self.repos.contains(&name.to_lowercase())
}
pub fn build_job_destinations_for_user_repo( pub fn build_job_destinations_for_user_repo(
&self, &self,
user: &str, user: &str,
@ -38,10 +44,12 @@ impl ACL {
} }
pub fn can_build_unrestricted(&self, user: &str, repo: &str) -> bool { pub fn can_build_unrestricted(&self, user: &str, repo: &str) -> bool {
if repo.to_lowercase() != "nixos/nixpkgs" { if repo.to_lowercase() == "nixos/nixpkgs" {
return self.trusted_users.contains(&user.to_lowercase());
} else if user == "grahamc" {
return true;
} else {
return false; return false;
} }
return self.trusted_users.contains(&user.to_lowercase());
} }
} }

View file

@ -40,6 +40,40 @@ fn main() {
let mut session = easyamqp::session_from_config(&cfg.rabbitmq).unwrap(); let mut session = easyamqp::session_from_config(&cfg.rabbitmq).unwrap();
let mut channel = session.open_channel(1).unwrap(); let mut channel = session.open_channel(1).unwrap();
channel.basic_prefetch(1).unwrap(); channel.basic_prefetch(1).unwrap();
channel
.declare_exchange(easyamqp::ExchangeConfig {
exchange: "build-jobs".to_owned(),
exchange_type: easyamqp::ExchangeType::Fanout,
passive: false,
durable: true,
auto_delete: false,
no_wait: false,
internal: false,
arguments: None,
})
.unwrap();
channel
.declare_queue(easyamqp::QueueConfig {
queue: format!("build-inputs-{}", cfg.nix.system.clone()),
passive: false,
durable: true,
exclusive: false,
auto_delete: false,
no_wait: false,
arguments: None,
})
.unwrap();
channel
.bind_queue(easyamqp::BindQueueConfig {
queue: format!("build-inputs-{}", cfg.nix.system.clone()),
exchange: "build-jobs".to_owned(),
routing_key: None,
no_wait: false,
arguments: None,
})
.unwrap();
channel channel
.consume( .consume(

View file

@ -0,0 +1,106 @@
extern crate ofborg;
extern crate amqp;
extern crate env_logger;
extern crate hyper;
extern crate hubcaps;
extern crate hyper_native_tls;
use std::env;
use amqp::Basic;
use ofborg::config;
use ofborg::worker;
use ofborg::tasks;
use ofborg::easyamqp;
use ofborg::easyamqp::TypedWrappers;
fn main() {
let cfg = config::load(env::args().nth(1).unwrap().as_ref());
ofborg::setup_log();
println!("Hello, world!");
let mut session = easyamqp::session_from_config(&cfg.rabbitmq).unwrap();
println!("Connected to rabbitmq");
let mut channel = session.open_channel(1).unwrap();
channel
.declare_exchange(easyamqp::ExchangeConfig {
exchange: "github-events".to_owned(),
exchange_type: easyamqp::ExchangeType::Topic,
passive: false,
durable: true,
auto_delete: false,
no_wait: false,
internal: false,
arguments: None,
})
.unwrap();
channel
.declare_queue(easyamqp::QueueConfig {
queue: "mass-rebuild-check-jobs".to_owned(),
passive: false,
durable: true,
exclusive: false,
auto_delete: false,
no_wait: false,
arguments: None,
})
.unwrap();
channel
.declare_queue(easyamqp::QueueConfig {
queue: "mass-rebuild-check-inputs".to_owned(),
passive: false,
durable: true,
exclusive: false,
auto_delete: false,
no_wait: false,
arguments: None,
})
.unwrap();
channel
.bind_queue(easyamqp::BindQueueConfig {
queue: "mass-rebuild-check-inputs".to_owned(),
exchange: "github-events".to_owned(),
routing_key: Some("pull_request.nixos/nixpkgs".to_owned()),
no_wait: false,
arguments: None,
})
.unwrap();
channel.basic_prefetch(1).unwrap();
channel
.consume(
worker::new(tasks::evaluationfilter::EvaluationFilterWorker::new(
cfg.acl(),
)),
easyamqp::ConsumeConfig {
queue: "mass-rebuild-check-inputs".to_owned(),
consumer_tag: format!("{}-evaluation-filter", cfg.whoami()),
no_local: false,
no_ack: false,
no_wait: false,
exclusive: false,
arguments: None,
},
)
.unwrap();
channel.start_consuming();
println!("Finished consuming?");
channel.close(200, "Bye").unwrap();
println!("Closed the channel");
session.close(200, "Good Bye");
println!("Closed the session... EOF");
}

View file

@ -29,6 +29,53 @@ fn main() {
println!("Connected to rabbitmq"); println!("Connected to rabbitmq");
let mut channel = session.open_channel(1).unwrap(); let mut channel = session.open_channel(1).unwrap();
channel
.declare_exchange(easyamqp::ExchangeConfig {
exchange: "github-events".to_owned(),
exchange_type: easyamqp::ExchangeType::Topic,
passive: false,
durable: true,
auto_delete: false,
no_wait: false,
internal: false,
arguments: None,
})
.unwrap();
channel
.declare_exchange(easyamqp::ExchangeConfig {
exchange: "build-jobs".to_owned(),
exchange_type: easyamqp::ExchangeType::Fanout,
passive: false,
durable: true,
auto_delete: false,
no_wait: false,
internal: false,
arguments: None,
})
.unwrap();
channel
.declare_queue(easyamqp::QueueConfig {
queue: "build-inputs".to_owned(),
passive: false,
durable: true,
exclusive: false,
auto_delete: false,
no_wait: false,
arguments: None,
})
.unwrap();
channel
.bind_queue(easyamqp::BindQueueConfig {
queue: "build-inputs".to_owned(),
exchange: "github-events".to_owned(),
routing_key: Some("issue_comment.*".to_owned()),
no_wait: false,
arguments: None,
})
.unwrap();
channel.basic_prefetch(1).unwrap(); channel.basic_prefetch(1).unwrap();
channel channel

View file

@ -23,30 +23,42 @@ fn main() {
let mut channel = session.open_channel(1).unwrap(); let mut channel = session.open_channel(1).unwrap();
channel
.declare_exchange(easyamqp::ExchangeConfig {
exchange: "logs".to_owned(),
exchange_type: easyamqp::ExchangeType::Topic,
passive: false,
durable: true,
auto_delete: false,
no_wait: false,
internal: false,
arguments: None,
})
.unwrap();
let queue_name = channel let queue_name = channel
.queue_declare( .declare_queue(easyamqp::QueueConfig {
"", queue: "".to_owned(),
false, // passive passive: false,
false, // durable durable: false,
true, // exclusive exclusive: true,
true, // auto_delete auto_delete: true,
false, //nowait no_wait: false,
Table::new(), arguments: None,
) })
.expect("Failed to declare an anon queue for log collection!") .unwrap()
.queue; .queue;
channel channel
.queue_bind( .bind_queue(easyamqp::BindQueueConfig {
queue_name.as_ref(), queue: queue_name.clone(),
"logs", exchange: "logs".to_owned(),
"*.*".as_ref(), routing_key: Some("*.*".to_owned()),
false, no_wait: false,
Table::new(), arguments: None,
) })
.unwrap(); .unwrap();
channel channel
.consume( .consume(
worker::new(tasks::log_message_collector::LogMessageCollector::new( worker::new(tasks::log_message_collector::LogMessageCollector::new(

View file

@ -30,7 +30,10 @@ fn main() {
let cloner = checkout::cached_cloner(Path::new(&cfg.checkout.root)); let cloner = checkout::cached_cloner(Path::new(&cfg.checkout.root));
let nix = cfg.nix(); let nix = cfg.nix();
let events = stats::RabbitMQ::new(session.open_channel(3).unwrap()); let events = stats::RabbitMQ::new(
&format!("{}-{}", cfg.runner.identity.clone(), cfg.nix.system.clone()),
session.open_channel(3).unwrap()
);
let mrw = tasks::massrebuilder::MassRebuildWorker::new( let mrw = tasks::massrebuilder::MassRebuildWorker::new(
cloner, cloner,
@ -42,6 +45,18 @@ fn main() {
cfg.tag_paths.clone().unwrap(), cfg.tag_paths.clone().unwrap(),
); );
channel
.declare_queue(easyamqp::QueueConfig {
queue: "mass-rebuild-check-jobs".to_owned(),
passive: false,
durable: true,
exclusive: false,
auto_delete: false,
no_wait: false,
arguments: None,
})
.unwrap();
channel.basic_prefetch(1).unwrap(); channel.basic_prefetch(1).unwrap();
channel channel
.consume( .consume(

109
ofborg/src/bin/stats.rs Normal file
View file

@ -0,0 +1,109 @@
extern crate hyper;
extern crate amqp;
extern crate ofborg;
use std::env;
use ofborg::{easyamqp, tasks, worker, config, stats};
use amqp::Basic;
use ofborg::easyamqp::TypedWrappers;
use hyper::server::{Request, Response, Server};
use std::thread;
fn main() {
let cfg = config::load(env::args().nth(1).unwrap().as_ref());
ofborg::setup_log();
println!("Hello, world!");
let mut session = easyamqp::session_from_config(&cfg.rabbitmq).unwrap();
println!("Connected to rabbitmq");
let events = stats::RabbitMQ::new(
&format!("{}-{}", cfg.runner.identity.clone(), cfg.nix.system.clone()),
session.open_channel(3).unwrap()
);
let metrics = stats::MetricCollector::new();
let collector = tasks::statscollector::StatCollectorWorker::new(
events,
metrics.clone(),
);
let mut channel = session.open_channel(1).unwrap();
channel
.declare_exchange(easyamqp::ExchangeConfig {
exchange: "stats".to_owned(),
exchange_type: easyamqp::ExchangeType::Fanout,
passive: false,
durable: true,
auto_delete: false,
no_wait: false,
internal: false,
arguments: None,
})
.unwrap();
channel
.declare_queue(easyamqp::QueueConfig {
queue: "stats-events".to_owned(),
passive: false,
durable: true,
exclusive: false,
auto_delete: false,
no_wait: false,
arguments: None,
})
.unwrap();
channel
.bind_queue(easyamqp::BindQueueConfig {
queue: "stats-events".to_owned(),
exchange: "stats".to_owned(),
routing_key: None,
no_wait: false,
arguments: None,
})
.unwrap();
channel.basic_prefetch(1).unwrap();
channel
.consume(
worker::new(collector),
easyamqp::ConsumeConfig {
queue: "stats-events".to_owned(),
consumer_tag: format!("{}-prometheus-stats-collector", cfg.whoami()),
no_local: false,
no_ack: false,
no_wait: false,
exclusive: false,
arguments: None,
},
)
.unwrap();
thread::spawn(||{
let addr = "0.0.0.0:9898";
println!("listening addr {:?}", addr);
Server::http(addr)
.unwrap()
.handle(move |_: Request, res: Response| {
res.send(metrics.prometheus_output().as_bytes()).unwrap();
})
.unwrap();
});
channel.start_consuming();
println!("Finished consuming?");
channel.close(200, "Bye").unwrap();
println!("Closed the channel");
session.close(200, "Good Bye");
println!("Closed the session... EOF");
}

View file

@ -33,6 +33,7 @@ pub struct FeedbackConfig {
pub struct RabbitMQConfig { pub struct RabbitMQConfig {
pub ssl: bool, pub ssl: bool,
pub host: String, pub host: String,
pub virtualhost: Option<String>,
pub username: String, pub username: String,
pub password: String, pub password: String,
} }
@ -58,6 +59,7 @@ pub struct LogStorage {
#[derive(Serialize, Deserialize, Debug)] #[derive(Serialize, Deserialize, Debug)]
pub struct RunnerConfig { pub struct RunnerConfig {
pub identity: String, pub identity: String,
pub repos: Option<Vec<String>>,
pub trusted_users: Option<Vec<String>>, pub trusted_users: Option<Vec<String>>,
pub known_users: Option<Vec<String>>, pub known_users: Option<Vec<String>>,
} }
@ -74,6 +76,9 @@ impl Config {
pub fn acl(&self) -> acl::ACL { pub fn acl(&self) -> acl::ACL {
return acl::ACL::new( return acl::ACL::new(
self.runner.repos.clone().expect(
"fetching config's runner.repos",
),
self.runner.trusted_users.clone().expect( self.runner.trusted_users.clone().expect(
"fetching config's runner.trusted_users", "fetching config's runner.trusted_users",
), ),
@ -114,11 +119,12 @@ impl Config {
impl RabbitMQConfig { impl RabbitMQConfig {
pub fn as_uri(&self) -> String { pub fn as_uri(&self) -> String {
return format!( return format!(
"{}://{}:{}@{}//", "{}://{}:{}@{}/{}",
if self.ssl { "amqps" } else { "amqp" }, if self.ssl { "amqps" } else { "amqp" },
self.username, self.username,
self.password, self.password,
self.host self.host,
self.virtualhost.clone().unwrap_or("/".to_owned()),
); );
} }
} }

View file

@ -301,7 +301,7 @@ pub fn session_from_config(config: &RabbitMQConfig) -> Result<amqp::Session, amq
amqp::AMQPScheme::AMQPS => 5671, amqp::AMQPScheme::AMQPS => 5671,
amqp::AMQPScheme::AMQP => 5672, amqp::AMQPScheme::AMQP => 5672,
}, },
vhost: "/".to_owned(), vhost: config.virtualhost.clone().unwrap_or("/".to_owned()),
login: config.username.clone(), login: config.username.clone(),
password: config.password.clone(), password: config.password.clone(),
scheme: scheme, scheme: scheme,

View file

@ -7,16 +7,16 @@ use ofborg::nix;
pub struct EvalChecker { pub struct EvalChecker {
name: String, name: String,
cmd: String, op: nix::Operation,
args: Vec<String>, args: Vec<String>,
nix: nix::Nix, nix: nix::Nix,
} }
impl EvalChecker { impl EvalChecker {
pub fn new(name: &str, cmd: &str, args: Vec<String>, nix: nix::Nix) -> EvalChecker { pub fn new(name: &str, op: nix::Operation, args: Vec<String>, nix: nix::Nix) -> EvalChecker {
EvalChecker { EvalChecker {
name: name.to_owned(), name: name.to_owned(),
cmd: cmd.to_owned(), op: op,
args: args, args: args,
nix: nix, nix: nix,
} }
@ -27,11 +27,11 @@ impl EvalChecker {
} }
pub fn execute(&self, path: &Path) -> Result<File, File> { pub fn execute(&self, path: &Path) -> Result<File, File> {
self.nix.safely(&self.cmd, path, self.args.clone(), false) self.nix.safely(self.op.clone(), path, self.args.clone(), false)
} }
pub fn cli_cmd(&self) -> String { pub fn cli_cmd(&self) -> String {
let mut cli = vec![self.cmd.clone()]; let mut cli = vec![self.op.to_string()];
cli.append(&mut self.args.clone()); cli.append(&mut self.args.clone());
return cli.join(" "); return cli.join(" ");
} }

View file

@ -20,8 +20,4 @@ pub struct Repository {
#[derive(Serialize, Deserialize, Debug)] #[derive(Serialize, Deserialize, Debug)]
pub struct Issue { pub struct Issue {
pub number: u64, pub number: u64,
pub pull_request: Option<PullRequest>,
} }
#[derive(Serialize, Deserialize, Debug)]
pub struct PullRequest {}

View file

@ -1,5 +1,7 @@
mod common; mod common;
mod issuecomment; mod issuecomment;
mod pullrequestevent;
pub use self::issuecomment::IssueComment; pub use self::issuecomment::IssueComment;
pub use self::common::{Issue, Repository, User, Comment, PullRequest}; pub use self::pullrequestevent::{PullRequest, PullRequestEvent, PullRequestAction, PullRequestState};
pub use self::common::{Issue, Repository, User, Comment};

View file

@ -0,0 +1,80 @@
use ofborg::ghevent::{Repository};
#[derive(Serialize, Deserialize)]
pub struct PullRequestEvent {
pub action: PullRequestAction,
pub number: u64,
pub repository: Repository,
pub pull_request: PullRequest,
pub changes: Option<PullRequestChanges>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct PullRequestChanges {
pub base: Option<BaseChange>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct BaseChange {
#[serde(rename="ref")]
pub git_ref: ChangeWas,
pub sha: ChangeWas,
}
#[derive(Serialize, Deserialize, Debug, PartialEq)]
pub struct ChangeWas {
pub from: String,
}
#[derive(Serialize, Deserialize, Debug, PartialEq)]
#[serde(rename_all="snake_case")]
pub enum PullRequestState {
Open,
Closed,
}
#[derive(Serialize, Deserialize, Debug, PartialEq)]
#[serde(rename_all="snake_case")]
pub enum PullRequestAction {
Assigned,
Unassigned,
ReviewRequested,
ReviewRequestRemoved,
Labeled,
Unlabeled,
Opened,
Edited,
Closed,
Reopened,
Synchronize,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct PullRequestRef {
#[serde(rename="ref")]
pub git_ref: String,
pub sha: String,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct PullRequest {
pub state: PullRequestState,
pub base: PullRequestRef,
pub head: PullRequestRef,
}
#[cfg(test)]
mod tests {
use super::*;
use serde_json;
#[test]
fn test_parse_pr_event() {
let data = include_str!("../../test-srcs/events/pr-changed-base.json");
let _p: PullRequestEvent =
serde_json::from_str(&data.to_string())
.expect("Should properly deserialize");
}
}

View file

@ -1,6 +1,12 @@
#![recursion_limit="512"]
#[macro_use] #[macro_use]
extern crate serde_derive; extern crate serde_derive;
extern crate serde; extern crate serde;
#[cfg_attr(test, macro_use)]
extern crate serde_json; extern crate serde_json;
#[macro_use] #[macro_use]
@ -9,7 +15,7 @@ extern crate log;
extern crate hubcaps; extern crate hubcaps;
extern crate hyper; extern crate hyper;
extern crate hyper_native_tls; extern crate hyper_native_tls;
extern crate either;
extern crate lru_cache; extern crate lru_cache;
extern crate tempfile; extern crate tempfile;
extern crate amqp; extern crate amqp;

View file

@ -1,5 +1,5 @@
#[derive(Serialize, Deserialize, Debug)] #[derive(Serialize, Deserialize, Debug, Clone)]
pub struct BuildLogMsg { pub struct BuildLogMsg {
pub system: String, pub system: String,
pub identity: String, pub identity: String,
@ -8,7 +8,7 @@ pub struct BuildLogMsg {
pub output: String, pub output: String,
} }
#[derive(Serialize, Deserialize, Debug)] #[derive(Serialize, Deserialize, Debug, Clone)]
pub struct BuildLogStart { pub struct BuildLogStart {
pub system: String, pub system: String,
pub identity: String, pub identity: String,

View file

@ -1,5 +1,5 @@
use std::env; use std::env;
use std::ffi::OsString; use std::fmt;
use std::fs::File; use std::fs::File;
use std::io::Seek; use std::io::Seek;
use std::io::SeekFrom; use std::io::SeekFrom;
@ -7,6 +7,58 @@ use std::path::Path;
use std::process::{Command, Stdio}; use std::process::{Command, Stdio};
use tempfile::tempfile; use tempfile::tempfile;
#[derive(Clone, Debug)]
pub enum Operation {
Instantiate,
Build,
QueryPackagesJSON,
QueryPackagesOutputs,
NoOp { operation: Box<Operation> },
Unknown { program: String },
}
impl Operation {
fn command(&self) -> Command {
match *self {
Operation::Instantiate => Command::new("nix-instantiate"),
Operation::Build => Command::new("nix-build"),
Operation::QueryPackagesJSON => Command::new("nix-env"),
Operation::QueryPackagesOutputs => Command::new("nix-env"),
Operation::NoOp { operation: _ } => Command::new("echo"),
Operation::Unknown { ref program } => Command::new(program),
}
}
fn args(&self, command: &mut Command) {
match *self {
Operation::Build => {
command.args(&["--no-out-link", "--keep-going"]);
},
Operation::QueryPackagesJSON => {
command.args(&["--query", "--available", "--json"]);
},
Operation::QueryPackagesOutputs => {
command.args(&["--query", "--available", "--no-name", "--attr-path", "--out-path"]);
},
Operation::NoOp { ref operation } => { operation.args(command); },
_ => (),
};
}
}
impl fmt::Display for Operation {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Operation::Build => write!(f, "{}", "nix-build"),
Operation::Instantiate => write!(f, "{}", "nix-instantiate"),
Operation::QueryPackagesJSON => write!(f, "{}", "nix-env -qa --json"),
Operation::QueryPackagesOutputs => write!(f, "{}", "nix-env -qaP --no-name --out-path"),
Operation::NoOp { ref operation } => operation.fmt(f),
Operation::Unknown { ref program } => write!(f, "{}", program),
}
}
}
#[derive(Clone, Debug, PartialEq)] #[derive(Clone, Debug, PartialEq)]
pub struct Nix { pub struct Nix {
system: String, system: String,
@ -64,24 +116,22 @@ impl Nix {
) -> Command { ) -> Command {
let mut attrargs: Vec<String> = Vec::with_capacity(3 + (attrs.len() * 2)); let mut attrargs: Vec<String> = Vec::with_capacity(3 + (attrs.len() * 2));
attrargs.push(file.to_owned()); attrargs.push(file.to_owned());
attrargs.push(String::from("--no-out-link"));
attrargs.push(String::from("--keep-going"));
for attr in attrs { for attr in attrs {
attrargs.push(String::from("-A")); attrargs.push(String::from("-A"));
attrargs.push(attr); attrargs.push(attr);
} }
return self.safe_command("nix-build", nixpkgs, attrargs); return self.safe_command(Operation::Build, nixpkgs, attrargs);
} }
pub fn safely( pub fn safely(
&self, &self,
cmd: &str, op: Operation,
nixpkgs: &Path, nixpkgs: &Path,
args: Vec<String>, args: Vec<String>,
keep_stdout: bool, keep_stdout: bool,
) -> Result<File, File> { ) -> Result<File, File> {
return self.run(self.safe_command(cmd, nixpkgs, args), keep_stdout); return self.run(self.safe_command(op, nixpkgs, args), keep_stdout);
} }
pub fn run(&self, mut cmd: Command, keep_stdout: bool) -> Result<File, File> { pub fn run(&self, mut cmd: Command, keep_stdout: bool) -> Result<File, File> {
@ -113,12 +163,12 @@ impl Nix {
} }
} }
pub fn safe_command(&self, cmd: &str, nixpkgs: &Path, args: Vec<String>) -> Command { pub fn safe_command(&self, op: Operation, nixpkgs: &Path, args: Vec<String>) -> Command {
let mut nixpath = OsString::new(); let nixpath = format!("nixpkgs={}", nixpkgs.display());
nixpath.push("nixpkgs=");
nixpath.push(nixpkgs.as_os_str()); let mut command = op.command();
op.args(&mut command);
let mut command = Command::new(cmd);
command.env_clear(); command.env_clear();
command.current_dir(nixpkgs); command.current_dir(nixpkgs);
command.env("HOME", "/homeless-shelter"); command.env("HOME", "/homeless-shelter");
@ -165,6 +215,14 @@ mod tests {
Nix::new("x86_64-linux".to_owned(), "daemon".to_owned(), 1800, None) Nix::new("x86_64-linux".to_owned(), "daemon".to_owned(), 1800, None)
} }
fn noop(operation: Operation) -> Operation {
Operation::NoOp { operation: Box::new(operation) }
}
fn env_noop() -> Operation {
Operation::Unknown { program: "./environment.sh".to_owned() }
}
fn build_path() -> PathBuf { fn build_path() -> PathBuf {
let mut cwd = env::current_dir().unwrap(); let mut cwd = env::current_dir().unwrap();
cwd.push(Path::new("./test-srcs/build")); cwd.push(Path::new("./test-srcs/build"));
@ -203,6 +261,7 @@ mod tests {
let buildlog = lines let buildlog = lines
.into_iter() .into_iter()
.map(|line| line.replace("\u{1b}[0m", "")) // ANSI reset
.map(|line| format!(" | {}", line)) .map(|line| format!(" | {}", line))
.collect::<Vec<String>>() .collect::<Vec<String>>()
.join("\n"); .join("\n");
@ -270,13 +329,92 @@ mod tests {
use std::path::PathBuf; use std::path::PathBuf;
use std::env; use std::env;
#[test]
fn test_build_operations() {
let nix = nix();
let op = noop(Operation::Build);
assert_eq!(op.to_string(), "nix-build");
let ret: Result<File, File> =
nix.run(
nix.safe_command(op, build_path().as_path(), vec![String::from("--version")]),
true,
);
assert_run(
ret,
Expect::Pass,
vec!["--no-out-link --keep-going", "--version"],
);
}
#[test]
fn test_instantiate_operation() {
let nix = nix();
let op = noop(Operation::Instantiate);
assert_eq!(op.to_string(), "nix-instantiate");
let ret: Result<File, File> =
nix.run(
nix.safe_command(op, build_path().as_path(), vec![String::from("--version")]),
true,
);
assert_run(
ret,
Expect::Pass,
vec!["--version"],
);
}
#[test]
fn test_query_packages_json() {
let nix = nix();
let op = noop(Operation::QueryPackagesJSON);
assert_eq!(op.to_string(), "nix-env -qa --json");
let ret: Result<File, File> =
nix.run(
nix.safe_command(op, build_path().as_path(), vec![String::from("--version")]),
true,
);
assert_run(
ret,
Expect::Pass,
vec!["--query --available --json", "--version"],
);
}
#[test]
fn test_query_packages_outputs() {
let nix = nix();
let op = noop(Operation::QueryPackagesOutputs);
assert_eq!(op.to_string(), "nix-env -qaP --no-name --out-path");
let ret: Result<File, File> =
nix.run(
nix.safe_command(op, build_path().as_path(), vec![String::from("--version")]),
true,
);
assert_run(
ret,
Expect::Pass,
vec![
"--query --available --no-name --attr-path --out-path",
"--version"
],
);
}
#[test] #[test]
fn safe_command_environment() { fn safe_command_environment() {
let nix = nix(); let nix = nix();
let ret: Result<File, File> = let ret: Result<File, File> =
nix.run( nix.run(
nix.safe_command("./environment.sh", build_path().as_path(), vec![]), nix.safe_command(env_noop(), build_path().as_path(), vec![]),
true, true,
); );
@ -298,7 +436,7 @@ mod tests {
let ret: Result<File, File> = let ret: Result<File, File> =
nix.run( nix.run(
nix.safe_command("./environment.sh", build_path().as_path(), vec![]), nix.safe_command(env_noop(), build_path().as_path(), vec![]),
true, true,
); );
@ -318,9 +456,10 @@ mod tests {
#[test] #[test]
fn safe_command_options() { fn safe_command_options() {
let nix = nix(); let nix = nix();
let op = noop(Operation::Build);
let ret: Result<File, File> = nix.run( let ret: Result<File, File> = nix.run(
nix.safe_command("echo", build_path().as_path(), vec![]), nix.safe_command(op, build_path().as_path(), vec![]),
true, true,
); );
@ -344,7 +483,7 @@ mod tests {
assert_run( assert_run(
ret, ret,
Expect::Pass, Expect::Pass,
vec!["-success.drv", "building path(s)", "hi", "-success"], vec!["-success.drv", "building ", "hi", "-success"],
); );
} }
@ -363,7 +502,7 @@ mod tests {
Expect::Fail, Expect::Fail,
vec![ vec![
"-failed.drv", "-failed.drv",
"building path(s)", "building ",
"hi", "hi",
"failed to produce output path", "failed to produce output path",
], ],
@ -389,11 +528,10 @@ mod tests {
); );
} }
#[test] #[test]
fn instantiation() { fn instantiation() {
let ret: Result<File, File> = nix().safely( let ret: Result<File, File> = nix().safely(
"nix-instantiate", Operation::Instantiate,
passing_eval_path().as_path(), passing_eval_path().as_path(),
vec![], vec![],
true, true,

View file

@ -170,14 +170,11 @@ impl OutPaths {
} }
self.nix.safely( self.nix.safely(
"nix-env", nix::Operation::QueryPackagesOutputs,
&self.path, &self.path,
vec![ vec![
String::from("-f"), String::from("-f"),
String::from(".gc-of-borg-outpaths.nix"), String::from(".gc-of-borg-outpaths.nix"),
String::from("-qaP"),
String::from("--no-name"),
String::from("--out-path"),
String::from("--arg"), String::from("--arg"),
String::from("checkMeta"), String::from("checkMeta"),
check_meta, check_meta,

View file

@ -1,23 +1,40 @@
use serde_json;
use amqp::Channel; use amqp::Channel;
use amqp::protocol::basic::BasicProperties; use amqp::protocol::basic::BasicProperties;
use amqp::Basic; use amqp::Basic;
pub trait SysEvents { include!(concat!(env!("OUT_DIR"), "/events.rs"));
fn tick(&mut self, name: &str);
#[macro_use]
mod macros {
#[macro_export]
macro_rules! my_macro(() => (FooBar));
}
pub trait SysEvents: Send {
fn notify(&mut self, event: Event);
}
#[derive(Serialize, Deserialize, Debug)]
pub struct EventMessage {
pub sender: String,
pub events: Vec<Event>,
} }
pub struct RabbitMQ { pub struct RabbitMQ {
identity: String,
channel: Channel, channel: Channel,
} }
impl RabbitMQ { impl RabbitMQ {
pub fn new(channel: Channel) -> RabbitMQ { pub fn new(identity: &str, channel: Channel) -> RabbitMQ {
RabbitMQ { channel: channel } RabbitMQ { identity: identity.to_owned(), channel: channel }
} }
} }
impl SysEvents for RabbitMQ { impl SysEvents for RabbitMQ {
fn tick(&mut self, name: &str) { fn notify(&mut self, event: Event) {
let props = BasicProperties { ..Default::default() }; let props = BasicProperties { ..Default::default() };
self.channel self.channel
.basic_publish( .basic_publish(
@ -26,7 +43,10 @@ impl SysEvents for RabbitMQ {
false, false,
false, false,
props, props,
String::from(name).into_bytes(), serde_json::to_string(&EventMessage {
sender: self.identity.clone(),
events: vec![event],
}).unwrap().into_bytes(),
) )
.unwrap(); .unwrap();
} }

View file

@ -0,0 +1,156 @@
extern crate amqp;
extern crate env_logger;
use ofborg::ghevent;
use ofborg::acl;
use serde_json;
use ofborg::message::{Repo, Pr, massrebuildjob};
use ofborg::worker;
use amqp::protocol::basic::{Deliver, BasicProperties};
pub struct EvaluationFilterWorker {
acl: acl::ACL,
}
impl EvaluationFilterWorker {
pub fn new(acl: acl::ACL) -> EvaluationFilterWorker {
return EvaluationFilterWorker {
acl: acl,
};
}
}
impl worker::SimpleWorker for EvaluationFilterWorker {
type J = ghevent::PullRequestEvent;
fn msg_to_job(
&mut self,
_: &Deliver,
_: &BasicProperties,
body: &Vec<u8>,
) -> Result<Self::J, String> {
return match serde_json::from_slice(body) {
Ok(e) => Ok(e),
Err(e) => {
Err(format!(
"Failed to deserialize job {:?}: {:?}",
e,
String::from_utf8(body.clone())
))
}
};
}
fn consumer(&mut self, job: &ghevent::PullRequestEvent) -> worker::Actions {
if !self.acl.is_repo_eligible(&job.repository.full_name) {
info!("Repo not authorized ({})", job.repository.full_name);
return vec![worker::Action::Ack];
}
if job.pull_request.state != ghevent::PullRequestState::Open {
info!("PR is not open ({}#{})", job.repository.full_name, job.number);
return vec![worker::Action::Ack];
}
let interesting: bool = match job.action {
ghevent::PullRequestAction::Opened => true,
ghevent::PullRequestAction::Synchronize => true,
ghevent::PullRequestAction::Reopened => true,
ghevent::PullRequestAction::Edited => {
if let Some(ref changes) = job.changes {
changes.base.is_some()
} else {
false
}
},
_ => false,
};
if !interesting {
info!("Not interesting: {}#{} because of {:?}",
job.repository.full_name, job.number, job.action
);
return vec![
worker::Action::Ack
];
}
info!("Found {}#{} to be interesting because of {:?}",
job.repository.full_name, job.number, job.action
);
let repo_msg = Repo {
clone_url: job.repository.clone_url.clone(),
full_name: job.repository.full_name.clone(),
owner: job.repository.owner.login.clone(),
name: job.repository.name.clone(),
};
let pr_msg = Pr {
number: job.number.clone(),
head_sha: job.pull_request.head.sha.clone(),
target_branch: Some(job.pull_request.base.git_ref.clone()),
};
let msg = massrebuildjob::MassRebuildJob {
repo: repo_msg.clone(),
pr: pr_msg.clone(),
};
return vec![
worker::publish_serde_action(
None,
Some("mass-rebuild-check-jobs".to_owned()),
&msg
),
worker::Action::Ack
];
}
}
#[cfg(test)]
mod tests {
use worker::SimpleWorker;
use super::*;
#[test]
fn changed_base() {
let data = include_str!("../../test-srcs/events/pr-changed-base.json");
let job: ghevent::PullRequestEvent =
serde_json::from_str(&data.to_string())
.expect("Should properly deserialize");
let mut worker = EvaluationFilterWorker::new(acl::ACL::new(
vec!["nixos/nixpkgs".to_owned()],
vec![],
vec![],
));
assert_eq!(
worker.consumer(&job),
vec![
worker::publish_serde_action(
None,
Some("mass-rebuild-check-jobs".to_owned()),
&massrebuildjob::MassRebuildJob {
repo: Repo {
clone_url: String::from("https://github.com/NixOS/nixpkgs.git"),
full_name: String::from("NixOS/nixpkgs"),
owner: String::from("NixOS"),
name: String::from("nixpkgs"),
},
pr: Pr {
number: 33299,
head_sha: String::from("887e8b460a7d45ddb3bbdebe01447b251b3229e8"),
target_branch: Some(String::from("staging")),
},
}
),
worker::Action::Ack,
]
);
}
}

View file

@ -59,6 +59,7 @@ impl worker::SimpleWorker for GitHubCommentWorker {
); );
if build_destinations.len() == 0 { if build_destinations.len() == 0 {
println!("No build destinations for: {:?}", job);
// Don't process comments if they can't build anything // Don't process comments if they can't build anything
return vec![worker::Action::Ack]; return vec![worker::Action::Ack];
} }

View file

@ -75,7 +75,7 @@ fn result_to_comment(result: &BuildResult) -> String {
let mut reply: Vec<String> = vec![]; let mut reply: Vec<String> = vec![];
reply.push(format!( reply.push(format!(
"{} on {} [(full log)](https://logs.nix.gsc.io/?key={}/{}.{}&attempt_id={})", "{} on {} [(full log)](https://logs.nix.ci/?key={}/{}.{}&attempt_id={})",
(match result.success { (match result.success {
true => "Success", true => "Success",
false => "Failure", false => "Failure",
@ -142,7 +142,7 @@ mod tests {
assert_eq!( assert_eq!(
&result_to_comment(&result), &result_to_comment(&result),
"Success on x86_64-linux [(full log)](https://logs.nix.gsc.io/?key=nixos/nixpkgs.2345&attempt_id=neatattemptid) "Success on x86_64-linux [(full log)](https://logs.nix.ci/?key=nixos/nixpkgs.2345&attempt_id=neatattemptid)
<details><summary>Partial log (click to expand)</summary><p> <details><summary>Partial log (click to expand)</summary><p>
@ -197,7 +197,7 @@ patching script interpreter paths in /nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29
assert_eq!( assert_eq!(
&result_to_comment(&result), &result_to_comment(&result),
"Failure on x86_64-linux [(full log)](https://logs.nix.gsc.io/?key=nixos/nixpkgs.2345&attempt_id=neatattemptid) "Failure on x86_64-linux [(full log)](https://logs.nix.ci/?key=nixos/nixpkgs.2345&attempt_id=neatattemptid)
<details><summary>Partial log (click to expand)</summary><p> <details><summary>Partial log (click to expand)</summary><p>
@ -252,7 +252,7 @@ patching script interpreter paths in /nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29
assert_eq!( assert_eq!(
&result_to_comment(&result), &result_to_comment(&result),
"Failure on x86_64-linux [(full log)](https://logs.nix.gsc.io/?key=nixos/nixpkgs.2345&attempt_id=none) "Failure on x86_64-linux [(full log)](https://logs.nix.ci/?key=nixos/nixpkgs.2345&attempt_id=none)
<details><summary>Partial log (click to expand)</summary><p> <details><summary>Partial log (click to expand)</summary><p>

View file

@ -1,14 +1,16 @@
extern crate amqp; extern crate amqp;
extern crate env_logger; extern crate env_logger;
use either::{Either, Left, Right};
use lru_cache::LruCache; use lru_cache::LruCache;
use serde_json; use serde_json;
use std::fs; use std::fs;
use std::fs::{OpenOptions, File}; use std::fs::{OpenOptions, File};
use std::path::{Component, PathBuf}; use std::path::{Component, PathBuf};
use std::io::Write;
use ofborg::writetoline::LineWriter; use ofborg::writetoline::LineWriter;
use ofborg::message::buildlogmsg::BuildLogMsg; use ofborg::message::buildlogmsg::{BuildLogStart, BuildLogMsg};
use ofborg::worker; use ofborg::worker;
use amqp::protocol::basic::{Deliver, BasicProperties}; use amqp::protocol::basic::{Deliver, BasicProperties};
@ -26,7 +28,7 @@ pub struct LogMessageCollector {
#[derive(Debug)] #[derive(Debug)]
pub struct LogMessage { pub struct LogMessage {
from: LogFrom, from: LogFrom,
message: BuildLogMsg, message: Either<BuildLogStart, BuildLogMsg>,
} }
fn validate_path_segment(segment: &PathBuf) -> Result<(), String> { fn validate_path_segment(segment: &PathBuf) -> Result<(), String> {
@ -58,14 +60,32 @@ impl LogMessageCollector {
}; };
} }
pub fn write_metadata(&mut self, from: &LogFrom, data: &BuildLogStart) -> Result<(), String>{
let metapath = self.path_for_metadata(&from)?;
let mut fp = self.open_file(metapath)?;
match serde_json::to_string(data) {
Ok(data) => {
if let Err(e) = fp.write(&data.as_bytes()) {
Err(format!("Failed to write metadata: {:?}", e))
} else {
Ok(())
}
},
Err(e) => {
Err(format!("Failed to stringify metadata: {:?}", e))
}
}
}
pub fn handle_for(&mut self, from: &LogFrom) -> Result<&mut LineWriter, String> { pub fn handle_for(&mut self, from: &LogFrom) -> Result<&mut LineWriter, String> {
if self.handles.contains_key(&from) { if self.handles.contains_key(&from) {
return Ok(self.handles.get_mut(&from).expect( return Ok(self.handles.get_mut(&from).expect(
"handles just contained the key", "handles just contained the key",
)); ));
} else { } else {
let logpath = self.path_for(&from)?; let logpath = self.path_for_log(&from)?;
let fp = self.open_log(logpath)?; let fp = self.open_file(logpath)?;
let writer = LineWriter::new(fp); let writer = LineWriter::new(fp);
self.handles.insert(from.clone(), writer); self.handles.insert(from.clone(), writer);
if let Some(handle) = self.handles.get_mut(&from) { if let Some(handle) = self.handles.get_mut(&from) {
@ -78,7 +98,13 @@ impl LogMessageCollector {
} }
} }
fn path_for(&self, from: &LogFrom) -> Result<PathBuf, String> { fn path_for_metadata(&self, from: &LogFrom) -> Result<PathBuf, String> {
let mut path = self.path_for_log(from)?;
path.set_extension("metadata.json");
return Ok(path);
}
fn path_for_log(&self, from: &LogFrom) -> Result<PathBuf, String> {
let mut location = self.log_root.clone(); let mut location = self.log_root.clone();
let routing_key = PathBuf::from(from.routing_key.clone()); let routing_key = PathBuf::from(from.routing_key.clone());
@ -100,7 +126,7 @@ impl LogMessageCollector {
} }
} }
fn open_log(&self, path: PathBuf) -> Result<File, String> { fn open_file(&self, path: PathBuf) -> Result<File, String> {
let dir = path.parent().unwrap(); let dir = path.parent().unwrap();
fs::create_dir_all(dir).unwrap(); fs::create_dir_all(dir).unwrap();
@ -114,7 +140,7 @@ impl LogMessageCollector {
match attempt { match attempt {
Ok(handle) => Ok(handle), Ok(handle) => Ok(handle),
Err(e) => Err(format!( Err(e) => Err(format!(
"Failed to open the log file for {:?}, err: {:?}", "Failed to open the file for {:?}, err: {:?}",
&path, &path,
e e
)), )),
@ -132,26 +158,44 @@ impl worker::SimpleWorker for LogMessageCollector {
body: &Vec<u8>, body: &Vec<u8>,
) -> Result<Self::J, String> { ) -> Result<Self::J, String> {
let decode = serde_json::from_slice(body); let message: Either<BuildLogStart, BuildLogMsg>;
if let Err(e) = decode { let attempt_id: String;
return Err(format!("failed to decode job: {:?}", e));
let decode_msg: Result<BuildLogMsg, _> = serde_json::from_slice(body);
if let Ok(msg) = decode_msg {
attempt_id = msg.attempt_id.clone();
message = Right(msg);
} else {
let decode_msg: Result<BuildLogStart, _> = serde_json::from_slice(body);
if let Ok(msg) = decode_msg {
attempt_id = msg.attempt_id.clone();
message = Left(msg);
} else {
return Err(format!("failed to decode job: {:?}", decode_msg));
}
} }
let message: BuildLogMsg = decode.unwrap(); return Ok(LogMessage {
Ok(LogMessage {
from: LogFrom { from: LogFrom {
routing_key: deliver.routing_key.clone(), routing_key: deliver.routing_key.clone(),
attempt_id: message.attempt_id.clone(), attempt_id: attempt_id,
}, },
message: message, message: message
}) });
} }
fn consumer(&mut self, job: &LogMessage) -> worker::Actions { fn consumer(&mut self, job: &LogMessage) -> worker::Actions {
match job.message {
Left(ref start) => {
self.write_metadata(&job.from, &start).expect("failed to write metadata");
},
Right(ref message) => {
let handle = self.handle_for(&job.from).unwrap(); let handle = self.handle_for(&job.from).unwrap();
handle.write_to_line((job.message.line_number - 1) as usize, &job.message.output); handle.write_to_line((message.line_number - 1) as usize,
&message.output);
}
}
return vec![worker::Action::Ack]; return vec![worker::Action::Ack];
} }
@ -194,12 +238,29 @@ mod tests {
} }
#[test] #[test]
fn test_path_for() { fn test_path_for_metadata() {
let p = TestScratch::new_dir("log-message-collector-path_for"); let p = TestScratch::new_dir("log-message-collector-path_for_metadata");
let worker = make_worker(p.path()); let worker = make_worker(p.path());
let path = worker let path = worker
.path_for(&LogFrom { .path_for_metadata(&LogFrom {
attempt_id: String::from("my-attempt-id"),
routing_key: String::from("my-routing-key"),
})
.expect("the path should be valid");
assert!(path.starts_with(p.path()));
assert!(path.as_os_str().to_string_lossy().ends_with("my-routing-key/my-attempt-id.metadata.json"));
}
#[test]
fn test_path_for_log() {
let p = TestScratch::new_dir("log-message-collector-path_for_log");
let worker = make_worker(p.path());
let path = worker
.path_for_log(&LogFrom {
attempt_id: String::from("my-attempt-id"), attempt_id: String::from("my-attempt-id"),
routing_key: String::from("my-routing-key"), routing_key: String::from("my-routing-key"),
}) })
@ -211,11 +272,11 @@ mod tests {
} }
#[test] #[test]
fn test_path_for_malicious() { fn test_path_for_log_malicious() {
let p = TestScratch::new_dir("log-message-collector-for_malicious"); let p = TestScratch::new_dir("log-message-collector-for_malicious");
let worker = make_worker(p.path()); let worker = make_worker(p.path());
let path = worker.path_for(&LogFrom { let path = worker.path_for_log(&LogFrom {
attempt_id: String::from("./../../"), attempt_id: String::from("./../../"),
routing_key: String::from("./../../foobar"), routing_key: String::from("./../../foobar"),
}); });
@ -242,52 +303,75 @@ mod tests {
#[test] #[test]
fn test_open_log() { fn test_open_file() {
let p = TestScratch::new_dir("log-message-collector-open_log"); let p = TestScratch::new_dir("log-message-collector-open_file");
let worker = make_worker(p.path()); let worker = make_worker(p.path());
assert!( assert!(
worker worker
.open_log(worker.path_for(&make_from("a")).unwrap()) .open_file(worker.path_for_log(&make_from("a")).unwrap())
.is_ok() .is_ok()
); );
assert!( assert!(
worker worker
.open_log(worker.path_for(&make_from("b.foo/123")).unwrap()) .open_file(worker.path_for_log(&make_from("b.foo/123")).unwrap())
.is_ok() .is_ok()
); );
} }
#[test] #[test]
pub fn test_logs_collect() { pub fn test_logs_collect() {
let mut job = LogMessage { let mut logmsg = BuildLogMsg {
from: make_from("foo"),
message: BuildLogMsg {
attempt_id: String::from("my-attempt-id"), attempt_id: String::from("my-attempt-id"),
identity: String::from("my-identity"), identity: String::from("my-identity"),
system: String::from("foobar-x8664"), system: String::from("foobar-x8664"),
line_number: 1, line_number: 1,
output: String::from("line-1"), output: String::from("line-1"),
}, };
let mut job = LogMessage {
from: make_from("foo"),
message: Right(logmsg.clone()),
}; };
let p = TestScratch::new_dir("log-message-collector-path_for"); let p = TestScratch::new_dir("log-message-collector-path_for_log");
{ {
let mut worker = make_worker(p.path()); let mut worker = make_worker(p.path());
assert_eq!(vec![worker::Action::Ack],
worker.consumer(&
LogMessage {
from: make_from("foo"),
message: Left(BuildLogStart {
attempt_id: String::from("my-attempt-id"),
identity: String::from("my-identity"),
system: String::from("foobar-x8664"),
})
}
)
);
assert_eq!(vec![worker::Action::Ack], worker.consumer(&job)); assert_eq!(vec![worker::Action::Ack], worker.consumer(&job));
job.message.line_number = 5; logmsg.line_number = 5;
job.message.output = String::from("line-5"); logmsg.output = String::from("line-5");
job.message = Right(logmsg.clone());
assert_eq!(vec![worker::Action::Ack], worker.consumer(&job)); assert_eq!(vec![worker::Action::Ack], worker.consumer(&job));
job.from.attempt_id = String::from("my-other-attempt"); job.from.attempt_id = String::from("my-other-attempt");
job.message.attempt_id = String::from("my-other-attempt"); logmsg.attempt_id = String::from("my-other-attempt");
job.message.line_number = 3; logmsg.line_number = 3;
job.message.output = String::from("line-3"); logmsg.output = String::from("line-3");
job.message = Right(logmsg.clone());
assert_eq!(vec![worker::Action::Ack], worker.consumer(&job)); assert_eq!(vec![worker::Action::Ack], worker.consumer(&job));
} }
let mut pr = p.path();
let mut s = String::new();
pr.push("routing-key-foo/attempt-id-foo.metadata.json");
File::open(pr).unwrap().read_to_string(&mut s).unwrap();
assert_eq!(&s, "{\"system\":\"foobar-x8664\",\"identity\":\"my-identity\",\"attempt_id\":\"my-attempt-id\"}");
let mut pr = p.path(); let mut pr = p.path();
let mut s = String::new(); let mut s = String::new();
pr.push("routing-key-foo/attempt-id-foo"); pr.push("routing-key-foo/attempt-id-foo");

View file

@ -10,10 +10,11 @@ use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
use ofborg::checkout; use ofborg::checkout;
use ofborg::message::{massrebuildjob, buildjob}; use ofborg::message::{massrebuildjob, buildjob};
use ofborg::nix::Nix; use std::time::Instant;
use ofborg::nix;
use ofborg::acl::ACL; use ofborg::acl::ACL;
use ofborg::stats; use ofborg::stats;
use ofborg::stats::Event;
use ofborg::worker; use ofborg::worker;
use ofborg::tagger::{StdenvTagger, RebuildTagger, PathsTagger, PkgsAddedRemovedTagger}; use ofborg::tagger::{StdenvTagger, RebuildTagger, PathsTagger, PkgsAddedRemovedTagger};
use ofborg::outpathdiff::{OutPaths, OutPathDiff}; use ofborg::outpathdiff::{OutPaths, OutPathDiff};
@ -25,7 +26,7 @@ use hubcaps;
pub struct MassRebuildWorker<E> { pub struct MassRebuildWorker<E> {
cloner: checkout::CachedCloner, cloner: checkout::CachedCloner,
nix: Nix, nix: nix::Nix,
github: hubcaps::Github, github: hubcaps::Github,
acl: ACL, acl: ACL,
identity: String, identity: String,
@ -36,7 +37,7 @@ pub struct MassRebuildWorker<E> {
impl<E: stats::SysEvents> MassRebuildWorker<E> { impl<E: stats::SysEvents> MassRebuildWorker<E> {
pub fn new( pub fn new(
cloner: checkout::CachedCloner, cloner: checkout::CachedCloner,
nix: Nix, nix: nix::Nix,
github: hubcaps::Github, github: hubcaps::Github,
acl: ACL, acl: ACL,
identity: String, identity: String,
@ -58,6 +59,20 @@ impl<E: stats::SysEvents> MassRebuildWorker<E> {
return massrebuildjob::Actions {}; return massrebuildjob::Actions {};
} }
fn tag_from_title(&self, issue: &hubcaps::issues::IssueRef) {
let darwin = issue.get()
.map(|iss| iss.title.to_lowercase().contains("darwin"))
.unwrap_or(false);
if darwin {
update_labels(
&issue,
vec![String::from("6.topic: darwin")],
vec![],
);
}
}
fn tag_from_paths(&self, issue: &hubcaps::issues::IssueRef, paths: Vec<String>) { fn tag_from_paths(&self, issue: &hubcaps::issues::IssueRef, paths: Vec<String>) {
let mut tagger = PathsTagger::new(self.tag_paths.clone()); let mut tagger = PathsTagger::new(self.tag_paths.clone());
@ -73,7 +88,7 @@ impl<E: stats::SysEvents> MassRebuildWorker<E> {
} }
} }
impl<E: stats::SysEvents> worker::SimpleWorker for MassRebuildWorker<E> { impl<E: stats::SysEvents + 'static> worker::SimpleWorker for MassRebuildWorker<E> {
type J = massrebuildjob::MassRebuildJob; type J = massrebuildjob::MassRebuildJob;
fn msg_to_job( fn msg_to_job(
@ -82,14 +97,14 @@ impl<E: stats::SysEvents> worker::SimpleWorker for MassRebuildWorker<E> {
_: &BasicProperties, _: &BasicProperties,
body: &Vec<u8>, body: &Vec<u8>,
) -> Result<Self::J, String> { ) -> Result<Self::J, String> {
self.events.tick("job-received"); self.events.notify(Event::JobReceived);
return match massrebuildjob::from(body) { return match massrebuildjob::from(body) {
Ok(e) => { Ok(e) => {
self.events.tick("job-decode-success"); self.events.notify(Event::JobDecodeSuccess);
Ok(e) Ok(e)
} }
Err(e) => { Err(e) => {
self.events.tick("job-decode-failure"); self.events.notify(Event::JobDecodeFailure);
error!( error!(
"Failed to decode message: {:?}, Err: {:?}", "Failed to decode message: {:?}, Err: {:?}",
String::from_utf8(body.clone()), String::from_utf8(body.clone()),
@ -113,7 +128,7 @@ impl<E: stats::SysEvents> worker::SimpleWorker for MassRebuildWorker<E> {
match issue.get() { match issue.get() {
Ok(iss) => { Ok(iss) => {
if iss.state == "closed" { if iss.state == "closed" {
self.events.tick("issue-already-closed"); self.events.notify(Event::IssueAlreadyClosed);
info!("Skipping {} because it is closed", job.pr.number); info!("Skipping {} because it is closed", job.pr.number);
return self.actions().skip(&job); return self.actions().skip(&job);
} }
@ -128,13 +143,15 @@ impl<E: stats::SysEvents> worker::SimpleWorker for MassRebuildWorker<E> {
} }
} }
Err(e) => { Err(e) => {
self.events.tick("issue-fetch-failed"); self.events.notify(Event::IssueFetchFailed);
info!("Error fetching {}!", job.pr.number); info!("Error fetching {}!", job.pr.number);
info!("E: {:?}", e); info!("E: {:?}", e);
return self.actions().skip(&job); return self.actions().skip(&job);
} }
} }
self.tag_from_title(&issue);
let mut overall_status = CommitStatus::new( let mut overall_status = CommitStatus::new(
repo.statuses(), repo.statuses(),
job.pr.head_sha.clone(), job.pr.head_sha.clone(),
@ -185,6 +202,8 @@ impl<E: stats::SysEvents> worker::SimpleWorker for MassRebuildWorker<E> {
hubcaps::statuses::State::Pending, hubcaps::statuses::State::Pending,
); );
let target_branch_rebuild_sniff_start = Instant::now();
if let Err(mut output) = rebuildsniff.find_before() { if let Err(mut output) = rebuildsniff.find_before() {
overall_status.set_url(make_gist( overall_status.set_url(make_gist(
&gists, &gists,
@ -193,6 +212,7 @@ impl<E: stats::SysEvents> worker::SimpleWorker for MassRebuildWorker<E> {
file_to_str(&mut output), file_to_str(&mut output),
)); ));
self.events.notify(Event::TargetBranchFailsEvaluation(target_branch.clone()));
overall_status.set_with_description( overall_status.set_with_description(
format!("Target branch {} doesn't evaluate!", &target_branch).as_ref(), format!("Target branch {} doesn't evaluate!", &target_branch).as_ref(),
hubcaps::statuses::State::Failure, hubcaps::statuses::State::Failure,
@ -200,6 +220,17 @@ impl<E: stats::SysEvents> worker::SimpleWorker for MassRebuildWorker<E> {
return self.actions().skip(&job); return self.actions().skip(&job);
} }
self.events.notify(
Event::EvaluationDuration(
target_branch.clone(),
target_branch_rebuild_sniff_start.elapsed().as_secs(),
)
);
self.events.notify(
Event::EvaluationDurationCount(
target_branch.clone()
)
);
overall_status.set_with_description("Fetching PR", hubcaps::statuses::State::Pending); overall_status.set_with_description("Fetching PR", hubcaps::statuses::State::Pending);
@ -275,20 +306,17 @@ impl<E: stats::SysEvents> worker::SimpleWorker for MassRebuildWorker<E> {
let eval_checks = vec![ let eval_checks = vec![
EvalChecker::new( EvalChecker::new(
"package-list", "package-list",
"nix-env", nix::Operation::QueryPackagesJSON,
vec![ vec![
String::from("--file"), String::from("--file"),
String::from("."), String::from("."),
String::from("--query"),
String::from("--available"),
String::from("--json"),
], ],
self.nix.clone() self.nix.clone()
), ),
EvalChecker::new( EvalChecker::new(
"nixos-options", "nixos-options",
"nix-instantiate", nix::Operation::Instantiate,
vec![ vec![
String::from("./nixos/release.nix"), String::from("./nixos/release.nix"),
String::from("-A"), String::from("-A"),
@ -299,7 +327,7 @@ impl<E: stats::SysEvents> worker::SimpleWorker for MassRebuildWorker<E> {
EvalChecker::new( EvalChecker::new(
"nixos-manual", "nixos-manual",
"nix-instantiate", nix::Operation::Instantiate,
vec![ vec![
String::from("./nixos/release.nix"), String::from("./nixos/release.nix"),
String::from("-A"), String::from("-A"),
@ -310,7 +338,7 @@ impl<E: stats::SysEvents> worker::SimpleWorker for MassRebuildWorker<E> {
EvalChecker::new( EvalChecker::new(
"nixpkgs-manual", "nixpkgs-manual",
"nix-instantiate", nix::Operation::Instantiate,
vec![ vec![
String::from("./pkgs/top-level/release.nix"), String::from("./pkgs/top-level/release.nix"),
String::from("-A"), String::from("-A"),
@ -321,7 +349,7 @@ impl<E: stats::SysEvents> worker::SimpleWorker for MassRebuildWorker<E> {
EvalChecker::new( EvalChecker::new(
"nixpkgs-tarball", "nixpkgs-tarball",
"nix-instantiate", nix::Operation::Instantiate,
vec![ vec![
String::from("./pkgs/top-level/release.nix"), String::from("./pkgs/top-level/release.nix"),
String::from("-A"), String::from("-A"),
@ -332,7 +360,7 @@ impl<E: stats::SysEvents> worker::SimpleWorker for MassRebuildWorker<E> {
EvalChecker::new( EvalChecker::new(
"nixpkgs-unstable-jobset", "nixpkgs-unstable-jobset",
"nix-instantiate", nix::Operation::Instantiate,
vec![ vec![
String::from("./pkgs/top-level/release.nix"), String::from("./pkgs/top-level/release.nix"),
String::from("-A"), String::from("-A"),
@ -476,10 +504,11 @@ impl<E: stats::SysEvents> worker::SimpleWorker for MassRebuildWorker<E> {
let mut rebuild_tags = RebuildTagger::new(); let mut rebuild_tags = RebuildTagger::new();
if let Some(attrs) = rebuildsniff.calculate_rebuild() { if let Some(attrs) = rebuildsniff.calculate_rebuild() {
if attrs.len() > 0 {
let gist_url = make_gist( let gist_url = make_gist(
&gists, &gists,
String::from("Changed Paths"), String::from("Changed Paths"),
None, Some("".to_owned()),
attrs attrs
.iter() .iter()
.map(|attr| format!("{}\t{}", &attr.architecture, &attr.package)) .map(|attr| format!("{}\t{}", &attr.architecture, &attr.package))
@ -487,10 +516,12 @@ impl<E: stats::SysEvents> worker::SimpleWorker for MassRebuildWorker<E> {
.join("\n"), .join("\n"),
); );
rebuild_tags.parse_attrs(attrs);
overall_status.set_url(gist_url); overall_status.set_url(gist_url);
} }
rebuild_tags.parse_attrs(attrs);
}
update_labels( update_labels(
&issue, &issue,
rebuild_tags.tags_to_add(), rebuild_tags.tags_to_add(),
@ -506,6 +537,8 @@ impl<E: stats::SysEvents> worker::SimpleWorker for MassRebuildWorker<E> {
); );
} }
self.events.notify(Event::TaskEvaluationCheckComplete);
return self.actions().done(&job, response); return self.actions().done(&job, response);
} }
} }
@ -523,7 +556,7 @@ pub enum System {
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
struct Stdenvs { struct Stdenvs {
nix: Nix, nix: nix::Nix,
co: PathBuf, co: PathBuf,
linux_stdenv_before: Option<String>, linux_stdenv_before: Option<String>,
@ -534,7 +567,7 @@ struct Stdenvs {
} }
impl Stdenvs { impl Stdenvs {
fn new(nix: Nix, co: PathBuf) -> Stdenvs { fn new(nix: nix::Nix, co: PathBuf) -> Stdenvs {
return Stdenvs { return Stdenvs {
nix: nix, nix: nix,
co: co, co: co,
@ -596,7 +629,7 @@ impl Stdenvs {
fn evalstdenv(&self, system: &str) -> Option<String> { fn evalstdenv(&self, system: &str) -> Option<String> {
let result = self.nix.with_system(system.to_owned()).safely( let result = self.nix.with_system(system.to_owned()).safely(
"nix-instantiate", nix::Operation::Instantiate,
&self.co, &self.co,
vec![ vec![
String::from("."), String::from("."),
@ -737,14 +770,23 @@ fn parse_commit_messages(messages: Vec<String>) -> Vec<String> {
mod tests { mod tests {
use super::*; use super::*;
use std::process::Command;
#[test] #[test]
fn stdenv_checking() { fn stdenv_checking() {
let nix = Nix::new(String::from("x86_64-linux"), String::from("daemon"), 1200, None); let output = Command::new("nix-instantiate")
.args(&["--eval", "-E", "<nixpkgs>"])
.output()
.expect("nix-instantiate required");
let nixpkgs = String::from_utf8(output.stdout)
.expect("nixpkgs required");
let nix = nix::Nix::new(String::from("x86_64-linux"), String::from("daemon"), 1200, None);
let mut stdenv = let mut stdenv =
Stdenvs::new( Stdenvs::new(
nix.clone(), nix.clone(),
PathBuf::from("/nix/var/nix/profiles/per-user/root/channels/nixos/nixpkgs"), PathBuf::from(nixpkgs.trim_right()),
); );
stdenv.identify(System::X8664Linux, StdenvFrom::Before); stdenv.identify(System::X8664Linux, StdenvFrom::Before);
stdenv.identify(System::X8664Darwin, StdenvFrom::Before); stdenv.identify(System::X8664Darwin, StdenvFrom::Before);

View file

@ -3,4 +3,6 @@ pub mod build;
pub mod massrebuilder; pub mod massrebuilder;
pub mod githubcommentfilter; pub mod githubcommentfilter;
pub mod githubcommentposter; pub mod githubcommentposter;
pub mod statscollector;
pub mod log_message_collector; pub mod log_message_collector;
pub mod evaluationfilter;

View file

@ -0,0 +1,70 @@
extern crate amqp;
extern crate env_logger;
use serde_json;
use ofborg::worker;
use ofborg::stats;
use amqp::protocol::basic::{Deliver, BasicProperties};
pub struct StatCollectorWorker<E> {
events: E,
collector: stats::MetricCollector,
}
impl<E: stats::SysEvents + 'static> StatCollectorWorker<E> {
pub fn new(events: E, collector: stats::MetricCollector) -> StatCollectorWorker<E> {
StatCollectorWorker {
events: events,
collector: collector,
}
}
}
impl<E: stats::SysEvents + 'static> worker::SimpleWorker for StatCollectorWorker<E> {
type J = stats::EventMessage;
fn msg_to_job(
&mut self,
_: &Deliver,
_: &BasicProperties,
body: &Vec<u8>,
) -> Result<Self::J, String> {
return match serde_json::from_slice(body) {
Ok(e) => Ok(e),
Err(_) => {
let mut modified_body: Vec<u8> = vec!["\"".as_bytes()[0]];
modified_body.append(&mut body.clone());
modified_body.push("\"".as_bytes()[0]);
match serde_json::from_slice(&modified_body) {
Ok(e) => {
self.events.notify(stats::Event::StatCollectorLegacyEvent(stats::event_metric_name(&e)));
Ok(stats::EventMessage {
sender: "".to_owned(),
events: vec![e],
})
},
Err(e) => {
self.events.notify(stats::Event::StatCollectorBogusEvent);
error!(
"Failed to decode message: {:?}, Err: {:?}",
String::from_utf8(body.clone()),
e
);
Err("Failed to decode message".to_owned())
}
}
}
};
}
fn consumer(&mut self, job: &stats::EventMessage) -> worker::Actions {
let sender = job.sender.clone();
for event in job.events.iter() {
self.collector.record(sender.clone(), event.clone());
}
return vec![worker::Action::Ack];
}
}

View file

@ -54,8 +54,8 @@ where
}); });
} }
pub trait SimpleWorker { pub trait SimpleWorker: Send + 'static {
type J; type J: Send;
fn consumer(&mut self, job: &Self::J) -> Actions; fn consumer(&mut self, job: &Self::J) -> Actions;

View file

@ -0,0 +1,484 @@
{
"action": "edited",
"number": 33299,
"pull_request": {
"url": "https://api.github.com/repos/NixOS/nixpkgs/pulls/33299",
"id": 160662893,
"html_url": "https://github.com/NixOS/nixpkgs/pull/33299",
"diff_url": "https://github.com/NixOS/nixpkgs/pull/33299.diff",
"patch_url": "https://github.com/NixOS/nixpkgs/pull/33299.patch",
"issue_url": "https://api.github.com/repos/NixOS/nixpkgs/issues/33299",
"number": 33299,
"state": "open",
"locked": false,
"title": "NixOS Tests: record an flv of the test",
"user": {
"login": "grahamc",
"id": 76716,
"avatar_url": "https://avatars3.githubusercontent.com/u/76716?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/grahamc",
"html_url": "https://github.com/grahamc",
"followers_url": "https://api.github.com/users/grahamc/followers",
"following_url": "https://api.github.com/users/grahamc/following{/other_user}",
"gists_url": "https://api.github.com/users/grahamc/gists{/gist_id}",
"starred_url": "https://api.github.com/users/grahamc/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/grahamc/subscriptions",
"organizations_url": "https://api.github.com/users/grahamc/orgs",
"repos_url": "https://api.github.com/users/grahamc/repos",
"events_url": "https://api.github.com/users/grahamc/events{/privacy}",
"received_events_url": "https://api.github.com/users/grahamc/received_events",
"type": "User",
"site_admin": false
},
"body": "###### Motivation for this change\r\n\r\nSometimes tests can be hard to debug. Maybe recording an FLV from the VNC could help with that? To start, enable the recording on the flaky keymap test.\r\n\r\n###### Things done\r\n\r\n<!-- Please check what applies. Note that these are not hard requirements but merely serve as information for reviewers. -->\r\n\r\n- [ ] Tested using sandboxing ([nix.useSandbox](http://nixos.org/nixos/manual/options.html#opt-nix.useSandbox) on NixOS, or option `build-use-sandbox` in [`nix.conf`](http://nixos.org/nix/manual/#sec-conf-file) on non-NixOS)\r\n- Built on platform(s)\r\n - [ ] NixOS\r\n - [ ] macOS\r\n - [ ] other Linux distributions\r\n- [ ] Tested via one or more NixOS test(s) if existing and applicable for the change (look inside [nixos/tests](https://github.com/NixOS/nixpkgs/blob/master/nixos/tests))\r\n- [ ] Tested compilation of all pkgs that depend on this change using `nix-shell -p nox --run \"nox-review wip\"`\r\n- [ ] Tested execution of all binary files (usually in `./result/bin/`)\r\n- [ ] Fits [CONTRIBUTING.md](https://github.com/NixOS/nixpkgs/blob/master/.github/CONTRIBUTING.md).\r\n\r\n---\r\n\r\n",
"created_at": "2018-01-01T22:39:24Z",
"updated_at": "2018-02-23T21:48:19Z",
"closed_at": null,
"merged_at": null,
"merge_commit_sha": "e145dffca8579ca8fac15497af5f166d1e1197a4",
"assignee": null,
"assignees": [],
"requested_reviewers": [],
"requested_teams": [],
"labels": [
{
"id": 737642262,
"url": "https://api.github.com/repos/NixOS/nixpkgs/labels/10.rebuild-darwin:%200",
"name": "10.rebuild-darwin: 0",
"color": "eeffee",
"default": false
},
{
"id": 737642408,
"url": "https://api.github.com/repos/NixOS/nixpkgs/labels/10.rebuild-linux:%200",
"name": "10.rebuild-linux: 0",
"color": "eeffee",
"default": false
}
],
"milestone": null,
"commits_url": "https://api.github.com/repos/NixOS/nixpkgs/pulls/33299/commits",
"review_comments_url": "https://api.github.com/repos/NixOS/nixpkgs/pulls/33299/comments",
"review_comment_url": "https://api.github.com/repos/NixOS/nixpkgs/pulls/comments{/number}",
"comments_url": "https://api.github.com/repos/NixOS/nixpkgs/issues/33299/comments",
"statuses_url": "https://api.github.com/repos/NixOS/nixpkgs/statuses/887e8b460a7d45ddb3bbdebe01447b251b3229e8",
"head": {
"label": "grahamc:flv-nixos-tests",
"ref": "flv-nixos-tests",
"sha": "887e8b460a7d45ddb3bbdebe01447b251b3229e8",
"user": {
"login": "grahamc",
"id": 76716,
"avatar_url": "https://avatars3.githubusercontent.com/u/76716?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/grahamc",
"html_url": "https://github.com/grahamc",
"followers_url": "https://api.github.com/users/grahamc/followers",
"following_url": "https://api.github.com/users/grahamc/following{/other_user}",
"gists_url": "https://api.github.com/users/grahamc/gists{/gist_id}",
"starred_url": "https://api.github.com/users/grahamc/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/grahamc/subscriptions",
"organizations_url": "https://api.github.com/users/grahamc/orgs",
"repos_url": "https://api.github.com/users/grahamc/repos",
"events_url": "https://api.github.com/users/grahamc/events{/privacy}",
"received_events_url": "https://api.github.com/users/grahamc/received_events",
"type": "User",
"site_admin": false
},
"repo": {
"id": 52226505,
"name": "nixpkgs",
"full_name": "grahamc/nixpkgs",
"owner": {
"login": "grahamc",
"id": 76716,
"avatar_url": "https://avatars3.githubusercontent.com/u/76716?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/grahamc",
"html_url": "https://github.com/grahamc",
"followers_url": "https://api.github.com/users/grahamc/followers",
"following_url": "https://api.github.com/users/grahamc/following{/other_user}",
"gists_url": "https://api.github.com/users/grahamc/gists{/gist_id}",
"starred_url": "https://api.github.com/users/grahamc/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/grahamc/subscriptions",
"organizations_url": "https://api.github.com/users/grahamc/orgs",
"repos_url": "https://api.github.com/users/grahamc/repos",
"events_url": "https://api.github.com/users/grahamc/events{/privacy}",
"received_events_url": "https://api.github.com/users/grahamc/received_events",
"type": "User",
"site_admin": false
},
"private": false,
"html_url": "https://github.com/grahamc/nixpkgs",
"description": "Nix Packages collection",
"fork": true,
"url": "https://api.github.com/repos/grahamc/nixpkgs",
"forks_url": "https://api.github.com/repos/grahamc/nixpkgs/forks",
"keys_url": "https://api.github.com/repos/grahamc/nixpkgs/keys{/key_id}",
"collaborators_url": "https://api.github.com/repos/grahamc/nixpkgs/collaborators{/collaborator}",
"teams_url": "https://api.github.com/repos/grahamc/nixpkgs/teams",
"hooks_url": "https://api.github.com/repos/grahamc/nixpkgs/hooks",
"issue_events_url": "https://api.github.com/repos/grahamc/nixpkgs/issues/events{/number}",
"events_url": "https://api.github.com/repos/grahamc/nixpkgs/events",
"assignees_url": "https://api.github.com/repos/grahamc/nixpkgs/assignees{/user}",
"branches_url": "https://api.github.com/repos/grahamc/nixpkgs/branches{/branch}",
"tags_url": "https://api.github.com/repos/grahamc/nixpkgs/tags",
"blobs_url": "https://api.github.com/repos/grahamc/nixpkgs/git/blobs{/sha}",
"git_tags_url": "https://api.github.com/repos/grahamc/nixpkgs/git/tags{/sha}",
"git_refs_url": "https://api.github.com/repos/grahamc/nixpkgs/git/refs{/sha}",
"trees_url": "https://api.github.com/repos/grahamc/nixpkgs/git/trees{/sha}",
"statuses_url": "https://api.github.com/repos/grahamc/nixpkgs/statuses/{sha}",
"languages_url": "https://api.github.com/repos/grahamc/nixpkgs/languages",
"stargazers_url": "https://api.github.com/repos/grahamc/nixpkgs/stargazers",
"contributors_url": "https://api.github.com/repos/grahamc/nixpkgs/contributors",
"subscribers_url": "https://api.github.com/repos/grahamc/nixpkgs/subscribers",
"subscription_url": "https://api.github.com/repos/grahamc/nixpkgs/subscription",
"commits_url": "https://api.github.com/repos/grahamc/nixpkgs/commits{/sha}",
"git_commits_url": "https://api.github.com/repos/grahamc/nixpkgs/git/commits{/sha}",
"comments_url": "https://api.github.com/repos/grahamc/nixpkgs/comments{/number}",
"issue_comment_url": "https://api.github.com/repos/grahamc/nixpkgs/issues/comments{/number}",
"contents_url": "https://api.github.com/repos/grahamc/nixpkgs/contents/{+path}",
"compare_url": "https://api.github.com/repos/grahamc/nixpkgs/compare/{base}...{head}",
"merges_url": "https://api.github.com/repos/grahamc/nixpkgs/merges",
"archive_url": "https://api.github.com/repos/grahamc/nixpkgs/{archive_format}{/ref}",
"downloads_url": "https://api.github.com/repos/grahamc/nixpkgs/downloads",
"issues_url": "https://api.github.com/repos/grahamc/nixpkgs/issues{/number}",
"pulls_url": "https://api.github.com/repos/grahamc/nixpkgs/pulls{/number}",
"milestones_url": "https://api.github.com/repos/grahamc/nixpkgs/milestones{/number}",
"notifications_url": "https://api.github.com/repos/grahamc/nixpkgs/notifications{?since,all,participating}",
"labels_url": "https://api.github.com/repos/grahamc/nixpkgs/labels{/name}",
"releases_url": "https://api.github.com/repos/grahamc/nixpkgs/releases{/id}",
"deployments_url": "https://api.github.com/repos/grahamc/nixpkgs/deployments",
"created_at": "2016-02-21T20:31:54Z",
"updated_at": "2017-05-07T04:44:29Z",
"pushed_at": "2018-01-01T22:35:52Z",
"git_url": "git://github.com/grahamc/nixpkgs.git",
"ssh_url": "git@github.com:grahamc/nixpkgs.git",
"clone_url": "https://github.com/grahamc/nixpkgs.git",
"svn_url": "https://github.com/grahamc/nixpkgs",
"homepage": null,
"size": 627435,
"stargazers_count": 1,
"watchers_count": 1,
"language": "Nix",
"has_issues": true,
"has_projects": true,
"has_downloads": true,
"has_wiki": false,
"has_pages": false,
"forks_count": 0,
"mirror_url": null,
"archived": false,
"open_issues_count": 1,
"license": {
"key": "other",
"name": "Other",
"spdx_id": null,
"url": null
},
"forks": 0,
"open_issues": 1,
"watchers": 1,
"default_branch": "master"
}
},
"base": {
"label": "NixOS:staging",
"ref": "staging",
"sha": "19784ca4c9ac378539bdc535b02ae673ba6ba0b0",
"user": {
"login": "NixOS",
"id": 487568,
"avatar_url": "https://avatars3.githubusercontent.com/u/487568?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/NixOS",
"html_url": "https://github.com/NixOS",
"followers_url": "https://api.github.com/users/NixOS/followers",
"following_url": "https://api.github.com/users/NixOS/following{/other_user}",
"gists_url": "https://api.github.com/users/NixOS/gists{/gist_id}",
"starred_url": "https://api.github.com/users/NixOS/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/NixOS/subscriptions",
"organizations_url": "https://api.github.com/users/NixOS/orgs",
"repos_url": "https://api.github.com/users/NixOS/repos",
"events_url": "https://api.github.com/users/NixOS/events{/privacy}",
"received_events_url": "https://api.github.com/users/NixOS/received_events",
"type": "Organization",
"site_admin": false
},
"repo": {
"id": 4542716,
"name": "nixpkgs",
"full_name": "NixOS/nixpkgs",
"owner": {
"login": "NixOS",
"id": 487568,
"avatar_url": "https://avatars3.githubusercontent.com/u/487568?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/NixOS",
"html_url": "https://github.com/NixOS",
"followers_url": "https://api.github.com/users/NixOS/followers",
"following_url": "https://api.github.com/users/NixOS/following{/other_user}",
"gists_url": "https://api.github.com/users/NixOS/gists{/gist_id}",
"starred_url": "https://api.github.com/users/NixOS/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/NixOS/subscriptions",
"organizations_url": "https://api.github.com/users/NixOS/orgs",
"repos_url": "https://api.github.com/users/NixOS/repos",
"events_url": "https://api.github.com/users/NixOS/events{/privacy}",
"received_events_url": "https://api.github.com/users/NixOS/received_events",
"type": "Organization",
"site_admin": false
},
"private": false,
"html_url": "https://github.com/NixOS/nixpkgs",
"description": "Nix Packages collection",
"fork": false,
"url": "https://api.github.com/repos/NixOS/nixpkgs",
"forks_url": "https://api.github.com/repos/NixOS/nixpkgs/forks",
"keys_url": "https://api.github.com/repos/NixOS/nixpkgs/keys{/key_id}",
"collaborators_url": "https://api.github.com/repos/NixOS/nixpkgs/collaborators{/collaborator}",
"teams_url": "https://api.github.com/repos/NixOS/nixpkgs/teams",
"hooks_url": "https://api.github.com/repos/NixOS/nixpkgs/hooks",
"issue_events_url": "https://api.github.com/repos/NixOS/nixpkgs/issues/events{/number}",
"events_url": "https://api.github.com/repos/NixOS/nixpkgs/events",
"assignees_url": "https://api.github.com/repos/NixOS/nixpkgs/assignees{/user}",
"branches_url": "https://api.github.com/repos/NixOS/nixpkgs/branches{/branch}",
"tags_url": "https://api.github.com/repos/NixOS/nixpkgs/tags",
"blobs_url": "https://api.github.com/repos/NixOS/nixpkgs/git/blobs{/sha}",
"git_tags_url": "https://api.github.com/repos/NixOS/nixpkgs/git/tags{/sha}",
"git_refs_url": "https://api.github.com/repos/NixOS/nixpkgs/git/refs{/sha}",
"trees_url": "https://api.github.com/repos/NixOS/nixpkgs/git/trees{/sha}",
"statuses_url": "https://api.github.com/repos/NixOS/nixpkgs/statuses/{sha}",
"languages_url": "https://api.github.com/repos/NixOS/nixpkgs/languages",
"stargazers_url": "https://api.github.com/repos/NixOS/nixpkgs/stargazers",
"contributors_url": "https://api.github.com/repos/NixOS/nixpkgs/contributors",
"subscribers_url": "https://api.github.com/repos/NixOS/nixpkgs/subscribers",
"subscription_url": "https://api.github.com/repos/NixOS/nixpkgs/subscription",
"commits_url": "https://api.github.com/repos/NixOS/nixpkgs/commits{/sha}",
"git_commits_url": "https://api.github.com/repos/NixOS/nixpkgs/git/commits{/sha}",
"comments_url": "https://api.github.com/repos/NixOS/nixpkgs/comments{/number}",
"issue_comment_url": "https://api.github.com/repos/NixOS/nixpkgs/issues/comments{/number}",
"contents_url": "https://api.github.com/repos/NixOS/nixpkgs/contents/{+path}",
"compare_url": "https://api.github.com/repos/NixOS/nixpkgs/compare/{base}...{head}",
"merges_url": "https://api.github.com/repos/NixOS/nixpkgs/merges",
"archive_url": "https://api.github.com/repos/NixOS/nixpkgs/{archive_format}{/ref}",
"downloads_url": "https://api.github.com/repos/NixOS/nixpkgs/downloads",
"issues_url": "https://api.github.com/repos/NixOS/nixpkgs/issues{/number}",
"pulls_url": "https://api.github.com/repos/NixOS/nixpkgs/pulls{/number}",
"milestones_url": "https://api.github.com/repos/NixOS/nixpkgs/milestones{/number}",
"notifications_url": "https://api.github.com/repos/NixOS/nixpkgs/notifications{?since,all,participating}",
"labels_url": "https://api.github.com/repos/NixOS/nixpkgs/labels{/name}",
"releases_url": "https://api.github.com/repos/NixOS/nixpkgs/releases{/id}",
"deployments_url": "https://api.github.com/repos/NixOS/nixpkgs/deployments",
"created_at": "2012-06-04T02:49:46Z",
"updated_at": "2018-02-23T20:56:05Z",
"pushed_at": "2018-02-23T21:40:58Z",
"git_url": "git://github.com/NixOS/nixpkgs.git",
"ssh_url": "git@github.com:NixOS/nixpkgs.git",
"clone_url": "https://github.com/NixOS/nixpkgs.git",
"svn_url": "https://github.com/NixOS/nixpkgs",
"homepage": null,
"size": 724069,
"stargazers_count": 2239,
"watchers_count": 2239,
"language": "Nix",
"has_issues": true,
"has_projects": true,
"has_downloads": true,
"has_wiki": false,
"has_pages": false,
"forks_count": 2580,
"mirror_url": null,
"archived": false,
"open_issues_count": 2860,
"license": {
"key": "other",
"name": "Other",
"spdx_id": null,
"url": null
},
"forks": 2580,
"open_issues": 2860,
"watchers": 2239,
"default_branch": "master"
}
},
"_links": {
"self": {
"href": "https://api.github.com/repos/NixOS/nixpkgs/pulls/33299"
},
"html": {
"href": "https://github.com/NixOS/nixpkgs/pull/33299"
},
"issue": {
"href": "https://api.github.com/repos/NixOS/nixpkgs/issues/33299"
},
"comments": {
"href": "https://api.github.com/repos/NixOS/nixpkgs/issues/33299/comments"
},
"review_comments": {
"href": "https://api.github.com/repos/NixOS/nixpkgs/pulls/33299/comments"
},
"review_comment": {
"href": "https://api.github.com/repos/NixOS/nixpkgs/pulls/comments{/number}"
},
"commits": {
"href": "https://api.github.com/repos/NixOS/nixpkgs/pulls/33299/commits"
},
"statuses": {
"href": "https://api.github.com/repos/NixOS/nixpkgs/statuses/887e8b460a7d45ddb3bbdebe01447b251b3229e8"
}
},
"author_association": "MEMBER",
"merged": false,
"mergeable": null,
"rebaseable": null,
"mergeable_state": "unknown",
"merged_by": null,
"comments": 5,
"review_comments": 0,
"maintainer_can_modify": true,
"commits": 1,
"additions": 41,
"deletions": 4,
"changed_files": 4
},
"changes": {
"base": {
"ref": {
"from": "master"
},
"sha": {
"from": "a6664d8192038c4dc2ad44169dbb76556fe71ac1"
}
}
},
"repository": {
"id": 4542716,
"name": "nixpkgs",
"full_name": "NixOS/nixpkgs",
"owner": {
"login": "NixOS",
"id": 487568,
"avatar_url": "https://avatars3.githubusercontent.com/u/487568?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/NixOS",
"html_url": "https://github.com/NixOS",
"followers_url": "https://api.github.com/users/NixOS/followers",
"following_url": "https://api.github.com/users/NixOS/following{/other_user}",
"gists_url": "https://api.github.com/users/NixOS/gists{/gist_id}",
"starred_url": "https://api.github.com/users/NixOS/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/NixOS/subscriptions",
"organizations_url": "https://api.github.com/users/NixOS/orgs",
"repos_url": "https://api.github.com/users/NixOS/repos",
"events_url": "https://api.github.com/users/NixOS/events{/privacy}",
"received_events_url": "https://api.github.com/users/NixOS/received_events",
"type": "Organization",
"site_admin": false
},
"private": false,
"html_url": "https://github.com/NixOS/nixpkgs",
"description": "Nix Packages collection",
"fork": false,
"url": "https://api.github.com/repos/NixOS/nixpkgs",
"forks_url": "https://api.github.com/repos/NixOS/nixpkgs/forks",
"keys_url": "https://api.github.com/repos/NixOS/nixpkgs/keys{/key_id}",
"collaborators_url": "https://api.github.com/repos/NixOS/nixpkgs/collaborators{/collaborator}",
"teams_url": "https://api.github.com/repos/NixOS/nixpkgs/teams",
"hooks_url": "https://api.github.com/repos/NixOS/nixpkgs/hooks",
"issue_events_url": "https://api.github.com/repos/NixOS/nixpkgs/issues/events{/number}",
"events_url": "https://api.github.com/repos/NixOS/nixpkgs/events",
"assignees_url": "https://api.github.com/repos/NixOS/nixpkgs/assignees{/user}",
"branches_url": "https://api.github.com/repos/NixOS/nixpkgs/branches{/branch}",
"tags_url": "https://api.github.com/repos/NixOS/nixpkgs/tags",
"blobs_url": "https://api.github.com/repos/NixOS/nixpkgs/git/blobs{/sha}",
"git_tags_url": "https://api.github.com/repos/NixOS/nixpkgs/git/tags{/sha}",
"git_refs_url": "https://api.github.com/repos/NixOS/nixpkgs/git/refs{/sha}",
"trees_url": "https://api.github.com/repos/NixOS/nixpkgs/git/trees{/sha}",
"statuses_url": "https://api.github.com/repos/NixOS/nixpkgs/statuses/{sha}",
"languages_url": "https://api.github.com/repos/NixOS/nixpkgs/languages",
"stargazers_url": "https://api.github.com/repos/NixOS/nixpkgs/stargazers",
"contributors_url": "https://api.github.com/repos/NixOS/nixpkgs/contributors",
"subscribers_url": "https://api.github.com/repos/NixOS/nixpkgs/subscribers",
"subscription_url": "https://api.github.com/repos/NixOS/nixpkgs/subscription",
"commits_url": "https://api.github.com/repos/NixOS/nixpkgs/commits{/sha}",
"git_commits_url": "https://api.github.com/repos/NixOS/nixpkgs/git/commits{/sha}",
"comments_url": "https://api.github.com/repos/NixOS/nixpkgs/comments{/number}",
"issue_comment_url": "https://api.github.com/repos/NixOS/nixpkgs/issues/comments{/number}",
"contents_url": "https://api.github.com/repos/NixOS/nixpkgs/contents/{+path}",
"compare_url": "https://api.github.com/repos/NixOS/nixpkgs/compare/{base}...{head}",
"merges_url": "https://api.github.com/repos/NixOS/nixpkgs/merges",
"archive_url": "https://api.github.com/repos/NixOS/nixpkgs/{archive_format}{/ref}",
"downloads_url": "https://api.github.com/repos/NixOS/nixpkgs/downloads",
"issues_url": "https://api.github.com/repos/NixOS/nixpkgs/issues{/number}",
"pulls_url": "https://api.github.com/repos/NixOS/nixpkgs/pulls{/number}",
"milestones_url": "https://api.github.com/repos/NixOS/nixpkgs/milestones{/number}",
"notifications_url": "https://api.github.com/repos/NixOS/nixpkgs/notifications{?since,all,participating}",
"labels_url": "https://api.github.com/repos/NixOS/nixpkgs/labels{/name}",
"releases_url": "https://api.github.com/repos/NixOS/nixpkgs/releases{/id}",
"deployments_url": "https://api.github.com/repos/NixOS/nixpkgs/deployments",
"created_at": "2012-06-04T02:49:46Z",
"updated_at": "2018-02-23T20:56:05Z",
"pushed_at": "2018-02-23T21:40:58Z",
"git_url": "git://github.com/NixOS/nixpkgs.git",
"ssh_url": "git@github.com:NixOS/nixpkgs.git",
"clone_url": "https://github.com/NixOS/nixpkgs.git",
"svn_url": "https://github.com/NixOS/nixpkgs",
"homepage": null,
"size": 724069,
"stargazers_count": 2239,
"watchers_count": 2239,
"language": "Nix",
"has_issues": true,
"has_projects": true,
"has_downloads": true,
"has_wiki": false,
"has_pages": false,
"forks_count": 2580,
"mirror_url": null,
"archived": false,
"open_issues_count": 2860,
"license": {
"key": "other",
"name": "Other",
"spdx_id": null,
"url": null
},
"forks": 2580,
"open_issues": 2860,
"watchers": 2239,
"default_branch": "master"
},
"organization": {
"login": "NixOS",
"id": 487568,
"url": "https://api.github.com/orgs/NixOS",
"repos_url": "https://api.github.com/orgs/NixOS/repos",
"events_url": "https://api.github.com/orgs/NixOS/events",
"hooks_url": "https://api.github.com/orgs/NixOS/hooks",
"issues_url": "https://api.github.com/orgs/NixOS/issues",
"members_url": "https://api.github.com/orgs/NixOS/members{/member}",
"public_members_url": "https://api.github.com/orgs/NixOS/public_members{/member}",
"avatar_url": "https://avatars3.githubusercontent.com/u/487568?v=4",
"description": ""
},
"sender": {
"login": "grahamc",
"id": 76716,
"avatar_url": "https://avatars3.githubusercontent.com/u/76716?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/grahamc",
"html_url": "https://github.com/grahamc",
"followers_url": "https://api.github.com/users/grahamc/followers",
"following_url": "https://api.github.com/users/grahamc/following{/other_user}",
"gists_url": "https://api.github.com/users/grahamc/gists{/gist_id}",
"starred_url": "https://api.github.com/users/grahamc/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/grahamc/subscriptions",
"organizations_url": "https://api.github.com/users/grahamc/orgs",
"repos_url": "https://api.github.com/users/grahamc/repos",
"events_url": "https://api.github.com/users/grahamc/events{/privacy}",
"received_events_url": "https://api.github.com/users/grahamc/received_events",
"type": "User",
"site_admin": false
}
}

View file

@ -1,11 +1,5 @@
{ {
"require": { "require": {
"php-amqplib/php-amqplib": ">=2.6.1", "php-amqplib/php-amqplib": ">=2.6.1"
"knplabs/github-api": "^2.6@dev",
"php-http/guzzle6-adapter": "^1.2@dev"
},
"minimum-stability": "dev",
"autoload": {
"psr-4": {"GHE\\": "src/"}
} }
} }

940
php/composer.lock generated
View file

@ -4,321 +4,20 @@
"Read more about it at https://getcomposer.org/doc/01-basic-usage.md#composer-lock-the-lock-file", "Read more about it at https://getcomposer.org/doc/01-basic-usage.md#composer-lock-the-lock-file",
"This file is @generated automatically" "This file is @generated automatically"
], ],
"content-hash": "0ee26122485b777e3ea752a8d5da0c61", "content-hash": "f0b42ac9169509834501cb7aa271b580",
"packages": [ "packages": [
{
"name": "clue/stream-filter",
"version": "v1.4.0",
"source": {
"type": "git",
"url": "https://github.com/clue/php-stream-filter.git",
"reference": "d80fdee9b3a7e0d16fc330a22f41f3ad0eeb09d0"
},
"dist": {
"type": "zip",
"url": "https://api.github.com/repos/clue/php-stream-filter/zipball/d80fdee9b3a7e0d16fc330a22f41f3ad0eeb09d0",
"reference": "d80fdee9b3a7e0d16fc330a22f41f3ad0eeb09d0",
"shasum": ""
},
"require": {
"php": ">=5.3"
},
"require-dev": {
"phpunit/phpunit": "^5.0 || ^4.8"
},
"type": "library",
"autoload": {
"psr-4": {
"Clue\\StreamFilter\\": "src/"
},
"files": [
"src/functions.php"
]
},
"notification-url": "https://packagist.org/downloads/",
"license": [
"MIT"
],
"authors": [
{
"name": "Christian Lück",
"email": "christian@lueck.tv"
}
],
"description": "A simple and modern approach to stream filtering in PHP",
"homepage": "https://github.com/clue/php-stream-filter",
"keywords": [
"bucket brigade",
"callback",
"filter",
"php_user_filter",
"stream",
"stream_filter_append",
"stream_filter_register"
],
"time": "2017-08-18T09:54:01+00:00"
},
{
"name": "guzzlehttp/guzzle",
"version": "6.3.0",
"source": {
"type": "git",
"url": "https://github.com/guzzle/guzzle.git",
"reference": "f4db5a78a5ea468d4831de7f0bf9d9415e348699"
},
"dist": {
"type": "zip",
"url": "https://api.github.com/repos/guzzle/guzzle/zipball/f4db5a78a5ea468d4831de7f0bf9d9415e348699",
"reference": "f4db5a78a5ea468d4831de7f0bf9d9415e348699",
"shasum": ""
},
"require": {
"guzzlehttp/promises": "^1.0",
"guzzlehttp/psr7": "^1.4",
"php": ">=5.5"
},
"require-dev": {
"ext-curl": "*",
"phpunit/phpunit": "^4.0 || ^5.0",
"psr/log": "^1.0"
},
"suggest": {
"psr/log": "Required for using the Log middleware"
},
"type": "library",
"extra": {
"branch-alias": {
"dev-master": "6.2-dev"
}
},
"autoload": {
"files": [
"src/functions_include.php"
],
"psr-4": {
"GuzzleHttp\\": "src/"
}
},
"notification-url": "https://packagist.org/downloads/",
"license": [
"MIT"
],
"authors": [
{
"name": "Michael Dowling",
"email": "mtdowling@gmail.com",
"homepage": "https://github.com/mtdowling"
}
],
"description": "Guzzle is a PHP HTTP client library",
"homepage": "http://guzzlephp.org/",
"keywords": [
"client",
"curl",
"framework",
"http",
"http client",
"rest",
"web service"
],
"time": "2017-06-22T18:50:49+00:00"
},
{
"name": "guzzlehttp/promises",
"version": "dev-master",
"source": {
"type": "git",
"url": "https://github.com/guzzle/promises.git",
"reference": "2e48ae638dc0bf0849772f5590835fcd700a2e1d"
},
"dist": {
"type": "zip",
"url": "https://api.github.com/repos/guzzle/promises/zipball/2e48ae638dc0bf0849772f5590835fcd700a2e1d",
"reference": "2e48ae638dc0bf0849772f5590835fcd700a2e1d",
"shasum": ""
},
"require": {
"php": ">=5.5.0"
},
"require-dev": {
"phpunit/phpunit": "^4.8.36"
},
"type": "library",
"extra": {
"branch-alias": {
"dev-master": "1.4-dev"
}
},
"autoload": {
"psr-4": {
"GuzzleHttp\\Promise\\": "src/"
},
"files": [
"src/functions_include.php"
]
},
"notification-url": "https://packagist.org/downloads/",
"license": [
"MIT"
],
"authors": [
{
"name": "Michael Dowling",
"email": "mtdowling@gmail.com",
"homepage": "https://github.com/mtdowling"
}
],
"description": "Guzzle promises library",
"keywords": [
"promise"
],
"time": "2017-12-07T21:04:15+00:00"
},
{
"name": "guzzlehttp/psr7",
"version": "dev-master",
"source": {
"type": "git",
"url": "https://github.com/guzzle/psr7.git",
"reference": "d2537c86fa8b004c29e9b9f5e10028f0a29df101"
},
"dist": {
"type": "zip",
"url": "https://api.github.com/repos/guzzle/psr7/zipball/d2537c86fa8b004c29e9b9f5e10028f0a29df101",
"reference": "d2537c86fa8b004c29e9b9f5e10028f0a29df101",
"shasum": ""
},
"require": {
"php": ">=5.4.0",
"psr/http-message": "~1.0"
},
"provide": {
"psr/http-message-implementation": "1.0"
},
"require-dev": {
"phpunit/phpunit": "~4.0"
},
"type": "library",
"extra": {
"branch-alias": {
"dev-master": "1.4-dev"
}
},
"autoload": {
"psr-4": {
"GuzzleHttp\\Psr7\\": "src/"
},
"files": [
"src/functions_include.php"
]
},
"notification-url": "https://packagist.org/downloads/",
"license": [
"MIT"
],
"authors": [
{
"name": "Michael Dowling",
"email": "mtdowling@gmail.com",
"homepage": "https://github.com/mtdowling"
},
{
"name": "Tobias Schultze",
"homepage": "https://github.com/Tobion"
}
],
"description": "PSR-7 message implementation that also provides common utility methods",
"keywords": [
"http",
"message",
"request",
"response",
"stream",
"uri",
"url"
],
"time": "2017-10-07T03:19:56+00:00"
},
{
"name": "knplabs/github-api",
"version": "2.7.0",
"source": {
"type": "git",
"url": "https://github.com/KnpLabs/php-github-api.git",
"reference": "d445f1eec4788763315c3c96a214db4e149f9deb"
},
"dist": {
"type": "zip",
"url": "https://api.github.com/repos/KnpLabs/php-github-api/zipball/d445f1eec4788763315c3c96a214db4e149f9deb",
"reference": "d445f1eec4788763315c3c96a214db4e149f9deb",
"shasum": ""
},
"require": {
"php": "^5.6 || ^7.0",
"php-http/cache-plugin": "^1.4",
"php-http/client-common": "^1.3",
"php-http/client-implementation": "^1.0",
"php-http/discovery": "^1.0",
"php-http/httplug": "^1.1",
"psr/cache": "^1.0",
"psr/http-message": "^1.0"
},
"require-dev": {
"cache/array-adapter": "^0.4",
"guzzlehttp/psr7": "^1.2",
"php-http/guzzle6-adapter": "^1.0",
"php-http/mock-client": "^1.0",
"phpunit/phpunit": "^5.5 || ^6.0",
"sllh/php-cs-fixer-styleci-bridge": "^1.3"
},
"type": "library",
"extra": {
"branch-alias": {
"dev-master": "2.6.x-dev"
}
},
"autoload": {
"psr-4": {
"Github\\": "lib/Github/"
}
},
"notification-url": "https://packagist.org/downloads/",
"license": [
"MIT"
],
"authors": [
{
"name": "Thibault Duplessis",
"email": "thibault.duplessis@gmail.com",
"homepage": "http://ornicar.github.com"
},
{
"name": "KnpLabs Team",
"homepage": "http://knplabs.com"
}
],
"description": "GitHub API v3 client",
"homepage": "https://github.com/KnpLabs/php-github-api",
"keywords": [
"api",
"gh",
"gist",
"github"
],
"time": "2017-12-12T20:14:04+00:00"
},
{ {
"name": "php-amqplib/php-amqplib", "name": "php-amqplib/php-amqplib",
"version": "dev-master", "version": "v2.7.2",
"source": { "source": {
"type": "git", "type": "git",
"url": "https://github.com/php-amqplib/php-amqplib.git", "url": "https://github.com/php-amqplib/php-amqplib.git",
"reference": "0f90b3d8bc50403458f0eefbcba7d1e2329dd0f6" "reference": "dfd3694a86f1a7394d3693485259d4074a6ec79b"
}, },
"dist": { "dist": {
"type": "zip", "type": "zip",
"url": "https://api.github.com/repos/php-amqplib/php-amqplib/zipball/0f90b3d8bc50403458f0eefbcba7d1e2329dd0f6", "url": "https://api.github.com/repos/php-amqplib/php-amqplib/zipball/dfd3694a86f1a7394d3693485259d4074a6ec79b",
"reference": "0f90b3d8bc50403458f0eefbcba7d1e2329dd0f6", "reference": "dfd3694a86f1a7394d3693485259d4074a6ec79b",
"shasum": "" "shasum": ""
}, },
"require": { "require": {
@ -330,6 +29,7 @@
"videlalvaro/php-amqplib": "self.version" "videlalvaro/php-amqplib": "self.version"
}, },
"require-dev": { "require-dev": {
"phpdocumentor/phpdocumentor": "^2.9",
"phpunit/phpunit": "^4.8", "phpunit/phpunit": "^4.8",
"scrutinizer/ocular": "^1.1", "scrutinizer/ocular": "^1.1",
"squizlabs/php_codesniffer": "^2.5" "squizlabs/php_codesniffer": "^2.5"
@ -350,7 +50,7 @@
}, },
"notification-url": "https://packagist.org/downloads/", "notification-url": "https://packagist.org/downloads/",
"license": [ "license": [
"LGPL-2.1" "LGPL-2.1-or-later"
], ],
"authors": [ "authors": [
{ {
@ -375,633 +75,13 @@
"queue", "queue",
"rabbitmq" "rabbitmq"
], ],
"time": "2017-09-26T05:30:15+00:00" "time": "2018-02-11T19:28:00+00:00"
},
{
"name": "php-http/cache-plugin",
"version": "dev-master",
"source": {
"type": "git",
"url": "https://github.com/php-http/cache-plugin.git",
"reference": "c573ac6ea9b4e33fad567f875b844229d18000b9"
},
"dist": {
"type": "zip",
"url": "https://api.github.com/repos/php-http/cache-plugin/zipball/c573ac6ea9b4e33fad567f875b844229d18000b9",
"reference": "c573ac6ea9b4e33fad567f875b844229d18000b9",
"shasum": ""
},
"require": {
"php": "^5.4 || ^7.0",
"php-http/client-common": "^1.1",
"php-http/message-factory": "^1.0",
"psr/cache": "^1.0",
"symfony/options-resolver": "^2.6 || ^3.0 || ^4.0"
},
"require-dev": {
"henrikbjorn/phpspec-code-coverage": "^1.0",
"phpspec/phpspec": "^2.5"
},
"type": "library",
"extra": {
"branch-alias": {
"dev-master": "1.5-dev"
}
},
"autoload": {
"psr-4": {
"Http\\Client\\Common\\Plugin\\": "src/"
}
},
"notification-url": "https://packagist.org/downloads/",
"license": [
"MIT"
],
"authors": [
{
"name": "Márk Sági-Kazár",
"email": "mark.sagikazar@gmail.com"
}
],
"description": "PSR-6 Cache plugin for HTTPlug",
"homepage": "http://httplug.io",
"keywords": [
"cache",
"http",
"httplug",
"plugin"
],
"time": "2017-11-29T20:45:41+00:00"
},
{
"name": "php-http/client-common",
"version": "dev-master",
"source": {
"type": "git",
"url": "https://github.com/php-http/client-common.git",
"reference": "9accb4a082eb06403747c0ffd444112eda41a0fd"
},
"dist": {
"type": "zip",
"url": "https://api.github.com/repos/php-http/client-common/zipball/9accb4a082eb06403747c0ffd444112eda41a0fd",
"reference": "9accb4a082eb06403747c0ffd444112eda41a0fd",
"shasum": ""
},
"require": {
"php": "^5.4 || ^7.0",
"php-http/httplug": "^1.1",
"php-http/message": "^1.6",
"php-http/message-factory": "^1.0",
"symfony/options-resolver": "^2.6 || ^3.0 || ^4.0"
},
"require-dev": {
"guzzlehttp/psr7": "^1.4",
"phpspec/phpspec": "^2.5 || ^3.4 || ^4.2"
},
"suggest": {
"php-http/cache-plugin": "PSR-6 Cache plugin",
"php-http/logger-plugin": "PSR-3 Logger plugin",
"php-http/stopwatch-plugin": "Symfony Stopwatch plugin"
},
"type": "library",
"extra": {
"branch-alias": {
"dev-master": "1.7-dev"
}
},
"autoload": {
"psr-4": {
"Http\\Client\\Common\\": "src/"
}
},
"notification-url": "https://packagist.org/downloads/",
"license": [
"MIT"
],
"authors": [
{
"name": "Márk Sági-Kazár",
"email": "mark.sagikazar@gmail.com"
}
],
"description": "Common HTTP Client implementations and tools for HTTPlug",
"homepage": "http://httplug.io",
"keywords": [
"client",
"common",
"http",
"httplug"
],
"time": "2017-11-30T11:06:59+00:00"
},
{
"name": "php-http/discovery",
"version": "dev-master",
"source": {
"type": "git",
"url": "https://github.com/php-http/discovery.git",
"reference": "0ecc08360e6011a4454dc60077db6e9f412be94c"
},
"dist": {
"type": "zip",
"url": "https://api.github.com/repos/php-http/discovery/zipball/0ecc08360e6011a4454dc60077db6e9f412be94c",
"reference": "0ecc08360e6011a4454dc60077db6e9f412be94c",
"shasum": ""
},
"require": {
"php": "^5.5 || ^7.0"
},
"require-dev": {
"henrikbjorn/phpspec-code-coverage": "^2.0.2",
"php-http/httplug": "^1.0",
"php-http/message-factory": "^1.0",
"phpspec/phpspec": "^2.4",
"puli/composer-plugin": "1.0.0-beta10"
},
"suggest": {
"php-http/message": "Allow to use Guzzle, Diactoros or Slim Framework factories",
"puli/composer-plugin": "Sets up Puli which is recommended for Discovery to work. Check http://docs.php-http.org/en/latest/discovery.html for more details."
},
"type": "library",
"extra": {
"branch-alias": {
"dev-master": "1.3-dev"
}
},
"autoload": {
"psr-4": {
"Http\\Discovery\\": "src/"
}
},
"notification-url": "https://packagist.org/downloads/",
"license": [
"MIT"
],
"authors": [
{
"name": "Márk Sági-Kazár",
"email": "mark.sagikazar@gmail.com"
}
],
"description": "Finds installed HTTPlug implementations and PSR-7 message factories",
"homepage": "http://php-http.org",
"keywords": [
"adapter",
"client",
"discovery",
"factory",
"http",
"message",
"psr7"
],
"time": "2017-11-22T21:17:04+00:00"
},
{
"name": "php-http/guzzle6-adapter",
"version": "dev-master",
"source": {
"type": "git",
"url": "https://github.com/php-http/guzzle6-adapter.git",
"reference": "54181ff8455a4c2e1706a53e0d98060b93030321"
},
"dist": {
"type": "zip",
"url": "https://api.github.com/repos/php-http/guzzle6-adapter/zipball/54181ff8455a4c2e1706a53e0d98060b93030321",
"reference": "54181ff8455a4c2e1706a53e0d98060b93030321",
"shasum": ""
},
"require": {
"guzzlehttp/guzzle": "^6.0",
"php": "^5.5 || ^7.0",
"php-http/httplug": "^1.0"
},
"provide": {
"php-http/async-client-implementation": "1.0",
"php-http/client-implementation": "1.0"
},
"require-dev": {
"ext-curl": "*",
"php-http/client-integration-tests": "^0.6"
},
"type": "library",
"extra": {
"branch-alias": {
"dev-master": "1.2-dev"
}
},
"autoload": {
"psr-4": {
"Http\\Adapter\\Guzzle6\\": "src/"
}
},
"notification-url": "https://packagist.org/downloads/",
"license": [
"MIT"
],
"authors": [
{
"name": "Márk Sági-Kazár",
"email": "mark.sagikazar@gmail.com"
},
{
"name": "David de Boer",
"email": "david@ddeboer.nl"
}
],
"description": "Guzzle 6 HTTP Adapter",
"homepage": "http://httplug.io",
"keywords": [
"Guzzle",
"http"
],
"time": "2017-05-29T15:06:15+00:00"
},
{
"name": "php-http/httplug",
"version": "dev-master",
"source": {
"type": "git",
"url": "https://github.com/php-http/httplug.git",
"reference": "afa7b216322f8157e21025f04f72eda0ee12f89d"
},
"dist": {
"type": "zip",
"url": "https://api.github.com/repos/php-http/httplug/zipball/afa7b216322f8157e21025f04f72eda0ee12f89d",
"reference": "afa7b216322f8157e21025f04f72eda0ee12f89d",
"shasum": ""
},
"require": {
"php": ">=5.4",
"php-http/promise": "^1.0",
"psr/http-message": "^1.0"
},
"require-dev": {
"henrikbjorn/phpspec-code-coverage": "^1.0",
"phpspec/phpspec": "^2.4"
},
"type": "library",
"extra": {
"branch-alias": {
"dev-master": "1.2-dev"
}
},
"autoload": {
"psr-4": {
"Http\\Client\\": "src/"
}
},
"notification-url": "https://packagist.org/downloads/",
"license": [
"MIT"
],
"authors": [
{
"name": "Eric GELOEN",
"email": "geloen.eric@gmail.com"
},
{
"name": "Márk Sági-Kazár",
"email": "mark.sagikazar@gmail.com"
}
],
"description": "HTTPlug, the HTTP client abstraction for PHP",
"homepage": "http://httplug.io",
"keywords": [
"client",
"http"
],
"time": "2017-12-18T08:01:36+00:00"
},
{
"name": "php-http/message",
"version": "dev-master",
"source": {
"type": "git",
"url": "https://github.com/php-http/message.git",
"reference": "977edb516e3c0419d3477610b4b718c8a9da1575"
},
"dist": {
"type": "zip",
"url": "https://api.github.com/repos/php-http/message/zipball/977edb516e3c0419d3477610b4b718c8a9da1575",
"reference": "977edb516e3c0419d3477610b4b718c8a9da1575",
"shasum": ""
},
"require": {
"clue/stream-filter": "^1.4",
"php": ">=5.4",
"php-http/message-factory": "^1.0.2",
"psr/http-message": "^1.0"
},
"provide": {
"php-http/message-factory-implementation": "1.0"
},
"require-dev": {
"akeneo/phpspec-skip-example-extension": "^1.0",
"coduo/phpspec-data-provider-extension": "^1.0",
"ext-zlib": "*",
"guzzlehttp/psr7": "^1.0",
"henrikbjorn/phpspec-code-coverage": "^1.0",
"phpspec/phpspec": "^2.4",
"slim/slim": "^3.0",
"zendframework/zend-diactoros": "^1.0"
},
"suggest": {
"ext-zlib": "Used with compressor/decompressor streams",
"guzzlehttp/psr7": "Used with Guzzle PSR-7 Factories",
"slim/slim": "Used with Slim Framework PSR-7 implementation",
"zendframework/zend-diactoros": "Used with Diactoros Factories"
},
"type": "library",
"extra": {
"branch-alias": {
"dev-master": "1.6-dev"
}
},
"autoload": {
"psr-4": {
"Http\\Message\\": "src/"
},
"files": [
"src/filters.php"
]
},
"notification-url": "https://packagist.org/downloads/",
"license": [
"MIT"
],
"authors": [
{
"name": "Márk Sági-Kazár",
"email": "mark.sagikazar@gmail.com"
}
],
"description": "HTTP Message related tools",
"homepage": "http://php-http.org",
"keywords": [
"http",
"message",
"psr-7"
],
"time": "2017-11-25T06:38:46+00:00"
},
{
"name": "php-http/message-factory",
"version": "dev-master",
"source": {
"type": "git",
"url": "https://github.com/php-http/message-factory.git",
"reference": "a2809d4fe294ebe8879aec8d4d5bf21faa029344"
},
"dist": {
"type": "zip",
"url": "https://api.github.com/repos/php-http/message-factory/zipball/a2809d4fe294ebe8879aec8d4d5bf21faa029344",
"reference": "a2809d4fe294ebe8879aec8d4d5bf21faa029344",
"shasum": ""
},
"require": {
"php": ">=5.4",
"psr/http-message": "^1.0"
},
"type": "library",
"extra": {
"branch-alias": {
"dev-master": "1.0-dev"
}
},
"autoload": {
"psr-4": {
"Http\\Message\\": "src/"
}
},
"notification-url": "https://packagist.org/downloads/",
"license": [
"MIT"
],
"authors": [
{
"name": "Márk Sági-Kazár",
"email": "mark.sagikazar@gmail.com"
}
],
"description": "Factory interfaces for PSR-7 HTTP Message",
"homepage": "http://php-http.org",
"keywords": [
"factory",
"http",
"message",
"stream",
"uri"
],
"time": "2016-02-03T08:16:31+00:00"
},
{
"name": "php-http/promise",
"version": "dev-master",
"source": {
"type": "git",
"url": "https://github.com/php-http/promise.git",
"reference": "1cc44dc01402d407fc6da922591deebe4659826f"
},
"dist": {
"type": "zip",
"url": "https://api.github.com/repos/php-http/promise/zipball/1cc44dc01402d407fc6da922591deebe4659826f",
"reference": "1cc44dc01402d407fc6da922591deebe4659826f",
"shasum": ""
},
"require-dev": {
"henrikbjorn/phpspec-code-coverage": "^1.0",
"phpspec/phpspec": "^2.4"
},
"type": "library",
"extra": {
"branch-alias": {
"dev-master": "1.0-dev"
}
},
"autoload": {
"psr-4": {
"Http\\Promise\\": "src/"
}
},
"notification-url": "https://packagist.org/downloads/",
"license": [
"MIT"
],
"authors": [
{
"name": "Márk Sági-Kazár",
"email": "mark.sagikazar@gmail.com"
},
{
"name": "Joel Wurtz",
"email": "joel.wurtz@gmail.com"
}
],
"description": "Promise used for asynchronous HTTP requests",
"homepage": "http://httplug.io",
"keywords": [
"promise"
],
"time": "2017-11-22T21:24:54+00:00"
},
{
"name": "psr/cache",
"version": "dev-master",
"source": {
"type": "git",
"url": "https://github.com/php-fig/cache.git",
"reference": "78c5a01ddbf11cf731f1338a4f5aba23b14d5b47"
},
"dist": {
"type": "zip",
"url": "https://api.github.com/repos/php-fig/cache/zipball/78c5a01ddbf11cf731f1338a4f5aba23b14d5b47",
"reference": "78c5a01ddbf11cf731f1338a4f5aba23b14d5b47",
"shasum": ""
},
"require": {
"php": ">=5.3.0"
},
"type": "library",
"extra": {
"branch-alias": {
"dev-master": "1.0.x-dev"
}
},
"autoload": {
"psr-4": {
"Psr\\Cache\\": "src/"
}
},
"notification-url": "https://packagist.org/downloads/",
"license": [
"MIT"
],
"authors": [
{
"name": "PHP-FIG",
"homepage": "http://www.php-fig.org/"
}
],
"description": "Common interface for caching libraries",
"keywords": [
"cache",
"psr",
"psr-6"
],
"time": "2016-10-13T14:48:10+00:00"
},
{
"name": "psr/http-message",
"version": "dev-master",
"source": {
"type": "git",
"url": "https://github.com/php-fig/http-message.git",
"reference": "f6561bf28d520154e4b0ec72be95418abe6d9363"
},
"dist": {
"type": "zip",
"url": "https://api.github.com/repos/php-fig/http-message/zipball/f6561bf28d520154e4b0ec72be95418abe6d9363",
"reference": "f6561bf28d520154e4b0ec72be95418abe6d9363",
"shasum": ""
},
"require": {
"php": ">=5.3.0"
},
"type": "library",
"extra": {
"branch-alias": {
"dev-master": "1.0.x-dev"
}
},
"autoload": {
"psr-4": {
"Psr\\Http\\Message\\": "src/"
}
},
"notification-url": "https://packagist.org/downloads/",
"license": [
"MIT"
],
"authors": [
{
"name": "PHP-FIG",
"homepage": "http://www.php-fig.org/"
}
],
"description": "Common interface for HTTP messages",
"homepage": "https://github.com/php-fig/http-message",
"keywords": [
"http",
"http-message",
"psr",
"psr-7",
"request",
"response"
],
"time": "2016-08-06T14:39:51+00:00"
},
{
"name": "symfony/options-resolver",
"version": "dev-master",
"source": {
"type": "git",
"url": "https://github.com/symfony/options-resolver.git",
"reference": "95a16ad04c0ca3404c9286eca3b4a0c36cc46f7d"
},
"dist": {
"type": "zip",
"url": "https://api.github.com/repos/symfony/options-resolver/zipball/95a16ad04c0ca3404c9286eca3b4a0c36cc46f7d",
"reference": "95a16ad04c0ca3404c9286eca3b4a0c36cc46f7d",
"shasum": ""
},
"require": {
"php": "^7.1.3"
},
"type": "library",
"extra": {
"branch-alias": {
"dev-master": "4.1-dev"
}
},
"autoload": {
"psr-4": {
"Symfony\\Component\\OptionsResolver\\": ""
},
"exclude-from-classmap": [
"/Tests/"
]
},
"notification-url": "https://packagist.org/downloads/",
"license": [
"MIT"
],
"authors": [
{
"name": "Fabien Potencier",
"email": "fabien@symfony.com"
},
{
"name": "Symfony Community",
"homepage": "https://symfony.com/contributors"
}
],
"description": "Symfony OptionsResolver Component",
"homepage": "https://symfony.com",
"keywords": [
"config",
"configuration",
"options"
],
"time": "2017-12-14T19:50:39+00:00"
} }
], ],
"packages-dev": [], "packages-dev": [],
"aliases": [], "aliases": [],
"minimum-stability": "dev", "minimum-stability": "stable",
"stability-flags": { "stability-flags": [],
"knplabs/github-api": 20,
"php-http/guzzle6-adapter": 20
},
"prefer-stable": false, "prefer-stable": false,
"prefer-lowest": false, "prefer-lowest": false,
"platform": [], "platform": [],

View file

@ -1,113 +0,0 @@
<?php
require __DIR__ . '/config.php';
use PhpAmqpLib\Message\AMQPMessage;
# define('AMQP_DEBUG', true);
$connection = rabbitmq_conn();
$channel = $connection->channel();
$channel->basic_qos(null, 1, true);
$channel->queue_declare('mass-rebuild-check-jobs',
false, true, false, false);
list($queueName, , ) = $channel->queue_declare('mass-rebuild-check-inputs',
false, true, false, false);
$channel->queue_bind($queueName, 'github-events', 'pull_request.nixos/nixpkgs');
echo "hi\n";
function outrunner($msg) {
try {
runner($msg);
} catch (\PhpAmqpLib\Exception\AMQPProtocolChannelException $e) {
echo "Channel exception:\n";
var_dump($e);
}
}
function runner($msg) {
echo "Msg Sha: " . md5($msg->body) . "\n";
$in = json_decode($msg->body);
if (!\GHE\ACL::isRepoEligible($in->repository->full_name)) {
echo "Repo not authorized (" . $in->repository->full_name . ")\n";
$msg->delivery_info['channel']->basic_ack($msg->delivery_info['delivery_tag']);
return true;
}
if ($in->pull_request->state != "open") {
echo "PR isn't open in the event\n";
$msg->delivery_info['channel']->basic_ack($msg->delivery_info['delivery_tag']);
return true;
}
$client = gh_client();
$status = $client->api('pull_request')->show(
$in->repository->owner->login,
$in->repository->name,
$in->number);
if ($status['mergeable'] === false) {
echo "github says the PR isn't able to be merged\n";
$msg->delivery_info['channel']->basic_ack($msg->delivery_info['delivery_tag']);
return true;
}
if ($status['state'] !== 'open') {
echo "github says the PR isn't open\n";
$msg->delivery_info['channel']->basic_ack($msg->delivery_info['delivery_tag']);
return true;
}
$ok_events = [
'opened',
'created',
'synchronize',
'reopened',
];
if (!in_array($in->action, $ok_events)) {
echo "Uninteresting event " . $in->action . "\n";
$msg->delivery_info['channel']->basic_ack($msg->delivery_info['delivery_tag']);
return true;
} else {
echo "so-called interesting event on #" . $in->number . ": " . $in->action . "\n";
}
$forward = [
'original_payload' => $in,
'repo' => [
'owner' => $in->repository->owner->login,
'name' => $in->repository->name,
'full_name' => $in->repository->full_name,
'clone_url' => $in->repository->clone_url,
],
'pr' => [
'number' => $in->number,
'target_branch' => $in->pull_request->base->ref,
'patch_url' => $in->pull_request->patch_url,
'head_sha' => $in->pull_request->head->sha,
],
];
echo "forwarding to mass-rebuild-check-jobs :)\n";
$message = new AMQPMessage(json_encode($forward),
array(
'content_type' => 'application/json',
'delivery_mode' => AMQPMessage::DELIVERY_MODE_PERSISTENT,
));
$msg->delivery_info['channel']->basic_publish($message, '', 'mass-rebuild-check-jobs');
$msg->delivery_info['channel']->basic_ack($msg->delivery_info['delivery_tag']);
return true;
}
$consumerTag = 'massrebuildcheckfilter' . getmypid();
$channel->basic_consume($queueName, $consumerTag, false, false, false, false, 'outrunner');
while(count($channel->callbacks)) {
$channel->wait();
}
echo "Bye\n";

View file

@ -1,16 +0,0 @@
<?php
namespace GHE;
class ACL {
static public function getRepos() {
return [
'grahamc/elm-stuff',
'nixos/nixpkgs',
'nixos/nixpkgs-channels',
];
}
static public function isRepoEligible($repo) {
return in_array(strtolower($repo), self::getRepos());
}
}

View file

@ -142,7 +142,26 @@ try {
$connection = retry_rabbitmq_conn(); $connection = retry_rabbitmq_conn();
$channel = $connection->channel(); $channel = $connection->channel();
$dec = $channel->exchange_declare('github-events', 'topic', false, true, false); $dec = $channel->exchange_declare(
'github-events',
'topic',
false, // passive
true, // durable
false // auto_delete
);
$channel->queue_declare(
'github-events-unknown',
false, // passive
true, // durable
false, // exclusive
false // auto-delete
);
$channel->queue_bind(
'github-events-unknown',
'github-events',
'unknown.*'
);
$message = new AMQPMessage(json_encode($input), $message = new AMQPMessage(json_encode($input),
array( array(

View file

@ -1,4 +0,0 @@
#!/usr/bin/env nix-shell
#!nix-shell -p bash -p jq -p curl -i bash
jq -s '.[0] * .[1] * .[2]' ./config.public.json ./config.known-users.json ./config.private.json > ./config.prod.json

View file

@ -33,3 +33,5 @@ done
jq -s '{ "runner": { "known_users": .[0]}}' "$accumulator" > "$dest" jq -s '{ "runner": { "known_users": .[0]}}' "$accumulator" > "$dest"
rm -f "$result" "$scratch" "$accumulator" rm -f "$result" "$scratch" "$accumulator"
jq -s '.[0] * .[1] * .[2]' ./config.public.json ./config.known-users.json ./config.private.json > ./config.prod.json